diff --git a/.coveragerc b/.coveragerc index ec75678..304abe3 100644 --- a/.coveragerc +++ b/.coveragerc @@ -2,7 +2,7 @@ [run] branch = True source = gldas -# omit = bad_file.py +omit = */_version.py [paths] source = diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..b4c9529 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +pygeobase/_version.py export-subst \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..acf1665 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,119 @@ + +# This workflow will install Python dependencies and run tests on +# windows and linux systems with a variety of Python versions + +# For more information see: +# https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: Automated Tests + +on: + push: + pull_request: + schedule: + - cron: '0 0 * * *' # daily + +jobs: + build: + name: Build py${{ matrix.python-version }} @ ${{ matrix.os }} 🐍 + runs-on: ${{ matrix.os }} + strategy: + matrix: + python-version: ['3.8', '3.9', '3.10'] + os: ["ubuntu-latest", "windows-latest"] + + steps: + - uses: actions/checkout@v2 + with: + submodules: true + - uses: conda-incubator/setup-miniconda@v2.0.1 + with: + miniconda-version: "latest" + auto-update-conda: true + python-version: ${{ matrix.python-version }} + environment-file: environment.yml + activate-environment: gldas + auto-activate-base: false + - name: Print environment infos + shell: bash -l {0} + run: | + conda info -a + conda list + pip list + which pip + which python + - name: Export Environment + shell: bash -l {0} + run: | + mkdir -p .artifacts + filename=env_py${{ matrix.python-version }}_${{ matrix.os }}.yml + conda env export --no-builds | grep -v "prefix" > .artifacts/$filename + - name: Install package and test + shell: bash -l {0} + run: | + pip install -e . + pytest --cache-clear + - name: Upload Coverage + shell: bash -l {0} + run: | + pip install coveralls && coveralls --service=github + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + COVERALLS_FLAG_NAME: ${{ matrix.python-version }} + COVERALLS_PARALLEL: true + - name: Create wheel and dist package + shell: bash -l {0} + run: | + git status + pip install setuptools_scm + if [ ${{ matrix.os }} == "windows-latest" ] + then + # build whls on windows + pip install wheel + python setup.py bdist_wheel --dist-dir .artifacts/dist + else + # build dist on linux + python setup.py sdist --dist-dir .artifacts/dist + fi + ls .artifacts/dist + - name: Upload Artifacts + uses: actions/upload-artifact@v2 + with: + name: Artifacts + path: .artifacts/* + coveralls: + name: Submit Coveralls 👚 + needs: build + runs-on: ubuntu-latest + container: python:3-slim + steps: + - name: Finished + run: | + pip3 install --upgrade coveralls && coveralls --service=github --finish + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + publish: + name: Upload to PyPI + if: | + startsWith(github.ref, 'refs/tags/v') && + startsWith(github.repository, 'TUW-GEO') + needs: build + runs-on: ubuntu-latest + steps: + - name: Print environment variables + run: | + echo "GITHUB_REF = $GITHUB_REF" + echo "GITHUB_REPOSITORY = $GITHUB_REPOSITORY" + - name: Download Artifacts + uses: actions/download-artifact@v2 + - name: Display downloaded files + run: ls -aR + - name: Upload to PyPI + uses: pypa/gh-action-pypi-publish@v1.4.1 + with: + skip_existing: true + verbose: true + verify_metadata: true + packages_dir: Artifacts/dist/ + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} # this needs to be uploaded to github actions secrets diff --git a/.gitignore b/.gitignore index 2df076b..c1e4ac3 100644 --- a/.gitignore +++ b/.gitignore @@ -42,3 +42,4 @@ cover/* MANIFEST */local_scripts/* +.artifacts/* diff --git a/.readthedocs.yml b/.readthedocs.yml index aee7b78..f791926 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -3,10 +3,11 @@ version: 2 sphinx: configuration: docs/conf.py -formats: - - pdf +conda: + environment: environment.yml python: version: 3.7 install: - - requirements: docs/requirements.txt \ No newline at end of file + - method: pip + path: . diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index d76db99..0000000 --- a/.travis.yml +++ /dev/null @@ -1,53 +0,0 @@ -dist: xenial -language: python -sudo: false -addons: - apt: - packages: - - gcc - - libgrib-api-dev -notifications: - email: false -python: - # We don't actually use the Travis Python, but this keeps it organized. - - "3.6" - - "3.7" - - "3.8" -install: - # You may want to periodically update this, although the conda update - # conda line below will keep everything up-to-date. We do this - # conditionally because it saves us some downloading if the version is - # the same. - - wget http://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; - - bash miniconda.sh -b -p $HOME/miniconda - - export PATH="$HOME/miniconda/bin:$PATH" - - hash -r - - conda config --set always_yes yes --set changeps1 no - - conda update -q conda - # Useful for debugging any issues with conda - - conda info -a - - - conda create -n gldas python=$TRAVIS_PYTHON_VERSION - - source activate gldas - - conda env update -f environment.yml - - conda list - - pip install . - - pip list - - which pip - - which python - -script: - - python setup.py test -after_success: - # report coverage results to coveralls.io - - pip install coveralls - - coveralls -deploy: - provider: pypi - # better then hiding the token would be to encrypt it with travis... - username: __token__ - password: $PYPI_TOKEN - skip_existing: true - on: - repo: TUW-GEO/gldas - tags: true \ No newline at end of file diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 65aa7fb..40dea08 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -7,6 +7,20 @@ Unreleased - +Version 0.7.0 +============= + +- Test download function with real data. +- Support GLDAS EP +- GHA to replace travis +- Update docs +- Update Formatting + +Version 0.6.1 +============= + +- Travis upload release to pypi + Version 0.6 =========== diff --git a/README.rst b/README.rst index b1f92cd..389d978 100644 --- a/README.rst +++ b/README.rst @@ -2,8 +2,8 @@ gldas ===== -.. image:: https://travis-ci.org/TUW-GEO/gldas.svg?branch=master - :target: https://travis-ci.org/TUW-GEO/gldas +.. image:: https://github.com/TUW-GEO/gldas/workflows/Automated%20Tests/badge.svg?branch=master + :target: https://github.com/TUW-GEO/gldas/actions .. image:: https://coveralls.io/repos/github/TUW-GEO/gldas/badge.svg?branch=master :target: https://coveralls.io/github/TUW-GEO/gldas?branch=master @@ -43,7 +43,7 @@ commands: .. code-block:: shell - conda create -n gldas python=3.6 # or any other supported python version + conda create -n gldas python=3.9 # or any other supported python version source activate gldas .. code-block:: shell @@ -66,23 +66,6 @@ format (reading, time series creation) and GLDAS Noah data version 2.0 and versi It should be easy to extend the package to support other GLDAS based products. This will be done as need arises. -Downloading Products -==================== - -In order to download GLDAS NOAH products you have to register an account with -NASA's Earthdata portal. Instructions can be found `here -`_. - -After that you can use the command line program ``gldas_download``. - -.. code:: - - mkdir ~/workspace/gldas_data - gldas_download ~/workspace/gldas_data - -would download GLDAS Noah version 2.0 in 0.25 degree sampling into the folder -``~/workspace/gldas_data``. For more options run ``gldas_download -h``. - Contribute ========== diff --git a/docs/Makefile b/docs/Makefile index f086e5e..fa2cc35 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -1,177 +1,29 @@ # Makefile for Sphinx documentation # -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . BUILDDIR = _build +AUTODOCDIR = api # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +$(error "The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/") endif -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext +.PHONY: help clean Makefile +# Put it first so that "make" without argument is like "make help". help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) clean: - rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/gldas.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/gldas.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $HOME/.local/share/devhelp/gldas" - @echo "# ln -s $(BUILDDIR)/devhelp $HOME/.local/share/devhelp/gldas" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -latexpdfja: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." - -xml: - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + rm -rf $(BUILDDIR)/* $(AUTODOCDIR) -pseudoxml: - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/conf.py b/docs/conf.py index dd5f081..69af94e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,9 +1,8 @@ -# -*- coding: utf-8 -*- -# # This file is execfile()d with the current directory set to its containing dir. # -# Note that not all possible configuration values are present in this -# autogenerated file. +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html # # All configuration values have a default; values that are commented out # serve to show the default. @@ -13,15 +12,18 @@ import inspect import shutil -__location__ = os.path.join(os.getcwd(), os.path.dirname( - inspect.getfile(inspect.currentframe()))) +# -- Path setup -------------------------------------------------------------- + +__location__ = os.path.join( + os.getcwd(), os.path.dirname(inspect.getfile(inspect.currentframe())) +) # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.join(__location__, '../src')) +sys.path.insert(0, os.path.join(__location__, "../src")) -# -- Run sphinx-apidoc ------------------------------------------------------ +# -- Run sphinx-apidoc ------------------------------------------------------- # This hack is necessary since RTD does not issue `sphinx-apidoc` before running # `sphinx-build -b html . _build/html`. See Issue: # https://github.com/rtfd/readthedocs.org/issues/1139 @@ -43,55 +45,65 @@ try: import sphinx - from pkg_resources import parse_version - cmd_line_template = "sphinx-apidoc -f -o {outputdir} {moduledir}" + cmd_line_template = ( + "sphinx-apidoc --implicit-namespaces -f -o {outputdir} {moduledir}" + ) cmd_line = cmd_line_template.format(outputdir=output_dir, moduledir=module_dir) args = cmd_line.split(" ") - if parse_version(sphinx.__version__) >= parse_version('1.7'): + if tuple(sphinx.__version__.split(".")) >= ("1", "7"): + # This is a rudimentary parse_version to avoid external dependencies args = args[1:] apidoc.main(args) except Exception as e: print("Running `sphinx-apidoc` failed!\n{}".format(e)) -# -- General configuration ----------------------------------------------------- +# -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', - 'sphinx.ext.autosummary', 'sphinx.ext.viewcode', 'sphinx.ext.coverage', - 'sphinx.ext.doctest', 'sphinx.ext.ifconfig', 'sphinx.ext.mathjax', - 'sphinx.ext.napoleon'] +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "sphinx.ext.todo", + "sphinx.ext.autosummary", + "sphinx.ext.viewcode", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.ifconfig", + "sphinx.ext.mathjax", + "sphinx.ext.napoleon", +] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'gldas' -copyright = u'2020, TU Wien' +project = "gldas" +copyright = "2021, TU Wien" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = '' # Is set by calling `setup.py docs` +version = "" # Is set by calling `setup.py docs` # The full version, including alpha/beta/rc tags. -release = '' # Is set by calling `setup.py docs` +release = "" # Is set by calling `setup.py docs` # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -105,7 +117,7 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", ".venv"] # The reST default role (used for this markup: `text`) to use for all documents. # default_role = None @@ -122,7 +134,7 @@ # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -131,18 +143,18 @@ # keep_warnings = False -# -- Options for HTML output --------------------------------------------------- +# -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'sphinx_rtd_theme' +html_theme = "alabaster" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - 'sidebar_width': '300px', - 'page_width': '1200px' + "sidebar_width": "300px", + "page_width": "1200px" } # Add any paths that contain custom themes here, relative to this directory. @@ -172,7 +184,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. @@ -216,27 +228,24 @@ # html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = 'gldas-doc' +htmlhelp_basename = "gldas-doc" -# -- Options for LaTeX output -------------------------------------------------- +# -- Options for LaTeX output ------------------------------------------------ latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -# 'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -# 'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -# 'preamble': '', + # The paper size ("letterpaper" or "a4paper"). + # "papersize": "letterpaper", + # The font size ("10pt", "11pt" or "12pt"). + # "pointsize": "10pt", + # Additional stuff for the LaTeX preamble. + # "preamble": "", } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'user_guide.tex', u'gldas Documentation', - u'TU Wien', 'manual'), + ("index", "user_guide.tex", "gldas Documentation", "TU Wien", "manual") ] # The name of an image file (relative to this directory) to place at the top of @@ -259,14 +268,15 @@ # If false, no module index is generated. # latex_domain_indices = True -# -- External mapping ------------------------------------------------------------ -python_version = '.'.join(map(str, sys.version_info[0:2])) +# -- External mapping -------------------------------------------------------- +python_version = ".".join(map(str, sys.version_info[0:2])) intersphinx_mapping = { - 'sphinx': ('http://www.sphinx-doc.org/en/stable', None), - 'python': ('https://docs.python.org/' + python_version, None), - 'matplotlib': ('https://matplotlib.org', None), - 'numpy': ('https://docs.scipy.org/doc/numpy', None), - 'sklearn': ('http://scikit-learn.org/stable', None), - 'pandas': ('http://pandas.pydata.org/pandas-docs/stable', None), - 'scipy': ('https://docs.scipy.org/doc/scipy/reference', None), -} \ No newline at end of file + "sphinx": ("http://www.sphinx-doc.org/en/stable", None), + "python": ("https://docs.python.org/" + python_version, None), + "matplotlib": ("https://matplotlib.org", None), + "numpy": ("https://docs.scipy.org/doc/numpy", None), + "sklearn": ("https://scikit-learn.org/stable", None), + "pandas": ("https://pandas.pydata.org/pandas-docs/stable", None), + "scipy": ("https://docs.scipy.org/doc/scipy/reference", None), + "pyscaffold": ("https://pyscaffold.org/en/stable", None), +} diff --git a/docs/download.rst b/docs/download.rst new file mode 100644 index 0000000..20a4f80 --- /dev/null +++ b/docs/download.rst @@ -0,0 +1,21 @@ +Downloading Products +==================== + +In order to download GLDAS NOAH products you have to register an account with +NASA's Earthdata portal at ``_. + +After that you can use the command line program ``gldas_download`` together with your username and password. + +For example to download all GLDAS Noah v2.1 Images (3-hourly) + +.. code:: + + gldas_download /tmp -s 2018-06-03 -e 2018-06-05 --product GLDAS_Noah_v21_025 --username **USERNAME** --password **PASSWORD** + +would download GLDAS Noah version 2.1 data from the select start to the selected end day into the '/tmp' folder. + +For a description of the download function and all options run + +..code:: + + gldas_download -h diff --git a/docs/img2ts.rst b/docs/img2ts.rst index 15b226f..2ebd47c 100755 --- a/docs/img2ts.rst +++ b/docs/img2ts.rst @@ -25,7 +25,7 @@ program. An example would be: .. code-block:: shell - gldas_repurpose /gldas_data /timeseries/data 2000-01-01 2001-01-01 SoilMoi0_10cm_inst SoilMoi10_40cm_inst + gldas_repurpose /download/image/path /output/timeseries/path 2000-01-01 2001-01-01 SoilMoi0_10cm_inst SoilMoi10_40cm_inst Which would take GLDAS Noah data stored in ``/gldas_data`` from January 1st 2000 to January 1st 2001 and store the parameters for the top 2 layers of soil moisture as time diff --git a/docs/index.rst b/docs/index.rst index af19a94..77a168e 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -12,9 +12,10 @@ Contents .. toctree:: :maxdepth: 2 - Reading + Downloading + Image Reading Variable names - Conversion to time series + Conversion to time series and Reading License Authors Changelog diff --git a/docs/license.rst b/docs/license.rst index 6437528..3989c51 100644 --- a/docs/license.rst +++ b/docs/license.rst @@ -4,4 +4,4 @@ License ======= -.. literalinclude:: ../LICENSE.txt +.. include:: ../LICENSE.txt diff --git a/docs/readme.rst b/docs/readme.rst new file mode 100644 index 0000000..81995ef --- /dev/null +++ b/docs/readme.rst @@ -0,0 +1,2 @@ +.. _readme: +.. include:: ../README.rst diff --git a/docs/varnames.rst b/docs/varnames.rst index 25457eb..e5f039f 100644 --- a/docs/varnames.rst +++ b/docs/varnames.rst @@ -2,8 +2,7 @@ Variable naming for different versions of GLDAS NOAH ==================================================== For GLDAS Noah 1.0 parameters are called using their PDS IDs from the table below. -A full list of PDS IDs can be found in the `GLDAS 1.0 README `_ - + For GLDAS Noah 2.0 and GLDAS Noah 2.1 parameters are called using Variable Names from the table below. A full list of variable names can be found in the `GLDAS 2.x README `_ diff --git a/environment.yml b/environment.yml index 7bff8d5..d6f09ec 100644 --- a/environment.yml +++ b/environment.yml @@ -15,6 +15,6 @@ dependencies: - repurpose - datedown>=0.3 - trollsift - - pytest-cov<=2.9 - - coverage==4.5.2 - - pytest==3.8.2 \ No newline at end of file + - pytest-cov + - coverage + - pytest diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..2c63dbb --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,8 @@ +[build-system] +# AVOID CHANGING REQUIRES: IT WILL BE UPDATED BY PYSCAFFOLD! +requires = ["setuptools>=46.1.0", "setuptools_scm[toml]>=5", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +# See configuration details in https://github.com/pypa/setuptools_scm +version_scheme = "no-guess-dev" diff --git a/setup.cfg b/setup.cfg index cb1d642..cfa76fc 100644 --- a/setup.cfg +++ b/setup.cfg @@ -6,12 +6,12 @@ name = gldas description = Readers and converters for data from the GLDAS Noah Land Surface Model. author = TU Wien -author-email = remote.sensing@geo.tuwien.ac.at +author_email = remote.sensing@geo.tuwien.ac.at license = mit -long-description = file: README.rst -long-description-content-type = text/x-rst; charset=UTF-8 +long_description = file: README.rst +long_description_content_type = text/x-rst; charset=UTF-8 url = https://github.com/TUW-GEO/gldas -project-urls = +project_urls = Documentation = https://gldas.readthedocs.io/en/latest/ # Change if running only on Windows, Mac or Linux (comma-separated) platforms = any @@ -23,24 +23,25 @@ classifiers = [options] zip_safe = False -packages = find: +packages = find_namespace: include_package_data = True package_dir = =src # DON'T CHANGE THE FOLLOWING LINE! IT WILL BE UPDATED BY PYSCAFFOLD! -setup_requires = pyscaffold>=3.2a0,<3.3a0 # Add here dependencies of your project (semicolon/line-separated), e.g. -install_requires = pyproj - pygeogrids - pygrib - numpy - pygeobase - datedown>=0.3 - trollsift - netcdf4 - pyresample - repurpose - pynetcf +install_requires = + importlib-metadata; python_version<"3.8" + pyproj + pygeogrids + pygrib + numpy + pygeobase + datedown>=0.3 + trollsift + netcdf4 + pyresample + repurpose + pynetcf # The usage of test_requires is discouraged, see `Dependency Management` docs # tests_require = pytest-cov; coverage; pytest; @@ -58,9 +59,9 @@ exclude = # PDF = ReportLab; RXP # Add here test requirements (semicolon/line-separated) testing = - pytest-cov<=2.9 - coverage==4.5.2 - pytest==3.8.2 + pytest-cov + coverage + pytest [options.entry_points] # Add here console scripts like: @@ -107,7 +108,7 @@ build_dir = build/sphinx [devpi:upload] # Options for the devpi: PyPI server and packaging tool # VCS export must be deactivated since we are using setuptools-scm -no-vcs = 1 +no_vcs = 1 formats = bdist_wheel [flake8] @@ -122,7 +123,7 @@ exclude = [pyscaffold] # PyScaffold's parameters when the project was created. # This will be used when updating. Do not change! -version = 3.2.3 +version = 4.0 package = gldas extensions = no_skeleton diff --git a/setup.py b/setup.py index b91dedf..6d96379 100644 --- a/setup.py +++ b/setup.py @@ -1,23 +1,21 @@ -# -*- coding: utf-8 -*- """ Setup file for gldas. Use setup.cfg to configure your project. - This file was generated with PyScaffold 3.2.3. + This file was generated with PyScaffold 4.0. PyScaffold helps you to put up the scaffold of your new Python project. Learn more under: https://pyscaffold.org/ """ -import sys - -from pkg_resources import VersionConflict, require from setuptools import setup -try: - require('setuptools>=38.3') -except VersionConflict: - print("Error: version of setuptools is too old (<38.3)!") - sys.exit(1) - - if __name__ == "__main__": - setup(use_pyscaffold=True) + try: + setup(use_scm_version={"version_scheme": "no-guess-dev"}) + except: # noqa + print( + "\n\nAn error occurred while building the project, " + "please ensure you have the most updated version of setuptools, " + "setuptools_scm and wheel with:\n" + " pip install -U setuptools setuptools_scm wheel\n\n" + ) + raise diff --git a/src/gldas/__init__.py b/src/gldas/__init__.py index b5e9415..e451f10 100644 --- a/src/gldas/__init__.py +++ b/src/gldas/__init__.py @@ -1,11 +1,16 @@ -# -*- coding: utf-8 -*- -from pkg_resources import get_distribution, DistributionNotFound +import sys + +if sys.version_info[:2] >= (3, 8): + # TODO: Import directly (no need for conditional) when `python_requires = >= 3.8` + from importlib.metadata import PackageNotFoundError, version # pragma: no cover +else: + from importlib_metadata import PackageNotFoundError, version # pragma: no cover try: # Change here if project is renamed and does not equal the package name dist_name = __name__ - __version__ = get_distribution(dist_name).version -except DistributionNotFound: - __version__ = 'unknown' + __version__ = version(dist_name) +except PackageNotFoundError: # pragma: no cover + __version__ = "unknown" finally: - del get_distribution, DistributionNotFound \ No newline at end of file + del version, PackageNotFoundError diff --git a/src/gldas/download.py b/src/gldas/download.py index 263c0bf..eb7880f 100644 --- a/src/gldas/download.py +++ b/src/gldas/download.py @@ -1,25 +1,3 @@ -# The MIT License (MIT) -# -# Copyright (c) 2018, TU Wien -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - """ Module managing download of NOAH GLDAS data. """ @@ -40,8 +18,9 @@ from datedown.down import download -def gldas_folder_get_version_first_last(root, fmt=None, - subpaths=['{time:%Y}', '{time:%j}']): +def gldas_folder_get_version_first_last( + root, fmt=None, subpaths=["{time:%Y}", "{time:%j}"] +): """ Get product version and first and last product which exists under the root folder. @@ -65,7 +44,7 @@ def gldas_folder_get_version_first_last(root, fmt=None, Last found product datetime """ if fmt is None: - fmt = "GLDAS_NOAH025_3H.A{time:%Y%m%d.%H%M}.0{version:2s}.nc4" + fmt = "GLDAS_NOAH025_3H{ep}.A{time:%Y%m%d.%H%M}.0{version:2s}.nc4" start = None end = None @@ -74,17 +53,16 @@ def gldas_folder_get_version_first_last(root, fmt=None, last_folder = get_last_gldas_folder(root, subpaths) if first_folder is not None: - files = sorted( - glob.glob(os.path.join(first_folder, globify(fmt)))) + files = sorted(glob.glob(os.path.join(first_folder, globify(fmt)))) data = parse(fmt, os.path.split(files[0])[1]) - start = data['time'] - version = 'GLDAS_Noah_v%s_025' % data['version'] + start = data["time"] + ep = data["ep"] + version = f"GLDAS_Noah_v{data['version']}_025{data['ep']}" if last_folder is not None: - files = sorted( - glob.glob(os.path.join(last_folder, globify(fmt)))) + files = sorted(glob.glob(os.path.join(last_folder, globify(fmt)))) data = parse(fmt, os.path.split(files[-1])[1]) - end = data['time'] + end = data["time"] return version, start, end @@ -213,8 +191,11 @@ def get_gldas_start_date(product): start_date : datetime Start date of NOAH GLDAS product. """ - dt_dict = {'GLDAS_Noah_v20_025': datetime(1948, 1, 1, 3), - 'GLDAS_Noah_v21_025': datetime(2000, 1, 1, 3)} + dt_dict = { + "GLDAS_Noah_v20_025": datetime(1948, 1, 1, 3), + "GLDAS_Noah_v21_025": datetime(2000, 1, 1, 3), + "GLDAS_Noah_v21_025_EP": datetime(2000, 1, 1, 3), + } return dt_dict[product] @@ -235,45 +216,72 @@ def parse_args(args): """ parser = argparse.ArgumentParser( description="Download GLDAS data.", - formatter_class=argparse.RawTextHelpFormatter) - - parser.add_argument("localroot", - help='Root of local filesystem where' - 'the data is stored.') + formatter_class=argparse.RawTextHelpFormatter, + ) - parser.add_argument("-s", "--start", type=mkdate, - help=("Startdate. Either in format YYYY-MM-DD or " - "YYYY-MM-DDTHH:MM. If not given then the target" - "folder is scanned for a start date. If no data" - "is found there then the first available date " - "of the product is used.")) + parser.add_argument( + "localroot", + help="Root of local filesystem where" "the data is stored.", + ) - parser.add_argument("-e", "--end", type=mkdate, - help=("Enddate. Either in format YYYY-MM-DD or " - "YYYY-MM-DDTHH:MM. If not given then the " - "current date is used.")) + parser.add_argument( + "-s", + "--start", + type=mkdate, + help=( + "Startdate as YYYY-MM-DD. " + "If not given then the target" + "folder is scanned for a start date. If no data" + "is found there then the first available date " + "of the product is used." + ), + ) - help_string = '\n'.join(['GLDAS product to download.', - 'GLDAS_Noah_v20_025 available from {} to 2010-12-31', - 'GLDAS_Noah_v21_025 available from {}']) + parser.add_argument( + "-e", + "--end", + type=mkdate, + help=( + "Enddate. In format YYYY-MM-DD.If not given then the " + "current date is used." + ), + ) + + help_string = "\n".join( + [ + "GLDAS product to download.", + "GLDAS_Noah_v20_025 available from {} to 2014-12-31", + "GLDAS_Noah_v21_025 available from {}", + "GLDAS_Noah_v21_025_EP available after GLDAS_Noah_v21_025", + ] + ) help_string = help_string.format( - get_gldas_start_date('GLDAS_Noah_v20_025'), - get_gldas_start_date('GLDAS_Noah_v21_025')) + get_gldas_start_date("GLDAS_Noah_v20_025"), + get_gldas_start_date("GLDAS_Noah_v21_025"), + ) parser.add_argument( - "--product", choices=["GLDAS_Noah_v20_025", "GLDAS_Noah_v21_025"], - default="GLDAS_Noah_v20_025", help=help_string) + "--product", + choices=[ + "GLDAS_Noah_v20_025", + "GLDAS_Noah_v21_025", + "GLDAS_Noah_v21_025_EP", + ], + default="GLDAS_Noah_v21_025", + help=help_string, + ) - parser.add_argument("--username", - help='Username to use for download.') + parser.add_argument("--username", help="Username to use for download.") - parser.add_argument("--password", - help='password to use for download.') + parser.add_argument("--password", help="password to use for download.") - parser.add_argument("--n_proc", default=1, type=int, - help='Number of parallel processes to use for' - 'downloading.') + parser.add_argument( + "--n_proc", + default=1, + type=int, + help="Number of parallel processes to use for" "downloading.", + ) args = parser.parse_args(args) # set defaults that can not be handled by argparse @@ -281,9 +289,10 @@ def parse_args(args): # Compare versions to prevent mixing data sets version, first, last = gldas_folder_get_version_first_last(args.localroot) if args.product and version and (args.product != version): - raise Exception('Error: Found products of different version ({}) ' - 'in {}. Abort download!'.format(version, - args.localroot)) + raise Exception( + "Error: Found products of different version ({}) " + "in {}. Abort download!".format(version, args.localroot) + ) if args.start is None or args.end is None: if not args.product: @@ -294,30 +303,38 @@ def parse_args(args): args.start = get_gldas_start_date(args.product) else: # In case of no indication if version, use GLDAS Noah 2.0 - # start time - args.start = get_gldas_start_date('GLDAS_Noah_v20_025') + # start time, because it has the longest time span + args.start = get_gldas_start_date("GLDAS_Noah_v20_025") else: args.start = last if args.end is None: args.end = datetime.now() - prod_urls = {'GLDAS_Noah_v20_025': - {'root': 'hydro1.gesdisc.eosdis.nasa.gov', - 'dirs': ['data', 'GLDAS', 'GLDAS_NOAH025_3H.2.0', - '%Y', '%j']}, - 'GLDAS_Noah_v21_025': - {'root': 'hydro1.gesdisc.eosdis.nasa.gov', - 'dirs': ['data', 'GLDAS', 'GLDAS_NOAH025_3H.2.1', - '%Y', '%j']}} - - args.urlroot = prod_urls[args.product]['root'] - args.urlsubdirs = prod_urls[args.product]['dirs'] - args.localsubdirs = ['%Y', '%j'] - - print("Downloading data from {} to {} " - "into folder {}.". format(args.start.isoformat(), - args.end.isoformat(), - args.localroot)) + prod_urls = { + "GLDAS_Noah_v20_025": { + "root": "hydro1.gesdisc.eosdis.nasa.gov", + "dirs": ["data", "GLDAS", "GLDAS_NOAH025_3H.2.0", "%Y", "%j"], + }, + "GLDAS_Noah_v21_025": { + "root": "hydro1.gesdisc.eosdis.nasa.gov", + "dirs": ["data", "GLDAS", "GLDAS_NOAH025_3H.2.1", "%Y", "%j"], + }, + "GLDAS_Noah_v21_025_EP": { + "root": "hydro1.gesdisc.eosdis.nasa.gov", + "dirs": ["data", "GLDAS", "GLDAS_NOAH025_3H_EP.2.1", "%Y", "%j"], + }, + } + + args.urlroot = prod_urls[args.product]["root"] + args.urlsubdirs = prod_urls[args.product]["dirs"] + args.localsubdirs = ["%Y", "%j"] + + print( + "Downloading data from {} to {} " + "into folder {}.".format( + args.start.isoformat(), args.end.isoformat(), args.localroot + ) + ) return args @@ -333,22 +350,28 @@ def main(args): args = parse_args(args) dts = list(daily(args.start, args.end)) - url_create_fn = partial(create_dt_url, root=args.urlroot, - fname='', subdirs=args.urlsubdirs) - fname_create_fn = partial(create_dt_fpath, root=args.localroot, - fname='', subdirs=args.localsubdirs) - - down_func = partial(download, - num_proc=args.n_proc, - username=args.username, - password="'" + args.password + "'", - recursive=True, - filetypes=['nc4', 'nc4.xml']) - download_by_dt(dts, url_create_fn, - fname_create_fn, down_func, - recursive=True) + url_create_fn = partial( + create_dt_url, root=args.urlroot, fname="", subdirs=args.urlsubdirs + ) + fname_create_fn = partial( + create_dt_fpath, + root=args.localroot, + fname="", + subdirs=args.localsubdirs, + ) + + down_func = partial( + download, + num_proc=args.n_proc, + username=args.username, + password="'" + args.password + "'", + recursive=True, + filetypes=["nc4", "nc4.xml"], + ) + download_by_dt( + dts, url_create_fn, fname_create_fn, down_func, recursive=True + ) def run(): main(sys.argv[1:]) - diff --git a/src/gldas/grid.py b/src/gldas/grid.py index 7251be5..ab47748 100644 --- a/src/gldas/grid.py +++ b/src/gldas/grid.py @@ -1,36 +1,40 @@ -# The MIT License (MIT) -# -# Copyright (c) 2018, TU Wien -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - import numpy as np - from pygeogrids.grids import BasicGrid from netCDF4 import Dataset import os def subgrid4bbox(grid, min_lon, min_lat, max_lon, max_lat): + """ + Select a spatial subset for the grid by bound box corner points + + Parameters + ---------- + grid: BasicGrid or CellGrid + Grid object to trim. + min_lon: float + Lower left corner longitude + min_lat: float + Lower left corner latitude + max_lon: float + Upper right corner longitude + max_lat: float + Upper right corner latitude + + Returns + ------- + subgrid: BasicGrid or CellGrid + Subset of the input grid. + """ gpis, lons, lats, _ = grid.get_grid_points() assert len(gpis) == len(lats) == len(lons) - bbox_gpis = gpis[np.where((lons <= max_lon) & (lons >= min_lon) & - (lats <= max_lat) & (lats >= min_lat))] + bbox_gpis = gpis[ + np.where( + (lons <= max_lon) + & (lons >= min_lon) + & (lats <= max_lat) + & (lats >= min_lat) + ) + ] return grid.subgrid_from_gpis(bbox_gpis) @@ -52,39 +56,50 @@ def GLDAS025Grids(only_land=False): """ resolution = 0.25 - glob_lons = np.arange(-180 + resolution / 2, 180 + resolution / 2, resolution) - glob_lats = np.arange(-90 + resolution / 2, 90 + resolution / 2, resolution) + glob_lons = np.arange( + -180 + resolution / 2, 180 + resolution / 2, resolution + ) + glob_lats = np.arange( + -90 + resolution / 2, 90 + resolution / 2, resolution + ) lon, lat = np.meshgrid(glob_lons, glob_lats) - glob_grid = BasicGrid(lon.flatten(), lat.flatten()).to_cell_grid(cellsize=5.) + glob_grid = BasicGrid(lon.flatten(), lat.flatten()).to_cell_grid( + cellsize=5.0 + ) if only_land: - ds = Dataset(os.path.join(os.path.abspath(os.path.dirname(__file__)), - 'GLDASp4_landmask_025d.nc4')) - land_lats = ds.variables['lat'][:] - land_mask = ds.variables['GLDAS_mask'][:].flatten().filled() == 0. + ds = Dataset( + os.path.join( + os.path.abspath(os.path.dirname(__file__)), + "GLDASp4_landmask_025d.nc4", + ) + ) + land_lats = ds.variables["lat"][:] + land_mask = ds.variables["GLDAS_mask"][:].flatten().filled() == 0.0 dlat = glob_lats.size - land_lats.size land_mask = np.concatenate((np.ones(dlat * glob_lons.size), land_mask)) - land_points = np.ma.masked_array(glob_grid.get_grid_points()[0], land_mask) + land_points = np.ma.masked_array( + glob_grid.get_grid_points()[0], land_mask + ) - land_grid = glob_grid.subgrid_from_gpis(land_points[~land_points.mask].filled()) + land_grid = glob_grid.subgrid_from_gpis( + land_points[~land_points.mask].filled() + ) return land_grid else: return glob_grid def GLDAS025Cellgrid(): + """Alias to create a global 0.25 DEG grid without gaps w. 5 DEG cells """ return GLDAS025Grids(only_land=False) def GLDAS025LandGrid(): + """Alias to create a global 0.25 DEG grid over land only w. 5 DEG cells """ return GLDAS025Grids(only_land=True) - -if __name__ == '__main__': - GLDAS025LandGrid() - - def load_grid(land_points=True, bbox=None): """ Load gldas grid. @@ -107,4 +122,4 @@ def load_grid(land_points=True, bbox=None): else: subgrid = None - return subgrid \ No newline at end of file + return subgrid diff --git a/src/gldas/interface.py b/src/gldas/interface.py index fa25966..ce77b99 100644 --- a/src/gldas/interface.py +++ b/src/gldas/interface.py @@ -1,26 +1,4 @@ -# The MIT License (MIT) -# -# Copyright (c) 2018, TU Wien -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -import warnings +import warnings import numpy as np import os @@ -38,38 +16,46 @@ from gldas.grid import GLDAS025Cellgrid from netCDF4 import Dataset from pygeogrids.netcdf import load_grid +from gldas.utils import deprecated -class GLDAS_Noah_v21_025Img(ImageBase): +class GLDAS_Noah_v2_025Img(ImageBase): """ Class for reading one GLDAS Noah v2.1 nc file in 0.25 deg grid. - - Parameters - ---------- - filename: string - filename of the GLDAS nc file - mode: string, optional - mode of opening the file, only 'r' is implemented at the moment - parameter : string or list, optional - one or list of parameters to read, see GLDAS v2.1 documentation - for more information (default: 'SoilMoi0_10cm_inst'). - subgrid : Cell Grid - Subgrid of the global GLDAS Grid to use for reading image data (e.g only land points) - array_1D: boolean, optional - if set then the data is read into 1D arrays. - Needed for some legacy code. """ - def __init__(self, filename, mode='r', parameter='SoilMoi0_10cm_inst', - subgrid=None, array_1D=False): + def __init__( + self, + filename, + mode="r", + parameter="SoilMoi0_10cm_inst", + subgrid=None, + array_1D=False, + ): + """ + Parameters + ---------- + filename: str + filename of the GLDAS nc file + mode: string, optional + mode of opening the file, only 'r' is implemented at the moment + parameter : string or list, optional + one or list of parameters to read, see GLDAS v2.1 documentation + for more information (default: 'SoilMoi0_10cm_inst'). + subgrid : Cell Grid + Subgrid of the global GLDAS Grid to use for reading image data (e.g only land points) + array_1D: boolean, optional + if set then the data is read into 1D arrays. + Needed for some legacy code. + """ - super(GLDAS_Noah_v21_025Img, self).__init__(filename, mode=mode) + super(GLDAS_Noah_v2_025Img, self).__init__(filename, mode=mode) if type(parameter) != list: parameter = [parameter] self.parameters = parameter - self.fill_values = np.repeat(9999., 1440 * 120) + self.fill_values = np.repeat(9999.0, 1440 * 120) self.grid = GLDAS025Cellgrid() if not subgrid else subgrid self.array_1D = array_1D @@ -84,10 +70,8 @@ def read(self, timestamp=None): try: dataset = Dataset(self.filename) - except IOError as e: - print(e) - print(" ".join([self.filename, "can not be opened"])) - raise e + except IOError: + raise IOError(f"Error opening file {self.filename}") param_names = [] for parameter in self.parameters: @@ -98,18 +82,23 @@ def read(self, timestamp=None): param_metadata = {} param_data = {} for attrname in variable.ncattrs(): - if attrname in ['long_name', 'units']: + if attrname in ["long_name", "units"]: param_metadata.update( - {str(attrname): getattr(variable, attrname)}) + {str(attrname): getattr(variable, attrname)} + ) param_data = dataset.variables[parameter][:] np.ma.set_fill_value(param_data, 9999) - param_data = np.concatenate(( - self.fill_values, - np.ma.getdata(param_data.filled()).flatten())) + param_data = np.concatenate( + ( + self.fill_values, + np.ma.getdata(param_data.filled()).flatten(), + ) + ) return_img.update( - {str(parameter): param_data[self.grid.activegpis]}) + {str(parameter): param_data[self.grid.activegpis]} + ) return_metadata.update({str(parameter): param_metadata}) @@ -118,28 +107,39 @@ def read(self, timestamp=None): return_img[parameter] except KeyError: path, thefile = os.path.split(self.filename) - print ('%s in %s is corrupt - filling' - 'image with NaN values' % (parameter, thefile)) - return_img[parameter] = np.empty( - self.grid.n_gpi).fill(np.nan) + print( + "%s in %s is corrupt - filling" + "image with NaN values" % (parameter, thefile) + ) + return_img[parameter] = np.empty(self.grid.n_gpi).fill( + np.nan + ) - return_metadata['corrupt_parameters'].append() + return_metadata["corrupt_parameters"].append() dataset.close() if self.array_1D: - return Image(self.grid.activearrlon, self.grid.activearrlat, - return_img, return_metadata, timestamp) + return Image( + self.grid.activearrlon, + self.grid.activearrlat, + return_img, + return_metadata, + timestamp, + ) else: for key in return_img: return_img[key] = np.flipud( - return_img[key].reshape((720, 1440))) + return_img[key].reshape((720, 1440)) + ) - return Image(np.flipud(self.grid.activearrlon.reshape((720, 1440))), - np.flipud(self.grid.activearrlat.reshape((720, 1440))), - return_img, - return_metadata, - timestamp) + return Image( + np.flipud(self.grid.activearrlon.reshape((720, 1440))), + np.flipud(self.grid.activearrlat.reshape((720, 1440))), + return_img, + return_metadata, + timestamp, + ) def write(self, data): raise NotImplementedError() @@ -151,6 +151,33 @@ def close(self): pass +class GLDAS_Noah_v21_025Img(GLDAS_Noah_v2_025Img): + def __init__( + self, + filename, + mode="r", + parameter="SoilMoi0_10cm_inst", + subgrid=None, + array_1D=False, + ): + + warnings.warn( + "GLDAS_Noah_v21_025Img is outdated and replaced by the general" + "GLDAS_Noah_v2_025Img class to read gldas v2.0 and v2.1 " + "0.25 DEG netcdf files." + "The old class will be removed soon.", + category=DeprecationWarning, + ) + + super(GLDAS_Noah_v21_025Img, self).__init__( + filename=filename, + mode=mode, + parameter=parameter, + subgrid=subgrid, + array_1D=array_1D, + ) + + class GLDAS_Noah_v1_025Img(ImageBase): """ Class for reading one GLDAS Noah v1 grib file in 0.25 deg grid. @@ -177,13 +204,21 @@ class GLDAS_Noah_v1_025Img(ImageBase): Needed for some legacy code. """ - def __init__(self, filename, mode='r', parameter='086_L1', subgrid=None, array_1D=False): + @deprecated(message="GLDAS Noah v1 data is deprecated, v2 should be used.") + def __init__( + self, + filename, + mode="r", + parameter="086_L1", + subgrid=None, + array_1D=False, + ): super(GLDAS_Noah_v1_025Img, self).__init__(filename, mode=mode) if type(parameter) != list: parameter = [parameter] self.parameters = parameter - self.fill_values = np.repeat(9999., 1440 * 120) + self.fill_values = np.repeat(9999.0, 1440 * 120) self.grid = subgrid if subgrid else GLDAS025Cellgrid() self.array_1D = array_1D @@ -191,7 +226,7 @@ def read(self, timestamp=None): return_img = {} return_metadata = {} - layers = {'085': 1, '086': 1} + layers = {"085": 1, "086": 1} try: grbs = pygrib.open(self.filename) @@ -202,37 +237,45 @@ def read(self, timestamp=None): ids = [] for parameter in self.parameters: - ids.append(int(parameter.split('_')[0])) + ids.append(int(parameter.split("_")[0])) parameter_ids = np.unique(np.array(ids)) for message in grbs: - if message['indicatorOfParameter'] in parameter_ids: - parameter_id = '{:03d}'.format(message['indicatorOfParameter']) + if message["indicatorOfParameter"] in parameter_ids: + parameter_id = "{:03d}".format(message["indicatorOfParameter"]) param_metadata = {} # read metadata in any case - param_metadata['units'] = message['units'] - param_metadata['long_name'] = message['parameterName'] + param_metadata["units"] = message["units"] + param_metadata["long_name"] = message["parameterName"] if parameter_id in layers.keys(): - parameter = '_'.join((parameter_id, 'L' + - str(layers[parameter_id]))) + parameter = "_".join( + (parameter_id, "L" + str(layers[parameter_id])) + ) if parameter in self.parameters: - param_data = np.concatenate(( - self.fill_values, - np.ma.getdata(message['values']).flatten())) + param_data = np.concatenate( + ( + self.fill_values, + np.ma.getdata(message["values"]).flatten(), + ) + ) return_img[parameter] = param_data[ - self.grid.activegpis] + self.grid.activegpis + ] return_metadata[parameter] = param_metadata layers[parameter_id] += 1 else: parameter = parameter_id - param_data = np.concatenate(( - self.fill_values, - np.ma.getdata(message['values']).flatten())) + param_data = np.concatenate( + ( + self.fill_values, + np.ma.getdata(message["values"]).flatten(), + ) + ) return_img[parameter] = param_data[self.grid.activegpis] return_metadata[parameter] = param_metadata @@ -241,21 +284,26 @@ def read(self, timestamp=None): try: return_img[parameter] except KeyError: - print(self.filename[self.filename.rfind('GLDAS'):], - 'corrupt file - filling image with nan values') + print( + self.filename[self.filename.rfind("GLDAS") :], + "corrupt file - filling image with nan values", + ) return_img[parameter] = np.empty(self.grid.n_gpi) return_img[parameter].fill(np.nan) if self.array_1D: - return Image(self.grid.activearrlon, - self.grid.activearrlat, - return_img, - return_metadata, - timestamp) + return Image( + self.grid.activearrlon, + self.grid.activearrlat, + return_img, + return_metadata, + timestamp, + ) else: for key in return_img: return_img[key] = np.flipud( - return_img[key].reshape((720, 1440))) + return_img[key].reshape((720, 1440)) + ) lons = np.flipud(self.grid.activearrlon.reshape((720, 1440))) lats = np.flipud(self.grid.activearrlat.reshape((720, 1440))) @@ -271,7 +319,6 @@ def flush(self): def close(self): pass - class GLDAS_Noah_v21_025Ds(MultiTemporalImageBase): """ Class for reading GLDAS v2.1 images in nc format. @@ -281,7 +328,7 @@ class GLDAS_Noah_v21_025Ds(MultiTemporalImageBase): data_path : string Path to the nc files parameter : string or list, optional - one or list of parameters to read, see GLDAS v2.1 documentation + one or list of parameters to read, see GLDAS v2.1 documentation for more information (default: 'SoilMoi0_10cm_inst'). subgrid : Cell Grid Subgrid of the global GLDAS Grid to use for reading image data (e.g only land points) @@ -290,21 +337,31 @@ class GLDAS_Noah_v21_025Ds(MultiTemporalImageBase): Needed for some legacy code. """ - def __init__(self, data_path, parameter='SoilMoi0_10cm_inst', - subgrid=None, array_1D=False): - ioclass_kws = {'parameter': parameter, - 'subgrid': subgrid, - 'array_1D': array_1D} - - sub_path = ['%Y', '%j'] - filename_templ = "GLDAS_NOAH025_3H.A{datetime}.*.nc4" - - super(GLDAS_Noah_v21_025Ds, self).__init__(data_path, GLDAS_Noah_v21_025Img, - fname_templ=filename_templ, - datetime_format="%Y%m%d.%H%M", - subpath_templ=sub_path, - exact_templ=False, - ioclass_kws=ioclass_kws) + def __init__( + self, + data_path, + parameter="SoilMoi0_10cm_inst", + subgrid=None, + array_1D=False, + ): + ioclass_kws = { + "parameter": parameter, + "subgrid": subgrid, + "array_1D": array_1D, + } + + sub_path = ["%Y", "%j"] + filename_templ = "GLDAS_NOAH025_3H*.A{datetime}.*.nc4" + + super(GLDAS_Noah_v21_025Ds, self).__init__( + data_path, + GLDAS_Noah_v21_025Img, + fname_templ=filename_templ, + datetime_format="%Y%m%d.%H%M", + subpath_templ=sub_path, + exact_templ=False, + ioclass_kws=ioclass_kws, + ) def tstamps_for_daterange(self, start_date, end_date): """ @@ -323,10 +380,18 @@ def tstamps_for_daterange(self, start_date, end_date): list of datetime objects of each available image between start_date and end_date """ - img_offsets = np.array([timedelta(hours=0), timedelta(hours=3), - timedelta(hours=6), timedelta(hours=9), - timedelta(hours=12), timedelta(hours=15), - timedelta(hours=18), timedelta(hours=21)]) + img_offsets = np.array( + [ + timedelta(hours=0), + timedelta(hours=3), + timedelta(hours=6), + timedelta(hours=9), + timedelta(hours=12), + timedelta(hours=15), + timedelta(hours=18), + timedelta(hours=21), + ] + ) timestamps = [] diff = end_date - start_date @@ -360,18 +425,28 @@ class GLDAS_Noah_v1_025Ds(MultiTemporalImageBase): Needed for some legacy code. """ - def __init__(self, data_path, parameter='086_L1', subgrid=None, array_1D=False): - ioclass_kws = {'parameter': parameter, - 'subgrid': subgrid, - 'array_1D': array_1D} - - sub_path = ['%Y', '%j'] + @deprecated("GLDAS Noah v1 data is deprecated, v2 should be used.") + def __init__( + self, data_path, parameter="086_L1", subgrid=None, array_1D=False + ): + ioclass_kws = { + "parameter": parameter, + "subgrid": subgrid, + "array_1D": array_1D, + } + + sub_path = ["%Y", "%j"] filename_templ = "GLDAS_NOAH025SUBP_3H.A{datetime}.001.*.grb" super(GLDAS_Noah_v1_025Ds, self).__init__( - data_path, GLDAS_Noah_v1_025Img, fname_templ=filename_templ, - datetime_format="%Y%j.%H%M", subpath_templ=sub_path, - exact_templ=False, ioclass_kws=ioclass_kws) + data_path, + GLDAS_Noah_v1_025Img, + fname_templ=filename_templ, + datetime_format="%Y%j.%H%M", + subpath_templ=sub_path, + exact_templ=False, + ioclass_kws=ioclass_kws, + ) def tstamps_for_daterange(self, start_date, end_date): """ @@ -390,10 +465,18 @@ def tstamps_for_daterange(self, start_date, end_date): list of datetime objects of each available image between start_date and end_date """ - img_offsets = np.array([timedelta(hours=0), timedelta(hours=3), - timedelta(hours=6), timedelta(hours=9), - timedelta(hours=12), timedelta(hours=15), - timedelta(hours=18), timedelta(hours=21)]) + img_offsets = np.array( + [ + timedelta(hours=0), + timedelta(hours=3), + timedelta(hours=6), + timedelta(hours=9), + timedelta(hours=12), + timedelta(hours=15), + timedelta(hours=18), + timedelta(hours=21), + ] + ) timestamps = [] diff = end_date - start_date @@ -406,7 +489,7 @@ def tstamps_for_daterange(self, start_date, end_date): class GLDASTs(GriddedNcOrthoMultiTs): def __init__(self, ts_path, grid_path=None, **kwargs): - ''' + """ Class for reading GLDAS time series after reshuffling. Parameters @@ -437,9 +520,9 @@ def __init__(self, ts_path, grid_path=None, **kwargs): if false dates will not be read automatically but only on specific request useable for bulk reading because currently the netCDF num2date routine is very slow for big datasets - ''' + """ if grid_path is None: grid_path = os.path.join(ts_path, "grid.nc") grid = load_grid(grid_path) - super(GLDASTs, self).__init__(ts_path, grid, **kwargs) \ No newline at end of file + super(GLDASTs, self).__init__(ts_path, grid, **kwargs) diff --git a/src/gldas/reshuffle.py b/src/gldas/reshuffle.py index 05bdeaf..14b11d7 100644 --- a/src/gldas/reshuffle.py +++ b/src/gldas/reshuffle.py @@ -37,6 +37,7 @@ from gldas.grid import load_grid import warnings + def get_filetype(inpath): """ Tries to find out the file type by searching for @@ -62,13 +63,13 @@ def get_filetype(inpath): filename, extension = os.path.splitext(name) filelist.append(extension) - if '.nc4' in filelist and '.grb' not in filelist: - return 'netCDF' - elif '.grb' in filelist and '.nc4' not in filelist: - return 'grib' + if ".nc4" in filelist and ".grb" not in filelist: + return "netCDF" + elif ".grb" in filelist and ".nc4" not in filelist: + return "grib" else: # if file type cannot be detected, guess netCDF - return 'netCDF' + return "netCDF" def mkdate(datestring): @@ -86,20 +87,27 @@ def mkdate(datestring): Date string as datetime. """ if len(datestring) == 10: - return datetime.strptime(datestring, '%Y-%m-%d') + return datetime.strptime(datestring, "%Y-%m-%d") if len(datestring) == 16: - return datetime.strptime(datestring, '%Y-%m-%dT%H:%M') + return datetime.strptime(datestring, "%Y-%m-%dT%H:%M") + def str2bool(val): - if val in ['True', 'true', 't', 'T', '1']: + if val in ["True", "true", "t", "T", "1"]: return True else: return False -def reshuffle(input_root, outputpath, - startdate, enddate, - parameters, input_grid=None, - imgbuffer=50): + +def reshuffle( + input_root, + outputpath, + startdate, + enddate, + parameters, + input_grid=None, + imgbuffer=50, +): """ Reshuffle method applied to GLDAS data. @@ -122,20 +130,22 @@ def reshuffle(input_root, outputpath, How many images to read at once before writing time series. """ - if get_filetype(input_root) == 'grib': + if get_filetype(input_root) == "grib": if input_grid is not None: - warnings.warn('Land Grid is fit to GLDAS 2.x netCDF data') + warnings.warn("Land Grid is fit to GLDAS 2.x netCDF data") - input_dataset = GLDAS_Noah_v1_025Ds(input_root, parameters, - subgrid=input_grid, array_1D=True) + input_dataset = GLDAS_Noah_v1_025Ds( + input_root, parameters, subgrid=input_grid, array_1D=True + ) else: - input_dataset = GLDAS_Noah_v21_025Ds(input_root, parameters, - subgrid=input_grid, array_1D=True) + input_dataset = GLDAS_Noah_v21_025Ds( + input_root, parameters, subgrid=input_grid, array_1D=True + ) if not os.path.exists(outputpath): os.makedirs(outputpath) - global_attr = {'product': 'GLDAS'} + global_attr = {"product": "GLDAS"} # get time series attributes from first day of data. data = input_dataset.read(startdate) @@ -145,11 +155,20 @@ def reshuffle(input_root, outputpath, else: grid = input_grid - reshuffler = Img2Ts(input_dataset=input_dataset, outputpath=outputpath, - startdate=startdate, enddate=enddate, input_grid=grid, - imgbuffer=imgbuffer, cellsize_lat=5.0, - cellsize_lon=5.0, global_attr=global_attr, zlib=True, - unlim_chunksize=1000, ts_attributes=ts_attributes) + reshuffler = Img2Ts( + input_dataset=input_dataset, + outputpath=outputpath, + startdate=startdate, + enddate=enddate, + input_grid=grid, + imgbuffer=imgbuffer, + cellsize_lat=5.0, + cellsize_lon=5.0, + global_attr=global_attr, + zlib=True, + unlim_chunksize=1000, + ts_attributes=ts_attributes, + ) reshuffler.calc() @@ -168,50 +187,86 @@ def parse_args(args): Command line arguments. """ parser = argparse.ArgumentParser( - description="Convert GLDAS data to time series format.") - parser.add_argument("dataset_root", - help='Root of local filesystem where the ' - 'data is stored.') - - parser.add_argument("timeseries_root", - help='Root of local filesystem where the timeseries ' - 'should be stored.') - - parser.add_argument("start", type=mkdate, - help=("Startdate. Either in format YYYY-MM-DD or " - "YYYY-MM-DDTHH:MM.")) - - parser.add_argument("end", type=mkdate, - help=("Enddate. Either in format YYYY-MM-DD or " - "YYYY-MM-DDTHH:MM.")) - - parser.add_argument("parameters", metavar="parameters", - nargs="+", - help=("Parameters to reshuffle into time series format. " - "e.g. SoilMoi0_10cm_inst SoilMoi10_40cm_inst for " - "Volumetric soil water layers 1 to 2.")) - - parser.add_argument("--land_points", type=str2bool, default='False', - help=("Set True to convert only land points as defined" - " in the GLDAS land mask (faster and less/smaller files)")) - - parser.add_argument("--bbox", type=float, default=None, nargs=4, - help=("min_lon min_lat max_lon max_lat. " - "Bounding Box (lower left and upper right corner) " - "of area to reshuffle (WGS84)")) - - parser.add_argument("--imgbuffer", type=int, default=50, - help=("How many images to read at once. Bigger " - "numbers make the conversion faster but " - "consume more memory.")) + description="Convert GLDAS data to time series format." + ) + parser.add_argument( + "dataset_root", + help="Root of local filesystem where the " "data is stored.", + ) + + parser.add_argument( + "timeseries_root", + help="Root of local filesystem where the timeseries " + "should be stored.", + ) + + parser.add_argument( + "start", + type=mkdate, + help=( + "Startdate. Either in format YYYY-MM-DD or " "YYYY-MM-DDTHH:MM." + ), + ) + + parser.add_argument( + "end", + type=mkdate, + help=("Enddate. Either in format YYYY-MM-DD or " "YYYY-MM-DDTHH:MM."), + ) + + parser.add_argument( + "parameters", + metavar="parameters", + nargs="+", + help=( + "Parameters to reshuffle into time series format. " + "e.g. SoilMoi0_10cm_inst SoilMoi10_40cm_inst for " + "Volumetric soil water layers 1 to 2." + ), + ) + + parser.add_argument( + "--land_points", + type=str2bool, + default="False", + help=( + "Set True to convert only land points as defined" + " in the GLDAS land mask (faster and less/smaller files)" + ), + ) + + parser.add_argument( + "--bbox", + type=float, + default=None, + nargs=4, + help=( + "min_lon min_lat max_lon max_lat. " + "Bounding Box (lower left and upper right corner) " + "of area to reshuffle (WGS84)" + ), + ) + + parser.add_argument( + "--imgbuffer", + type=int, + default=50, + help=( + "How many images to read at once. Bigger " + "numbers make the conversion faster but " + "consume more memory." + ), + ) args = parser.parse_args(args) # set defaults that can not be handled by argparse - print("Converting data from {} to" - " {} into folder {}.".format(args.start.isoformat(), - args.end.isoformat(), - args.timeseries_root)) + print( + "Converting data from {} to" + " {} into folder {}.".format( + args.start.isoformat(), args.end.isoformat(), args.timeseries_root + ) + ) return args @@ -227,21 +282,21 @@ def main(args): """ args = parse_args(args) - input_grid = load_grid(land_points=args.land_points, - bbox=tuple(args.bbox) if args.bbox is not None else None) - - reshuffle(args.dataset_root, - args.timeseries_root, - args.start, - args.end, - args.parameters, - input_grid=input_grid, - imgbuffer=args.imgbuffer) + input_grid = load_grid( + land_points=args.land_points, + bbox=tuple(args.bbox) if args.bbox is not None else None, + ) + reshuffle( + args.dataset_root, + args.timeseries_root, + args.start, + args.end, + args.parameters, + input_grid=input_grid, + imgbuffer=args.imgbuffer, + ) def run(): main(sys.argv[1:]) - -if __name__ == '__main__': - run() diff --git a/src/gldas/utils.py b/src/gldas/utils.py new file mode 100644 index 0000000..0cdfafd --- /dev/null +++ b/src/gldas/utils.py @@ -0,0 +1,38 @@ +import functools +import inspect +import warnings + +def deprecated(message: str = None): + """ + Decorator for classes or functions to mark them as deprecated. + If the decorator is applied without a specific message (`@deprecated()`), + the default warning is shown when using the function/class. To specify + a custom message use it like: + @deprecated('Don't use this function anymore!'). + + Parameters + ---------- + message : str, optional (default: None) + Custom message to show with the DeprecationWarning. + """ + + def decorator(src): + default_msg = f"GLDAS python " \ + f"{'class' if inspect.isclass(src) else 'method'} " \ + f"'{src.__module__}.{src.__name__}' " \ + f"is deprecated and will be removed soon." + + @functools.wraps(src) + def new_func(*args, **kwargs): + warnings.simplefilter('always', DeprecationWarning) + + warnings.warn( + default_msg if message is None else message, + category=DeprecationWarning, + stacklevel=2) + warnings.simplefilter('default', DeprecationWarning) + return src(*args, **kwargs) + + return new_func + + return decorator diff --git a/tests/test_download.py b/tests/test_download.py index e16a8d6..c5533fa 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -2,80 +2,100 @@ Tests for the download module of GLDAS. """ import os +import unittest from datetime import datetime +import pytest +import tempfile + from gldas.download import get_last_formatted_dir_in_dir from gldas.download import get_first_formatted_dir_in_dir from gldas.download import get_last_gldas_folder from gldas.download import get_first_gldas_folder from gldas.download import gldas_folder_get_version_first_last +from gldas.download import main as main_download + +from gldas.interface import GLDAS_Noah_v21_025Ds + +try: + username = os.environ['GES_DISC_USERNAME'] + pwd = os.environ['GES_DISC_PWD'] +except KeyError: + username = pwd = None + +@pytest.mark.skipif(username is None or pwd is None, + reason="Environment variable (or GitHub Secret) expected but not found:" + "`GES_DISC_USERNAME` and/or `GES_DISC_PWD`") +class TestDownload(unittest.TestCase): + def setUp(self) -> None: + self.outpath = tempfile.mkdtemp(prefix='gldas') + + def test_download_GLDAS_Noah_v21_025(self): + args = [self.outpath, '-s', '2010-03-02', '-e' '2010-03-02', '--product', "GLDAS_Noah_v21_025", + '--username', username, '--password', pwd] + main_download(args) + assert len(os.listdir(os.path.join(self.outpath, '2010', '061'))) == 8 * 2 + 2 + + ds = GLDAS_Noah_v21_025Ds(self.outpath) + img = ds.read(datetime(2010, 3, 2, 3)) + assert list(img.data.keys()) == ['SoilMoi0_10cm_inst'] == list(img.metadata.keys()) + ds.close() + def test_get_last_dir_in_dir(): - path = os.path.join(os.path.dirname(__file__), - 'folder_test', 'success') + path = os.path.join(os.path.dirname(__file__), "folder_test", "success") last_dir = get_last_formatted_dir_in_dir(path, "{time:%Y}") - assert last_dir == '2014' + assert last_dir == "2014" + def test_get_last_dir_in_dir_failure(): - path = os.path.join(os.path.dirname(__file__), - 'folder_test', 'failure') + path = os.path.join(os.path.dirname(__file__), "folder_test", "failure") last_dir = get_last_formatted_dir_in_dir(path, "{time:%Y}") assert last_dir == None def test_get_first_dir_in_dir(): - path = os.path.join(os.path.dirname(__file__), - 'folder_test', 'success') + path = os.path.join(os.path.dirname(__file__), "folder_test", "success") last_dir = get_first_formatted_dir_in_dir(path, "{time:%Y}") - assert last_dir == '2013' + assert last_dir == "2013" + def test_get_last_gldas_folder(): - path = os.path.join(os.path.dirname(__file__), - 'folder_test', 'success') - last = get_last_gldas_folder(path, ['{time:%Y}', '{time:%j}']) + path = os.path.join(os.path.dirname(__file__), "folder_test", "success") + last = get_last_gldas_folder(path, ["{time:%Y}", "{time:%j}"]) last_should = os.path.join(path, "2014", "134") assert last == last_should + def test_get_last_gldas_folder_no_folder(): - path = os.path.join(os.path.dirname(__file__), - 'folder_test', 'failure') - last = get_last_gldas_folder(path, ['{time:%Y}', '{time:%j}']) + path = os.path.join(os.path.dirname(__file__), "folder_test", "failure") + last = get_last_gldas_folder(path, ["{time:%Y}", "{time:%j}"]) last_should = None assert last == last_should + def test_get_first_gldas_folder(): - path = os.path.join(os.path.dirname(__file__), - 'folder_test', 'success') - last = get_first_gldas_folder(path, ['{time:%Y}', '{time:%j}']) + path = os.path.join(os.path.dirname(__file__), "folder_test", "success") + last = get_first_gldas_folder(path, ["{time:%Y}", "{time:%j}"]) last_should = os.path.join(path, "2013", "001") assert last == last_should + def test_get_first_gldas_folder_no_folder(): - path = os.path.join(os.path.dirname(__file__), - 'folder_test', 'failure') - last = get_first_gldas_folder(path, ['{time:%Y}', '{time:%j}']) + path = os.path.join(os.path.dirname(__file__), "folder_test", "failure") + last = get_first_gldas_folder(path, ["{time:%Y}", "{time:%j}"]) last_should = None assert last == last_should + def test_gldas_get_start_end(): - path = os.path.join(os.path.dirname(__file__), - 'test-data', 'GLDAS_NOAH_image_data') + path = os.path.join( + os.path.dirname(__file__), "test-data", "GLDAS_NOAH_image_data" + ) version, start, end = gldas_folder_get_version_first_last(path) - version_should='GLDAS_Noah_v21_025' - start_should = datetime(2015,1,1) - end_should = datetime(2015,1,1) + version_should = "GLDAS_Noah_v21_025" + start_should = datetime(2015, 1, 1) + end_should = datetime(2015, 1, 1) assert version == version_should assert end == end_should assert start == start_should - - - -if __name__ == '__main__': - test_gldas_get_start_end() - test_get_last_dir_in_dir() - test_get_first_dir_in_dir() - test_get_first_gldas_folder() - test_get_first_gldas_folder_no_folder() - test_get_last_dir_in_dir() - test_get_last_gldas_folder() - test_get_last_gldas_folder_no_folder() - test_get_last_dir_in_dir_failure() \ No newline at end of file + diff --git a/tests/test_grid.py b/tests/test_grid.py index dbbd709..82f5ff7 100644 --- a/tests/test_grid.py +++ b/tests/test_grid.py @@ -1,5 +1,6 @@ from gldas.grid import GLDAS025Cellgrid, GLDAS025LandGrid, subgrid4bbox + def test_GLDAS025_cell_grid(): gldas = GLDAS025Cellgrid() assert gldas.activegpis.size == 1036800 @@ -13,11 +14,12 @@ def test_GLDAS025LandGrid(): gldas = GLDAS025LandGrid() assert gldas.activegpis.size == 243883 assert gldas.activegpis[153426] == 810230 - assert gldas.activearrcell[153426] == 1720 + assert gldas.activearrcell[153426] == 1720 assert gldas.activearrlat[153426] == 50.625 assert gldas.activearrlon[153426] == 57.625 + def test_bbox_subgrid(): - bbox = (130.125, -29.875, 134.875, -25.125) # bbox for cell 2244 + bbox = (130.125, -29.875, 134.875, -25.125) # bbox for cell 2244 subgrid = subgrid4bbox(GLDAS025Cellgrid(), *bbox) - assert subgrid == GLDAS025Cellgrid().subgrid_from_cells([2244]) \ No newline at end of file + assert subgrid == GLDAS025Cellgrid().subgrid_from_cells([2244]) diff --git a/tests/test_interface.py b/tests/test_interface.py index 0b4e04c..043f6cf 100644 --- a/tests/test_interface.py +++ b/tests/test_interface.py @@ -6,246 +6,319 @@ from gldas.interface import GLDAS_Noah_v21_025Ds, GLDAS_Noah_v21_025Img from gldas.grid import GLDAS025LandGrid -def test_GLDAS_Noah_v1_025Ds_img_reading(): - parameter = ['086_L2', '086_L1', '085_L1', '138', '132', '051'] - img = GLDAS_Noah_v1_025Ds(data_path=os.path.join(os.path.dirname(__file__), - 'test-data', - 'GLDAS_NOAH_image_data'), - parameter=parameter, - subgrid=None, - array_1D=True) - image = img.read( - datetime(2015, 1, 1, 0)) +def test_GLDAS_Noah_v1_025Ds_img_reading(): + parameter = ["086_L2", "086_L1", "085_L1", "138", "132", "051"] + img = GLDAS_Noah_v1_025Ds( + data_path=os.path.join( + os.path.dirname(__file__), "test-data", "GLDAS_NOAH_image_data" + ), + parameter=parameter, + subgrid=None, + array_1D=True, + ) + + image = img.read(datetime(2015, 1, 1, 0)) assert sorted(image.data.keys()) == sorted(parameter) assert image.timestamp == datetime(2015, 1, 1, 0) - assert image.data['086_L1'][998529] == 30.7344 - assert image.data['086_L2'][998529] == 93.138 - assert image.data['085_L1'][206360] == 285.19 - assert image.data['138'][998529] == 237.27 - assert image.data['051'][998529] == 0 - assert image.lon.shape == (360 * 180 * (1 / 0.25)**2,) + assert image.data["086_L1"][998529] == 30.7344 + assert image.data["086_L2"][998529] == 93.138 + assert image.data["085_L1"][206360] == 285.19 + assert image.data["138"][998529] == 237.27 + assert image.data["051"][998529] == 0 + assert image.lon.shape == (360 * 180 * (1 / 0.25) ** 2,) assert image.lon.shape == image.lat.shape assert sorted(list(image.metadata.keys())) == sorted(parameter) - assert image.metadata['085_L1']['units'] == u'K' - assert image.metadata['085_L1']['long_name'] == u'ST Surface temperature of soil K' + assert image.metadata["085_L1"]["units"] == u"K" + assert ( + image.metadata["085_L1"]["long_name"] + == u"ST Surface temperature of soil K" + ) + img.close() def test_GLDAS_Noah_v21_025Ds_img_reading(): - parameter = ['SoilMoi10_40cm_inst', 'SoilMoi0_10cm_inst', 'SoilTMP0_10cm_inst', - 'AvgSurfT_inst', 'SWE_inst'] - img = GLDAS_Noah_v21_025Ds(data_path=os.path.join(os.path.dirname(__file__), - 'test-data', - 'GLDAS_NOAH_image_data'), - parameter=parameter, - subgrid=None, - array_1D=True) - - image = img.read( - datetime(2015, 1, 1, 0)) + parameter = [ + "SoilMoi10_40cm_inst", + "SoilMoi0_10cm_inst", + "SoilTMP0_10cm_inst", + "AvgSurfT_inst", + "SWE_inst", + ] + img = GLDAS_Noah_v21_025Ds( + data_path=os.path.join( + os.path.dirname(__file__), "test-data", "GLDAS_NOAH_image_data" + ), + parameter=parameter, + subgrid=None, + array_1D=True, + ) + + image = img.read(datetime(2015, 1, 1, 0)) assert sorted(image.data.keys()) == sorted(parameter) assert image.timestamp == datetime(2015, 1, 1, 0) - assert round(image.data['SoilMoi0_10cm_inst'][998529],3) == 38.804 - assert round(image.data['SoilMoi10_40cm_inst'][998529],3) == 131.699 - assert round(image.data['SoilTMP0_10cm_inst'][998529],3) == 254.506 - assert round(image.data['AvgSurfT_inst'][998529],3) == 235.553 - assert round(image.data['SWE_inst'][998529],3) == 108.24 - assert image.lon.shape == (360 * 180 * (1 / 0.25)**2,) + assert round(image.data["SoilMoi0_10cm_inst"][998529], 3) == 38.804 + assert round(image.data["SoilMoi10_40cm_inst"][998529], 3) == 131.699 + assert round(image.data["SoilTMP0_10cm_inst"][998529], 3) == 254.506 + assert round(image.data["AvgSurfT_inst"][998529], 3) == 235.553 + assert round(image.data["SWE_inst"][998529], 3) == 108.24 + assert image.lon.shape == (360 * 180 * (1 / 0.25) ** 2,) assert image.lon.shape == image.lat.shape assert sorted(list(image.metadata.keys())) == sorted(parameter) - assert image.metadata['AvgSurfT_inst']['units'] == u'K' - assert image.metadata['AvgSurfT_inst']['long_name'] == u'Average Surface Skin temperature' + assert image.metadata["AvgSurfT_inst"]["units"] == u"K" + assert ( + image.metadata["AvgSurfT_inst"]["long_name"] + == u"Average Surface Skin temperature" + ) + img.close() def test_GLDAS_Noah_v21_025Ds_img_reading_landpoints(): landgrid = GLDAS025LandGrid() - parameter = ['SoilMoi10_40cm_inst', 'SoilMoi0_10cm_inst', 'SoilTMP0_10cm_inst', - 'AvgSurfT_inst', 'SWE_inst'] - - img = GLDAS_Noah_v21_025Ds(data_path=os.path.join(os.path.dirname(__file__), - 'test-data', - 'GLDAS_NOAH_image_data'), - parameter=parameter, - subgrid=landgrid, - array_1D=True) - - image = img.read( - datetime(2015, 1, 1, 0)) + parameter = [ + "SoilMoi10_40cm_inst", + "SoilMoi0_10cm_inst", + "SoilTMP0_10cm_inst", + "AvgSurfT_inst", + "SWE_inst", + ] + + img = GLDAS_Noah_v21_025Ds( + data_path=os.path.join( + os.path.dirname(__file__), "test-data", "GLDAS_NOAH_image_data" + ), + parameter=parameter, + subgrid=landgrid, + array_1D=True, + ) + + image = img.read(datetime(2015, 1, 1, 0)) assert sorted(image.data.keys()) == sorted(parameter) # gpi for testing on the land grid: 527549, lat: 1.625, lon: -52.625 assert image.timestamp == datetime(2015, 1, 1, 0) - assert round(image.data['SoilMoi0_10cm_inst'][50000],3) == 26.181 - assert round(image.data['SoilMoi10_40cm_inst'][50000],3) == 84.558 - assert round(image.data['SoilTMP0_10cm_inst'][50000],3) == 301.276 - assert round(image.data['AvgSurfT_inst'][50000],3) == 294.863 - assert round(image.data['SWE_inst'][50000],3) == 0 - assert (image.lon.size, image.lat.size) == (landgrid.activearrlat.size, - landgrid.activearrlon.size) + assert round(image.data["SoilMoi0_10cm_inst"][50000], 3) == 26.181 + assert round(image.data["SoilMoi10_40cm_inst"][50000], 3) == 84.558 + assert round(image.data["SoilTMP0_10cm_inst"][50000], 3) == 301.276 + assert round(image.data["AvgSurfT_inst"][50000], 3) == 294.863 + assert round(image.data["SWE_inst"][50000], 3) == 0 + assert (image.lon.size, image.lat.size) == ( + landgrid.activearrlat.size, + landgrid.activearrlon.size, + ) assert sorted(list(image.metadata.keys())) == sorted(parameter) - assert image.metadata['AvgSurfT_inst']['units'] == u'K' - assert image.metadata['AvgSurfT_inst']['long_name'] == u'Average Surface Skin temperature' + assert image.metadata["AvgSurfT_inst"]["units"] == u"K" + assert ( + image.metadata["AvgSurfT_inst"]["long_name"] + == u"Average Surface Skin temperature" + ) + img.close() + def test_GLDAS_Noah_v1_025Ds_timestamps_for_daterange(): landgrid = GLDAS025LandGrid() - parameter = ['086_L2', '086_L1', '085_L1', '138', '132', '051'] - img = GLDAS_Noah_v1_025Ds(data_path=os.path.join(os.path.dirname(__file__), - 'test-data', - 'GLDAS_NOAH_image_data'), - parameter=parameter, - subgrid=None, - array_1D=True) - - tstamps = img.tstamps_for_daterange(datetime(2000, 1, 1), - datetime(2000, 1, 1)) + parameter = ["086_L2", "086_L1", "085_L1", "138", "132", "051"] + img = GLDAS_Noah_v1_025Ds( + data_path=os.path.join( + os.path.dirname(__file__), "test-data", "GLDAS_NOAH_image_data" + ), + parameter=parameter, + subgrid=None, + array_1D=True, + ) + + tstamps = img.tstamps_for_daterange( + datetime(2000, 1, 1), datetime(2000, 1, 1) + ) assert len(tstamps) == 8 - assert tstamps == [datetime(2000, 1, 1, 0), - datetime(2000, 1, 1, 3), - datetime(2000, 1, 1, 6), - datetime(2000, 1, 1, 9), - datetime(2000, 1, 1, 12), - datetime(2000, 1, 1, 15), - datetime(2000, 1, 1, 18), - datetime(2000, 1, 1, 21)] + assert tstamps == [ + datetime(2000, 1, 1, 0), + datetime(2000, 1, 1, 3), + datetime(2000, 1, 1, 6), + datetime(2000, 1, 1, 9), + datetime(2000, 1, 1, 12), + datetime(2000, 1, 1, 15), + datetime(2000, 1, 1, 18), + datetime(2000, 1, 1, 21), + ] + img.close() def test_GLDAS_Noah_v21_025Ds_timestamps_for_daterange(): - parameter = ['SoilMoi10_40cm_inst', 'SoilMoi0_10cm_inst', 'SoilTMP0_10cm_inst', - 'AvgSurfT_inst', 'SWE_inst'] - img = GLDAS_Noah_v21_025Ds(data_path=os.path.join(os.path.dirname(__file__), - 'test-data', - 'GLDAS_NOAH_image_data'), - parameter=parameter, - subgrid=None, - array_1D=True) - - tstamps = img.tstamps_for_daterange(datetime(2000, 1, 1), - datetime(2000, 1, 1)) + parameter = [ + "SoilMoi10_40cm_inst", + "SoilMoi0_10cm_inst", + "SoilTMP0_10cm_inst", + "AvgSurfT_inst", + "SWE_inst", + ] + img = GLDAS_Noah_v21_025Ds( + data_path=os.path.join( + os.path.dirname(__file__), "test-data", "GLDAS_NOAH_image_data" + ), + parameter=parameter, + subgrid=None, + array_1D=True, + ) + + tstamps = img.tstamps_for_daterange( + datetime(2000, 1, 1), datetime(2000, 1, 1) + ) assert len(tstamps) == 8 - assert tstamps == [datetime(2000, 1, 1, 0), - datetime(2000, 1, 1, 3), - datetime(2000, 1, 1, 6), - datetime(2000, 1, 1, 9), - datetime(2000, 1, 1, 12), - datetime(2000, 1, 1, 15), - datetime(2000, 1, 1, 18), - datetime(2000, 1, 1, 21)] + assert tstamps == [ + datetime(2000, 1, 1, 0), + datetime(2000, 1, 1, 3), + datetime(2000, 1, 1, 6), + datetime(2000, 1, 1, 9), + datetime(2000, 1, 1, 12), + datetime(2000, 1, 1, 15), + datetime(2000, 1, 1, 18), + datetime(2000, 1, 1, 21), + ] + img.close() def test_GLDAS_Noah_v1_025Img_img_reading_1D(): - parameter = ['086_L2', '086_L1', '085_L1', '138', '132', '051'] - img = GLDAS_Noah_v1_025Img(os.path.join(os.path.dirname(__file__), - 'test-data', - 'GLDAS_NOAH_image_data', - '2015', - '001', - 'GLDAS_NOAH025SUBP_3H.A2015001.0000.001.2015037193230.grb'), - parameter=parameter, - subgrid=None, - array_1D=True) + parameter = ["086_L2", "086_L1", "085_L1", "138", "132", "051"] + img = GLDAS_Noah_v1_025Img( + os.path.join( + os.path.dirname(__file__), + "test-data", + "GLDAS_NOAH_image_data", + "2015", + "001", + "GLDAS_NOAH025SUBP_3H.A2015001.0000.001.2015037193230.grb", + ), + parameter=parameter, + subgrid=None, + array_1D=True, + ) image = img.read() assert sorted(image.data.keys()) == sorted(parameter) - assert image.data['086_L1'][998529] == 30.7344 - assert image.data['086_L2'][998529] == 93.138 - assert image.data['085_L1'][206360] == 285.19 - assert image.data['138'][998529] == 237.27 - assert image.data['051'][998529] == 0 - assert image.lon.shape == (360 * 180 * (1 / 0.25)**2,) + assert image.data["086_L1"][998529] == 30.7344 + assert image.data["086_L2"][998529] == 93.138 + assert image.data["085_L1"][206360] == 285.19 + assert image.data["138"][998529] == 237.27 + assert image.data["051"][998529] == 0 + assert image.lon.shape == (360 * 180 * (1 / 0.25) ** 2,) assert image.lon.shape == image.lat.shape + img.close() def test_GLDAS_Noah_v21_025Img_img_reading_1D(): landgrid = GLDAS025LandGrid() - parameter = ['SoilMoi10_40cm_inst', 'SoilMoi0_10cm_inst', 'SoilTMP0_10cm_inst', - 'AvgSurfT_inst', 'SWE_inst'] - img = GLDAS_Noah_v21_025Img(os.path.join(os.path.dirname(__file__), - 'test-data', - 'GLDAS_NOAH_image_data', - '2015', - '001', - 'GLDAS_NOAH025_3H.A20150101.0000.021.nc4'), - parameter=parameter, - subgrid=landgrid, - array_1D=True) + parameter = [ + "SoilMoi10_40cm_inst", + "SoilMoi0_10cm_inst", + "SoilTMP0_10cm_inst", + "AvgSurfT_inst", + "SWE_inst", + ] + img = GLDAS_Noah_v21_025Img( + os.path.join( + os.path.dirname(__file__), + "test-data", + "GLDAS_NOAH_image_data", + "2015", + "001", + "GLDAS_NOAH025_3H.A20150101.0000.021.nc4", + ), + parameter=parameter, + subgrid=landgrid, + array_1D=True, + ) image = img.read() assert sorted(image.data.keys()) == sorted(parameter) # gpi for testing on the land grid: 527549, lat: 1.625, lon: -52.625 - assert round(image.data['SoilMoi0_10cm_inst'][50000],3) == 26.181 - assert round(image.data['SoilMoi10_40cm_inst'][50000],3) == 84.558 - assert round(image.data['SoilTMP0_10cm_inst'][50000],3) == 301.276 - assert round(image.data['AvgSurfT_inst'][50000],3) == 294.863 - assert round(image.data['SWE_inst'][50000],3) == 0 - assert (image.lon.size, image.lat.size) == (landgrid.activearrlat.size, - landgrid.activearrlon.size) - + assert round(image.data["SoilMoi0_10cm_inst"][50000], 3) == 26.181 + assert round(image.data["SoilMoi10_40cm_inst"][50000], 3) == 84.558 + assert round(image.data["SoilTMP0_10cm_inst"][50000], 3) == 301.276 + assert round(image.data["AvgSurfT_inst"][50000], 3) == 294.863 + assert round(image.data["SWE_inst"][50000], 3) == 0 + assert (image.lon.size, image.lat.size) == ( + landgrid.activearrlat.size, + landgrid.activearrlon.size, + ) + img.close() def test_GLDAS_Noah_v1_025Img_img_reading_2D(): - parameter = ['086_L2', '086_L1', '085_L1', '138', '132', '051'] - img = GLDAS_Noah_v1_025Img(os.path.join(os.path.dirname(__file__), - 'test-data', - 'GLDAS_NOAH_image_data', - '2015', - '001', - 'GLDAS_NOAH025SUBP_3H.A2015001.0000.001.2015037193230.grb'), - parameter=parameter) + parameter = ["086_L2", "086_L1", "085_L1", "138", "132", "051"] + img = GLDAS_Noah_v1_025Img( + os.path.join( + os.path.dirname(__file__), + "test-data", + "GLDAS_NOAH_image_data", + "2015", + "001", + "GLDAS_NOAH025SUBP_3H.A2015001.0000.001.2015037193230.grb", + ), + parameter=parameter, + ) image = img.read() - assert image.data['086_L1'].shape == (720, 1440) + assert image.data["086_L1"].shape == (720, 1440) assert image.lon[0, 0] == -179.875 assert image.lon[0, 1439] == 179.875 assert image.lat[0, 0] == 89.875 assert image.lat[719, 0] == -89.875 assert sorted(image.data.keys()) == sorted(parameter) - assert image.data['086_L1'][26, 609] == 30.7344 - assert image.data['086_L2'][26, 609] == 93.138 - assert image.data['085_L1'][576, 440] == 285.19 - assert image.data['138'][26, 609] == 237.27 - assert image.data['051'][26, 609] == 0 + assert image.data["086_L1"][26, 609] == 30.7344 + assert image.data["086_L2"][26, 609] == 93.138 + assert image.data["085_L1"][576, 440] == 285.19 + assert image.data["138"][26, 609] == 237.27 + assert image.data["051"][26, 609] == 0 assert image.lon.shape == (720, 1440) assert image.lon.shape == image.lat.shape + img.close() def test_GLDAS_Noah_v21_025Img_img_reading_2D(): - parameter = ['SoilMoi10_40cm_inst', 'SoilMoi0_10cm_inst', 'SoilTMP0_10cm_inst', - 'AvgSurfT_inst', 'SWE_inst'] - img = GLDAS_Noah_v21_025Img(os.path.join(os.path.dirname(__file__), - 'test-data', - 'GLDAS_NOAH_image_data', - '2015', - '001', - 'GLDAS_NOAH025_3H.A20150101.0000.021.nc4'), - parameter=parameter) + parameter = [ + "SoilMoi10_40cm_inst", + "SoilMoi0_10cm_inst", + "SoilTMP0_10cm_inst", + "AvgSurfT_inst", + "SWE_inst", + ] + img = GLDAS_Noah_v21_025Img( + os.path.join( + os.path.dirname(__file__), + "test-data", + "GLDAS_NOAH_image_data", + "2015", + "001", + "GLDAS_NOAH025_3H.A20150101.0000.021.nc4", + ), + parameter=parameter, + ) image = img.read() - assert image.data['SoilMoi0_10cm_inst'].shape == (720, 1440) + assert image.data["SoilMoi0_10cm_inst"].shape == (720, 1440) assert image.lon[0, 0] == -179.875 assert image.lon[0, 1439] == 179.875 assert image.lat[0, 0] == 89.875 assert image.lat[719, 0] == -89.875 assert sorted(image.data.keys()) == sorted(parameter) - assert round(image.data['SoilMoi0_10cm_inst'][26,609],3) == 38.804 - assert round(image.data['SoilMoi10_40cm_inst'][26,609],3) == 131.699 - assert round(image.data['SoilTMP0_10cm_inst'][26,609],3) == 254.506 - assert round(image.data['AvgSurfT_inst'][26,609],3) == 235.553 - assert round(image.data['SWE_inst'][26,609],3) == 108.24 + assert round(image.data["SoilMoi0_10cm_inst"][26, 609], 3) == 38.804 + assert round(image.data["SoilMoi10_40cm_inst"][26, 609], 3) == 131.699 + assert round(image.data["SoilTMP0_10cm_inst"][26, 609], 3) == 254.506 + assert round(image.data["AvgSurfT_inst"][26, 609], 3) == 235.553 + assert round(image.data["SWE_inst"][26, 609], 3) == 108.24 assert image.lon.shape == (720, 1440) assert image.lon.shape == image.lat.shape - -if __name__ == '__main__': - test_GLDAS_Noah_v1_025Ds_img_reading() - + img.close() diff --git a/tests/test_reshuffle.py b/tests/test_reshuffle.py index 7a95cc0..2ba1562 100755 --- a/tests/test_reshuffle.py +++ b/tests/test_reshuffle.py @@ -1,4 +1,3 @@ - import os import glob import tempfile @@ -8,35 +7,58 @@ from gldas.reshuffle import main from gldas.interface import GLDASTs +from tempfile import TemporaryDirectory + import pytest -@pytest.mark.parametrize("landpoints,bbox,n_files_should", - [(True, False, 969), (False, False, 2593), (False, True, 5)]) + +@pytest.mark.parametrize( + "landpoints,bbox,n_files_should", + # 15 cells, 4 with out landpoints, 1 grid file + [(True, True, 15-4+1), (False, True, 15+1)], +) def test_reshuffle(landpoints, bbox, n_files_should): if bbox is True: - bbox = ['44.375', '14.375', '45.875', '15.625'] - inpath = os.path.join(os.path.dirname(os.path.abspath(__file__)), - "test-data", "img2ts_test", "netcdf") + bbox = ["41.125", "11.125", "63.875", "23.875"] + inpath = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "test-data", + "img2ts_test", + "netcdf", + ) startdate = "2016-01-01T03:00" enddate = "2016-01-01T21:00" parameters = ["SoilMoi0_10cm_inst", "SoilMoi10_40cm_inst"] - ts_path = tempfile.mkdtemp() - args = [inpath, ts_path, startdate, enddate] + parameters \ - + ['--land_points', str(landpoints)] - if bbox: - args += ['--bbox', *bbox] - main(args) - assert len(glob.glob(os.path.join(ts_path, "*.nc"))) == n_files_should - - ds = GLDASTs(ts_path, - ioclass_kws={'read_bulk': True, 'read_dates': False}, - parameters=['SoilMoi0_10cm_inst', 'SoilMoi10_40cm_inst']) - - ts = ds.read(45.08, 15.1) - ts_SM0_10_values_should = np.array([9.595, 9.593, 9.578, - 9.562, 9.555, 9.555, 9.556], dtype=np.float32) - nptest.assert_allclose(ts['SoilMoi0_10cm_inst'].values, ts_SM0_10_values_should, rtol=1e-5) - ts_SM10_40_values_should = np.array([50.065, 50.064, 50.062, - 50.060, 50.059, 50.059, 50.059], dtype=np.float32) - nptest.assert_allclose(ts['SoilMoi10_40cm_inst'].values, ts_SM10_40_values_should,rtol=1e-5) + with TemporaryDirectory() as ts_path: + args = ( + [inpath, ts_path, startdate, enddate] + + parameters + + ["--land_points", str(landpoints)] + ) + if bbox: + args += ["--bbox", *bbox] + main(args) + assert len(glob.glob(os.path.join(ts_path, "*.nc"))) == n_files_should + + ds = GLDASTs( + ts_path, + ioclass_kws={"read_bulk": True, "read_dates": False}, + parameters=["SoilMoi0_10cm_inst", "SoilMoi10_40cm_inst"], + ) + + ts = ds.read(45.08, 15.1) + ts_SM0_10_values_should = np.array( + [9.595, 9.593, 9.578, 9.562, 9.555, 9.555, 9.556], dtype=np.float32 + ) + nptest.assert_allclose( + ts["SoilMoi0_10cm_inst"].values, ts_SM0_10_values_should, rtol=1e-5 + ) + ts_SM10_40_values_should = np.array( + [50.065, 50.064, 50.062, 50.060, 50.059, 50.059, 50.059], + dtype=np.float32, + ) + nptest.assert_allclose( + ts["SoilMoi10_40cm_inst"].values, ts_SM10_40_values_should, rtol=1e-5 + ) + ds.close() diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..580d389 --- /dev/null +++ b/tox.ini @@ -0,0 +1,68 @@ +# Tox configuration file +# Read more under https://tox.readthedocs.org/ +# THIS SCRIPT IS SUPPOSED TO BE AN EXAMPLE. MODIFY IT ACCORDING TO YOUR NEEDS! + +[tox] +minversion = 3.15 +envlist = default + + +[testenv] +description = invoke pytest to run automated tests +isolated_build = True +setenv = + TOXINIDIR = {toxinidir} +passenv = + HOME +extras = + testing +commands = + pytest {posargs} + + +[testenv:{clean,build}] +description = + Build (or clean) the package in isolation according to instructions in: + https://setuptools.readthedocs.io/en/latest/build_meta.html#how-to-use-it + https://github.com/pypa/pep517/issues/91 + https://github.com/pypa/build +# NOTE: build is still experimental, please refer to the links for updates/issues +skip_install = True +changedir = {toxinidir} +deps = + build: build +commands = + clean: python -c 'from shutil import rmtree; rmtree("build", True); rmtree("dist", True)' + build: python -m build . +# By default `build` produces wheels, you can also explicitly use the flags `--sdist` and `--wheel` + + +[testenv:{docs,doctests}] +description = invoke sphinx-build to build the docs/run doctests +setenv = + DOCSDIR = {toxinidir}/docs + BUILDDIR = {toxinidir}/docs/_build + docs: BUILD = html + doctests: BUILD = doctest +deps = + -r {toxinidir}/docs/requirements.txt + # ^ requirements.txt shared with Read The Docs +commands = + sphinx-build -b {env:BUILD} -d "{env:BUILDDIR}/doctrees" "{env:DOCSDIR}" "{env:BUILDDIR}/{env:BUILD}" {posargs} + + +[testenv:publish] +description = + Publish the package you have been developing to a package index server. + By default, it uses testpypi. If you really want to publish your package + to be publicly accessible in PyPI, use the `-- --repository pypi` option. +skip_install = True +changedir = {toxinidir} +passenv = + TWINE_USERNAME + TWINE_PASSWORD + TWINE_REPOSITORY +deps = twine +commands = + python -m twine check dist/* + python -m twine upload {posargs:--repository testpypi} dist/*