diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 00000000..c19aac9e --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,204 @@ +name: CI + +on: + push: + branches: "*" + pull_request: + branches: main + +permissions: + contents: read + pages: write + id-token: write + +concurrency: + group: "pages" + cancel-in-progress: false + +jobs: + coverage: + name: coverage + runs-on: ubuntu-latest + timeout-minutes: 15 + strategy: + fail-fast: false + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + submodules: recursive + fetch-depth: 0 + fetch-tags: true + + - name: Setup Miniconda + uses: mamba-org/setup-micromamba@v1 + with: + cache-downloads: true + condarc: | + channels: + - conda-forge + create-args: | + python=3.12 + environment-name: pyinterp + environment-file: conda/environment.yml + init-shell: bash + + - name: Build extension with code coverage and the C++ tests + shell: bash -l {0} + run: | + python setup.py build_ext --code-coverage --build-unittests + python setup.py build + + - name: Run tests and measure coverage + shell: bash -l {0} + run: | + pytest -v -ra --cov=pyinterp --cov-report=lcov --measure-coverage + python setup.py gtest + + - name: Filter and merge coverage + shell: bash -l {0} + run: | + lcov --add-tracefile coverage_cpp.lcov --add-tracefile coverage.lcov --output-file merged_coverage.lcov + lcov -r merged_coverage.lcov "${CONDA_PREFIX}/*" "/usr/*" "*/third_party/*" --output-file filtered_merged_coverage.lcov + genhtml filtered_merged_coverage.lcov --output-directory htmllcov + + - name: Print total coverage + shell: bash -l {0} + run: | + percent=$(lcov -l filtered_merged_coverage.lcov|tail -1|grep -o '[0-9]\+\.[0-9]\+%' | head -n 1) + echo "Total coverage: $percent" + + - name: Upload HTML report as artifact + uses: actions/upload-artifact@v4 + with: + name: coverage-report + path: htmllcov + + linux: + name: ${{ matrix.python-version }}-posix + runs-on: ubuntu-latest + timeout-minutes: 15 + strategy: + fail-fast: false + max-parallel: 5 + matrix: + # Python 3.12 is used in all other jobs + python-version: ['3.10', '3.11', '3.13'] + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + submodules: recursive + fetch-depth: 0 + fetch-tags: true + + - name: Setup Miniconda + uses: mamba-org/setup-micromamba@v1 + with: + cache-downloads: true + condarc: | + channels: + - conda-forge + create-args: | + python=${{ matrix.python-version }} + environment-name: pyinterp + environment-file: conda/environment.yml + init-shell: bash + + - name: Build extension + shell: bash -l {0} + run: | + python setup.py build_ext + python setup.py build + + - name: Run tests + shell: bash -l {0} + run: | + pytest -v -ra + + + macos: + name: macos + runs-on: macos-latest + timeout-minutes: 15 + strategy: + fail-fast: false + max-parallel: 5 + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + submodules: recursive + fetch-depth: 0 + fetch-tags: true + + - name: Setup Miniconda + uses: mamba-org/setup-micromamba@v1 + with: + cache-downloads: true + condarc: | + channels: + - conda-forge + create-args: | + python=3.12 + environment-name: pyinterp + environment-file: conda/environment.yml + init-shell: bash + + - name: Build extension + shell: bash -l {0} + run: | + python setup.py build_ext + python setup.py build + + - name: Run tests + shell: bash -l {0} + run: | + pytest -v -ra + + - name: Generate Documentation + shell: bash -l {0} + run: | + sphinx-build -b html docs/source docs/build + + - name: Setup Pages + uses: actions/configure-pages@v5 + + - name: Upload artifact + uses: actions/upload-pages-artifact@v3 + with: + path: docs/build + + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 + + win: + name: win + runs-on: windows-2019 + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Setup Miniconda + uses: mamba-org/setup-micromamba@v1 + with: + cache-downloads: true + condarc: | + channels: + - conda-forge + create-args: | + python=3.12 + environment-name: pyinterp + environment-file: conda/environment.yml + + - name: Building Testing Python Package + shell: bash -l {0} + run: | + python setup.py build_ext --generator="Visual Studio 16 2019" + python setup.py build + pytest -v -ra diff --git a/CMakeLists.txt b/CMakeLists.txt index c71a1c59..80a23861 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -243,6 +243,7 @@ else() else() message( WARNING "No BLAS library found. Eigen uses its own BLAS implementation.") + set(BLAS_LIBRARIES "") endif() endif() diff --git a/README.rst b/README.rst index 3b127aa9..bb68d61d 100644 --- a/README.rst +++ b/README.rst @@ -2,8 +2,8 @@ pangeo-pyinterp ############### -|Build Status| |Azure DevOps Coverage| |Downloads| |Platforms| -|Latest Release Date| |License| |Binder| |Documentation Status| +|Build Status| |Downloads| |Platforms| +|Latest Release Date| |License| |Binder| Python library for optimized geo-referenced interpolation. @@ -133,9 +133,8 @@ The geohash is a compact way of representing a location, and is useful for storing a location in a database, or for indexing a location in a database. -.. |Build Status| image:: https://dev.azure.com/fbriol/pangeo-pyinterp/_apis/build/status/CNES.pangeo-pyinterp?branchName=master - :target: https://dev.azure.com/fbriol/pangeo-pyinterp/_build/latest?definitionId=2&branchName=master -.. |Azure DevOps Coverage| image:: https://img.shields.io/azure-devops/coverage/fbriol/pangeo-pyinterp/2?style=flat-square +.. |Build Status| image:: https://github.com/CNES/pangeo-pyinterp/actions/workflows/ci.yaml/badge.svg + :target: https://github.com/CNES/pangeo-pyinterp/actions .. |Downloads| image:: https://anaconda.org/conda-forge/pyinterp/badges/downloads.svg?service=github :target: image:: https://www.anaconda.com/distribution/ .. |Platforms| image:: https://anaconda.org/conda-forge/pyinterp/badges/platforms.svg?service=github @@ -146,5 +145,3 @@ storing a location in a database, or for indexing a location in a database. :target: https://opensource.org/licenses/BSD-3-Clause .. |Binder| image:: https://mybinder.org/badge_logo.svg :target: https://mybinder.org/v2/gh/CNES/pangeo-pyinterp/master?filepath=notebooks/auto_examples/ -.. |Documentation Status| image:: https://readthedocs.org/projects/pangeo-pyinterp/badge/?version=latest - :target: https://pangeo-pyinterp.readthedocs.io/en/latest/?badge=latest diff --git a/azure-pipelines.yml b/azure-pipelines.yml deleted file mode 100644 index 0aa0c525..00000000 --- a/azure-pipelines.yml +++ /dev/null @@ -1,180 +0,0 @@ -trigger: - - master - - develop -jobs: -- job: - displayName: Ubuntu-22.04 - pool: - vmImage: 'ubuntu-22.04' - strategy: - matrix: - Python311: - python.version: '3.11' - steps: - - checkout: self - fetchDepth: 1 - - bash: git submodule update --init --recursive - displayName: Initialize the submodules - - script: | - sudo add-apt-repository ppa:ubuntu-toolchain-r/test - sudo apt-get update - sudo apt-get install -y libc++-dev libc++abi-dev libc++abi1 libstdc++-10-dev gcc-10 g++-10 - sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 100 - sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-10 100 - displayName: Install GCC - - bash: echo "##vso[task.prependpath]$CONDA/bin" - displayName: Add conda to PATH - - bash: conda create --yes --quiet --name Build - displayName: Create Anaconda environment - - bash: | - source activate Build - conda install --yes --quiet -c conda-forge mamba python=$PYTHON_VERSION - displayName: Install mamba - - bash: | - source activate Build - mamba install --yes --quiet -c conda-forge dask cmake coverage eigen boost-cpp mkl-devel numpy pytest-cov setuptools xarray - displayName: Install build requirements - - bash: | - source activate Build - python setup.py build_ext --mkl=true build - displayName: Build package - - bash: | - source activate Build - python setup.py test --pytest-args="--junitxml=junit/test-results.xml --cov=pyinterp --cov-report=xml --cov-report=html --cov-config=pytest.ini" - displayName: Test cases - - task: PublishTestResults@2 - condition: succeededOrFailed() - inputs: - testResultsFiles: '**/test-*.xml' - testRunTitle: 'Publish test results for Python $(python.version)' - - task: PublishCodeCoverageResults@2 - inputs: - codeCoverageTool: Cobertura - summaryFileLocation: '$(System.DefaultWorkingDirectory)/**/coverage.xml' - reportDirectory: '$(System.DefaultWorkingDirectory)/**/htmlcov' -- job: - displayName: Ubuntu-22.04 - pool: - vmImage: 'ubuntu-22.04' - strategy: - matrix: - Python310: - python.version: '3.10' - Python312: - python.version: '3.12' - steps: - - checkout: self - fetchDepth: 1 - - bash: git submodule update --init --recursive - displayName: Initialize the submodules - - script: | - sudo add-apt-repository ppa:ubuntu-toolchain-r/test - sudo apt-get update - sudo apt-get install -y libc++-dev libc++abi-dev libc++abi1 libstdc++-10-dev gcc-10 g++-10 - sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 100 - sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-10 100 - displayName: Install GCC - - bash: echo "##vso[task.prependpath]$CONDA/bin" - displayName: Add conda to PATH - - bash: conda create --yes --quiet --name Build - displayName: Create Anaconda environment - - bash: | - source activate Build - conda install --yes --quiet -c conda-forge mamba python=$PYTHON_VERSION - displayName: Install mamba - - bash: | - source activate Build - mamba install --yes --quiet -c conda-forge dask cmake coverage eigen boost-cpp mkl-devel numpy pytest-cov setuptools xarray - displayName: Install build requirements - - bash: | - source activate Build - python setup.py build_ext --mkl=true build - displayName: Build package - - bash: | - source activate Build - python setup.py test - displayName: Test cases -- job: - displayName: macOS-14 - pool: - vmImage: 'macOS-14' - strategy: - matrix: - Python310: - python.version: '3.10' - Python311: - python.version: '3.11' - Python312: - python.version: '3.12' - steps: - - checkout: self - fetchDepth: 1 - - bash: git submodule update --init --recursive - displayName: Initialize the submodules - - bash: echo "##vso[task.prependpath]/usr/local/bin" - displayName: Add Homebrew to PATH - - bash: brew install --cask miniconda - displayName: Install Miniconda with Homebrew - - bash: | - source /usr/local/Caskroom/miniconda/base/bin/activate - conda create --yes --quiet --name Build - displayName: Create Anaconda environment - - bash: | - source /usr/local/Caskroom/miniconda/base/bin/activate - source activate Build - conda install --yes --quiet -c conda-forge mamba python=$PYTHON_VERSION - displayName: Install mamba - - bash: | - source /usr/local/Caskroom/miniconda/base/bin/activate - source activate Build - mamba install --yes --quiet -c conda-forge dask cmake coverage eigen boost-cpp numpy pytest-cov setuptools xarray - displayName: Install build requirements - - bash: | - source /usr/local/Caskroom/miniconda/base/bin/activate - source activate Build - python setup.py build - displayName: Build package - - bash: | - source /usr/local/Caskroom/miniconda/base/bin/activate - source activate Build - python setup.py test - displayName: Test cases -- job: - displayName: windows-2022 - pool: - vmImage: 'windows-2022' - strategy: - matrix: - Python310: - python.version: '3.10' - Python311: - python.version: '3.11' - Python312: - python.version: '3.12' - steps: - - checkout: self - fetchDepth: 1 - - script: git submodule update --init --recursive - displayName: Initialize the submodules - - powershell: Write-Host "##vso[task.prependpath]$env:CONDA\Scripts" - displayName: Add conda to PATH - - bash: echo "##vso[task.prependpath]$CONDA/bin" - displayName: Add conda to PATH - - script: conda create --yes --quiet --name Build - displayName: Create Anaconda environment - - script: | - call activate Build - conda install --yes --quiet -c conda-forge mamba python=%PYTHON_VERSION% - displayName: Install mamba - - script: | - call activate Build - mamba install --yes --quiet -c conda-forge dask cmake eigen boost-cpp mkl-devel mkl numpy pytest setuptools xarray - displayName: Install build requirements - - script: | - call activate Build - python setup.py build_ext --generator="Visual Studio 17 2022" --mkl=true build - displayName: Build package - - script: | - call activate Build - python setup.py test - displayName: Test cases diff --git a/binder/environment.yml b/binder/environment.yml index 7b91c835..9edb69e2 100644 --- a/binder/environment.yml +++ b/binder/environment.yml @@ -9,7 +9,7 @@ dependencies: - jupyter - numcodecs - pandas - - pyinterp>=2024.3.0 + - pyinterp>=2024.11.0 - pytest - sphinx-gallery - sphinx-inline-tabs diff --git a/conda/environment.yml b/conda/environment.yml index ee43ef47..3a1f618d 100644 --- a/conda/environment.yml +++ b/conda/environment.yml @@ -1,16 +1,24 @@ -name: RTD +name: pyinterp dependencies: + - boost-cpp - cartopy>=0.20.0 + - cmake + - coverage + - dask + - eigen - furo + - gtest - numcodecs + - numpy - pandas - pip - - pyinterp>=2024.3.0 - pypandoc - pytest - - python=3.9 + - lcov + - pytest-cov + - setuptools - sphinx-gallery - sphinx-inline-tabs + - xarray channels: - - main - conda-forge diff --git a/conda/meta.yaml b/conda/meta.yaml index f505b4ca..f9a752bf 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -1,5 +1,5 @@ {% set name = "pyinterp" %} -{% set version = "2024.6.0" %} +{% set version = "2024.11.0" %} package: name: {{ name|lower }} diff --git a/conftest.py b/conftest.py index 32fa49f4..80c7416c 100644 --- a/conftest.py +++ b/conftest.py @@ -43,3 +43,4 @@ def pytest_addoption(parser): """Add command line options to pytest.""" parser.addoption('--visualize', action='store_true', default=False) parser.addoption('--dump', action='store_true', default=False) + parser.addoption('--measure-coverage', action='store_true', default=False) diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index b54cdfe8..3fb96d3c 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -1,6 +1,24 @@ Changelog ######### +2024.11.0 +--------- + +* `#27 `_: Introduced a new + feature to the RTree object to handle Cartesian coordinates, bypassing the + LLA/ECEF conversion . +* Introduced GitHub Actions CI workflow for coverage and testing across Linux, + macOS, and Windows. +* Added new dependencies to ``conda/environment.yml``: boost-cpp, cmake, + coverage, dask, eigen, gtest, lcov, pytest-cov, setuptools, and xarray. +* Added CMake policy CMP0167 in ``CMakeLists.txt``. +* Updated pre-commit hooks to newer versions in ``.pre-commit-config.yaml``. +* Modified ``CMakeLists.txt`` to adjust BLAS libraries configuration. +* Updated ``README.rst`` badges to use GitHub Actions instead of Azure DevOps. +* Renamed conda environment from RTD to pyinterp in ``conda/environment.yml``. +* Deleted Azure Pipelines configuration (``azure-pipelines.yml``). +* Removed docformatter pre-commit hook from ``.pre-commit-config.yaml``. + 2024.6.0 -------- * Fix compatibility with Numpy 2.0.0 diff --git a/docs/source/conf.py b/docs/source/conf.py index 5fddfe44..c8cee777 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -7,8 +7,30 @@ # -- Path setup -------------------------------------------------------------- import pathlib +import sysconfig +import sys HERE = pathlib.Path(__file__).absolute().parent +ROOT_DIRECTORY = HERE.parent.parent +MAJOR = sys.version_info[0] +MINOR = sys.version_info[1] + + +def get_build_dirname(): + """Returns the name of the build directory.""" + path = pathlib.Path( + ROOT_DIRECTORY, 'build', + 'lib.%s-%d.%d' % (sysconfig.get_platform(), MAJOR, MINOR)) + if path.exists(): + return path + return pathlib.Path( + ROOT_DIRECTORY, 'build', + f'lib.{sysconfig.get_platform()}-{sys.implementation.cache_tag}') + + +build_dirname = get_build_dirname() +if build_dirname.exists(): + sys.path.insert(0, str(build_dirname)) # -- Project information ----------------------------------------------------- @@ -17,9 +39,9 @@ author = 'CNES/CLS' # The short X.Y version -version = '2024.6.0' +version = '2024.11.0' # The full version, including alpha/beta/rc tags -release = '2024.6.0' +release = '2024.11.0' # -- General configuration --------------------------------------------------- diff --git a/docs/source/setup/build.rst b/docs/source/setup/build.rst index 4bbd85d3..8e6ed167 100644 --- a/docs/source/setup/build.rst +++ b/docs/source/setup/build.rst @@ -45,16 +45,13 @@ Once you have satisfied the requirements detailed above, to build the library, type the command ``python3 setup.py build_ext`` at the root of the project. You can specify, among other things, the following options: - * ``--boost-root`` to specify the Preferred Boost installation prefix. * ``--build-unittests`` to build the unit tests of the C++ extension. - * ``--conda-forge`` to use the generation parameters of the conda-forge - package. - * ``--code-coverage`` to enable coverage reporting on the C++ extension. * ``--c-compiler`` to select the C compiler to use. + * ``--cmake-args`` to pass additional arguments to CMake. + * ``--code-coverage`` to enable coverage reporting on the C++ extension. * ``--cxx-compiler`` to select the C++ compiler to use. * ``--debug`` to compile the C++ library in Debug mode. - * ``--eigen-root`` to specify the Eigen3 include directory. - * ``--mkl-root`` to specify the MKL directory. + * ``--generator`` to specify the generator to use with CMake. * ``--mkl`` to use MKL as BLAS library * ``--reconfigure`` to force CMake to reconfigure the project. @@ -82,7 +79,7 @@ use the following at the root of the project: .. code-block:: bash - python setup.py test + pytest -v -ra Generating the test coverage report ----------------------------------- @@ -96,14 +93,14 @@ following steps: .. code-block:: bash python setup.py build_ext --code-coverage --build-unittests - python setup.py test --ext-coverage + python setup.py gtest + genhtml coverage_cpp.lcov --output-directory htmllcov The first command compiles the extension to generate a coverage mapping to allow -code coverage analysis. The second command performs the Python and C++ unit -tests, analyze the coverage of the C++ code, and generates the associated HTML -report with `lcov `_. The -generated report is available in the ``htmllcov`` directory located at the root -of the project. +code coverage analysis. The second command runs the C++ unit tests and generates +the coverage report. The third command generates the associated HTML report with +`lcov `_. The generated report is +available in the ``htmllcov`` directory located at the root of the project. .. note:: @@ -117,11 +114,44 @@ following step: .. code-block:: bash - python setup.py test --pytest-args="--cov=pyinterp --cov-report=html" + pytest -v -ra --cov=pyinterp --cov-report=html The HTML report is available in the ``htmlcov`` directory located at the root of the project. +Global coverage report +^^^^^^^^^^^^^^^^^^^^^^ + +Is it possible to generate a global coverage report by combining the two previous +reports? To do this, type the following command: + +.. code-block:: bash + + python setup.py build_ext --code-coverage --build-unittests + python setup.py build + python setup.py gtest + pytest -v -ra --cov=pyinterp --cov-report=lcov --measure-coverage + lcov --add-tracefile coverage_cpp.lcov --add-tracefile coverage.lcov --output-file merged_coverage.lcov + lcov -r merged_coverage.lcov "${CONDA_PREFIX}/*" "/usr/*" "*/third_party/*" --output-file filtered_merged_coverage.lcov + genhtml filtered_merged_coverage.lcov --output-directory htmllcov + +The steps to generate a global coverage report are as follows: + +1. Compile the extension to generate a coverage mapping for code coverage + analysis. +2. Compile the Python extension. +3. Run the C++ unit tests and generate the coverage report. +4. Run the Python unit tests and generate the coverage report. The option + ``--measure-coverage`` is used to reduce the number of data processed during + the Python test, speeding up the process as the C++ extension is compiled + without optimization. +5. Merge the two coverage reports. +6. Filter the coverage report to remove the system and third-party libraries. +7. Generate the associated HTML report with `lcov + `_. + +The generated report is available in the ``htmllcov`` directory located at the root of the project. + Automatic Documentation ======================= @@ -130,7 +160,7 @@ documentation. It is possible to generate it to produce a local mini WEB site to read and navigate it. To do this, type the following command: :: - python setup.py build_sphinx + sphinx-build -b html docs/source docs/build .. note:: diff --git a/examples/ex_2d.py b/examples/ex_2d.py index d77c2f86..d3eb5c6e 100644 --- a/examples/ex_2d.py +++ b/examples/ex_2d.py @@ -47,8 +47,10 @@ # %% # The grid is :py:meth:`interpolated # ` to the desired coordinates: -mss = interpolator.bivariate( - coords=dict(lon=mx.ravel(), lat=my.ravel())).reshape(mx.shape) +mss = interpolator.bivariate(coords={ + 'lon': mx.ravel(), + 'lat': my.ravel() +}).reshape(mx.shape) # %% # Let's visualize the original grid and the result of the interpolation. @@ -105,9 +107,14 @@ # The interpolation :py:meth:`bicubic ` # function has more parameters to define the data frame used by the spline # functions and how to process the edges of the regional grids: -mss = interpolator.bicubic(coords=dict(lon=mx.ravel(), lat=my.ravel()), - nx=3, - ny=3).reshape(mx.shape) +mss = interpolator.bicubic( + coords={ + 'lon': mx.ravel(), + 'lat': my.ravel() + }, + nx=3, + ny=3, +).reshape(mx.shape) # %% # .. warning:: diff --git a/examples/ex_3d.py b/examples/ex_3d.py index b93fbf68..7e42de13 100644 --- a/examples/ex_3d.py +++ b/examples/ex_3d.py @@ -62,8 +62,11 @@ # %% # We interpolate our grid using a :py:meth:`classical # `: -trivariate = interpolator.trivariate( - dict(longitude=mx.ravel(), latitude=my.ravel(), time=mz.ravel())) +trivariate = interpolator.trivariate({ + 'longitude': mx.ravel(), + 'latitude': my.ravel(), + 'time': mz.ravel() +}) # %% # Bicubic on 3D grid @@ -80,8 +83,11 @@ # We interpolate our grid using a :py:meth:`bicubic # ` interpolation in space followed by # a linear interpolation in the temporal axis: -bicubic = interpolator.bicubic( - dict(longitude=mx.ravel(), latitude=my.ravel(), time=mz.ravel())) +bicubic = interpolator.bicubic({ + 'longitude': mx.ravel(), + 'latitude': my.ravel(), + 'time': mz.ravel() +}) # %% # We transform our result cubes into a matrix. diff --git a/examples/ex_4d.py b/examples/ex_4d.py index 89b4703d..c28692b3 100644 --- a/examples/ex_4d.py +++ b/examples/ex_4d.py @@ -52,11 +52,12 @@ # %% # We interpolate our grid using a :py:meth:`classical # `: -quadrivariate = interpolator.quadrivariate( - dict(longitude=mx.ravel(), - latitude=my.ravel(), - time=mz.ravel(), - level=mu.ravel())).reshape(mx.shape) +quadrivariate = interpolator.quadrivariate({ + 'longitude': mx.ravel(), + 'latitude': my.ravel(), + 'time': mz.ravel(), + 'level': mu.ravel() +}).reshape(mx.shape) # %% # Bicubic on 4D grid @@ -73,12 +74,15 @@ # We interpolate our grid using a :py:meth:`bicubic # ` interpolation in space followed by # a linear interpolation in the temporal axis: -bicubic = interpolator.bicubic(dict(longitude=mx.ravel(), - latitude=my.ravel(), - time=mz.ravel(), - level=mu.ravel()), - nx=2, - ny=2).reshape(mx.shape) +bicubic = interpolator.bicubic( + { + 'longitude': mx.ravel(), + 'latitude': my.ravel(), + 'time': mz.ravel(), + 'level': mu.ravel() + }, + nx=2, + ny=2).reshape(mx.shape) # %% # We transform our result cubes into a matrix. diff --git a/examples/ex_axis.py b/examples/ex_axis.py index 930c8f80..80e8f983 100644 --- a/examples/ex_axis.py +++ b/examples/ex_axis.py @@ -27,7 +27,7 @@ import pyinterp axis = pyinterp.Axis(numpy.arange(-90, 90, 0.25)) -axis +print(axis) # %% # This object can be queried to obtain its properties. @@ -79,7 +79,7 @@ ]) axis = pyinterp.Axis(MERCATOR_LATITUDES) -axis +print(axis) # %% # Let's display its properties. @@ -98,7 +98,7 @@ # It is also possible to represent longitudes going around the earth, i.e. # making a circle. axis = pyinterp.Axis(numpy.arange(0, 360, 1), is_circle=True) -axis +print(axis) # %% # In this case, you don't have to worry about the bounds of the axis. @@ -125,7 +125,7 @@ dates = numpy.datetime64('2020-01-01') + numpy.arange( 10**6, step=500).astype('timedelta64[ms]') axis = pyinterp.TemporalAxis(dates) -axis +print(axis) # %% # It is possible to search for a date in this axis. @@ -138,4 +138,4 @@ # %% # This object also makes it possible to manipulate timedeltas. axis = pyinterp.TemporalAxis(dates - numpy.datetime64('2020-01-01')) -axis +print(axis) diff --git a/examples/ex_binning.py b/examples/ex_binning.py index 1187b902..b2262472 100644 --- a/examples/ex_binning.py +++ b/examples/ex_binning.py @@ -40,7 +40,7 @@ binning = pyinterp.Binning2D( pyinterp.Axis(numpy.arange(27, 42, 0.3), is_circle=True), pyinterp.Axis(numpy.arange(40, 47, 0.3))) -binning +print(binning) # %% # We push the loaded data into the different defined bins using :ref:`simple @@ -122,7 +122,7 @@ hist2d = pyinterp.Histogram2D( pyinterp.Axis(numpy.arange(27, 42, 0.3), is_circle=True), pyinterp.Axis(numpy.arange(40, 47, 0.3))) -hist2d +print(hist2d) # %% # We push the loaded data into the different defined bins using the method diff --git a/examples/ex_dateutils.py b/examples/ex_dateutils.py index bbf1ee9b..4a718677 100644 --- a/examples/ex_dateutils.py +++ b/examples/ex_dateutils.py @@ -28,7 +28,7 @@ def make_date(samples=10000): # %% dates = make_date() -dates +print(dates) # %% # Get the date part as a structured numpy array of three fields: ``year``, diff --git a/examples/ex_descriptive_statistics.py b/examples/ex_descriptive_statistics.py index 784532da..6d233033 100644 --- a/examples/ex_descriptive_statistics.py +++ b/examples/ex_descriptive_statistics.py @@ -16,7 +16,7 @@ Numerically stable, scalable formulas for parallel and online computation of higher-order multivariate central moments with arbitrary weights. - Comput Stat 31, 1305–1325, + Comput Stat 31, 1305-1325, 2016, https://doi.org/10.1007/s00180-015-0637-z """ @@ -28,7 +28,8 @@ # %% # Create a random array -values = numpy.random.random_sample((2, 4, 6, 8)) +generator = numpy.random.Generator(numpy.random.PCG64(0)) +values = generator.random((2, 4, 6, 8)) # %% # Create a DescriptiveStatistics object. @@ -74,6 +75,6 @@ # %% # Finally, it's possible to calculate weighted statistics. -weights = numpy.random.random_sample((2, 4, 6, 8)) +weights = generator.random((2, 4, 6, 8)) ds = pyinterp.DescriptiveStatistics(values, weights=weights, axis=(1, 2)) ds.mean() diff --git a/examples/ex_geodetic.py b/examples/ex_geodetic.py index 6c53e483..91622cd9 100644 --- a/examples/ex_geodetic.py +++ b/examples/ex_geodetic.py @@ -25,12 +25,12 @@ import pyinterp.geodetic wgs84 = pyinterp.geodetic.Spheroid() -wgs84 +print(wgs84) # %% # You can instantiate other systems. grs80 = pyinterp.geodetic.Spheroid((6378137, 1 / 298.257222101)) -grs80 +print(grs80) # %% # World Geodetic Coordinates System @@ -40,16 +40,23 @@ # longitude, and altitude (LLA) coordinates to Earth-centered Earth-fixed (ECEF) # coordinates. You can instantiate it from the Python, to do conversions or # transformations. -lon = numpy.random.uniform(-180.0, 180.0, 1000000) -lat = numpy.random.uniform(-90.0, 90.0, 1000000) -alt = numpy.random.uniform(-10000, 100000, 1000000) +generator = numpy.random.Generator(numpy.random.PCG64(0)) +lon = generator.uniform(-180.0, 180.0, 1000000) +lat = generator.uniform(-90.0, 90.0, 1000000) +alt = generator.uniform(-10000, 100000, 1000000) a = pyinterp.geodetic.Coordinates(wgs84) b = pyinterp.geodetic.Coordinates(grs80) elapsed = timeit.timeit('a.transform(b, lon, lat, alt, num_threads=0)', number=100, - globals=dict(a=a, b=b, lon=lon, lat=lat, alt=alt)) + globals={ + 'a': a, + 'b': b, + 'lon': lon, + 'lat': lat, + 'alt': alt + }) print('transform: %f seconds' % (float(elapsed) / 100)) # %% diff --git a/examples/ex_geohash.py b/examples/ex_geohash.py index cff515cd..f79bff57 100644 --- a/examples/ex_geohash.py +++ b/examples/ex_geohash.py @@ -20,7 +20,6 @@ import numpy import pandas -# import pyinterp @@ -146,14 +145,15 @@ def plot_geohash_grid(precision, # # Generation of dummy data SIZE = 1000000 -lon = numpy.random.uniform(-180, 180, SIZE) -lat = numpy.random.uniform(-80, 80, SIZE) -measures = numpy.random.random_sample(SIZE) +generator = numpy.random.Generator(numpy.random.PCG64(0)) +lon = generator.uniform(-180, 180, SIZE) +lat = generator.uniform(-80, 80, SIZE) +measures = generator.random(SIZE) # %% # Encoding the data codes = pyinterp.geohash.encode(lon, lat, precision=4) -codes +print(codes) # As you can see, the resulting codes are encoding as numpy byte arrays. @@ -162,7 +162,11 @@ def plot_geohash_grid(precision, # quickly. timeit.timeit('pyinterp.geohash.encode(lon, lat)', number=50, - globals=dict(pyinterp=pyinterp, lon=lon, lat=lat)) / 50 + globals={ + 'pyinterp': pyinterp, + 'lon': lon, + 'lat': lat + }) / 50 # %% # The inverse operation is also possible. @@ -172,11 +176,11 @@ def plot_geohash_grid(precision, # You can also use the :py:func:`pyinterp.geohash.transform` to transform # coordinates from one précision to another. codes = pyinterp.geohash.transform(codes, precision=1) -codes +print(codes) # %% codes = pyinterp.geohash.transform(codes, precision=3) -codes +print(codes) # %% # The :py:func:`pyinterp.geohash.bounding_boxes` function allows calculating the @@ -261,11 +265,12 @@ def plot_geohash_grid(precision, # %% # Density calculation # =================== -df = pandas.DataFrame( - dict(lon=lon, - lat=lat, - measures=measures, - geohash=pyinterp.geohash.encode(lon, lat, precision=3))) +df = pandas.DataFrame({ + 'lon': lon, + 'lat': lat, + 'measures': measures, + 'geohash': pyinterp.geohash.encode(lon, lat, precision=3) +}) df.set_index('geohash', inplace=True) df = df.groupby('geohash').count()['measures'].rename('count').to_frame() df['density'] = df['count'] / ( diff --git a/examples/ex_objects.py b/examples/ex_objects.py index 417c30a8..e7173c03 100644 --- a/examples/ex_objects.py +++ b/examples/ex_objects.py @@ -41,7 +41,7 @@ # used by the interpolator to search for the data to be used. Let's start with # the y-axis representing the latitude axis. y_axis = pyinterp.Axis(lat) -y_axis +print(y_axis) # %% # For example, you can search for the closest point to 0.12 degrees north @@ -52,16 +52,16 @@ # Then, the x-axis representing the longitudinal axis. In this case, the axis is # an axis representing a 360 degree circle. x_axis = pyinterp.Axis(lon, is_circle=True) -x_axis +print(x_axis) # %% # The values -180 and 180 degrees represent the same point on the axis. -x_axis.find_index([-180]) == x_axis.find_index([180]) +print(x_axis.find_index([-180]) == x_axis.find_index([180])) # %% # Finally, we create the time axis t_axis = pyinterp.TemporalAxis(time) -t_axis +print(t_axis) # %% # As these objects must communicate in C++ memory space, we use objects specific @@ -70,19 +70,31 @@ # indexes: values = lon[10:20] + 1 / 3 index = pandas.Index(lon) -print('pandas.Index: %f' % timeit.timeit( - 'index.searchsorted(values)', globals=dict(index=index, values=values))) -print('pyinterp.Axis %f' % timeit.timeit( - 'x_axis.find_index(values)', globals=dict(x_axis=x_axis, values=values))) +print('pandas.Index: %f' % timeit.timeit('index.searchsorted(values)', + globals={ + 'index': index, + 'values': values + })) +print('pyinterp.Axis %f' % timeit.timeit('x_axis.find_index(values)', + globals={ + 'x_axis': x_axis, + 'values': values + })) # %% # This time axis is also very efficient compared to the pandas index. index = pandas.Index(time) values = time + numpy.timedelta64(1, 'ns') -print('pandas.Index: %f' % timeit.timeit( - 'index.searchsorted(values)', globals=dict(index=index, values=values))) -print('pyinterp.Axis %f' % timeit.timeit( - 't_axis.find_index(values)', globals=dict(t_axis=t_axis, values=values))) +print('pandas.Index: %f' % timeit.timeit('index.searchsorted(values)', + globals={ + 'index': index, + 'values': values + })) +print('pyinterp.Axis %f' % timeit.timeit('t_axis.find_index(values)', + globals={ + 't_axis': t_axis, + 'values': values + })) # %% # Before constructing the tensor for pyinterp, we must begin to organize the @@ -106,7 +118,7 @@ # handled array. Axis data are copied for non-uniform axes, and only examined # for regular axes. grid_3d = pyinterp.Grid3D(x_axis, y_axis, t_axis, tcw) -grid_3d +print(grid_3d) # %% # xarray backend @@ -118,4 +130,4 @@ # `_ convention usually found in NetCDF files. interpolator = pyinterp.backends.xarray.RegularGridInterpolator( pyinterp.tests.load_grid3d().tcw) -interpolator.grid +print(interpolator.grid) diff --git a/examples/ex_orbit.py b/examples/ex_orbit.py new file mode 100644 index 00000000..b0492694 --- /dev/null +++ b/examples/ex_orbit.py @@ -0,0 +1,217 @@ +""" +******************* +Orbit Interpolation +******************* + +This library facilitates the interpolation of orbit ephemerides from a template +file that contains satellite positions for a single orbit cycle. It supports the +propagation of the orbit over time, making it useful for simulations and other +applications that require orbit propagation. + +To begin, we will load the orbit ephemerides from the template file. In this +example, we will create a simple function to load the data from the test file. +""" + +# %% +import pathlib + +import cartopy.crs as ccrs +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd + +import pyinterp +import pyinterp.tests + + +def load_test_ephemeris( + filename: pathlib.Path +) -> tuple[float, np.ndarray, np.ndarray, np.ndarray, np.timedelta64]: + """Loads the ephemeris from a text file. + + Args: + filename: Name of the file to be loaded. + + Returns: + A tuple containing the height of the orbit, the ephemeris and the + duration of the cycle. + """ + with open(filename) as stream: + lines = stream.readlines() + + def to_dict(comments) -> dict[str, float]: + """Returns a dictionary describing the parameters of the orbit.""" + result = {} + for item in comments: + assert item.startswith('#'), 'Comments must start with #' + key, value = item[1:].split('=') + result[key.strip()] = float(value) + return result + + # The two first lines are the header and contain the height and the + # duration of the cycle in fractional days. + settings = to_dict(lines[:2]) + del lines[:2] + + # The rest of the lines are the ephemeris + ephemeris = np.loadtxt(lines, + delimiter=' ', + dtype={ + 'names': + ('time', 'longitude', 'latitude', 'height'), + 'formats': ('f8', 'f8', 'f8', 'f8') + }) + + return ( + settings['height'], + ephemeris['longitude'], + ephemeris['latitude'], + ephemeris['time'].astype('timedelta64[s]'), + np.timedelta64(int(settings['cycle_duration'] * 86400.0 * 1e9), 'ns'), + ) + + +# %% +# Set the path to the test file +swot_calval_ephemeris_path = pyinterp.tests.swot_calval_ephemeris_path() +ephemeris = load_test_ephemeris(swot_calval_ephemeris_path) + +# %% +# Compute the orbit properties from the provided ephemeris +orbit = pyinterp.calculate_orbit(*ephemeris) + +# %% +# The orbit object provides a method to calculate the number of passes per cycle +print(orbit.passes_per_cycle()) + +# %% +# To get the cycle duration, and the orbit duration we can use the +# `cycle_duration` method and the `orbit_duration` method respectively. +print(orbit.cycle_duration().astype('m8[ms]').item()) +print(orbit.orbit_duration().astype('m8[ms]').item()) + +# %% +# We can also retrieve the pass duration for a given pass number: +print(orbit.pass_duration(2).astype('m8[ms]').item()) + +# %% +# A utility function is provided to compute an absolute pass number from a +# relative pass number and to decode the absolute pass number back into a +# relative pass number. +# This function is useful for storing the pass number in a database or indexing +# the passes in a file, among other applications. +absolute_pass_number = orbit.encode_absolute_pass_number(cycle_number=11, + pass_number=2) +print(absolute_pass_number) +cycle_number, pass_number = orbit.decode_absolute_pass_number( + absolute_pass_number) +print(cycle_number, pass_number) + +# %% +# The online documentation provides more information about the available methods +# for the this object: :py:class:`Orbit `. +# +# The next step is to interpolate the orbit ephemerides over time to get the +# satellite positions for a given relative pass number. +# +# .. note:: +# +# Is it possible to iterate over the relative pass numbers over time periods +# using the :py:meth:`iterate ` method. +# +# .. code:: python +# +# for cycle_number, pass_number, first_location_date in orbit.iterate( +# start_date, end_date): +# ... +# +nadir_pass_corrdinates = pyinterp.calculate_pass(2, orbit) +assert nadir_pass_corrdinates is not None +pd.DataFrame({ + 'time': nadir_pass_corrdinates.time, + 'lon_nadir': nadir_pass_corrdinates.lon_nadir, + 'lat_nadir': nadir_pass_corrdinates.lat_nadir, +}) + +# %% +print(nadir_pass_corrdinates.equator_coordinates) + +# %% +# The variable `nadir_pass_corrdinates` contains the satellite positions for the +# given pass: +# - The nadir longitude in degrees +# - The nadir latitude in degrees +# - The time for each position in a numpy.datetime64 array +# - The along track distance in meters +# - And the coordinates of the satellite at the equator. +# +# .. note:: +# +# The variable `nadir_pass_corrdinates` could be `None` if the pass number +# is outside the bounding box defined during the instantiation of the +# :py:class:`Orbit ` object. +# +# See the online documentation for more information about the available methods +# for the :py:class:`Pass ` object. +# +# Finally, we can calculate the satellite positions for a given pass number over +# a swath. +assert nadir_pass_corrdinates is not None +swath = pyinterp.calculate_swath(nadir_pass_corrdinates) + +# %% +# The `swath` object contains the properties of the pass, similar to the +# previous example. Additionally, it includes the coordinates of the satellite +# over the swath for each location on the nadir track. +pd.DataFrame({ + 'time': swath.time, + 'lon_nadir': swath.lon_nadir, + 'lat_nadir': swath.lat_nadir, +}) + +# %% +# The DataFrame `df` shows the longitude and latitude coordinates of the +# satellite for the first two lines of the swath. The index `x_ac` represents +# the across-track distance in meters. +df = pd.DataFrame( + { + 'lon_0': swath.lon[0, :], + 'lon_1': swath.lon[1, :], + 'lat_0': swath.lat[0, :], + 'lat_1': swath.lat[1, :], + }, + index=swath.x_ac[0, :], +) +df.index.name = 'x_ac' +df + +# %% +# We can plot the satellite positions over the swath using the following code: +fig, (ax1, ax2) = plt.subplots(1, + 2, + figsize=(20, 10), + subplot_kw={'projection': ccrs.PlateCarree()}) + +# Zoomed plot +ax1.set_extent([58, 70, 12, 32], crs=ccrs.PlateCarree()) +ax1.plot(swath.lon[::4, ::4].ravel(), + swath.lat[::4, ::4].ravel(), + 'b.', + markersize=1) +ax1.plot(swath.lon_nadir, swath.lat_nadir, 'r.', markersize=0.5) +ax1.set_title('Satellite positions - Zoomed') +ax1.coastlines() +ax1.gridlines(draw_labels=True) + +# Full swath plot +ax2.plot(swath.lon.ravel(), swath.lat.ravel(), 'b.', markersize=1) +ax2.plot(swath.lon_nadir, swath.lat_nadir, 'r.', markersize=0.5) +ax2.set_title('Satellite positions - Full Swath') +ax2.coastlines() +ax2.gridlines(draw_labels=True) +ax1.set_aspect('auto') +ax2.set_aspect('auto') + +plt.show() + +# %% diff --git a/examples/ex_unstructured.py b/examples/ex_unstructured.py index 9235993b..0333a251 100644 --- a/examples/ex_unstructured.py +++ b/examples/ex_unstructured.py @@ -10,6 +10,14 @@ object. By default, this object considers the WGS-84 geodetic coordinate system. But you can define another one using the class :py:class:`Spheroid `. + +.. note:: + + By default, the class converts coordinates from the WGS-84 geodetic system + to a Cartesian coordinate system. However, if you set the parameter ``ecef`` + to ``True``, this transformation is disabled. In this case, both input and + output coordinates are expected to be in the Cartesian coordinate system, + and the RTree will handle only Cartesian coordinates without any conversion. """ # %% @@ -31,9 +39,10 @@ SIZE = 2000 X0, X1 = 80, 170 Y0, Y1 = -45, 30 -lons = numpy.random.uniform(low=X0, high=X1, size=(SIZE, )) -lats = numpy.random.uniform(low=Y0, high=Y1, size=(SIZE, )) -data = numpy.random.uniform(low=-1.0, high=1.0, size=(SIZE, )) +generator = numpy.random.Generator(numpy.random.PCG64(0)) +lons = generator.uniform(low=X0, high=X1, size=(SIZE, )) +lats = generator.uniform(low=Y0, high=Y1, size=(SIZE, )) +data = generator.uniform(low=-1.0, high=1.0, size=(SIZE, )) # %% # Populates the search tree diff --git a/examples/pangeo_time_series.py b/examples/pangeo_time_series.py index 8400d30d..4629043c 100644 --- a/examples/pangeo_time_series.py +++ b/examples/pangeo_time_series.py @@ -101,15 +101,19 @@ def cnes_jd_to_datetime(seconds): def load_positions(): """Loading and formatting the dataset.""" - df = pandas.read_csv(pyinterp.tests.positions_path(), - header=None, - sep=r';', - usecols=[0, 1, 2, 3], - names=['id', 'time', 'lon', 'lat'], - dtype=dict(id=numpy.uint32, - time=numpy.float64, - lon=numpy.float64, - lat=numpy.float64)) + df = pandas.read_csv( + pyinterp.tests.positions_path(), + header=None, + sep=r';', + usecols=[0, 1, 2, 3], + names=['id', 'time', 'lon', 'lat'], + dtype={ + 'id': numpy.uint32, + 'time': numpy.float64, + 'lon': numpy.float64, + 'lat': numpy.float64, + }, + ) df.mask(df == 1.8446744073709552e+19, numpy.nan, inplace=True) df['time'] = df['time'].apply(cnes_jd_to_datetime) df.set_index('time', inplace=True) @@ -128,13 +132,12 @@ def periods(df, time_series, frequency='W'): period_start = df.groupby( df.index.to_period(frequency))['sla'].count().index - for start, end in zip(period_start, period_start[1:]): - start = start.to_timestamp() - if start < time_series.series[0]: - start = time_series.series[0] - end = end.to_timestamp() - yield start, end - yield end, df.index[-1] + time_series.dt + for start, end in zip(period_start, period_start[1:], strict=False): + start_timestamp = start.to_timestamp() + start_timestamp = max(start_timestamp, time_series.series[0]) + end_timestamp = end.to_timestamp() + yield start_timestamp, end_timestamp + yield end_timestamp, df.index[-1] + time_series.dt # %% @@ -145,9 +148,11 @@ def interpolate(df, time_series, start, end): mask = (df.index >= start) & (df.index < end) selected = df.loc[mask, ['lon', 'lat']] df.loc[mask, ['sla']] = interpolator.trivariate( - dict(longitude=selected['lon'].values, - latitude=selected['lat'].values, - time=selected.index.values), + { + 'longitude': selected['lon'].values, + 'latitude': selected['lat'].values, + 'time': selected.index.values + }, interpolator='inverse_distance_weighting', num_threads=0) diff --git a/examples/pangeo_unstructured_grid.py b/examples/pangeo_unstructured_grid.py index fb4cb20f..ade23d71 100644 --- a/examples/pangeo_unstructured_grid.py +++ b/examples/pangeo_unstructured_grid.py @@ -30,8 +30,8 @@ # in-memory structure, both in terms of construction time and queries. When this # is not possible, you can insert new information into the tree as you go along # using the :py:meth:`insert ` method. -cat_url = 'https://raw.githubusercontent.com/pangeo-data/pangeo-datastore' \ - '/master/intake-catalogs/ocean/llc4320.yaml' +cat_url = ('https://raw.githubusercontent.com/pangeo-data/pangeo-datastore' + '/master/intake-catalogs/ocean/llc4320.yaml') cat = intake.open_catalog(cat_url) # %% diff --git a/setup.py b/setup.py index d8eb1508..b82bdf40 100755 --- a/setup.py +++ b/setup.py @@ -6,13 +6,12 @@ this module using distutils/setuptools.""" from __future__ import annotations -from typing import Any +from typing import Any, ClassVar import datetime import os import pathlib import platform import re -import shlex import subprocess import sys import sysconfig @@ -42,7 +41,7 @@ def compare_setuptools_version(required: tuple[int, ...]) -> bool: def distutils_dirname(prefix=None, extname=None) -> pathlib.Path: """Returns the name of the build directory.""" - prefix = 'lib' or prefix + prefix = prefix or 'lib' extname = '' if extname is None else os.sep.join(extname.split('.')[:-1]) if compare_setuptools_version((62, 1)): return pathlib.Path( @@ -108,7 +107,7 @@ def revision() -> str: match = pattern.search(line) if match: return match.group(1) - raise AssertionError() + raise AssertionError stdout: Any = execute( 'git describe --tags --dirty --long --always').strip() @@ -130,7 +129,7 @@ def revision() -> str: stdout = execute(f"git log {sha1} -1 --format=\"%H %at\"") stdout = stdout.strip().split() - date = datetime.datetime.utcfromtimestamp(int(stdout[1])) + date = datetime.datetime.fromtimestamp(int(stdout[1])) # Conda configuration files are not present in the distribution, but only # in the GIT repository of the source code. @@ -202,36 +201,30 @@ class BuildExt(setuptools.command.build_ext.build_ext): """Build everything needed to install.""" user_options = setuptools.command.build_ext.build_ext.user_options user_options += [ - ('boost-root=', None, 'Preferred Boost installation prefix'), ('build-unittests', None, 'Build the unit tests of the C++ extension'), - ('conda-forge', None, 'Generation of the conda-forge package'), - ('code-coverage', None, 'Enable coverage reporting'), ('c-compiler=', None, 'Preferred C compiler'), + ('cmake-args=', None, 'Additional arguments for CMake'), + ('code-coverage', None, 'Enable coverage reporting'), ('cxx-compiler=', None, 'Preferred C++ compiler'), - ('eigen-root=', None, 'Preferred Eigen3 include directory'), ('generator=', None, 'Selected CMake generator'), - ('mkl-root=', None, 'Preferred MKL installation prefix'), ('mkl=', None, 'Using MKL as BLAS library'), ('reconfigure', None, 'Forces CMake to reconfigure this project') ] boolean_options = setuptools.command.build_ext.build_ext.boolean_options - boolean_options += ['mkl', 'conda-forge'] + boolean_options += ['mkl'] def initialize_options(self) -> None: """Set default values for all the options that this command supports.""" super().initialize_options() - self.boost_root = None self.build_unittests = None - self.conda_forge = None self.code_coverage = None self.c_compiler = None + self.cmake_args = None self.cxx_compiler = None - self.eigen_root = None self.generator = None self.mkl = None - self.mkl_root = None self.reconfigure = None def finalize_options(self) -> None: @@ -239,11 +232,6 @@ def finalize_options(self) -> None: super().finalize_options() if self.code_coverage is not None and platform.system() == 'Windows': raise RuntimeError('Code coverage is not supported on Windows') - if self.mkl_root is not None: - self.mkl = True - if not self.mkl and self.mkl_root: - raise RuntimeError( - 'argument --mkl_root not allowed with argument --mkl=no') def run(self) -> None: """Carry out the action.""" @@ -251,37 +239,6 @@ def run(self) -> None: self.build_cmake(ext) super().run() - def boost(self) -> list[str] | None: - """Get the default boost path in Anaconda's environment.""" - boost_root = pathlib.Path(sys.prefix) - if (boost_root / 'include' / 'boost').exists(): - return f'-DBoost_ROOT={boost_root}'.split() - boost_root = pathlib.Path(sys.prefix, 'Library', 'include') - if not boost_root.exists(): - if self.conda_forge: - raise RuntimeError( - 'Unable to find the Boost library in the conda ' - 'distribution used.') - return None - return f'-DBoost_INCLUDE_DIR={boost_root}'.split() - - def eigen(self) -> str | None: - """Get the default Eigen3 path in Anaconda's environment.""" - eigen_include_dir = pathlib.Path(sys.prefix, 'include', 'eigen3') - if eigen_include_dir.exists(): - return f'-DEIGEN3_INCLUDE_DIR={eigen_include_dir}' - eigen_include_dir = pathlib.Path(sys.prefix, 'Library', 'include', - 'eigen3') - if not eigen_include_dir.exists(): - eigen_include_dir = eigen_include_dir.parent - if not eigen_include_dir.exists(): - if self.conda_forge: - raise RuntimeError( - 'Unable to find the Eigen3 library in the conda ' - 'distribution used.') - return None - return f'-DEIGEN3_INCLUDE_DIR={eigen_include_dir}' - @staticmethod def set_conda_mklroot() -> None: """Set the default MKL path in Anaconda's environment.""" @@ -294,57 +251,59 @@ def set_conda_mklroot() -> None: os.environ['MKLROOT'] = sys.prefix @staticmethod - def is_conda() -> bool: - """Detect if the Python interpreter is part of a conda distribution.""" - result = pathlib.Path(sys.prefix, 'conda-meta').exists() - if not result: - try: - # pylint: disable=unused-import,import-outside-toplevel - import conda # noqa: F401 - - # pylint: enable=unused-import,import-outside-toplevel - except ImportError: - result = False - else: - result = True - return result + def conda_prefix() -> str | None: + """Returns the conda prefix.""" + if 'CONDA_PREFIX' in os.environ: + return os.environ['CONDA_PREFIX'] + return None def set_cmake_user_options(self) -> list[str]: """Sets the options defined by the user.""" - cmake_variable: Any - is_conda = self.is_conda() result = [] + conda_prefix = self.conda_prefix() + if self.c_compiler is not None: result.append('-DCMAKE_C_COMPILER=' + self.c_compiler) if self.cxx_compiler is not None: result.append('-DCMAKE_CXX_COMPILER=' + self.cxx_compiler) - if self.conda_forge: - result.append('-DCONDA_FORGE=ON') - - if self.boost_root is not None: - result.append('-DBOOSTROOT=' + self.boost_root) - elif is_conda: - cmake_variable = self.boost() - if cmake_variable: - result += cmake_variable - - if self.eigen_root is not None: - result.append('-DEIGEN3_INCLUDE_DIR=' + self.eigen_root) - elif is_conda: - cmake_variable = self.eigen() - if cmake_variable: - result.append(cmake_variable) - - if self.mkl_root is not None: - os.environ['MKLROOT'] = self.mkl_root - elif is_conda and self.mkl: + if conda_prefix is not None: + result.append('-DCMAKE_PREFIX_PATH=' + conda_prefix) + + elif conda_prefix and self.mkl: self.set_conda_mklroot() return result + def get_config(self) -> str: + """Returns the configuration to use.""" + cfg: str + if self.debug: + cfg = 'Debug' + elif self.code_coverage: + cfg = 'RelWithDebInfo' + else: + cfg = 'Release' + return cfg + + def cmake_arguments(self, cfg: str, extdir: str) -> list[str]: + """Returns the cmake arguments.""" + cmake_args: list[str] = [ + '-DCMAKE_BUILD_TYPE=' + cfg, + '-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=' + extdir, + '-DPython3_EXECUTABLE=' + sys.executable, + *self.set_cmake_user_options() + ] + + if platform.python_implementation() == 'PyPy': + cmake_args.append('-DPython3_FIND_IMPLEMENTATIONS=PyPy') + elif 'Pyston' in sys.version: + cmake_args.append('-DPython3_INCLUDE_DIR=' + + sysconfig.get_path('include')) + return cmake_args + def build_cmake(self, ext) -> None: """Execute cmake to build the Python extension.""" # These dirs will be created in build_py, so if you don't have @@ -353,20 +312,8 @@ def build_cmake(self, ext) -> None: build_temp.mkdir(parents=True, exist_ok=True) extdir = str( pathlib.Path(self.get_ext_fullpath(ext.name)).parent.resolve()) - - cfg = 'Debug' if self.debug or self.code_coverage else 'Release' - - cmake_args = [ - '-DCMAKE_BUILD_TYPE=' + cfg, '-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=' + - str(extdir), '-DPython3_EXECUTABLE=' + sys.executable - ] + self.set_cmake_user_options() - - if platform.python_implementation() == 'PyPy': - cmake_args.append('-DPython3_FIND_IMPLEMENTATIONS=PyPy') - elif 'Pyston' in sys.version: - cmake_args.append('-DPython3_INCLUDE_DIR=' + - sysconfig.get_path('include')) - + cfg = self.get_config() + cmake_args = self.cmake_arguments(cfg, extdir) build_args = ['--config', cfg] is_windows = platform.system() == 'Windows' @@ -377,9 +324,6 @@ def build_cmake(self, ext) -> None: cmake_args.append( '-G' + os.environ.get('CMAKE_GEN', 'Visual Studio 16 2019')) - if self.verbose: # type: ignore - build_args += ['--verbose'] - if not is_windows: build_args += ['--', f'-j{os.cpu_count()}'] if platform.system() == 'Darwin': @@ -395,6 +339,9 @@ def build_cmake(self, ext) -> None: ] build_args += ['--', '/m'] + if self.cmake_args: + cmake_args.extend(self.cmake_args.split()) + os.chdir(str(build_temp)) # Has CMake ever been executed? @@ -406,8 +353,8 @@ def build_cmake(self, ext) -> None: configure = True if configure: - self.spawn(['cmake', str(WORKING_DIRECTORY)] + cmake_args) - if not self.dry_run: # type: ignore + self.spawn(['cmake', str(WORKING_DIRECTORY), *cmake_args]) + if not self.dry_run: cmake_cmd = ['cmake', '--build', '.'] if self.build_unittests is None: cmake_cmd += ['--target', 'core'] @@ -417,77 +364,39 @@ def build_cmake(self, ext) -> None: # pylint: enable=too-many-instance-attributes -class Test(setuptools.Command): - """Test runner.""" - description = 'run pytest' - user_options = [('ext-coverage', None, - 'Generate C++ extension coverage reports'), - ('pytest-args=', None, 'Arguments to pass to pytest')] +class CxxTestRunner(setuptools.Command): + """Compile and launch the C++ tests.""" + description: ClassVar[str] = 'run the C++ tests' + user_options: ClassVar[list[tuple[str, str | None, str]]] = [] def initialize_options(self): """Set default values for all the options that this command supports.""" - self.ext_coverage = None - self.pytest_args = None + if platform.system() == 'Windows': + raise RuntimeError('Code coverage is not supported on Windows') def finalize_options(self): """Set final values for all the options that this command supports.""" - if self.pytest_args is None: - self.pytest_args = '' - self.pytest_args = ' --pyargs pyinterp ' + self.pytest_args def run(self): """Run tests.""" - # pylint: disable=import-outside-toplevel - import pytest - - # pylint: enable=import-outside-toplevel - sys.path.insert(0, str(distutils_dirname())) - - errno = pytest.main( - shlex.split( - self.pytest_args, # type: ignore - posix=platform.system() != 'Windows')) - if errno: - sys.exit(errno) - # Directory used during the generating the C++ extension. tempdir = distutils_dirname('temp') - # We work in the extension generation directory (CMake directory) - os.chdir(str(tempdir)) - - # If the C++ unit tests have been generated, they are executed. - if pathlib.Path(tempdir, 'src', 'pyinterp', 'core', 'tests', - 'test_axis').exists(): - self.spawn(['ctest', '--output-on-failure']) - - # Generation of the code coverage of the C++ extension? - if not self.ext_coverage: - return - - # Directory for writing the HTML coverage report. - htmllcov = str(pathlib.Path(tempdir.parent.parent, 'htmllcov')) + # Navigate to the directory containing the C++ tests and run them. + os.chdir(str(tempdir / 'src' / 'pyinterp' / 'core' / 'tests')) + self.spawn(['ctest', '--output-on-failure']) # File containing the coverage report. - coverage_info = str(pathlib.Path(tempdir, 'coverage.info')) + coverage_lcov = str( + pathlib.Path(tempdir.parent.parent, 'coverage_cpp.lcov')) # Collect coverage data from python/C++ unit tests self.spawn([ 'lcov', '--capture', '--directory', - str(tempdir), '--output-file', coverage_info + str(tempdir), '--output-file', coverage_lcov ]) - # The coverage of third-party libraries is removed. - self.spawn([ - 'lcov', '-r', coverage_info, '*/Xcode.app/*', '*/third_party/*', - '*/boost/*', '*/eigen3/*', '*/tests/*', '*/usr/*', '--output-file', - coverage_info - ]) - - # Finally, we generate the HTML coverage report. - self.spawn(['genhtml', coverage_info, '--output-directory', htmllcov]) - class SDist(setuptools.command.sdist.sdist): """Custom sdist command that copies the pytest configuration file into the @@ -526,7 +435,6 @@ def typehints(): def main(): """Main function.""" install_requires = ['dask', 'numpy', 'xarray >= 0.13'] - tests_require = install_requires + ['pytest'] setuptools.setup( author='CNES/CLS', author_email='fbriol@gmail.com', @@ -546,8 +454,8 @@ def main(): cmdclass={ 'build_ext': BuildExt, 'sdist': SDist, - 'test': Test - }, # type: ignore + 'gtest': CxxTestRunner, + }, data_files=typehints(), description='Interpolation of geo-referenced data for Python.', ext_modules=[CMakeExtension(name='pyinterp.core')], @@ -567,8 +475,7 @@ def main(): exclude=['pyinterp.core*'], ), platforms=['POSIX', 'MacOS', 'Windows'], - python_requires='>=3.8', - tests_require=tests_require, + python_requires='>=3.10', url='https://github.com/CNES/pangeo-pyinterp', version=revision(), zip_safe=False, diff --git a/src/pyinterp/backends/xarray.py b/src/pyinterp/backends/xarray.py index 9f1b4b6a..84539d86 100644 --- a/src/pyinterp/backends/xarray.py +++ b/src/pyinterp/backends/xarray.py @@ -10,14 +10,17 @@ """ from __future__ import annotations -from collections.abc import Hashable +from typing import TYPE_CHECKING import pickle -import numpy -import xarray as xr - from .. import cf, core, grid, interpolator +if TYPE_CHECKING: + from collections.abc import Hashable + + import numpy as np + import xarray as xr + __all__ = ['Grid2D', 'Grid3D', 'Grid4D', 'RegularGridInterpolator'] @@ -42,7 +45,7 @@ def _axis(self, units: cf.AxisUnit) -> str | None: """ for name, coord in self.data_array.coords.items(): if hasattr(coord, 'units') and coord.units in units: - return name # type: ignore + return name # type: ignore[return-value] return None def longitude(self) -> str | None: @@ -90,7 +93,8 @@ def _dims_from_data_array(data_array: xr.DataArray, f'{ndims}, found {size}.') if not geodetic: - return tuple(data_array.coords)[:2] # type: ignore + return tuple( # type: ignore[return-value] + data_array.coords)[:2] ident = AxisIdentifier(data_array) lon = ident.longitude() @@ -183,7 +187,7 @@ def __init__(self, data_array.transpose(*self._dims).values, increasing_axes='inplace' if increasing_axes else None) - def bivariate(self, coords: dict, *args, **kwargs) -> numpy.ndarray: + def bivariate(self, coords: dict, *args, **kwargs) -> np.ndarray: """Evaluate the interpolation defined for the given coordinates. Args: @@ -200,7 +204,7 @@ def bivariate(self, coords: dict, *args, **kwargs) -> numpy.ndarray: return interpolator.bivariate(self, *_coords(coords, self._dims), *args, **kwargs) - def bicubic(self, coords: dict, *args, **kwargs) -> numpy.ndarray: + def bicubic(self, coords: dict, *args, **kwargs) -> np.ndarray: """Evaluate the interpolation defined for the given coordinates. Args: @@ -270,7 +274,7 @@ def __init__(self, data_array.transpose(x, y, z).values, increasing_axes='inplace' if increasing_axes else None) - def trivariate(self, coords: dict, *args, **kwargs) -> numpy.ndarray: + def trivariate(self, coords: dict, *args, **kwargs) -> np.ndarray: """Evaluate the interpolation defined for the given coordinates. Args: @@ -290,7 +294,7 @@ def trivariate(self, coords: dict, *args, **kwargs) -> numpy.ndarray: self, *_coords(coords, self._dims, self._datetime64), *args, **kwargs) - def bicubic(self, coords: dict, *args, **kwargs) -> numpy.ndarray: + def bicubic(self, coords: dict, *args, **kwargs) -> np.ndarray: """Evaluate the interpolation defined for the given coordinates. Args: @@ -373,7 +377,7 @@ def __init__(self, data_array.transpose(x, y, z, u).values, increasing_axes='inplace' if increasing_axes else None) - def quadrivariate(self, coords: dict, *args, **kwargs) -> numpy.ndarray: + def quadrivariate(self, coords: dict, *args, **kwargs) -> np.ndarray: """Evaluate the interpolation defined for the given coordinates. Args: @@ -393,7 +397,7 @@ def quadrivariate(self, coords: dict, *args, **kwargs) -> numpy.ndarray: self, *_coords(coords, self._dims, self._datetime64), *args, **kwargs) - def bicubic(self, coords: dict, *args, **kwargs) -> numpy.ndarray: + def bicubic(self, coords: dict, *args, **kwargs) -> np.ndarray: """Evaluate the interpolation defined for the given coordinates. Args: @@ -504,7 +508,7 @@ def __call__(self, bounds_error: bool = False, bicubic_kwargs: dict | None = None, num_threads: int = 0, - **kwargs) -> numpy.ndarray: + **kwargs) -> np.ndarray: """Interpolation at coordinates. Args: diff --git a/src/pyinterp/binning.py b/src/pyinterp/binning.py index 6ebd3e13..cf976c9b 100644 --- a/src/pyinterp/binning.py +++ b/src/pyinterp/binning.py @@ -52,7 +52,8 @@ def __init__(self, x: core.Axis, y: core.Axis, wgs: geodetic.Spheroid | None = None, - dtype: numpy.dtype = numpy.dtype('float64')): + dtype: numpy.dtype | None = None): + dtype = dtype or numpy.dtype('float64') if dtype == numpy.dtype('float64'): self._instance: Binning2DTyped = core.Binning2DFloat64(x, y, wgs) elif dtype == numpy.dtype('float32'): @@ -94,7 +95,7 @@ def __add__(self, other: Binning2D) -> Binning2D: result = copy.copy(self) if type(result._instance) != type(other._instance): # noqa: E721 raise TypeError('Binning2D instance must be of the same type') - result._instance += other._instance # type: ignore + result._instance += other._instance # type: ignore[operator] return result def push(self, @@ -245,7 +246,8 @@ class Binning1D: def __init__(self, x: core.Axis, range: tuple[float, float] | None = None, - dtype: numpy.dtype = numpy.dtype('float64')): + dtype: numpy.dtype | None = None): + dtype = dtype or numpy.dtype('float64') if dtype == numpy.dtype('float64'): self._instance: (core.Binning1DFloat64 | core.Binning1DFloat32) = core.Binning1DFloat64( @@ -284,7 +286,7 @@ def __add__(self, other: Binning1D) -> Binning1D: result = copy.copy(self) if type(result._instance) != type(other._instance): # noqa: E721 raise TypeError('Binning1D instance must be of the same type') - result._instance += other._instance # type: ignore + result._instance += other._instance # type: ignore[operator] return result def push( diff --git a/src/pyinterp/core/__init__.pyi b/src/pyinterp/core/__init__.pyi index 0479a504..7f36994c 100644 --- a/src/pyinterp/core/__init__.pyi +++ b/src/pyinterp/core/__init__.pyi @@ -1,20 +1,52 @@ -from __future__ import annotations - from typing import ( Any, ClassVar, Iterator, List, - Literal, Optional, + Self, Tuple, + TypeAlias, overload, ) import numpy import numpy.typing -from . import dateutils, fill, geodetic, geohash +from . import fill, geodetic + +__all__ = [ + "Axis", + "AxisBoundary", + "AxisInt64", + "Bilinear2D", + "Bilinear3D", + "Binning1DFloat32", + "Binning1DFloat64", + "Binning2DFloat32", + "Binning2DFloat64", + "CovarianceFunction", + "DescriptiveStatisticsFloat32", + "DescriptiveStatisticsFloat64", + "fill", + "geodetic", + "GeoHash", + "Grid2DFloat32", + "Grid2DFloat64", + "Grid2DInt8", + "Grid2DUInt8", + "Grid3DFloat32", + "Grid3DFloat64", + "Grid3DInt8", + "Grid3DUInt8", + "Grid4DFloat32", + "Grid4DFloat64", + "Grid4DInt8", + "Grid4DUInt8", + "Histogram2DFloat32", + "Histogram2DFloat64", +] + from .array import ( Array1DBool, Array1DFloat32, @@ -82,7 +114,7 @@ class Axis: def __copy__(self) -> Axis: ... - def __eq__(self, other: Axis) -> bool: # type: ignore + def __eq__(self, other: Axis) -> bool: # type: ignore[override] ... @overload @@ -346,7 +378,10 @@ class Binning2DFloat32: def __getstate__(self) -> tuple: ... - def __iadd__(self, other: Binning2DFloat32) -> Binning2DFloat32: + def __iadd__( + self, + other: Binning2DFloat32, + ) -> Self: ... def __setstate__(self, state: tuple) -> None: @@ -416,7 +451,7 @@ class Binning2DFloat64: def __getstate__(self) -> tuple: ... - def __iadd__(self, other: Binning2DFloat64) -> Binning2DFloat64: + def __iadd__(self, other: Binning2DFloat64) -> Self: ... def __setstate__(self, state: tuple) -> None: @@ -539,9 +574,7 @@ class DescriptiveStatisticsFloat32: def __getstate__(self) -> tuple: ... - def __iadd__( - self, other: DescriptiveStatisticsFloat32 - ) -> DescriptiveStatisticsFloat32: + def __iadd__(self, other: DescriptiveStatisticsFloat32) -> Self: ... def __setstate__(self, state: tuple) -> None: @@ -594,9 +627,7 @@ class DescriptiveStatisticsFloat64: def __getstate__(self) -> tuple: ... - def __iadd__( - self, other: DescriptiveStatisticsFloat64 - ) -> DescriptiveStatisticsFloat64: + def __iadd__(self, other: DescriptiveStatisticsFloat64) -> Self: ... def __setstate__(self, state: tuple) -> None: @@ -1036,7 +1067,7 @@ class Histogram2DFloat32: def __getstate__(self) -> tuple: ... - def __iadd__(self, other: Histogram2DFloat32) -> Histogram2DFloat32: + def __iadd__(self, other: Histogram2DFloat32) -> Self: ... def __setstate__(self, state: tuple) -> None: @@ -1099,7 +1130,7 @@ class Histogram2DFloat64: def __getstate__(self) -> tuple: ... - def __iadd__(self, other: Histogram2DFloat64) -> Histogram2DFloat64: + def __iadd__(self, other: Histogram2DFloat64) -> Self: ... def __setstate__(self, state: tuple) -> None: @@ -1223,7 +1254,7 @@ class Period: ... -Array1DPeriod = numpy.ndarray[ +Array1DPeriod: TypeAlias = numpy.ndarray[ tuple[int], numpy.dtype[Period] # type: ignore[type-var] ] @@ -1559,9 +1590,7 @@ class StreamingHistogramFloat32: def __getstate__(self) -> tuple: ... - def __iadd__( - self, - other: StreamingHistogramFloat32) -> StreamingHistogramFloat32: + def __iadd__(self, other: StreamingHistogramFloat32) -> Self: ... def __setstate__(self, state: tuple) -> None: @@ -1623,9 +1652,7 @@ class StreamingHistogramFloat64: def __getstate__(self) -> tuple: ... - def __iadd__( - self, - other: StreamingHistogramFloat64) -> StreamingHistogramFloat64: + def __iadd__(self, other: StreamingHistogramFloat64) -> Self: ... def __setstate__(self, state: tuple) -> None: diff --git a/src/pyinterp/core/array.pyi b/src/pyinterp/core/array.pyi index 3b1bd80b..a4790ee9 100644 --- a/src/pyinterp/core/array.pyi +++ b/src/pyinterp/core/array.pyi @@ -1,33 +1,33 @@ -from typing import Any, Literal +from typing import Any, TypeAlias import numpy -_1D = tuple[int] -_2D = tuple[int, int] -_3D = tuple[int, int, int] -_4D = tuple[int, int, int, int] +OneDim: TypeAlias = tuple[int] +TwoDims: TypeAlias = tuple[int, int] +ThreeDims: TypeAlias = tuple[int, int, int] +FourDims: TypeAlias = tuple[int, int, int, int] -Array1DBool = numpy.ndarray[_1D, numpy.dtype[numpy.bool_]] -Array1DFloat32 = numpy.ndarray[_1D, numpy.dtype[numpy.float32]] -Array1DFloat64 = numpy.ndarray[_1D, numpy.dtype[numpy.float64]] -Array1DInt64 = numpy.ndarray[_1D, numpy.dtype[numpy.int64]] -Array1DStr = numpy.ndarray[_1D, numpy.dtype[numpy.str_]] -Array1DUInt64 = numpy.ndarray[_1D, numpy.dtype[numpy.uint64]] -Array2DFloat32 = numpy.ndarray[_2D, numpy.dtype[numpy.float32]] -Array2DFloat64 = numpy.ndarray[_2D, numpy.dtype[numpy.float64]] -Array2DInt64 = numpy.ndarray[_2D, numpy.dtype[numpy.int64]] -Array2DInt8 = numpy.ndarray[_2D, numpy.dtype[numpy.int8]] -Array2DUInt64 = numpy.ndarray[_2D, numpy.dtype[numpy.uint64]] -Array2DUInt8 = numpy.ndarray[_2D, numpy.dtype[numpy.uint8]] -Array3DFloat32 = numpy.ndarray[_3D, numpy.dtype[numpy.float32]] -Array3DFloat64 = numpy.ndarray[_3D, numpy.dtype[numpy.float64]] -Array3DInt8 = numpy.ndarray[_3D, numpy.dtype[numpy.int8]] -Array3DUInt8 = numpy.ndarray[_3D, numpy.dtype[numpy.uint8]] -Array4DFloat32 = numpy.ndarray[_4D, numpy.dtype[numpy.float32]] -Array4DFloat64 = numpy.ndarray[_4D, numpy.dtype[numpy.float64]] -Array4DInt8 = numpy.ndarray[_4D, numpy.dtype[numpy.int8]] -Array4DUInt8 = numpy.ndarray[_4D, numpy.dtype[numpy.uint8]] -ArrayFloat32 = numpy.ndarray[Any, numpy.dtype[numpy.float32]] -ArrayFloat64 = numpy.ndarray[Any, numpy.dtype[numpy.float64]] -ArrayInt64 = numpy.ndarray[Any, numpy.dtype[numpy.int64]] -ArrayUInt64 = numpy.ndarray[Any, numpy.dtype[numpy.uint64]] +Array1DBool: TypeAlias = numpy.ndarray[OneDim, numpy.dtype[numpy.bool_]] +Array1DFloat32: TypeAlias = numpy.ndarray[OneDim, numpy.dtype[numpy.float32]] +Array1DFloat64: TypeAlias = numpy.ndarray[OneDim, numpy.dtype[numpy.float64]] +Array1DInt64: TypeAlias = numpy.ndarray[OneDim, numpy.dtype[numpy.int64]] +Array1DStr: TypeAlias = numpy.ndarray[OneDim, numpy.dtype[numpy.str_]] +Array1DUInt64: TypeAlias = numpy.ndarray[OneDim, numpy.dtype[numpy.uint64]] +Array2DFloat32: TypeAlias = numpy.ndarray[TwoDims, numpy.dtype[numpy.float32]] +Array2DFloat64: TypeAlias = numpy.ndarray[TwoDims, numpy.dtype[numpy.float64]] +Array2DInt64: TypeAlias = numpy.ndarray[TwoDims, numpy.dtype[numpy.int64]] +Array2DInt8: TypeAlias = numpy.ndarray[TwoDims, numpy.dtype[numpy.int8]] +Array2DUInt64: TypeAlias = numpy.ndarray[TwoDims, numpy.dtype[numpy.uint64]] +Array2DUInt8: TypeAlias = numpy.ndarray[TwoDims, numpy.dtype[numpy.uint8]] +Array3DFloat32: TypeAlias = numpy.ndarray[ThreeDims, numpy.dtype[numpy.float32]] +Array3DFloat64: TypeAlias = numpy.ndarray[ThreeDims, numpy.dtype[numpy.float64]] +Array3DInt8: TypeAlias = numpy.ndarray[ThreeDims, numpy.dtype[numpy.int8]] +Array3DUInt8: TypeAlias = numpy.ndarray[ThreeDims, numpy.dtype[numpy.uint8]] +Array4DFloat32: TypeAlias = numpy.ndarray[FourDims, numpy.dtype[numpy.float32]] +Array4DFloat64: TypeAlias = numpy.ndarray[FourDims, numpy.dtype[numpy.float64]] +Array4DInt8: TypeAlias = numpy.ndarray[FourDims, numpy.dtype[numpy.int8]] +Array4DUInt8: TypeAlias = numpy.ndarray[FourDims, numpy.dtype[numpy.uint8]] +ArrayFloat32: TypeAlias = numpy.ndarray[Any, numpy.dtype[numpy.float32]] +ArrayFloat64: TypeAlias = numpy.ndarray[Any, numpy.dtype[numpy.float64]] +ArrayInt64: TypeAlias = numpy.ndarray[Any, numpy.dtype[numpy.int64]] +ArrayUInt64: TypeAlias = numpy.ndarray[Any, numpy.dtype[numpy.uint64]] diff --git a/src/pyinterp/core/fill.pyi b/src/pyinterp/core/fill.pyi index 2073563e..ac8cf46c 100644 --- a/src/pyinterp/core/fill.pyi +++ b/src/pyinterp/core/fill.pyi @@ -1,9 +1,5 @@ -from __future__ import annotations - from typing import ClassVar, Tuple, overload -import numpy - from . import ( Grid2DFloat32, Grid2DFloat64, diff --git a/src/pyinterp/core/geodetic.pyi b/src/pyinterp/core/geodetic.pyi index 3c5cd194..9075f95c 100644 --- a/src/pyinterp/core/geodetic.pyi +++ b/src/pyinterp/core/geodetic.pyi @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import ( Any, ClassVar, @@ -7,12 +5,11 @@ from typing import ( Iterator, List, Optional, + Self, Tuple, overload, ) -import numpy - from .. import core from .array import Array1DBool, Array1DFloat64, Array2DFloat64 @@ -371,7 +368,7 @@ class MultiPolygon: def __getstate__(self) -> tuple: ... - def __iadd__(self, other: MultiPolygon) -> MultiPolygon: + def __iadd__(self, other: MultiPolygon) -> Self: ... def __iter__(self) -> Iterator: diff --git a/src/pyinterp/core/geohash/__init__.pyi b/src/pyinterp/core/geohash/__init__.pyi index 595e17eb..f7a3c9f0 100644 --- a/src/pyinterp/core/geohash/__init__.pyi +++ b/src/pyinterp/core/geohash/__init__.pyi @@ -1,10 +1,7 @@ -from __future__ import annotations - from typing import Optional, Tuple, overload import numpy -from . import int64 from .. import geodetic from ..array import Array1DFloat64, Array1DStr diff --git a/src/pyinterp/core/geohash/int64.pyi b/src/pyinterp/core/geohash/int64.pyi index 1e0ab695..c9514a1a 100644 --- a/src/pyinterp/core/geohash/int64.pyi +++ b/src/pyinterp/core/geohash/int64.pyi @@ -1,8 +1,3 @@ -from __future__ import annotations - -from typing import Any, Literal, overload - -from .. import geodetic from ..array import Array1DFloat64, Array1DUInt64 def decode(hash: Array1DUInt64, diff --git a/src/pyinterp/core/module/main.cpp b/src/pyinterp/core/module/main.cpp index fd2eb4cd..dcc2741f 100644 --- a/src/pyinterp/core/module/main.cpp +++ b/src/pyinterp/core/module/main.cpp @@ -39,7 +39,7 @@ GeoHash encoded as integer 64 bits init_geohash_string(m); } -PYBIND11_MODULE(core, m) { // NOLINT +PYBIND11_MODULE(core, m, py::mod_gil_not_used()) { // NOLINT m.doc() = R"__doc__( Core module ----------- diff --git a/src/pyinterp/core/module/period.cpp b/src/pyinterp/core/module/period.cpp index 99b9b5f5..779b9708 100644 --- a/src/pyinterp/core/module/period.cpp +++ b/src/pyinterp/core/module/period.cpp @@ -19,7 +19,14 @@ void init_period(py::module &m) { "last", [](const pyinterp::Period &self) -> int64_t { return self.last; }) .def("end", &pyinterp::Period::end) - .def("__len__", &pyinterp::Period::length) + .def("__len__", + [](const pyinterp::Period &self) -> int64_t { + auto result = self.length(); + if (result < 0) { + throw std::invalid_argument("invalid period"); + } + return result; + }) .def("__str__", [](const pyinterp::Period &self) -> std::string { std::stringstream ss; diff --git a/src/pyinterp/core/tests/axis.cpp b/src/pyinterp/core/tests/axis.cpp index 37eaa97d..eba878fe 100644 --- a/src/pyinterp/core/tests/axis.cpp +++ b/src/pyinterp/core/tests/axis.cpp @@ -25,7 +25,7 @@ class AxisTest : public testing::Test { epsilon, is_circle)); } - void reset_axis(Eigen::Ref> values, T epsilon, + void reset_axis(const pyinterp::Vector &values, T epsilon, bool is_circle) { axis = std::move( std::make_unique>(values, epsilon, is_circle)); @@ -34,195 +34,215 @@ class AxisTest : public testing::Test { std::unique_ptr> axis{}; }; -TYPED_TEST_SUITE(AxisTest, Implementations); +class AxisTestSuite { + public: + template + static std::string GetName(int) { + if (std::is_same_v) { + return "int32"; + } + if (std::is_same_v) { + return "int64"; + } + if (std::is_same_v) { + return "float"; + } + if (std::is_same_v) { + return "double"; + } + throw std::runtime_error("unsupported type"); + } +}; + +TYPED_TEST_SUITE(AxisTest, Implementations, AxisTestSuite); TYPED_TEST(AxisTest, default_constructor) { // undefined axis - auto &axis = *(this->axis); - EXPECT_TRUE(detail::math::Fill::is(axis.front())); - EXPECT_TRUE(detail::math::Fill::is(axis.back())); - EXPECT_TRUE(detail::math::Fill::is(axis.min_value())); - EXPECT_TRUE(detail::math::Fill::is(axis.max_value())); - EXPECT_THROW((void)axis.increment(), std::logic_error); - EXPECT_FALSE(axis.is_circle()); - EXPECT_TRUE(axis.is_ascending()); - EXPECT_TRUE(detail::math::Fill::is(axis.front())); - EXPECT_TRUE(detail::math::Fill::is(axis.back())); - EXPECT_THROW((void)axis.increment(), std::logic_error); - EXPECT_EQ(axis.is_regular(), false); - EXPECT_EQ(axis.size(), 0); - EXPECT_THROW((void)axis.coordinate_value(0), std::out_of_range); - EXPECT_THROW((void)axis.slice(0, 1), std::out_of_range); - EXPECT_EQ(axis.find_index(360, true), -1); - EXPECT_EQ(axis.find_index(360, false), -1); - auto indexes = axis.find_indexes(360); + auto *axis = this->axis.get(); + EXPECT_TRUE(detail::math::Fill::is(axis->front())); + EXPECT_TRUE(detail::math::Fill::is(axis->back())); + EXPECT_TRUE(detail::math::Fill::is(axis->min_value())); + EXPECT_TRUE(detail::math::Fill::is(axis->max_value())); + EXPECT_THROW((void)axis->increment(), std::logic_error); + EXPECT_FALSE(axis->is_circle()); + EXPECT_TRUE(axis->is_ascending()); + EXPECT_TRUE(detail::math::Fill::is(axis->front())); + EXPECT_TRUE(detail::math::Fill::is(axis->back())); + EXPECT_THROW((void)axis->increment(), std::logic_error); + EXPECT_EQ(axis->is_regular(), false); + EXPECT_EQ(axis->size(), 0); + EXPECT_THROW((void)axis->coordinate_value(0), std::out_of_range); + EXPECT_THROW((void)axis->slice(0, 1), std::out_of_range); + EXPECT_EQ(axis->find_index(360, true), -1); + EXPECT_EQ(axis->find_index(360, false), -1); + auto indexes = axis->find_indexes(360); EXPECT_FALSE(indexes.has_value()); } TYPED_TEST(AxisTest, singleton) { // axis with one value this->reset_axis(0, 1, 1, static_cast(1e-6), false); - auto &axis = *(this->axis); - EXPECT_EQ(axis.find_index(0, false), 0); - EXPECT_EQ(axis.find_index(1, false), -1); - EXPECT_EQ(axis.find_index(1, true), 0); - auto indexes = axis.find_indexes(0); + auto *axis = this->axis.get(); + EXPECT_EQ(axis->find_index(0, false), 0); + EXPECT_EQ(axis->find_index(1, false), -1); + EXPECT_EQ(axis->find_index(1, true), 0); + auto indexes = axis->find_indexes(0); EXPECT_FALSE(indexes.has_value()); - EXPECT_TRUE(axis.is_ascending()); - EXPECT_EQ(axis.front(), 0); - EXPECT_EQ(axis.back(), 0); - EXPECT_EQ(axis.min_value(), 0); - EXPECT_EQ(axis.max_value(), 0); - EXPECT_EQ(axis.increment(), 1); - EXPECT_FALSE(axis.is_circle()); - EXPECT_EQ(axis.is_regular(), true); - EXPECT_EQ(axis.size(), 1); - EXPECT_EQ(axis.coordinate_value(0), 0); - auto slice = axis.slice(0, 1); + EXPECT_TRUE(axis->is_ascending()); + EXPECT_EQ(axis->front(), 0); + EXPECT_EQ(axis->back(), 0); + EXPECT_EQ(axis->min_value(), 0); + EXPECT_EQ(axis->max_value(), 0); + EXPECT_EQ(axis->increment(), 1); + EXPECT_FALSE(axis->is_circle()); + EXPECT_EQ(axis->is_regular(), true); + EXPECT_EQ(axis->size(), 1); + EXPECT_EQ(axis->coordinate_value(0), 0); + auto slice = axis->slice(0, 1); EXPECT_EQ(slice.size(), 1); EXPECT_EQ(slice[0], 0); - EXPECT_THROW((void)axis.coordinate_value(1), std::exception); - EXPECT_THROW((void)axis.slice(0, 2), std::exception); + EXPECT_THROW((void)axis->coordinate_value(1), std::exception); + EXPECT_THROW((void)axis->slice(0, 2), std::exception); } TYPED_TEST(AxisTest, binary) { // axis with two values this->reset_axis(0, 1, 2, static_cast(1e-6), false); - auto &axis = *(this->axis); - auto indexes = axis.find_indexes(0); + auto *axis = this->axis.get(); + auto indexes = axis->find_indexes(0); ASSERT_TRUE(indexes); EXPECT_EQ(std::get<0>(*indexes), 0); EXPECT_EQ(std::get<1>(*indexes), 1); - indexes = axis.find_indexes(1); + indexes = axis->find_indexes(1); ASSERT_TRUE(indexes); EXPECT_EQ(std::get<0>(*indexes), 0); EXPECT_EQ(std::get<1>(*indexes), 1); if (std::is_floating_point_v) { - EXPECT_FALSE(axis.find_indexes(static_cast(-0.1))); - EXPECT_FALSE(axis.find_indexes(static_cast(+1.1))); - indexes = axis.find_indexes(static_cast(0.4)); + EXPECT_FALSE(axis->find_indexes(static_cast(-0.1))); + EXPECT_FALSE(axis->find_indexes(static_cast(+1.1))); + indexes = axis->find_indexes(static_cast(0.4)); ASSERT_TRUE(indexes); EXPECT_EQ(std::get<0>(*indexes), 0); EXPECT_EQ(std::get<1>(*indexes), 1); - indexes = axis.find_indexes(static_cast(0.6)); + indexes = axis->find_indexes(static_cast(0.6)); ASSERT_TRUE(indexes); EXPECT_EQ(std::get<0>(*indexes), 0); EXPECT_EQ(std::get<1>(*indexes), 1); } else { - EXPECT_FALSE(axis.find_indexes(-1)); - EXPECT_FALSE(axis.find_indexes(+2)); - indexes = axis.find_indexes(0); + EXPECT_FALSE(axis->find_indexes(-1)); + EXPECT_FALSE(axis->find_indexes(+2)); + indexes = axis->find_indexes(0); ASSERT_TRUE(indexes); EXPECT_EQ(std::get<0>(*indexes), 0); EXPECT_EQ(std::get<1>(*indexes), 1); } - EXPECT_EQ(axis.front(), 0); - EXPECT_EQ(axis.back(), 1); - EXPECT_EQ(axis.min_value(), 0); - EXPECT_EQ(axis.max_value(), 1); - EXPECT_EQ(axis.increment(), 1); - EXPECT_FALSE(axis.is_circle()); - EXPECT_TRUE(axis.is_ascending()); - EXPECT_EQ(axis.is_regular(), true); - EXPECT_EQ(axis.size(), 2); - auto value = axis.coordinate_value(0); + EXPECT_EQ(axis->front(), 0); + EXPECT_EQ(axis->back(), 1); + EXPECT_EQ(axis->min_value(), 0); + EXPECT_EQ(axis->max_value(), 1); + EXPECT_EQ(axis->increment(), 1); + EXPECT_FALSE(axis->is_circle()); + EXPECT_TRUE(axis->is_ascending()); + EXPECT_EQ(axis->is_regular(), true); + EXPECT_EQ(axis->size(), 2); + auto value = axis->coordinate_value(0); EXPECT_EQ(value, 0); - value = axis.coordinate_value(1); + value = axis->coordinate_value(1); EXPECT_EQ(value, 1); - EXPECT_THROW(value = axis.coordinate_value(2), std::exception); - auto slice = axis.slice(0, 2); + EXPECT_THROW(value = axis->coordinate_value(2), std::exception); + auto slice = axis->slice(0, 2); EXPECT_EQ(slice.size(), 2); EXPECT_EQ(slice[0], 0); EXPECT_EQ(slice[1], 1); - EXPECT_THROW((void)axis.slice(0, 3), std::exception); + EXPECT_THROW((void)axis->slice(0, 3), std::exception); } TYPED_TEST(AxisTest, wrap_longitude) { // axis representing a circle this->reset_axis(0, 359, 360, static_cast(1e-6), true); - auto &a1 = *(this->axis); + auto *a1 = this->axis.get(); int64_t i1; - EXPECT_EQ(a1.front(), 0); - EXPECT_EQ(a1.increment(), 1); - EXPECT_TRUE(a1.is_circle()); - EXPECT_TRUE(a1.is_regular()); - EXPECT_TRUE(a1.is_ascending()); - EXPECT_EQ(a1.front(), 0); - EXPECT_EQ(a1.back(), 359); - EXPECT_EQ(a1.min_value(), 0); - EXPECT_EQ(a1.max_value(), 359); - EXPECT_EQ(a1.size(), 360); - EXPECT_EQ(a1.coordinate_value(0), 0); - EXPECT_EQ(a1.coordinate_value(180), 180); - EXPECT_THROW((void)a1.coordinate_value(520), std::exception); - i1 = a1.find_index(0, false); + EXPECT_EQ(a1->front(), 0); + EXPECT_EQ(a1->increment(), 1); + EXPECT_TRUE(a1->is_circle()); + EXPECT_TRUE(a1->is_regular()); + EXPECT_TRUE(a1->is_ascending()); + EXPECT_EQ(a1->front(), 0); + EXPECT_EQ(a1->back(), 359); + EXPECT_EQ(a1->min_value(), 0); + EXPECT_EQ(a1->max_value(), 359); + EXPECT_EQ(a1->size(), 360); + EXPECT_EQ(a1->coordinate_value(0), 0); + EXPECT_EQ(a1->coordinate_value(180), 180); + EXPECT_THROW((void)a1->coordinate_value(520), std::exception); + i1 = a1->find_index(0, false); EXPECT_EQ(i1, 0); - i1 = a1.find_index(360, true); + i1 = a1->find_index(360, true); EXPECT_EQ(i1, 0); - i1 = a1.find_index(360, false); + i1 = a1->find_index(360, false); EXPECT_EQ(i1, 0); - auto indexes = a1.find_indexes(360); + auto indexes = a1->find_indexes(360); ASSERT_TRUE(indexes); EXPECT_EQ(std::get<0>(*indexes), 0); EXPECT_EQ(std::get<1>(*indexes), 1); - indexes = a1.find_indexes(370); + indexes = a1->find_indexes(370); ASSERT_TRUE(indexes); EXPECT_EQ(std::get<0>(*indexes), 10); EXPECT_EQ(std::get<1>(*indexes), 11); if (std::is_floating_point_v) { - indexes = a1.find_indexes(static_cast(-9.5)); + indexes = a1->find_indexes(static_cast(-9.5)); ASSERT_TRUE(indexes); EXPECT_EQ(std::get<0>(*indexes), 350); EXPECT_EQ(std::get<1>(*indexes), 351); } else { - indexes = a1.find_indexes(-10); + indexes = a1->find_indexes(-10); ASSERT_TRUE(indexes); EXPECT_EQ(std::get<0>(*indexes), 350); EXPECT_EQ(std::get<1>(*indexes), 351); } - a1.flip(); - EXPECT_EQ(a1.front(), 359); - EXPECT_EQ(a1.increment(), -1); - EXPECT_TRUE(a1.is_circle()); - EXPECT_TRUE(a1.is_regular()); - EXPECT_FALSE(a1.is_ascending()); - EXPECT_EQ(a1.front(), 359); - EXPECT_EQ(a1.back(), 0); - EXPECT_EQ(a1.min_value(), 0); - EXPECT_EQ(a1.max_value(), 359); - EXPECT_EQ(a1.size(), 360); - EXPECT_EQ(a1.coordinate_value(0), 359); - EXPECT_EQ(a1.coordinate_value(180), 179); - EXPECT_THROW((void)a1.coordinate_value(520), std::exception); - auto slice = a1.slice(0, 2); + a1->flip(); + EXPECT_EQ(a1->front(), 359); + EXPECT_EQ(a1->increment(), -1); + EXPECT_TRUE(a1->is_circle()); + EXPECT_TRUE(a1->is_regular()); + EXPECT_FALSE(a1->is_ascending()); + EXPECT_EQ(a1->front(), 359); + EXPECT_EQ(a1->back(), 0); + EXPECT_EQ(a1->min_value(), 0); + EXPECT_EQ(a1->max_value(), 359); + EXPECT_EQ(a1->size(), 360); + EXPECT_EQ(a1->coordinate_value(0), 359); + EXPECT_EQ(a1->coordinate_value(180), 179); + EXPECT_THROW((void)a1->coordinate_value(520), std::exception); + auto slice = a1->slice(0, 2); EXPECT_EQ(slice.size(), 2); EXPECT_EQ(slice[0], 359); EXPECT_EQ(slice[1], 358); - EXPECT_THROW((void)a1.slice(0, 520), std::exception); - i1 = a1.find_index(0, false); + EXPECT_THROW((void)a1->slice(0, 520), std::exception); + i1 = a1->find_index(0, false); EXPECT_EQ(i1, 359); - i1 = a1.find_index(359, true); + i1 = a1->find_index(359, true); EXPECT_EQ(i1, 0); - i1 = a1.find_index(359, false); + i1 = a1->find_index(359, false); EXPECT_EQ(i1, 0); if (std::is_floating_point_v) { - indexes = a1.find_indexes(static_cast(359.5)); + indexes = a1->find_indexes(static_cast(359.5)); ASSERT_TRUE(indexes); EXPECT_EQ(std::get<0>(*indexes), 359); EXPECT_EQ(std::get<1>(*indexes), 0); } - indexes = a1.find_indexes(370); + indexes = a1->find_indexes(370); ASSERT_TRUE(indexes); EXPECT_EQ(std::get<0>(*indexes), 349); EXPECT_EQ(std::get<1>(*indexes), 350); if (std::is_floating_point_v) { - indexes = a1.find_indexes(static_cast(-9.5)); + indexes = a1->find_indexes(static_cast(-9.5)); ASSERT_TRUE(indexes); EXPECT_EQ(std::get<0>(*indexes), 9); EXPECT_EQ(std::get<1>(*indexes), 8); } else { - indexes = a1.find_indexes(-9); + indexes = a1->find_indexes(-9); ASSERT_TRUE(indexes); EXPECT_EQ(std::get<0>(*indexes), 8); EXPECT_EQ(std::get<1>(*indexes), 9); @@ -249,7 +269,7 @@ TYPED_TEST(AxisTest, wrap_longitude) { EXPECT_EQ(slice.size(), 2); EXPECT_EQ(slice[0], -180); EXPECT_EQ(slice[1], -179); - EXPECT_NE(a1, a2); + EXPECT_NE(*a1, a2); a2 = detail::Axis(180, -179, 360, static_cast(1e-6), true); @@ -273,7 +293,7 @@ TYPED_TEST(AxisTest, wrap_longitude) { EXPECT_EQ(a2.back(), -179); EXPECT_EQ(a2.coordinate_value(0), 180); EXPECT_EQ(a2.coordinate_value(180), 0); - EXPECT_NE(a1, a2); + EXPECT_NE(*a1, a2); a2.flip(); EXPECT_EQ(a2.front(), -179); @@ -507,77 +527,78 @@ TEST(axis, irregular) { TYPED_TEST(AxisTest, search_indexes) { // search for indexes around a value on an axis this->reset_axis(0, 359, 360, static_cast(1e-6), true); - auto &axis = *(this->axis); + auto *axis = this->axis.get(); if (std::is_floating_point_v) { - auto indexes = axis.find_indexes(static_cast(359.4)); + auto indexes = axis->find_indexes(static_cast(359.4)); ASSERT_TRUE(indexes.has_value()); EXPECT_EQ(std::get<0>(*indexes), 359); EXPECT_EQ(std::get<1>(*indexes), 0); - indexes = axis.find_indexes(static_cast(359.6)); + indexes = axis->find_indexes(static_cast(359.6)); ASSERT_TRUE(indexes.has_value()); EXPECT_EQ(std::get<0>(*indexes), 359); EXPECT_EQ(std::get<1>(*indexes), 0); - indexes = axis.find_indexes(static_cast(-0.1)); + indexes = axis->find_indexes(static_cast(-0.1)); ASSERT_TRUE(indexes.has_value()); EXPECT_EQ(std::get<0>(*indexes), 359); EXPECT_EQ(std::get<1>(*indexes), 0); - indexes = axis.find_indexes(static_cast(359.9)); + indexes = axis->find_indexes(static_cast(359.9)); ASSERT_TRUE(indexes.has_value()); EXPECT_EQ(std::get<0>(*indexes), 359); EXPECT_EQ(std::get<1>(*indexes), 0); - indexes = axis.find_indexes(static_cast(0.01)); + indexes = axis->find_indexes(static_cast(0.01)); ASSERT_TRUE(indexes.has_value()); EXPECT_EQ(std::get<0>(*indexes), 0); EXPECT_EQ(std::get<1>(*indexes), 1); - indexes = axis.find_indexes(static_cast(358.9)); + indexes = axis->find_indexes(static_cast(358.9)); ASSERT_TRUE(indexes.has_value()); EXPECT_EQ(std::get<0>(*indexes), 358); EXPECT_EQ(std::get<1>(*indexes), 359); } else { - auto indexes = axis.find_indexes(359); + auto indexes = axis->find_indexes(359); ASSERT_TRUE(indexes.has_value()); EXPECT_EQ(std::get<0>(*indexes), 358); EXPECT_EQ(std::get<1>(*indexes), 359); - indexes = axis.find_indexes(-1); + indexes = axis->find_indexes(-1); ASSERT_TRUE(indexes.has_value()); EXPECT_EQ(std::get<0>(*indexes), 358); EXPECT_EQ(std::get<1>(*indexes), 359); - indexes = axis.find_indexes(360); + indexes = axis->find_indexes(360); ASSERT_TRUE(indexes.has_value()); EXPECT_EQ(std::get<0>(*indexes), 0); EXPECT_EQ(std::get<1>(*indexes), 1); } - axis = detail::Axis(10, 20, 1, static_cast(1e-6), true); - EXPECT_FALSE(axis.find_indexes(static_cast(20.01)).has_value()); - EXPECT_FALSE(axis.find_indexes(static_cast(9.9)).has_value()); + this->reset_axis(10, 20, 1, static_cast(1e-6), true); + axis = this->axis.get(); + EXPECT_FALSE(axis->find_indexes(static_cast(20.01)).has_value()); + EXPECT_FALSE(axis->find_indexes(static_cast(9.9)).has_value()); } TYPED_TEST(AxisTest, search_window) { // search for indexes that frame a value around a window std::vector indexes; this->reset_axis(-180, 179, 360, static_cast(1e-6), true); - auto &axis = *(this->axis); + auto *axis = this->axis.get(); - indexes = axis.find_indexes(0, 1, pyinterp::axis::kUndef); + indexes = axis->find_indexes(0, 1, pyinterp::axis::kUndef); ASSERT_EQ(indexes.size(), 2); EXPECT_EQ(indexes[0], 180); EXPECT_EQ(indexes[1], 181); - EXPECT_THROW(indexes = axis.find_indexes(0, 0, pyinterp::axis::kUndef), + EXPECT_THROW(indexes = axis->find_indexes(0, 0, pyinterp::axis::kUndef), std::invalid_argument); - indexes = axis.find_indexes(0, 5, pyinterp::axis::kUndef); + indexes = axis->find_indexes(0, 5, pyinterp::axis::kUndef); ASSERT_EQ(indexes.size(), 10); EXPECT_EQ(indexes[0], 176); EXPECT_EQ(indexes[1], 177); @@ -590,7 +611,7 @@ TYPED_TEST(AxisTest, search_window) { EXPECT_EQ(indexes[8], 184); EXPECT_EQ(indexes[9], 185); - indexes = axis.find_indexes(-180, 5, pyinterp::axis::kUndef); + indexes = axis->find_indexes(-180, 5, pyinterp::axis::kUndef); ASSERT_EQ(indexes.size(), 10); EXPECT_EQ(indexes[0], 356); EXPECT_EQ(indexes[1], 357); @@ -603,7 +624,7 @@ TYPED_TEST(AxisTest, search_window) { EXPECT_EQ(indexes[8], 4); EXPECT_EQ(indexes[9], 5); - indexes = axis.find_indexes(179, 5, pyinterp::axis::kUndef); + indexes = axis->find_indexes(179, 5, pyinterp::axis::kUndef); ASSERT_EQ(indexes.size(), 10); EXPECT_EQ(indexes[0], 354); EXPECT_EQ(indexes[1], 355); @@ -617,8 +638,8 @@ TYPED_TEST(AxisTest, search_window) { EXPECT_EQ(indexes[9], 3); if (std::is_floating_point_v) { - indexes = axis.find_indexes(static_cast(179.4), 5, - pyinterp::axis::kUndef); + indexes = axis->find_indexes(static_cast(179.4), 5, + pyinterp::axis::kUndef); ASSERT_EQ(indexes.size(), 10); EXPECT_EQ(indexes[0], 355); EXPECT_EQ(indexes[1], 356); @@ -631,8 +652,8 @@ TYPED_TEST(AxisTest, search_window) { EXPECT_EQ(indexes[8], 3); EXPECT_EQ(indexes[9], 4); - indexes = axis.find_indexes(static_cast(179.6), 5, - pyinterp::axis::kUndef); + indexes = axis->find_indexes(static_cast(179.6), 5, + pyinterp::axis::kUndef); ASSERT_EQ(indexes.size(), 10); EXPECT_EQ(indexes[0], 355); EXPECT_EQ(indexes[1], 356); @@ -647,8 +668,8 @@ TYPED_TEST(AxisTest, search_window) { } this->reset_axis(0, 9, 10, static_cast(1e-6), false); - axis = *(this->axis); - indexes = axis.find_indexes(5, 4, pyinterp::axis::kUndef); + axis = this->axis.get(); + indexes = axis->find_indexes(5, 4, pyinterp::axis::kUndef); ASSERT_EQ(indexes.size(), 8); EXPECT_EQ(indexes[0], 2); EXPECT_EQ(indexes[1], 3); @@ -659,12 +680,12 @@ TYPED_TEST(AxisTest, search_window) { EXPECT_EQ(indexes[6], 8); EXPECT_EQ(indexes[7], 9); - indexes = axis.find_indexes(-1, 4, pyinterp::axis::kUndef); + indexes = axis->find_indexes(-1, 4, pyinterp::axis::kUndef); EXPECT_EQ(indexes.empty(), true); - indexes = axis.find_indexes(10, 4, pyinterp::axis::kUndef); + indexes = axis->find_indexes(10, 4, pyinterp::axis::kUndef); EXPECT_EQ(indexes.empty(), true); - indexes = axis.find_indexes(1, 4, pyinterp::axis::kSym); + indexes = axis->find_indexes(1, 4, pyinterp::axis::kSym); ASSERT_EQ(indexes.size(), 8); EXPECT_EQ(indexes[0], 2); EXPECT_EQ(indexes[1], 1); @@ -675,7 +696,7 @@ TYPED_TEST(AxisTest, search_window) { EXPECT_EQ(indexes[6], 4); EXPECT_EQ(indexes[7], 5); - indexes = axis.find_indexes(9, 4, pyinterp::axis::kSym); + indexes = axis->find_indexes(9, 4, pyinterp::axis::kSym); ASSERT_EQ(indexes.size(), 8); EXPECT_EQ(indexes[0], 5); EXPECT_EQ(indexes[1], 6); @@ -686,7 +707,7 @@ TYPED_TEST(AxisTest, search_window) { EXPECT_EQ(indexes[6], 7); EXPECT_EQ(indexes[7], 6); - indexes = axis.find_indexes(1, 4, pyinterp::axis::kWrap); + indexes = axis->find_indexes(1, 4, pyinterp::axis::kWrap); ASSERT_EQ(indexes.size(), 8); EXPECT_EQ(indexes[0], 8); EXPECT_EQ(indexes[1], 9); @@ -697,7 +718,7 @@ TYPED_TEST(AxisTest, search_window) { EXPECT_EQ(indexes[6], 4); EXPECT_EQ(indexes[7], 5); - indexes = axis.find_indexes(9, 4, pyinterp::axis::kWrap); + indexes = axis->find_indexes(9, 4, pyinterp::axis::kWrap); ASSERT_EQ(indexes.size(), 8); EXPECT_EQ(indexes[0], 5); EXPECT_EQ(indexes[1], 6); @@ -708,7 +729,7 @@ TYPED_TEST(AxisTest, search_window) { EXPECT_EQ(indexes[6], 1); EXPECT_EQ(indexes[7], 2); - indexes = axis.find_indexes(1, 4, pyinterp::axis::kExpand); + indexes = axis->find_indexes(1, 4, pyinterp::axis::kExpand); ASSERT_EQ(indexes.size(), 8); EXPECT_EQ(indexes[0], 0); EXPECT_EQ(indexes[1], 0); @@ -719,7 +740,7 @@ TYPED_TEST(AxisTest, search_window) { EXPECT_EQ(indexes[6], 4); EXPECT_EQ(indexes[7], 5); - indexes = axis.find_indexes(9, 4, pyinterp::axis::kExpand); + indexes = axis->find_indexes(9, 4, pyinterp::axis::kExpand); ASSERT_EQ(indexes.size(), 8); EXPECT_EQ(indexes[0], 5); EXPECT_EQ(indexes[1], 6); @@ -730,9 +751,9 @@ TYPED_TEST(AxisTest, search_window) { EXPECT_EQ(indexes[6], 9); EXPECT_EQ(indexes[7], 9); - indexes = axis.find_indexes(1, 4, pyinterp::axis::kUndef); + indexes = axis->find_indexes(1, 4, pyinterp::axis::kUndef); ASSERT_TRUE(indexes.empty()); - indexes = axis.find_indexes(9, 4, pyinterp::axis::kUndef); + indexes = axis->find_indexes(9, 4, pyinterp::axis::kUndef); ASSERT_TRUE(indexes.empty()); } @@ -747,18 +768,18 @@ TEST(axis, timestamp) { TYPED_TEST(AxisTest, find_nearest_index) { this->reset_axis(0, 355, 72, static_cast(1e-6), true); - auto &axis = *(this->axis); + auto *axis = this->axis.get(); - EXPECT_EQ(axis.find_nearest_index(356, false), 71); - EXPECT_EQ(axis.find_nearest_index(358, false), 0); - EXPECT_EQ(axis.find_nearest_index(-2, false), 0); - EXPECT_EQ(axis.find_nearest_index(-4, false), 71); + EXPECT_EQ(axis->find_nearest_index(356, false), 71); + EXPECT_EQ(axis->find_nearest_index(358, false), 0); + EXPECT_EQ(axis->find_nearest_index(-2, false), 0); + EXPECT_EQ(axis->find_nearest_index(-4, false), 71); this->reset_axis(-180, 175, 72, static_cast(1e-6), true); - axis = *(this->axis); + axis = this->axis.get(); - EXPECT_EQ(axis.find_nearest_index(176, false), 71); - EXPECT_EQ(axis.find_nearest_index(178, false), 0); - EXPECT_EQ(axis.find_nearest_index(-182, false), 0); - EXPECT_EQ(axis.find_nearest_index(-184, false), 71); + EXPECT_EQ(axis->find_nearest_index(176, false), 71); + EXPECT_EQ(axis->find_nearest_index(178, false), 0); + EXPECT_EQ(axis->find_nearest_index(-182, false), 0); + EXPECT_EQ(axis->find_nearest_index(-184, false), 71); } diff --git a/src/pyinterp/core/tests/axis_container.cpp b/src/pyinterp/core/tests/axis_container.cpp index 85bcc195..51e17a32 100644 --- a/src/pyinterp/core/tests/axis_container.cpp +++ b/src/pyinterp/core/tests/axis_container.cpp @@ -16,7 +16,28 @@ class UndefinedTest : public testing::Test { public: using Axis = container::Undefined; }; -TYPED_TEST_SUITE(UndefinedTest, Implementations); + +class TestSuite { + public: + template + static std::string GetName(int) { + if (std::is_same_v) { + return "int32"; + } + if (std::is_same_v) { + return "int64"; + } + if (std::is_same_v) { + return "float"; + } + if (std::is_same_v) { + return "double"; + } + throw std::runtime_error("unsupported type"); + } +}; + +TYPED_TEST_SUITE(UndefinedTest, Implementations, TestSuite); TYPED_TEST(UndefinedTest, undefined) { // undefined axis @@ -42,7 +63,7 @@ class IrregularTest : public testing::Test { public: using Axis = container::Irregular; }; -TYPED_TEST_SUITE(IrregularTest, Implementations); +TYPED_TEST_SUITE(IrregularTest, Implementations, TestSuite); TYPED_TEST(IrregularTest, irregular) { // irregular axis @@ -98,7 +119,7 @@ class RegularTest : public testing::Test { public: using Axis = container::Regular; }; -TYPED_TEST_SUITE(RegularTest, Implementations); +TYPED_TEST_SUITE(RegularTest, Implementations, TestSuite); TYPED_TEST(RegularTest, irregular) { // regular axis diff --git a/src/pyinterp/core/tests/interpolation_polynomial.cpp b/src/pyinterp/core/tests/interpolation_polynomial.cpp index 745e8828..64c7a002 100644 --- a/src/pyinterp/core/tests/interpolation_polynomial.cpp +++ b/src/pyinterp/core/tests/interpolation_polynomial.cpp @@ -40,6 +40,6 @@ TEST(Polynomial, derivative) { auto interpolator = pyinterp::detail::interpolation::Polynomial(); auto dy = interpolator.derivative(xa, ya, xp); for (auto ix = 0; ix < xp.size(); ix++) { - EXPECT_DOUBLE_EQ(dy(ix), dyp(ix)); + EXPECT_NEAR(dy(ix), dyp(ix), 1e-4); } } diff --git a/src/pyinterp/core/tests/math_kriging.cpp b/src/pyinterp/core/tests/math_kriging.cpp index 28cbcacf..3eab48bd 100644 --- a/src/pyinterp/core/tests/math_kriging.cpp +++ b/src/pyinterp/core/tests/math_kriging.cpp @@ -31,5 +31,5 @@ TEST(math_loess, loess) { auto interpolator = math::Kriging(1, 0.25, math::kMatern_52); auto x = interpolator.universal_kriging(coordinates, values, query); - // EXPECT_NEAR(x, 0.994599, 1e-6); + EXPECT_NEAR(x, 0.0388, 1e-3); } diff --git a/src/pyinterp/fill.py b/src/pyinterp/fill.py index d097a0b4..2c250dde 100644 --- a/src/pyinterp/fill.py +++ b/src/pyinterp/fill.py @@ -4,13 +4,15 @@ """ from __future__ import annotations -from typing import Any +from typing import TYPE_CHECKING, Any import concurrent.futures import numpy from . import core, grid, interface -from .typing import NDArray + +if TYPE_CHECKING: + from .typing import NDArray def loess(mesh: grid.Grid2D | grid.Grid3D, diff --git a/src/pyinterp/geodetic/__init__.py b/src/pyinterp/geodetic/__init__.py index aa2fb166..f972c8ed 100644 --- a/src/pyinterp/geodetic/__init__.py +++ b/src/pyinterp/geodetic/__init__.py @@ -4,7 +4,10 @@ """ from __future__ import annotations -import numpy +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + import numpy from .. import interface from ..core import geodetic @@ -119,7 +122,7 @@ class Polygon(geodetic.Polygon): def __init__(self, outer: list[Point], inners: list[list[Point]] | None = None) -> None: - super().__init__(outer, inners) # type: ignore + super().__init__(outer, inners) class MultiPolygon(geodetic.MultiPolygon): @@ -153,7 +156,8 @@ def normalize_longitudes(lon: numpy.ndarray, if lon.flags.writeable: geodetic.normalize_longitudes(lon, min_lon) return lon - return geodetic.normalize_longitudes(lon, min_lon) # type: ignore + return geodetic.normalize_longitudes( # type: ignore[return-value] + lon, min_lon) class RTree(geodetic.RTree): diff --git a/src/pyinterp/geohash/converter.py b/src/pyinterp/geohash/converter.py index 405934c1..01d2eeec 100644 --- a/src/pyinterp/geohash/converter.py +++ b/src/pyinterp/geohash/converter.py @@ -42,9 +42,13 @@ def to_xarray(hashes: numpy.ndarray, data: numpy.ndarray) -> xarray.DataArray: return xarray.DataArray( grid, dims=('lat', 'lon'), - coords=dict(lon=xarray.DataArray(x_axis, - dims=('lon', ), - attrs=dict(units='degrees_north')), - lat=xarray.DataArray(y_axis, - dims=('lat', ), - attrs=dict(units='degrees_east')))) + coords={ + 'lon': + xarray.DataArray(x_axis, + dims=('lon', ), + attrs={'units': 'degrees_north'}), + 'lat': + xarray.DataArray(y_axis, + dims=('lat', ), + attrs={'units': 'degrees_east'}) + }) diff --git a/src/pyinterp/grid.py b/src/pyinterp/grid.py index 8c0fcc08..1119313a 100644 --- a/src/pyinterp/grid.py +++ b/src/pyinterp/grid.py @@ -62,7 +62,7 @@ class Grid2D: def __init__(self, *args, increasing_axes: str | None = None): prefix = '' - for idx, item in enumerate(args): + for item in args: if isinstance(item, core.TemporalAxis): prefix = 'Temporal' break diff --git a/src/pyinterp/histogram2d.py b/src/pyinterp/histogram2d.py index 6472c957..f03c9571 100644 --- a/src/pyinterp/histogram2d.py +++ b/src/pyinterp/histogram2d.py @@ -67,7 +67,8 @@ def __init__(self, x: core.Axis, y: core.Axis, bin_counts: int | None = None, - dtype: numpy.dtype | None = numpy.dtype('float64')): + dtype: numpy.dtype | None = None): + dtype = dtype or numpy.dtype('float64') if dtype == numpy.dtype('float64'): self._instance: Histogram2DTyped = core.Histogram2DFloat64( x, y, bin_counts) @@ -105,7 +106,7 @@ def __add__(self, other: Histogram2D) -> Histogram2D: if self.dtype != other.dtype: raise ValueError('dtype mismatch') result = copy.copy(self) - result._instance += other._instance # type: ignore + result._instance += other._instance # type: ignore[operator] return result def push(self, x: numpy.ndarray, y: numpy.ndarray, diff --git a/src/pyinterp/interface.py b/src/pyinterp/interface.py index 4861a55e..9ace82bf 100644 --- a/src/pyinterp/interface.py +++ b/src/pyinterp/interface.py @@ -36,27 +36,30 @@ def _core_class_suffix(x: numpy.ndarray, handle_integer: bool = False) -> str: str: the class suffix """ dtype = x.dtype.type + result: str if dtype == numpy.float64: - return 'Float64' - if dtype == numpy.float32: - return 'Float32' - if dtype == numpy.int64: - return 'Float64' - if dtype == numpy.uint64: - return 'Float64' - if dtype == numpy.int32: - return 'Float32' - if dtype == numpy.uint32: - return 'Float32' - if dtype == numpy.int16: - return 'Float32' - if dtype == numpy.uint16: - return 'Float32' - if dtype == numpy.int8: - return 'Float32' if not handle_integer else 'Int8' - if dtype == numpy.uint8: - return 'Float32' if not handle_integer else 'UInt8' - raise ValueError('Unhandled dtype: ' + str(dtype)) + result = 'Float64' + elif dtype == numpy.float32: + result = 'Float32' + elif dtype == numpy.int64: + result = 'Float64' + elif dtype == numpy.uint64: + result = 'Float64' + elif dtype == numpy.int32: + result = 'Float32' + elif dtype == numpy.uint32: + result = 'Float32' + elif dtype == numpy.int16: + result = 'Float32' + elif dtype == numpy.uint16: + result = 'Float32' + elif dtype == numpy.int8: + result = 'Float32' if not handle_integer else 'Int8' + elif dtype == numpy.uint8: + result = 'Float32' if not handle_integer else 'UInt8' + else: + raise ValueError('Unhandled dtype: ' + str(dtype)) + return result def _core_function(function: str, instance: object) -> str: @@ -147,11 +150,11 @@ def _core_window_function(wf: str | None, defaults = {'gaussian': None, 'lanczos': 1, 'parzen': 0} arg = defaults[wf] - if wf == 'lanczos' and arg < 1: # type: ignore + if wf == 'lanczos' and arg < 1: # type: ignore[operator] raise ValueError(f'The argument of the function {wf!r} must be ' 'greater than 1') - if wf == 'parzen' and arg < 0: # type: ignore + if wf == 'parzen' and arg < 0: # type: ignore[operator] raise ValueError(f'The argument of the function {wf!r} must be ' 'greater than 0') diff --git a/src/pyinterp/orbit.py b/src/pyinterp/orbit.py index d76c85bc..617f96b4 100644 --- a/src/pyinterp/orbit.py +++ b/src/pyinterp/orbit.py @@ -8,13 +8,16 @@ """ from __future__ import annotations -from collections.abc import Iterator +from typing import TYPE_CHECKING import dataclasses import numpy +if TYPE_CHECKING: + from collections.abc import Iterator + from .typing import NDArray, NDArrayDateTime, NDArrayTimeDelta + from . import core, geodetic -from .typing import NDArray, NDArrayDateTime, NDArrayTimeDelta def interpolate( @@ -44,8 +47,8 @@ def interpolate( wgs = wgs or geodetic.Coordinates() mz = wgs.spheroid.semi_major_axis / wgs.spheroid.semi_minor_axis() x, y, z = wgs.lla_to_ecef( - lon, # type: ignore - lat, # type: ignore + lon, + lat, numpy.full_like(lon, height), ) @@ -57,25 +60,25 @@ def interpolate( x = core.interpolate1d( x_axis, x, - xi, # type: ignore + xi, half_window_size=half_window_size, ) y = core.interpolate1d( x_axis, y, - xi, # type: ignore + xi, half_window_size=half_window_size, ) z = core.interpolate1d( x_axis, z, - xi, # type: ignore + xi, half_window_size=half_window_size, ) r = core.interpolate1d( x_axis, r, - xi, # type: ignore + xi, half_window_size=half_window_size, ) @@ -109,7 +112,7 @@ def _rearrange_orbit( Returns: The orbit rearranged starting from pass 1. """ - dy = numpy.roll(lat, 1) - lat # type: ignore + dy = numpy.roll(lat, 1) - lat indexes = numpy.where((dy < 0) & (numpy.roll(dy, 1) >= 0))[0] # If the orbit is already starting from pass 1, nothing to do @@ -140,9 +143,9 @@ def _calculate_pass_time(lat: NDArray, Returns: Start date of half-orbits. """ - dy = numpy.roll(lat, 1) - lat # type: ignore + dy = numpy.roll(lat, 1) - lat indexes = numpy.where(((dy < 0) & (numpy.roll(dy, 1) >= 0)) - | ((dy > 0) # type: ignore + | ((dy > 0) & (numpy.roll(dy, 1) <= 0)))[0] # The duration of the first pass is zero. indexes[0] = 0 @@ -195,8 +198,8 @@ def orbit_duration(self) -> numpy.timedelta64: def curvilinear_distance(self) -> numpy.ndarray: """Get the curvilinear distance.""" return geodetic.LineString( - self.longitude, # type: ignore - self.latitude, # type: ignore + self.longitude, + self.latitude, ).curvilinear_distance(strategy='thomas', wgs=self.wgs) def pass_duration(self, number: int) -> numpy.timedelta64: @@ -346,7 +349,7 @@ def mask(self, requirement_bounds: tuple[float, float]) -> NDArray: valid[(numpy.abs(self.x_ac) >= requirement_bounds[0]) & (numpy.abs(self.x_ac) <= requirement_bounds[1])] = 1 along_track = numpy.full(self.lon_nadir.shape, 1, dtype=numpy.float64) - return along_track[:, numpy.newaxis] * valid # type: ignore + return along_track[:, numpy.newaxis] * valid def insert_central_pixel(self) -> Swath: """Return a swath with a central pixel dividing the swath in two by the @@ -388,8 +391,8 @@ def _equator_properties(lon_nadir: NDArray, lat_nadir: NDArray, # Calculate the position of the satellite at the equator intersection = geodetic.LineString( - lon1, # type: ignore - lat1, # type: ignore + lon1, + lat1, ).intersection( geodetic.LineString(numpy.array([lon1[0] - 0.5, lon1[1] + 0.5]), numpy.array([0, 0], dtype='float64'))) @@ -402,8 +405,8 @@ def _equator_properties(lon_nadir: NDArray, lat_nadir: NDArray, lon1 = numpy.insert(lon1, 1, point.lon) lat1 = numpy.insert(lat1, 1, 0) x_al = geodetic.LineString( - lon1, # type: ignore - lat1, # type: ignore + lon1, + lat1, ).curvilinear_distance(strategy='thomas') # Pop the along track distance at the equator @@ -412,8 +415,8 @@ def _equator_properties(lon_nadir: NDArray, lat_nadir: NDArray, return EquatorCoordinates( point.lon, - numpy.interp(x_eq, x_al, time[i0:i1 + 1].astype('i8')).astype( - time.dtype), # type: ignore + numpy.interp( # type: ignore[arg-type] + x_eq, x_al, time[i0:i1 + 1].astype('i8')).astype(time.dtype), ) @@ -446,7 +449,7 @@ def calculate_orbit( # If the first point of the given orbit starts at the equator, we need to # skew this first pass. if -40 <= lat_nadir[0] <= 40: - dy = numpy.roll(lat_nadir, 1) - lat_nadir # type: ignore + dy = numpy.roll(lat_nadir, 1) - lat_nadir indexes = numpy.where(((dy < 0) & (numpy.roll(dy, 1) >= 0)) | ((dy > 0) & (numpy.roll(dy, 1) <= 0)))[0] @@ -462,14 +465,13 @@ def calculate_orbit( time[-1], numpy.timedelta64(500, 'ms'), dtype=time.dtype) - lon_nadir, lat_nadir = interpolate( - lon_nadir, # type: ignore - lat_nadir, - time.astype('i8'), - time_hr.astype('i8'), - height=height, - wgs=wgs, - half_window_size=50) + lon_nadir, lat_nadir = interpolate(lon_nadir, + lat_nadir, + time.astype('i8'), + time_hr.astype('i8'), + height=height, + wgs=wgs, + half_window_size=50) time = time_hr if cycle_duration is not None: @@ -482,15 +484,15 @@ def calculate_orbit( # Rearrange orbit starting from pass 1 lon_nadir, lat_nadir, time = _rearrange_orbit( time[-1] + time[1] - time[0], - lon_nadir, # type: ignore + lon_nadir, lat_nadir, - time, # type: ignore + time, ) # Calculates the along track distance (km) distance = geodetic.LineString( - lon_nadir, # type: ignore[arg-type] - lat_nadir, # type: ignore[arg-type] + lon_nadir, + lat_nadir, ).curvilinear_distance(strategy='thomas', wgs=spheroid) * 1e-3 # Interpolate the final orbit according the given along track resolution @@ -498,19 +500,16 @@ def calculate_orbit( distance[-2], along_track_resolution or 2, dtype=distance.dtype) - lon_nadir, lat_nadir = interpolate( - lon_nadir[:-1], - lat_nadir[:-1], - distance[:-1], # type: ignore - x_al, # type: ignore - height=height, - wgs=wgs, - half_window_size=10) - - time = numpy.interp( - x_al, # type: ignore - distance[:-1], # type: ignore - time[:-1].astype('i8')).astype(time.dtype) + lon_nadir, lat_nadir = interpolate(lon_nadir[:-1], + lat_nadir[:-1], + distance[:-1], + x_al, + height=height, + wgs=wgs, + half_window_size=10) + + time = numpy.interp(x_al, distance[:-1], + time[:-1].astype('i8')).astype(time.dtype) return Orbit( height, @@ -518,9 +517,9 @@ def calculate_orbit( lon_nadir, numpy.sort(_calculate_pass_time(lat_nadir, time)), time, - x_al, # type: ignore - wgs.spheroid, # type: ignore - ) # type: ignore + x_al, + wgs.spheroid, # type: ignore[arg-type] + ) def calculate_pass( @@ -613,8 +612,8 @@ def calculate_swath( x_ac = numpy.full((len(half_orbit), x_ac.size), x_ac) lon, lat = core.geodetic.calculate_swath( - half_orbit.lon_nadir, # type: ignore - half_orbit.lat_nadir, # type: ignore + half_orbit.lon_nadir, + half_orbit.lat_nadir, across_track_resolution * 1e3, half_gap * 1e3, half_swath, @@ -627,7 +626,7 @@ def calculate_swath( half_orbit.time, half_orbit.x_al, half_orbit.equator_coordinates, - lon, # type: ignore - lat, # type: ignore + lon, + lat, x_ac, ) diff --git a/src/pyinterp/period.py b/src/pyinterp/period.py index 26a112e9..e13397f2 100644 --- a/src/pyinterp/period.py +++ b/src/pyinterp/period.py @@ -4,13 +4,15 @@ """ from __future__ import annotations -from typing import Any +from typing import TYPE_CHECKING, Any, ClassVar import re import numpy +if TYPE_CHECKING: + from .typing import NDArray, NDArrayDateTime, NDArrayTimeDelta + from . import core -from .typing import NDArray, NDArrayDateTime, NDArrayTimeDelta # Parse the unit of numpy.timedelta64. PATTERN = re.compile(r'(?:datetime|timedelta)64\[(\w+)\]').search @@ -290,7 +292,8 @@ class PeriodList: Args: periods: A list of periods. """ - DTYPE = [('begin', numpy.int64), ('last', numpy.int64)] + DTYPE: ClassVar[list[tuple[str, type]]] = [('begin', numpy.int64), + ('last', numpy.int64)] def __init__(self, periods: NDArrayDateTime | core.PeriodList, diff --git a/src/pyinterp/statistics/descriptive_descriptive.py b/src/pyinterp/statistics/descriptive_descriptive.py index 9a56adfa..9c47d6d2 100644 --- a/src/pyinterp/statistics/descriptive_descriptive.py +++ b/src/pyinterp/statistics/descriptive_descriptive.py @@ -8,13 +8,22 @@ """ from __future__ import annotations -from typing import Any -from collections.abc import Iterable +from typing import TYPE_CHECKING, Any +import sys + +# Self is unavailable in Python 3.10 +if sys.version_info[:2] > (3, 10): + from typing import Self +else: + Self = 'DescriptiveStatistics' import copy import dask.array.core import numpy +if TYPE_CHECKING: + from collections.abc import Iterable + from .. import core @@ -34,14 +43,13 @@ def _process_block(attr, x, w, axis): drop_axis = list(range(values.ndim))[1:] - return dask.array.core.map_blocks( - _process_block, - attr, - values, - weights, - axis, - drop_axis=drop_axis, - dtype='object').sum().compute() # type: ignore + return dask.array.core.map_blocks(_process_block, + attr, + values, + weights, + axis, + drop_axis=drop_axis, + dtype='object').sum().compute() class DescriptiveStatistics: @@ -71,7 +79,7 @@ class DescriptiveStatistics: Numerically stable, scalable formulas for parallel and online computation of higher-order multivariate central moments with arbitrary weights. - Comput Stat 31, 1305–1325, + Comput Stat 31, 1305-1325, 2016, https://doi.org/10.1007/s00180-015-0637-z """ @@ -112,7 +120,7 @@ def copy(self) -> DescriptiveStatistics: result._instance = self._instance.__copy__() return result - def __iadd__(self, other: Any) -> DescriptiveStatistics: + def __iadd__(self, other: Any) -> Self: """Adds a new descriptive statistics container to the current one. Returns: @@ -122,7 +130,7 @@ def __iadd__(self, other: Any) -> DescriptiveStatistics: if type(self._instance) != type(other._instance): # noqa: E721 raise TypeError( 'Descriptive statistics must have the same type') - self._instance += other._instance # type: ignore + self._instance += other._instance # type: ignore[operator] return self raise TypeError('unsupported operand type(s) for +=' f": '{type(self)}' and '{type(other)}'") diff --git a/src/pyinterp/statistics/streaming_histogram.py b/src/pyinterp/statistics/streaming_histogram.py index 087e16d7..ef5f4be8 100644 --- a/src/pyinterp/statistics/streaming_histogram.py +++ b/src/pyinterp/statistics/streaming_histogram.py @@ -8,12 +8,20 @@ """ from __future__ import annotations -from typing import Any -from collections.abc import Iterable +from typing import TYPE_CHECKING, Any +import sys + +if sys.version_info[:2] > (3, 10): + from typing import Self +else: + Self = 'DescriptiveStatistics' import dask.array.core import numpy +if TYPE_CHECKING: + from collections.abc import Iterable + from .. import core @@ -37,15 +45,14 @@ def _process_block(attr, x, w, axis, bin_count): drop_axis = list(range(values.ndim))[1:] - return dask.array.core.map_blocks( - _process_block, - attr, - values, - weights, - axis, - bin_count, - drop_axis=drop_axis, - dtype='object').sum().compute() # type: ignore + return dask.array.core.map_blocks(_process_block, + attr, + values, + weights, + axis, + bin_count, + drop_axis=drop_axis, + dtype='object').sum().compute() class StreamingHistogram: @@ -123,7 +130,7 @@ def __init__(self, axis=axis, bin_count=bin_count) - def __iadd__(self, other: Any) -> StreamingHistogram: + def __iadd__(self, other: Any) -> Self: """Adds a new histogram to the current one. Args: @@ -135,7 +142,7 @@ def __iadd__(self, other: Any) -> StreamingHistogram: if isinstance(other, StreamingHistogram): if type(self._instance) != type(other._instance): # noqa: E721 raise TypeError('StreamingHistogram types must match') - self._instance += other._instance # type: ignore + self._instance += other._instance # type: ignore[operator] else: raise TypeError('unsupported operand type(s) for +=' f": '{type(self)}' and '{type(other)}'") diff --git a/src/pyinterp/tests/__init__.py b/src/pyinterp/tests/__init__.py index 041248dc..7353a7c0 100644 --- a/src/pyinterp/tests/__init__.py +++ b/src/pyinterp/tests/__init__.py @@ -2,7 +2,6 @@ # # All rights reserved. Use of this source code is governed by a # BSD-style license that can be found in the LICENSE file. -from typing import Optional import json import pathlib @@ -67,9 +66,10 @@ def _decode_datetime64(array: numpy.ndarray) -> numpy.ndarray: with path.open('r') as stream: data = json.load(stream) for item in ('ud', 'vd'): - data['data_vars'][item]['data'] = list( - map(lambda x: x if x is not None else float('nan'), - data['data_vars'][item]['data'])) + data['data_vars'][item]['data'] = [ + x if x is not None else float('nan') + for x in data['data_vars'][item]['data'] + ] ds = xarray.Dataset.from_dict(data) ds['time'] = xarray.DataArray(_decode_datetime64(ds['time'].values), dims=['time'], diff --git a/src/pyinterp/tests/core/test_axis.py b/src/pyinterp/tests/core/test_axis.py index 369fb44c..44e9ee13 100644 --- a/src/pyinterp/tests/core/test_axis.py +++ b/src/pyinterp/tests/core/test_axis.py @@ -93,6 +93,8 @@ def test_axis_accessor(): a = core.Axis(MERCATOR_LATITUDES, is_circle=True) assert not a.is_circle + assert isinstance(str(a), str) + with pytest.raises(RuntimeError): a.increment() diff --git a/src/pyinterp/tests/core/test_bivariate.py b/src/pyinterp/tests/core/test_bivariate.py index 9f4970c6..a8419467 100644 --- a/src/pyinterp/tests/core/test_bivariate.py +++ b/src/pyinterp/tests/core/test_bivariate.py @@ -58,11 +58,11 @@ def test_grid2d_pickle(): np.ma.fix_invalid(grid.array) == np.ma.fix_invalid(other.array)) -def run_bivariate(interpolator, filename, visualize, dump): +def run_bivariate(step, interpolator, filename, visualize, dump): """Testing an interpolation method.""" grid = load_data() - lon = np.arange(-180, 180, 1 / 3.0) + 1 / 3.0 - lat = np.arange(-90, 90, 1 / 3.0) + 1 / 3.0 + lon = np.arange(-180, 180, step) + 1 / 3 + lat = np.arange(-90, 90, step) + 1 / 3 x, y = np.meshgrid(lon, lat, indexing='ij') z0 = core.bivariate_float64(grid, @@ -103,12 +103,14 @@ def test_bivariate_interpolator(pytestconfig): """Testing of different interpolation methods.""" visualize = pytestconfig.getoption('visualize') dump = pytestconfig.getoption('dump') - a = run_bivariate(core.Nearest2D(), 'mss_bivariate_nearest', visualize, - dump) - b = run_bivariate(core.Bilinear2D(), 'mss_bivariate_bilinear', visualize, - dump) - c = run_bivariate(core.InverseDistanceWeighting2D(), 'mss_bivariate_idw', + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 10 if measure_coverage else 1 / 3 + a = run_bivariate(step, core.Nearest2D(), 'mss_bivariate_nearest', visualize, dump) + b = run_bivariate(step, core.Bilinear2D(), 'mss_bivariate_bilinear', + visualize, dump) + c = run_bivariate(step, core.InverseDistanceWeighting2D(), + 'mss_bivariate_idw', visualize, dump) assert (a - b).std() != 0 assert (a - c).std() != 0 assert (b - c).std() != 0 @@ -129,9 +131,11 @@ def test_spline_interpolator(pytestconfig): """Testing of different spline interpolation methods.""" visualize = pytestconfig.getoption('visualize') dump = pytestconfig.getoption('dump') + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 10 if measure_coverage else 1 / 3 grid = load_data() - lon = np.arange(-180, 180, 1 / 3.0) + 1 / 3.0 - lat = np.arange(-90, 90, 1 / 3.0) + 1 / 3.0 + lon = np.arange(-180, 180, step) + 1 / 3 + lat = np.arange(-90, 90, step) + 1 / 3 x, y = np.meshgrid(lon, lat, indexing='ij') z0 = core.spline_float64(grid, x.ravel(), @@ -166,11 +170,13 @@ def test_spline_interpolator(pytestconfig): num_threads=0) -def test_spline_degraded(): +def test_spline_degraded(pytestconfig): """Testing of different spline interpolation methods.""" + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 10 if measure_coverage else 1 / 3 grid = load_data(is_circle=False) - lon = np.arange(-190, -170, 1 / 3.0) - lat = np.arange(-40, 40, 1 / 3.0) + 1 / 3.0 + lon = np.arange(-190, -170, step) + lat = np.arange(-40, 40, step) + 1 / 3 x, y = np.meshgrid(lon, lat, indexing='ij') with pytest.raises(ValueError): diff --git a/src/pyinterp/tests/core/test_geodetic_rtree.py b/src/pyinterp/tests/core/test_geodetic_rtree.py index 8cd8f4ff..c495983a 100644 --- a/src/pyinterp/tests/core/test_geodetic_rtree.py +++ b/src/pyinterp/tests/core/test_geodetic_rtree.py @@ -52,9 +52,11 @@ def load_data(packing=True): def test_geodetic_rtree_idw(pytestconfig): """Interpolation test.""" + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 10 if measure_coverage else 1 mesh = load_data() - lon = np.arange(-180, 180, 1) - lat = np.arange(-90, 90, 1) + lon = np.arange(-180, 180, step) + lat = np.arange(-90, 90, step) x, y = np.meshgrid(lon, lat, indexing='ij') z, _ = mesh.inverse_distance_weighting(x.ravel(), y.ravel(), @@ -71,9 +73,11 @@ def test_geodetic_rtree_idw(pytestconfig): def test_geodetic_rtree_rbf(pytestconfig): """Interpolation test.""" + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 10 if measure_coverage else 1 mesh = load_data() - lon = np.arange(-180, 180, 1) - lat = np.arange(-90, 90, 1) + lon = np.arange(-180, 180, step) + lat = np.arange(-90, 90, step) x, y = np.meshgrid(lon, lat, indexing='ij') z, _ = mesh.radial_basis_function(x.ravel(), y.ravel(), @@ -93,9 +97,11 @@ def test_geodetic_rtree_rbf(pytestconfig): def test_geodetic_rtree_window_function(pytestconfig): """Interpolation test.""" + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 10 if measure_coverage else 1 mesh = load_data() - lon = np.arange(-180, 180, 1) - lat = np.arange(-90, 90, 1) + lon = np.arange(-180, 180, step) + lat = np.arange(-90, 90, step) x, y = np.meshgrid(lon, lat, indexing='ij') z0, _ = mesh.window_function(x.ravel(), y.ravel(), diff --git a/src/pyinterp/tests/core/test_geohash.py b/src/pyinterp/tests/core/test_geohash.py index 291cd0d9..d373a0b2 100644 --- a/src/pyinterp/tests/core/test_geohash.py +++ b/src/pyinterp/tests/core/test_geohash.py @@ -96,3 +96,21 @@ def test_class(): point = instance.center() assert lat == pytest.approx(point.lat, abs=1e-6) assert lon == pytest.approx(point.lon, abs=1e-6) + assert instance.area() != 0 + + with pytest.raises(ValueError): + GeoHash(0, 0, 32) + + with pytest.raises(ValueError): + GeoHash.from_string('0123456789012345678901234567890123456789') + + with pytest.raises(ValueError): + GeoHash.from_string('%%%%%') + + +def test_error_with_precision(): + error = GeoHash.error_with_precision(1) + assert error == (45.0, 45.0) + + with pytest.raises(ValueError): + GeoHash.error_with_precision(32) diff --git a/src/pyinterp/tests/core/test_rtree.py b/src/pyinterp/tests/core/test_rtree.py index b75fb9eb..7ec2c67f 100644 --- a/src/pyinterp/tests/core/test_rtree.py +++ b/src/pyinterp/tests/core/test_rtree.py @@ -5,6 +5,8 @@ import os import pickle +import pytest + try: import matplotlib.colors import matplotlib.pyplot @@ -57,9 +59,11 @@ def load_data(packing=True): def test_rtree_idw(pytestconfig): """Interpolation test.""" + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 20 if measure_coverage else 1 mesh = load_data() - lon = np.arange(-180, 180, 1 / 3.0, dtype='float32') + 1 / 3.0 - lat = np.arange(-90, 90, 1 / 3.0, dtype='float32') + 1 / 3.0 + lon = np.arange(-180, 180, step, dtype='float32') + 1 / 3 + lat = np.arange(-90, 90, step, dtype='float32') + 1 / 3 x, y = np.meshgrid(lon, lat, indexing='ij') z0, _ = mesh.inverse_distance_weighting(np.vstack( (x.ravel(), y.ravel())).T, @@ -83,9 +87,11 @@ def test_rtree_idw(pytestconfig): def test_rtree_rbf(pytestconfig): """Interpolation test.""" + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 20 if measure_coverage else 1 mesh = load_data() - lon = np.arange(-180, 180, 1 / 3.0, dtype='float32') + 1 / 3.0 - lat = np.arange(-90, 90, 1 / 3.0, dtype='float32') + 1 / 3.0 + lon = np.arange(-180, 180, step, dtype='float32') + 1 / 3 + lat = np.arange(-90, 90, step, dtype='float32') + 1 / 3 x, y = np.meshgrid(lon, lat, indexing='ij') z0, _ = mesh.radial_basis_function( np.vstack((x.ravel(), y.ravel())).T, @@ -115,9 +121,11 @@ def test_rtree_rbf(pytestconfig): def test_rtree_window_function(pytestconfig): """Interpolation test.""" + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 20 if measure_coverage else 1 mesh = load_data() - lon = np.arange(-180, 180, 1 / 3.0, dtype='float32') + 1 / 3.0 - lat = np.arange(-90, 90, 1 / 3.0, dtype='float32') + 1 / 3.0 + lon = np.arange(-180, 180, step, dtype='float32') + 1 / 3 + lat = np.arange(-90, 90, step, dtype='float32') + 1 / 3 x, y = np.meshgrid(lon, lat, indexing='ij') z0, _ = mesh.window_function(np.vstack((x.ravel(), y.ravel())).T, within=False, @@ -141,9 +149,11 @@ def test_rtree_window_function(pytestconfig): def test_rtree_kriging(pytestconfig): """Interpolation test.""" + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 20 if measure_coverage else 1 mesh = load_data() - lon = np.arange(-180, 180, 1 / 3.0, dtype='float32') + 1 / 3.0 - lat = np.arange(-90, 90, 1 / 3.0, dtype='float32') + 1 / 3.0 + lon = np.arange(-180, 180, step, dtype='float32') + 1 / 3 + lat = np.arange(-90, 90, step, dtype='float32') + 1 / 3 x, y = np.meshgrid(lon, lat, indexing='ij') z0, _ = mesh.universal_kriging( np.vstack((x.ravel(), y.ravel())).T, @@ -206,3 +216,34 @@ def test_rtree_pickle(): interpolator = load_data() other = pickle.loads(pickle.dumps(interpolator)) assert isinstance(other, core.RTree3DFloat32) + + +def test_rtree_ecef(): + x = np.array([70, 55, 35, 55, 65, 85], dtype=np.float32) + y = np.array([33, 30, 35, 45, 63, 50], dtype=np.float32) + z = np.array([0, 1, 2, 3, 4, 5], dtype=np.float32) + + with pytest.raises(ValueError): + core.RTree3DFloat32(core.geodetic.Spheroid(), ecef=True) + + instance = core.RTree3DFloat32(ecef=True) + instance.packing(np.vstack((x, y, z * 0)).T, z) + distances, values = instance.query( + np.vstack((np.array([70]), np.array([33]), np.array([0]))).T) + assert distances[0, 0] == 0 + assert distances[0, 1] == np.sqrt((70 - 55)**2 + (33 - 30)**2) + assert distances[0, 2] == np.sqrt((70 - 55)**2 + (33 - 45)**2) + assert distances[0, 3] == np.sqrt((70 - 85)**2 + (33 - 50)**2) + assert values[0, 0] == 0 + assert values[0, 1] == 1 + assert values[0, 2] == 3 + assert values[0, 3] == 5 + points, values = instance.value( + np.vstack((np.array([70]), np.array([33]), np.array([0]))).T) + assert np.all(points == np.array([[ + [70, 33, 0], + [55, 30, 0], + [55, 45, 0], + [85, 50, 0], + ]])) + assert np.all(values == np.array([[0, 1, 3, 5]])) diff --git a/src/pyinterp/tests/core/test_trivariate.py b/src/pyinterp/tests/core/test_trivariate.py index 15ebe87f..6d9480b9 100644 --- a/src/pyinterp/tests/core/test_trivariate.py +++ b/src/pyinterp/tests/core/test_trivariate.py @@ -69,11 +69,11 @@ def test_grid3d_pickle(): np.ma.fix_invalid(grid.array) == np.ma.fix_invalid(other.array)) -def run_interpolator(interpolator, filename, visualize, dump): +def run_interpolator(step, interpolator, filename, visualize, dump): """Testing an interpolation method.""" grid = load_data() - lon = np.arange(-180, 180, 1 / 3.0) + 1 / 3.0 - lat = np.arange(-90, 90, 1 / 3.0) + 1 / 3.0 + lon = np.arange(-180, 180, step) + 1 / 3 + lat = np.arange(-90, 90, step) + 1 / 3 time = 898500 + 3 x, y, t = np.meshgrid(lon, lat, time, indexing='ij') z0 = core.trivariate_float64(grid, @@ -100,9 +100,11 @@ def run_interpolator(interpolator, filename, visualize, dump): def test_trivariate_spline(pytestconfig): """Testing of the spline interpolation.""" + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 10 if measure_coverage else 1 / 3 grid = load_data() - lon = np.arange(-180, 180, 1 / 3.0) + 1 / 3.0 - lat = np.arange(-80, 80, 1 / 3.0) + 1 / 3.0 + lon = np.arange(-180, 180, step) + 1 / 3 + lat = np.arange(-80, 80, step) + 1 / 3 time = 898524 + 3 x, y, t = np.meshgrid(lon, lat, time, indexing='ij') z0 = core.spline_float64(grid, @@ -130,12 +132,14 @@ def test_trivariate_spline(pytestconfig): 'tcw_spline.png') -def test_grid3d_bounds_error(): +def test_grid3d_bounds_error(pytestconfig): """Test of the detection on interpolation outside bounds.""" + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 10 if measure_coverage else 1 / 3 grid = load_data() interpolator = core.Bilinear3D() - lon = np.arange(-180, 180, 1 / 3.0) + 1 / 3.0 - lat = np.arange(-90, 90 + 1, 1 / 3.0) + 1 / 3.0 + lon = np.arange(-180, 180, step) + 1 / 3 + lat = np.arange(-90, 90 + 1, step) + 1 / 3 time = 898500 + 3 x, y, t = np.meshgrid(lon, lat, time, indexing='ij') core.trivariate_float64( @@ -159,10 +163,12 @@ def test_grid3d_bounds_error(): def test_grid3d_z_method(pytestconfig): """Test of the interpolation method used on Z-axis.""" dump = pytestconfig.getoption('dump') + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 10 if measure_coverage else 1 / 3 grid = load_data(temporal_axis=True) interpolator = core.TemporalBilinear3D() - lon = np.arange(-180, 180, 1 / 3.0) + 1 / 3.0 - lat = np.arange(-90, 90 + 1, 1 / 3.0) + 1 / 3.0 + lon = np.arange(-180, 180, step) + 1 / 3 + lat = np.arange(-90, 90 + 1, step) + 1 / 3 time = np.array(['2002-07-02T15'], dtype='datetime64[h]').astype('int64') x, y, t = np.meshgrid(lon, lat, time, indexing='ij') z0 = core.trivariate_float64( @@ -210,23 +216,27 @@ def test_grid3d_interpolator(pytestconfig): """Testing of different interpolation methods.""" visualize = pytestconfig.getoption('visualize') dump = pytestconfig.getoption('dump') - a = run_interpolator(core.Nearest3D(), 'tcw_trivariate_nearest', visualize, - dump) - b = run_interpolator(core.Bilinear3D(), 'tcw_trivariate_bilinear', + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 10 if measure_coverage else 1 / 3 + a = run_interpolator(step, core.Nearest3D(), 'tcw_trivariate_nearest', visualize, dump) - c = run_interpolator(core.InverseDistanceWeighting3D(), + b = run_interpolator(step, core.Bilinear3D(), 'tcw_trivariate_bilinear', + visualize, dump) + c = run_interpolator(step, core.InverseDistanceWeighting3D(), 'tcw_trivariate_idw', visualize, dump) assert (a - b).std() != 0 assert (a - c).std() != 0 assert (b - c).std() != 0 -def test_invalid_data(): +def test_invalid_data(pytestconfig): """Testing of the interpolation with invalid data.""" + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 10 if measure_coverage else 1 / 3 grid = load_data(temporal_axis=True) interpolator = core.TemporalBilinear3D() - lon = np.arange(-180, 180, 1 / 3.0) + 1 / 3.0 - lat = np.arange(-90, 90 + 1, 1 / 3.0) + 1 / 3.0 + lon = np.arange(-180, 180, step) + 1 / 3 + lat = np.arange(-90, 90 + 1, step) + 1 / 3 time = np.array(['2002-07-02T15'], dtype='datetime64[h]').astype('int64') x, y, t = np.meshgrid(lon, lat, time, indexing='ij') z0 = core.trivariate_float64( diff --git a/src/pyinterp/tests/test_2d.py b/src/pyinterp/tests/test_2d.py index 2857517a..70dc6bf9 100644 --- a/src/pyinterp/tests/test_2d.py +++ b/src/pyinterp/tests/test_2d.py @@ -128,10 +128,12 @@ def test_biavariate(pytestconfig): def test_bicubic(pytestconfig): dump = pytestconfig.getoption('dump') + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 10 if measure_coverage else 1 grid = xr_backend.Grid2D(load_grid2d().mss) - lon = np.arange(-180, 180, 1) + 1 / 3.0 - lat = np.arange(-90, 90, 1) + 1 / 3.0 + lon = np.arange(-180, 180, step) + 1 / 3 + lat = np.arange(-90, 90, step) + 1 / 3 x, y = np.meshgrid(lon, lat, indexing='ij') z = grid.bicubic(collections.OrderedDict(lon=x.ravel(), lat=y.ravel())) @@ -191,6 +193,8 @@ def test_bicubic(pytestconfig): def test_grid_2d_int8(pytestconfig): dump = pytestconfig.getoption('dump') + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 10 if measure_coverage else 1 grid = load_grid2d().mss grid.values[~np.isnan(grid.values)] = 0 @@ -200,8 +204,8 @@ def test_grid_2d_int8(pytestconfig): interpolator = xr_backend.RegularGridInterpolator(grid) assert isinstance(interpolator.grid._instance, core.Grid2DInt8) - lon = np.arange(-180, 180, 1) + 1 / 3.0 - lat = np.arange(-90, 90, 1) + 1 / 3.0 + lon = np.arange(-180, 180, step) + 1 / 3 + lat = np.arange(-90, 90, step) + 1 / 3 x, y = np.meshgrid(lon, lat, indexing='ij') z = interpolator(collections.OrderedDict(lon=x.ravel(), lat=y.ravel()), diff --git a/src/pyinterp/tests/test_binning.py b/src/pyinterp/tests/test_binning.py index f7b6c5b2..a6b6b171 100644 --- a/src/pyinterp/tests/test_binning.py +++ b/src/pyinterp/tests/test_binning.py @@ -16,23 +16,29 @@ from .. import Axis, Binning1D, Binning2D, geodetic -def build_binning2d_instance(dtype): +def build_binning2d_instance(step, dtype): ds = load_grid2d() - x_axis = Axis(np.arange(-180, 180, 5), is_circle=True) - y_axis = Axis(np.arange(-90, 95, 5)) + x_axis = Axis(np.arange(-180, 180, step), is_circle=True) + y_axis = Axis(np.arange(-90, 95, step)) binning = Binning2D(x_axis, y_axis, geodetic.Spheroid(), dtype=dtype) assert x_axis == binning.x assert y_axis == binning.y assert isinstance(str(binning), str) lon, lat = np.meshgrid(ds.lon, ds.lat) - binning.push(lon, lat, ds.mss, simple=True) + mss = ds.mss + if step != 1: + # Reduce the dataset size to measure test coverage. + lon = lon[::10, ::10] + lat = lat[::10, ::10] + mss = mss[::10, ::10] + binning.push(lon, lat, mss, simple=True) simple_mean = binning.variable('mean') assert isinstance(simple_mean, np.ndarray) binning.clear() - binning.push(lon, lat, ds.mss, simple=False) + binning.push(lon, lat, mss, simple=False) linear_mean = binning.variable('mean') assert isinstance(simple_mean, np.ndarray) assert np.any(linear_mean != simple_mean) @@ -43,15 +49,17 @@ def build_binning2d_instance(dtype): binning.variable('_') -def test_binning2d(): - build_binning2d_instance(np.float64) - build_binning2d_instance(np.float32), Binning2D +def test_binning2d(pytestconfig): + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 10 if measure_coverage else 1 + build_binning2d_instance(step, np.float64) + build_binning2d_instance(step, np.float32) with pytest.raises(ValueError): - build_binning2d_instance(np.int8) + build_binning2d_instance(step, np.int8) -def test_binning2d_dask(): +def test_binning2d_dask(pytestconfig): x_axis = Axis(np.linspace(-180, 180, 1), is_circle=True) y_axis = Axis(np.linspace(-80, 80, 1)) binning = Binning2D(x_axis, y_axis) @@ -100,7 +108,7 @@ def build_binning1d_instance(dtype): def test_binning1d(): build_binning1d_instance(np.float64) - build_binning1d_instance(np.float32), Binning2D + build_binning1d_instance(np.float32) with pytest.raises(ValueError): build_binning1d_instance(np.int8) diff --git a/src/pyinterp/tests/test_geodetic.py b/src/pyinterp/tests/test_geodetic.py index b599326b..8e36b847 100644 --- a/src/pyinterp/tests/test_geodetic.py +++ b/src/pyinterp/tests/test_geodetic.py @@ -143,9 +143,11 @@ def test_multipolygon(): def test_rtree(pytestconfig): + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 10 if measure_coverage else 1 mesh = load_data() - lon = numpy.arange(-180, 180, 1) + 1 / 3.0 - lat = numpy.arange(-90, 90, 1) + 1 / 3.0 + lon = numpy.arange(-180, 180, step) + 1 / 3.0 + lat = numpy.arange(-90, 90, step) + 1 / 3.0 x, y = numpy.meshgrid(lon, lat, indexing='ij') data, _ = mesh.query(x.ravel(), y.ravel()) data, _ = mesh.inverse_distance_weighting(x.ravel(), y.ravel()) diff --git a/src/pyinterp/tests/test_period.py b/src/pyinterp/tests/test_period.py index cf08c263..6e17a33f 100644 --- a/src/pyinterp/tests/test_period.py +++ b/src/pyinterp/tests/test_period.py @@ -1,6 +1,7 @@ import pickle import numpy +import pytest from ..period import Period, PeriodList @@ -43,6 +44,7 @@ def test_interface(): assert p1.end() == datetime64(10) assert p1.duration() == timedelta64(9) assert not p1.is_null() + assert len(p1) == 9 assert str(p1) == '[1970-01-02, 1970-01-11)' # With numpy 2.0.0rc1, the representation of a datetime64 is different @@ -62,6 +64,7 @@ def test_interface(): assert p1.end() == datetime64(11) assert p1.duration() == timedelta64(10) assert not p1.is_null() + assert len(p1) == 10 p2 = period2() assert p2.begin == datetime64(5) @@ -69,6 +72,7 @@ def test_interface(): assert p2.end() == datetime64(30) assert p2.duration() == timedelta64(25) assert not p2.is_null() + assert len(p2) == 25 def test_cmp(): @@ -115,6 +119,7 @@ def test_relation(): def test_zero_length_period(): """Tests the behavior of a zero-length period.""" zero_len = make_period(3, 3) + assert len(zero_len) == 0 assert make_period(1, 1) == make_period(1, 1) assert make_period(3, 3) == zero_len @@ -142,6 +147,8 @@ def test_zero_length_period(): def test_invalid_period(): """Tests the behavior of a null period.""" null_per = make_period(5, 1) + with pytest.raises(ValueError): + assert len(null_per) == 0 assert not null_per.is_before(datetime64(7)) assert not null_per.is_after(datetime64(7)) @@ -450,3 +457,18 @@ def test_eclipse(): numpy.timedelta64(1, 's')) assert handler.is_it_close(numpy.datetime64('2019-12-10T11:02:50'), numpy.timedelta64(4, 's')) + + assert handler.are_periods_sorted_and_disjointed() + periods[2, :], periods[3, :] = periods[3, :], periods[2, :].copy() + handler = PeriodList(periods.T) + assert not handler.are_periods_sorted_and_disjointed() + assert handler.sort().are_periods_sorted_and_disjointed() + + period = Period(numpy.datetime64('2019-12-13T01:21:28.255', 'ms'), + numpy.datetime64('2019-12-15T00:48:21.092', 'ms')) + + merged = handler.intersection(period) + assert len(merged) == 4 + within = merged.within(period) + assert len(within) == 4 + assert numpy.all(merged.periods == within.periods) diff --git a/src/pyinterp/tests/test_rtree.py b/src/pyinterp/tests/test_rtree.py index d9efffb8..9b72b667 100644 --- a/src/pyinterp/tests/test_rtree.py +++ b/src/pyinterp/tests/test_rtree.py @@ -63,9 +63,11 @@ def load_data(): def test_interpolate(pytestconfig): dump = pytestconfig.getoption('dump') + measure_coverage = pytestconfig.getoption('measure_coverage') + step = 10 if measure_coverage else 1 / 3 mesh = load_data() - lon = np.arange(-180, 180, 1 / 3.0) + 1 / 3.0 - lat = np.arange(-90, 90, 1 / 3.0) + 1 / 3.0 + lon = np.arange(-180, 180, step) + 1 / 3 + lat = np.arange(-90, 90, step) + 1 / 3 x, y = np.meshgrid(lon, lat, indexing='ij') coordinates = np.vstack((x.ravel(), y.ravel())).T points, values = mesh.value(coordinates)