From 407984ee3caf15cb79013137b7d53631870e8b66 Mon Sep 17 00:00:00 2001 From: Liang-Jun Zhu Date: Thu, 21 Nov 2024 10:30:23 +0800 Subject: [PATCH] Squashed 'seims/src/ccgl/' changes from c3cb5566..f55a618e f55a618e [ci skip] Merge commit '8d31c4cf074efa132e8f8b4ea083507b2fa634da' into dev 8d31c4cf Squashed 'cmake/' changes from 2c2cf3f..2c05601 c11f6de7 update github actions to use Node.js 20 [ci skip] cc7c6c8f update to the latest master [ci skip] 9c857bb6 Typo fixed of IMAGE_TAG, using github.ref == 'refs/heads/master' 20a046d3 Merge pull request #14 from crazyzlj/dev 002902e1 deploy images after the success of build and test 77608eba Update dockerfiles 57f33486 Backup previous used workflow for deploying images 9eab234e Remove links to dockerfiles cce3f0de Ignore for docker build process 308a8df2 Change mongo-c-driver download path 45439f66 update to the latest master branch dc331ec5 Merge branch 'master' of github.com:crazyzlj/CCGL e085ab46 Update gdal version; use default version using ankane/setup-mongodb 411ba9a3 Merge pull request #13 from crazyzlj/dev b3f9c929 remove --platform=$BUILDPLATFORM 39fbff55 Merge pull request #12 from crazyzlj/dev 6bce8436 Use FROM --platform=$BUILDPLATFORM xxx as builder; Add provenance: false for docker/build-push-action 460a0834 Merge pull request #11 from crazyzlj/dev 415c1280 Update how to use CCGL with MongoDB in docker f50d3f99 Copy ccgl to /usr/local directories 55635670 Remove dockerfiles that running gtest of ccgl c46ba95d Build amd64 and arm64 versions c18889e3 Update gdal image from ghcr ece54d09 test all image versions 3e2f3334 test mongodb on macos-latest e2b12743 test tags for acr and ghcr b7652b48 test ankane/setup-mongodb on macos a8000c93 test acr and ghcr b54e60de only test ghcr 506a1a96 test permissions write-all 91112103 test e7791ea7 test ci 14568e41 Test again 533a4483 Test push images to ACR and ghcr 0190e958 Test push images to ACR and ghcr 9f3a4509 Test push images to ACR and ghcr e33f3873 Merge pull request #10 from crazyzlj/dev 4c705a08 add deploy docker images to ghcr.io 43a91748 add deploy docker images to ghcr.io 82933b02 add deploy docker images to ghcr.io 13444638 Merge pull request #9 from crazyzlj/dev c78b3437 (bugfix):set installation related paths to cache 35998ae0 Merge pull request #8 from crazyzlj/dev e9478086 (bugfix):only support mongo-c-driver-1.5.0+ to use mongoc_collection_find_with_opts 8c3db8f1 Merge pull request #7 from crazyzlj/dev f163d02b Squashed 'cmake/' changes from 8a954e2..2c2cf3f 52707277 Merge commit 'f163d02bd16f45d53927393d63e7a506c0e21ec1' into dev ea3b1661 (bugfixed): default nodata should depends on data type of clsRasterData 21ce5201 (feat/experimental): Add support of Sanitizers 314dee89 (bugfix): delete tm correctly fe0630dc (bugfix): fixed memory leak on MSVC 738b8028 merge latest master e74c4cca Bug fixed on GDAL 1.x & 2.x c4997932 Add CODECOV_TOKEN fa7d7229 Add GDAL data types added from versions 3.5 & 3.7 c797e3e5 Test passed on Xcode git-subtree-dir: seims/src/ccgl git-subtree-split: f55a618e2e84c0416b0d9436185a023ba838c932 --- .dockerignore | 9 + .github/workflows/cmake_builds.yml | 110 +++++++++-- .../cmake_builds_with-gdal-mongodb.yml | 143 +++++++++----- .github/workflows/cmake_builds_with-gdal.yml | 111 +++++++++-- .../workflows/cmake_builds_with-mongodb.yml | 135 ++++++++----- .github/workflows/deploy_images.yml.bak | 128 +++++++++++++ .github/workflows/download_gdal.ps1 | 2 +- .github/workflows/download_mongo-c-driver.ps1 | 2 +- CMakeLists.txt | 25 ++- Dockerfile-alpine | 1 - Dockerfile-alpine-with-gdal | 1 - Dockerfile-alpine-with-gdal-mongodb | 1 - Dockerfile-alpine-with-mongodb | 1 - cmake/FindASan.cmake | 62 ++++++ cmake/FindLLVM.cmake | 24 +-- cmake/FindMSan.cmake | 60 ++++++ cmake/FindSanitizers.cmake | 91 +++++++++ cmake/FindTSan.cmake | 68 +++++++ cmake/FindUBSan.cmake | 49 +++++ cmake/asan-wrapper | 55 ++++++ cmake/sanitize-helpers.cmake | 178 ++++++++++++++++++ docker/Dockerfile.alpine-basic | 59 ++++++ docker/Dockerfile.alpine-with-gdal | 59 ++++++ docker/Dockerfile.alpine-with-gdal-mongodb | 59 ++++++ docker/Dockerfile.alpine-with-mongodb | 59 ++++++ docker/README.md | 45 ++--- docker/alpine-with-gdal-mongodb/Dockerfile | 61 ------ docker/alpine-with-gdal/Dockerfile | 61 ------ docker/alpine-with-mongodb/Dockerfile | 61 ------ docker/alpine/Dockerfile | 61 ------ .../test/alpine-with-gdal-mongodb/Dockerfile | 48 ----- docker/test/alpine-with-gdal/Dockerfile | 43 ----- docker/test/alpine-with-mongodb/Dockerfile | 48 ----- docker/test/alpine/Dockerfile | 44 ----- src/data_raster.cpp | 27 ++- src/data_raster.hpp | 154 ++++++++++++--- src/db_mongoc.cpp | 14 +- src/db_mongoc.h | 2 +- src/utils_array.h | 1 - src/utils_string.cpp | 7 +- src/utils_time.cpp | 30 ++- test/CMakeLists.txt | 4 + test/raster/test_raster1d_mask_exceed.cpp | 21 ++- test/raster/test_raster1d_mask_within.cpp | 9 +- test/raster/test_raster1d_nomask.cpp | 4 +- test/raster/test_raster1d_split_merge.cpp | 10 +- test/raster/test_raster2d_mask.cpp | 7 +- test/raster/test_raster2d_nomask.cpp | 7 +- test/raster/test_raster2d_split_merge.cpp | 20 +- test/raster/test_raster_constructor.cpp | 43 +++-- test/test_main.cpp | 6 + 51 files changed, 1628 insertions(+), 702 deletions(-) create mode 100644 .dockerignore create mode 100644 .github/workflows/deploy_images.yml.bak delete mode 120000 Dockerfile-alpine delete mode 120000 Dockerfile-alpine-with-gdal delete mode 120000 Dockerfile-alpine-with-gdal-mongodb delete mode 120000 Dockerfile-alpine-with-mongodb create mode 100644 cmake/FindASan.cmake create mode 100644 cmake/FindMSan.cmake create mode 100755 cmake/FindSanitizers.cmake create mode 100644 cmake/FindTSan.cmake create mode 100644 cmake/FindUBSan.cmake create mode 100755 cmake/asan-wrapper create mode 100755 cmake/sanitize-helpers.cmake create mode 100644 docker/Dockerfile.alpine-basic create mode 100644 docker/Dockerfile.alpine-with-gdal create mode 100644 docker/Dockerfile.alpine-with-gdal-mongodb create mode 100644 docker/Dockerfile.alpine-with-mongodb delete mode 100644 docker/alpine-with-gdal-mongodb/Dockerfile delete mode 100644 docker/alpine-with-gdal/Dockerfile delete mode 100644 docker/alpine-with-mongodb/Dockerfile delete mode 100644 docker/alpine/Dockerfile delete mode 100644 docker/test/alpine-with-gdal-mongodb/Dockerfile delete mode 100644 docker/test/alpine-with-gdal/Dockerfile delete mode 100644 docker/test/alpine-with-mongodb/Dockerfile delete mode 100644 docker/test/alpine/Dockerfile diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 000000000..cbc47ddd0 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,9 @@ +# .dockerignore +.github +.idea +.vs +bin +build* +cmake-build* +dist +html diff --git a/.github/workflows/cmake_builds.yml b/.github/workflows/cmake_builds.yml index b02310d99..e7ee17baf 100644 --- a/.github/workflows/cmake_builds.yml +++ b/.github/workflows/cmake_builds.yml @@ -1,30 +1,45 @@ -# Build CCGL library on native C++11 and run test with CMake +# Build CCGL library on native C++11, run test with CMake, and deploy docker image -name: Build on native C++ +name: Build and deploy with native C++11 on: - push: - paths-ignore: - # - 'doc/**' - pull_request: - paths-ignore: - # - 'doc/**' - workflow_dispatch: + push: + branches: + - 'master' + - 'dev' + paths-ignore: + - 'doc/**' + pull_request: + paths-ignore: + - 'doc/**' + workflow_dispatch: env: # Customize the CMake build type here (Release, Debug, RelWithDebInfo, etc.) BUILD_TYPE: Debug -jobs: - build-linux-ubuntu: - # The CMake configure and build commands are platform agnostic and should work equally well on Windows or Mac. - # You can convert this to a matrix build if you need cross-platform coverage. - # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix - runs-on: ubuntu-latest + # Container registry domain, and a name for the Docker image that this workflow builds. + # IMAGE_NAME: ${{ github.repository }} ==> crazyzlj/ccgl + IMAGE_NAME: ccgl + # github container registry + REGISTRY: ghcr.io + NAMESPACE: ${{ github.repository_owner }} + # aliyun ACR + REGION_ID_ACR: cn-hangzhou + REGISTRY_ACR: registry.cn-hangzhou.aliyuncs.com + LOGIN_SERVER: https://registry.cn-hangzhou.aliyuncs.com + NAMESPACE_ACR: ljzhu-geomodels + DOCKERFILE_NAME: Dockerfile.alpine-basic + # IMAGE_TAG: ${{ github.sha }} + IMAGE_TAG: ${{ github.ref == 'refs/heads/master' && 'alpine-basic' || 'dev-alpine-basic' }} +jobs: + # Check GitHub Actions runner images versions: https://github.com/actions/runner-images + build-linux: + runs-on: ubuntu-22.04 steps: - name: Checkout CCGL - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Configure CMake # Configure CMake in a 'build' subdirectory. `CMAKE_BUILD_TYPE` is only required if you are using a single-configuration generator such as make. @@ -41,11 +56,70 @@ jobs: # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail run: ctest -C ${{env.BUILD_TYPE}} --rerun-failed --output-on-failure + deploy-linux: + needs: build-linux + runs-on: ubuntu-22.04 + # Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job. + # permissions: write-all + permissions: + contents: read + packages: write + attestations: write + id-token: write + + steps: + - uses: actions/checkout@v4 + # login alibaba Container Registry + - name: Login to ACR + uses: aliyun/acr-login@v1 + with: + login-server: ${{ env.LOGIN_SERVER }} + region-id: ${{ env.REGION_ID_ACR }} + username: ${{ secrets.ACR_USERNAME }} + password: ${{ secrets.ACR_PASSWORD }} + # login ghcr.io + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + # This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels. + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@v5 + with: + images: | + ${{ env.REGISTRY_ACR }}/${{ env.NAMESPACE_ACR }}/${{ env.IMAGE_NAME }} + ${{ env.REGISTRY }}/${{ env.NAMESPACE }}/${{ env.IMAGE_NAME }} + tags: | + type=ref,event=branch + type=ref,event=pr + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + # This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages. + # It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository. + # It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step. + - uses: docker/setup-buildx-action@v3 + - uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/${{ env.DOCKERFILE_NAME }} + push: true + platforms: linux/amd64,linux/arm64 + provenance: false + tags: | + ${{ env.REGISTRY_ACR }}/${{ env.NAMESPACE_ACR }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} + ${{ env.REGISTRY }}/${{ env.NAMESPACE }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} + cache-from: type=gha + cache-to: type=gha,mode=max + build-windows: runs-on: windows-2019 steps: - name: Checkout CCGL - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Configure CMake shell: cmd @@ -69,7 +143,7 @@ jobs: uses: maxim-lobanov/setup-xcode@v1 - name: Checkout CCGL - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Configure CMake run: cmake -B ${{github.workspace}}/build -DCMAKE_BUILD_TYPE=${{env.BUILD_TYPE}} -DUNITTEST=1 diff --git a/.github/workflows/cmake_builds_with-gdal-mongodb.yml b/.github/workflows/cmake_builds_with-gdal-mongodb.yml index ded480ff3..53a319a78 100644 --- a/.github/workflows/cmake_builds_with-gdal-mongodb.yml +++ b/.github/workflows/cmake_builds_with-gdal-mongodb.yml @@ -1,28 +1,38 @@ -# Build CCGL library on native C++11 with GDAL and mongo-c-driver, and run unit test with CMake -# Doxygen documents and code coverage are also built on Linux(Ubuntu). +# Build CCGL library on native C++11 with GDAL and mongo-c-driver, generate Doxygen documents, generate code coverage, run test with CMake, and deploy docker image -name: Build with GDAL and MongoDB +name: Build and deploy with GDAL and MongoDB on: - push: - paths-ignore: - - 'doc/**' - pull_request: - paths-ignore: - - 'doc/**' - workflow_dispatch: + push: + branches: + - 'master' + - 'dev' + pull_request: + workflow_dispatch: env: # Customize the CMake build type here (Release, Debug, RelWithDebInfo, etc.) BUILD_TYPE: Debug -jobs: - build-linux-ubuntu: - # The CMake configure and build commands are platform agnostic and should work equally well on Windows or Mac. - # You can convert this to a matrix build if you need cross-platform coverage. - # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix - runs-on: ubuntu-latest + # Container registry domain, and a name for the Docker image that this workflow builds. + # IMAGE_NAME: ${{ github.repository }} ==> crazyzlj/ccgl + IMAGE_NAME: ccgl + # github container registry + REGISTRY: ghcr.io + NAMESPACE: ${{ github.repository_owner }} + # aliyun ACR + REGION_ID_ACR: cn-hangzhou + REGISTRY_ACR: registry.cn-hangzhou.aliyuncs.com + LOGIN_SERVER: https://registry.cn-hangzhou.aliyuncs.com + NAMESPACE_ACR: ljzhu-geomodels + DOCKERFILE_NAME: Dockerfile.alpine-with-gdal-mongodb + # IMAGE_TAG: ${{ github.sha }} + IMAGE_TAG: ${{ github.ref == 'refs/heads/master' && 'alpine-with-gdal-mongodb' || 'dev-alpine-with-gdal-mongodb' }} +jobs: + build-linux: + # Check GitHub Actions runner images versions: https://github.com/actions/runner-images + runs-on: ubuntu-22.04 services: mongodb: image: mongo @@ -31,19 +41,7 @@ jobs: steps: - name: Checkout CCGL - uses: actions/checkout@v3 - - # 1. https://github.com/supercharge/mongodb-github-action - # - name: MongoDB in GitHub Actions - # uses: supercharge/mongodb-github-action@1.7.0 - # 2. launch manually (untested) - # - name: launch - # shell: bash - # run: | - # mkdir /tmp/d - # mongod --dbpath /tmp/d --fork --logpath /tmp/log - # sleep 5 - # echo 'db' | mongo 127.0.0.1 + uses: actions/checkout@v4 - name: Install GDAL and mongo-c-driver run: sudo apt-get update && sudo apt-get install -qq gdal-bin libgdal-dev libmongoc-1.0-0 libmongoc-dev lcov @@ -70,6 +68,7 @@ jobs: - name: Upload to CodeCov uses: codecov/codecov-action@v3 with: + token: ${{ secrets.CODECOV_TOKEN }} files: ${{github.workspace}}/build/ccov/unittest.info flags: unittest # optional name: codecov-umbrella # optional @@ -89,20 +88,78 @@ jobs: doxyfile-path: 'doc/Doxyfile.zh-cn.in' - name: Deploy Doc - uses: peaceiris/actions-gh-pages@v3 + uses: peaceiris/actions-gh-pages@v4 with: github_token: ${{ secrets.GITHUB_TOKEN }} commit_message: ${{ github.event.head_commit.message }} publish_branch: gh-pages force_orphan: true publish_dir: html + deploy-linux: + needs: build-linux + runs-on: ubuntu-22.04 + # Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job. + # permissions: write-all + permissions: + contents: read + packages: write + attestations: write + id-token: write + + steps: + - uses: actions/checkout@v4 + # login alibaba Container Registry + - name: Login to ACR + uses: aliyun/acr-login@v1 + with: + login-server: ${{ env.LOGIN_SERVER }} + region-id: ${{ env.REGION_ID_ACR }} + username: ${{ secrets.ACR_USERNAME }} + password: ${{ secrets.ACR_PASSWORD }} + # login ghcr.io + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + # This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels. + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@v5 + with: + images: | + ${{ env.REGISTRY_ACR }}/${{ env.NAMESPACE_ACR }}/${{ env.IMAGE_NAME }} + ${{ env.REGISTRY }}/${{ env.NAMESPACE }}/${{ env.IMAGE_NAME }} + tags: | + type=ref,event=branch + type=ref,event=pr + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + # This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages. + # It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository. + # It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step. + - uses: docker/setup-buildx-action@v3 + - uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/${{ env.DOCKERFILE_NAME }} + push: true + platforms: linux/amd64,linux/arm64 + provenance: false + tags: | + ${{ env.REGISTRY_ACR }}/${{ env.NAMESPACE_ACR }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} + ${{ env.REGISTRY }}/${{ env.NAMESPACE }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} + cache-from: type=gha + cache-to: type=gha,mode=max build-windows: runs-on: windows-2019 steps: - name: Checkout CCGL - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Find mongod.exe run: where.exe mongod.exe @@ -162,22 +219,12 @@ jobs: - name: Setup xcode uses: maxim-lobanov/setup-xcode@v1 - # default mongod install path: /usr/local/bin/mongod - # default mongod.conf: /usr/local/etc/mongod.conf - # systemLog: - # destination: file - # path: /usr/local/var/log/mongodb/mongo.log - # logAppend: true - # storage: - # dbPath: /usr/local/var/mongodb - # net: - # bindIp: 127.0.0.1 - - name: Check mongod - run: | - which mongod - cat /usr/local/etc/mongod.conf - - name: Start mongod manually - run: mongod --config /usr/local/etc/mongod.conf --fork + # https://github.com/ankane/setup-mongodb + - name: Setup MongoDB + uses: ankane/setup-mongodb@v1 + + - name: Run MongoDB + run: mongosh --eval "db.version()" - name: Install mongo-c-driver run: brew install mongo-c-driver @@ -186,7 +233,7 @@ jobs: run: brew list gdal &>/dev/null || brew install gdal - name: Checkout CCGL - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Configure CMake run: cmake -B ${{github.workspace}}/build -DCMAKE_BUILD_TYPE=${{env.BUILD_TYPE}} -DUNITTEST=1 diff --git a/.github/workflows/cmake_builds_with-gdal.yml b/.github/workflows/cmake_builds_with-gdal.yml index b6ba52580..68a3ffdc5 100644 --- a/.github/workflows/cmake_builds_with-gdal.yml +++ b/.github/workflows/cmake_builds_with-gdal.yml @@ -1,30 +1,45 @@ -# Build CCGL library on native C++11 with GDAL and run test with CMake +# Build CCGL library on native C++11 with GDAL, run test with CMake, and deploy docker image -name: Build with GDAL +name: Build and deploy with GDAL on: - push: - paths-ignore: - #- 'doc/**' - pull_request: - paths-ignore: - #- 'doc/**' - workflow_dispatch: + push: + branches: + - 'master' + - 'dev' + paths-ignore: + - 'doc/**' + pull_request: + paths-ignore: + - 'doc/**' + workflow_dispatch: env: # Customize the CMake build type here (Release, Debug, RelWithDebInfo, etc.) BUILD_TYPE: Debug -jobs: - build-linux-ubuntu: - # The CMake configure and build commands are platform agnostic and should work equally well on Windows or Mac. - # You can convert this to a matrix build if you need cross-platform coverage. - # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix - runs-on: ubuntu-latest + # Container registry domain, and a name for the Docker image that this workflow builds. + # IMAGE_NAME: ${{ github.repository }} ==> crazyzlj/ccgl + IMAGE_NAME: ccgl + # github container registry + REGISTRY: ghcr.io + NAMESPACE: ${{ github.repository_owner }} + # aliyun ACR + REGION_ID_ACR: cn-hangzhou + REGISTRY_ACR: registry.cn-hangzhou.aliyuncs.com + LOGIN_SERVER: https://registry.cn-hangzhou.aliyuncs.com + NAMESPACE_ACR: ljzhu-geomodels + DOCKERFILE_NAME: Dockerfile.alpine-with-gdal + # IMAGE_TAG: ${{ github.sha }} + IMAGE_TAG: ${{ github.ref == 'refs/heads/master' && 'alpine-with-gdal' || 'dev-alpine-with-gdal' }} +jobs: + # Check GitHub Actions runner images versions: https://github.com/actions/runner-images + build-linux: + runs-on: ubuntu-22.04 steps: - name: Checkout CCGL - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install GDAL run: sudo apt-get update && sudo apt-get install -qq gdal-bin libgdal-dev @@ -44,12 +59,70 @@ jobs: # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail run: ctest -C ${{env.BUILD_TYPE}} --rerun-failed --output-on-failure + deploy-linux: + needs: build-linux + runs-on: ubuntu-22.04 + # Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job. + # permissions: write-all + permissions: + contents: read + packages: write + attestations: write + id-token: write + + steps: + - uses: actions/checkout@v4 + # login alibaba Container Registry + - name: Login to ACR + uses: aliyun/acr-login@v1 + with: + login-server: ${{ env.LOGIN_SERVER }} + region-id: ${{ env.REGION_ID_ACR }} + username: ${{ secrets.ACR_USERNAME }} + password: ${{ secrets.ACR_PASSWORD }} + # login ghcr.io + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + # This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels. + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@v5 + with: + images: | + ${{ env.REGISTRY_ACR }}/${{ env.NAMESPACE_ACR }}/${{ env.IMAGE_NAME }} + ${{ env.REGISTRY }}/${{ env.NAMESPACE }}/${{ env.IMAGE_NAME }} + tags: | + type=ref,event=branch + type=ref,event=pr + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + # This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages. + # It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository. + # It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step. + - uses: docker/setup-buildx-action@v3 + - uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/${{ env.DOCKERFILE_NAME }} + push: true + platforms: linux/amd64,linux/arm64 + provenance: false + tags: | + ${{ env.REGISTRY_ACR }}/${{ env.NAMESPACE_ACR }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} + ${{ env.REGISTRY }}/${{ env.NAMESPACE }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} + cache-from: type=gha + cache-to: type=gha,mode=max + build-windows: runs-on: windows-2019 - steps: - name: Checkout CCGL - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Download GDAL id: pwshdowngdal @@ -88,7 +161,7 @@ jobs: run: brew list gdal &>/dev/null || brew install gdal - name: Checkout CCGL - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Configure CMake run: cmake -B ${{github.workspace}}/build -DCMAKE_BUILD_TYPE=${{env.BUILD_TYPE}} -DUNITTEST=1 diff --git a/.github/workflows/cmake_builds_with-mongodb.yml b/.github/workflows/cmake_builds_with-mongodb.yml index 155aaa184..7d7ac03c0 100644 --- a/.github/workflows/cmake_builds_with-mongodb.yml +++ b/.github/workflows/cmake_builds_with-mongodb.yml @@ -1,11 +1,14 @@ -# Build CCGL library on native C++11 with mongo-c-driver and run test with CMake +# Build CCGL library on native C++11 with mongo-c-driver, run test with CMake, and deploy docker image -name: Build with MongoDB +name: Build and deploy with MongoDB on: push: - paths-ignore: - - 'doc/**' + branches: + - 'master' + - 'dev' + paths-ignore: + - 'doc/**' pull_request: paths-ignore: - 'doc/**' @@ -15,13 +18,25 @@ env: # Customize the CMake build type here (Release, Debug, RelWithDebInfo, etc.) BUILD_TYPE: Debug -jobs: - build-linux-ubuntu: - # The CMake configure and build commands are platform agnostic and should work equally well on Windows or Mac. - # You can convert this to a matrix build if you need cross-platform coverage. - # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix - runs-on: ubuntu-latest + # Container registry domain, and a name for the Docker image that this workflow builds. + # IMAGE_NAME: ${{ github.repository }} ==> crazyzlj/ccgl + IMAGE_NAME: ccgl + # github container registry + REGISTRY: ghcr.io + NAMESPACE: ${{ github.repository_owner }} + # aliyun ACR + REGION_ID_ACR: cn-hangzhou + REGISTRY_ACR: registry.cn-hangzhou.aliyuncs.com + LOGIN_SERVER: https://registry.cn-hangzhou.aliyuncs.com + NAMESPACE_ACR: ljzhu-geomodels + DOCKERFILE_NAME: Dockerfile.alpine-with-mongodb + # IMAGE_TAG: ${{ github.sha }} + IMAGE_TAG: ${{ github.ref == 'refs/heads/master' && 'alpine-with-mongodb' || 'dev-alpine-with-mongodb' }} +jobs: + # Check GitHub Actions runner images versions: https://github.com/actions/runner-images + build-linux: + runs-on: ubuntu-22.04 services: mongodb: image: mongo @@ -30,19 +45,7 @@ jobs: steps: - name: Checkout CCGL - uses: actions/checkout@v3 - - # 1. https://github.com/supercharge/mongodb-github-action - # - name: MongoDB in GitHub Actions - # uses: supercharge/mongodb-github-action@1.7.0 - # 2. launch manually (untested) - # - name: launch - # shell: bash - # run: | - # mkdir /tmp/d - # mongod --dbpath /tmp/d --fork --logpath /tmp/log - # sleep 5 - # echo 'db' | mongo 127.0.0.1 + uses: actions/checkout@v4 - name: Install mongo-c-driver run: sudo apt-get update && sudo apt-get install -qq libmongoc-1.0-0 libmongoc-dev @@ -62,12 +65,70 @@ jobs: # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail run: ctest -C ${{env.BUILD_TYPE}} --rerun-failed --output-on-failure + deploy-linux: + needs: build-linux + runs-on: ubuntu-22.04 + # Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job. + # permissions: write-all + permissions: + contents: read + packages: write + attestations: write + id-token: write + + steps: + - uses: actions/checkout@v4 + # login alibaba Container Registry + - name: Login to ACR + uses: aliyun/acr-login@v1 + with: + login-server: ${{ env.LOGIN_SERVER }} + region-id: ${{ env.REGION_ID_ACR }} + username: ${{ secrets.ACR_USERNAME }} + password: ${{ secrets.ACR_PASSWORD }} + # login ghcr.io + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + # This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels. + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@v5 + with: + images: | + ${{ env.REGISTRY_ACR }}/${{ env.NAMESPACE_ACR }}/${{ env.IMAGE_NAME }} + ${{ env.REGISTRY }}/${{ env.NAMESPACE }}/${{ env.IMAGE_NAME }} + tags: | + type=ref,event=branch + type=ref,event=pr + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + # This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages. + # It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository. + # It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step. + - uses: docker/setup-buildx-action@v3 + - uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/${{ env.DOCKERFILE_NAME }} + push: true + platforms: linux/amd64,linux/arm64 + provenance: false + tags: | + ${{ env.REGISTRY_ACR }}/${{ env.NAMESPACE_ACR }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} + ${{ env.REGISTRY }}/${{ env.NAMESPACE }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} + cache-from: type=gha + cache-to: type=gha,mode=max + build-windows: runs-on: windows-2019 - steps: - name: Checkout CCGL - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Find mongod.exe run: where.exe mongod.exe @@ -119,28 +180,18 @@ jobs: - name: Setup xcode uses: maxim-lobanov/setup-xcode@v1 - # default mongod install path: /usr/local/bin/mongod - # default mongod.conf: /usr/local/etc/mongod.conf - # systemLog: - # destination: file - # path: /usr/local/var/log/mongodb/mongo.log - # logAppend: true - # storage: - # dbPath: /usr/local/var/mongodb - # net: - # bindIp: 127.0.0.1 - - name: Check mongod - run: | - which mongod - cat /usr/local/etc/mongod.conf - - name: Start mongod manually - run: mongod --config /usr/local/etc/mongod.conf --fork + # https://github.com/ankane/setup-mongodb + - name: Setup MongoDB + uses: ankane/setup-mongodb@v1 + + - name: Run MongoDB + run: mongosh --eval "db.version()" - name: Install mongo-c-driver run: brew install mongo-c-driver - name: Checkout CCGL - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Configure CMake run: cmake -B ${{github.workspace}}/build -DCMAKE_BUILD_TYPE=${{env.BUILD_TYPE}} -DUNITTEST=1 diff --git a/.github/workflows/deploy_images.yml.bak b/.github/workflows/deploy_images.yml.bak new file mode 100644 index 000000000..39bc74905 --- /dev/null +++ b/.github/workflows/deploy_images.yml.bak @@ -0,0 +1,128 @@ +name: Create and publish CCGL Docker images + +# Configures this workflow to run every time a change is pushed to the branches. +on: + push: + branches: + - 'master' + - 'dev' + +# Defines custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds. +env: + # IMAGE_NAME: ${{ github.repository }} ==> crazyzlj/ccgl + IMAGE_NAME: ccgl + # github container registry + REGISTRY: ghcr.io + NAMESPACE: ${{ github.repository_owner }} + # aliyun ACR + REGION_ID_ACR: cn-hangzhou + REGISTRY_ACR: registry.cn-hangzhou.aliyuncs.com + NAMESPACE_ACR: ljzhu-geomodels + # IMAGE_TAG: ${{ github.sha }} + +# There is a single job in this workflow. It's configured to run on the latest available version of Ubuntu. +jobs: + build-and-push-image: + runs-on: ubuntu-latest + # Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job. + # permissions: write-all + permissions: + contents: read + packages: write + attestations: write + id-token: write + # + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + # login alibaba Container Registry + - name: Login to ACR + uses: aliyun/acr-login@v1 + with: + login-server: https://registry.cn-hangzhou.aliyuncs.com + region-id: cn-hangzhou + username: "${{ secrets.ACR_USERNAME }}" + password: "${{ secrets.ACR_PASSWORD }}" + + # login ghcr.io + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + # This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels. + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@v5 + with: + images: | + ${{ env.REGISTRY_ACR }}/${{ env.NAMESPACE_ACR }}/${{ env.IMAGE_NAME }} + ${{ env.REGISTRY }}/${{ env.NAMESPACE }}/${{ env.IMAGE_NAME }} + tags: | + type=ref,event=branch + type=ref,event=pr + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + # This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages. + # It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository. + # It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step. + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Build basic CCGL + uses: docker/build-push-action@v5 + with: + context: . + file: ./Dockerfile-alpine + push: true + platforms: linux/amd64,linux/arm64 + provenance: false + tags: | + ${{ env.REGISTRY_ACR }}/${{ env.NAMESPACE_ACR }}/${{ env.IMAGE_NAME }}:${{ github.ref_name }}-alpine + ${{ env.REGISTRY }}/${{ env.NAMESPACE }}/${{ env.IMAGE_NAME }}:${{ github.ref_name }}-alpine + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Build CCGL with gdal + uses: docker/build-push-action@v5 + with: + context: . + file: ./Dockerfile-alpine-with-gdal + push: true + platforms: linux/amd64,linux/arm64 + provenance: false + tags: | + ${{ env.REGISTRY_ACR }}/${{ env.NAMESPACE_ACR }}/${{ env.IMAGE_NAME }}:${{ github.ref_name }}-alpine-with-gdal + ${{ env.REGISTRY }}/${{ env.NAMESPACE }}/${{ env.IMAGE_NAME }}:${{ github.ref_name }}-alpine-with-gdal + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Build CCGL with mongodb + uses: docker/build-push-action@v5 + with: + context: . + file: ./Dockerfile-alpine-with-mongodb + push: true + platforms: linux/amd64,linux/arm64 + provenance: false + tags: | + ${{ env.REGISTRY_ACR }}/${{ env.NAMESPACE_ACR }}/${{ env.IMAGE_NAME }}:${{ github.ref_name }}-alpine-with-mongodb + ${{ env.REGISTRY }}/${{ env.NAMESPACE }}/${{ env.IMAGE_NAME }}:${{ github.ref_name }}-alpine-with-mongodb + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Build CCGL with gdal and mongodb + uses: docker/build-push-action@v5 + with: + context: . + file: ./Dockerfile-alpine-with-gdal-mongodb + push: true + platforms: linux/amd64,linux/arm64 + provenance: false + tags: | + ${{ env.REGISTRY_ACR }}/${{ env.NAMESPACE_ACR }}/${{ env.IMAGE_NAME }}:${{ github.ref_name }}-alpine-with-gdal-mongodb + ${{ env.REGISTRY }}/${{ env.NAMESPACE }}/${{ env.IMAGE_NAME }}:${{ github.ref_name }}-alpine-with-gdal-mongodb + cache-from: type=gha + cache-to: type=gha,mode=max diff --git a/.github/workflows/download_gdal.ps1 b/.github/workflows/download_gdal.ps1 index d92015b1f..43ee61be0 100644 --- a/.github/workflows/download_gdal.ps1 +++ b/.github/workflows/download_gdal.ps1 @@ -1,4 +1,4 @@ -param ($gdalPath = "$env:SystemDrive\gdal", $VSversion = "1928", $GDALversion = "3.5.3", $MAPSversion = "8.0.0") +param ($gdalPath = "$env:SystemDrive\gdal", $VSversion = "1928", $GDALversion = "3.9.1", $MAPSversion = "8.2.0") $GDALversion=$GDALversion -replace '\.','-' $MAPSversion=$MAPSversion -replace '\.','-' $urllib = "https://download.gisinternals.com/sdk/downloads/release-$VSversion-x64-gdal-$GDALversion-mapserver-$MAPSversion-libs.zip" diff --git a/.github/workflows/download_mongo-c-driver.ps1 b/.github/workflows/download_mongo-c-driver.ps1 index 023167d86..cb44b27f4 100644 --- a/.github/workflows/download_mongo-c-driver.ps1 +++ b/.github/workflows/download_mongo-c-driver.ps1 @@ -1,5 +1,5 @@ param ($mongoCPath = "$env:SystemDrive\mongo-c-driver", $version = "1.16.2") -$url = "https://raw.githubusercontent.com/crazyzlj/Github_Actions_Precompiled_Packages/release/releases/mongo-c-driver-$version-vs2019x64.zip" +$url = "https://raw.githubusercontent.com/crazyzlj/Github_Actions_Precompiled_Packages/release/mongo-c-driver/mongo-c-driver-$version-vs2019x64.zip" $zipFile = "$mongoCPath\mongo-c-driver.zip" # Check if mongoCPath existed diff --git a/CMakeLists.txt b/CMakeLists.txt index 9f5237d10..e6edb7209 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -17,6 +17,13 @@ # -DLLVM_ROOT_DIR Specific the root directory of brew installed LLVM, e.g., /opt/homebrew/opt/llvm # -DBUILD_DOC=1 means build CCGL documentation based on doxygen # +# Sanitizers related flags (Experimental): +# +# -DSANITIZE_ADDRESS=On for MSVC and Clang +# -DSANITIZE_MEMORY=On for MSVC, GCC, and Clang +# -DSANITIZE_THREAD=On for MSVC, GCC, and Clang +# -DSANITIZE_UNDEFINED=On for MSVC, GCC, and Clang +# # Routine testing platforms and compilers include: # 1. Windows 10 with Visual Studio 2010/2015/2019, mongo-c-driver-1.16.2, GDAL-1.11.4/2.4.4/3.3.3 # 2. CentOS 6.2 (cluster) with GCC-4.8.4, mongo-c-driver-1.5.5, GDAL-1.9.0 @@ -25,7 +32,7 @@ # mongo-c-driver-1.16.2, GDAL-3.3.3 (brew installed and framework by kyngchaos.com) # # Created and maintained by Liangjun Zhu, zlj(a)lreis.ac.cn -# Latest updated: Apr 5, 2023 +# Latest updated: Aug 10, 2023 # Copyright (C) 2017-2023 Lreis, IGSNRR, CAS # ---------------------------------------------------------------------------- ### Disable in-source builds to prevent source tree corruption. @@ -88,9 +95,9 @@ ELSE() ENDIF () ENDIF () -SET(INSTALL_PREFIX ${INSTALL_DIR}) -SET(CMAKE_INSTALL_PREFIX ${INSTALL_DIR}) -SET(DOC_INSTALL_DIR "${INSTALL_DIR}/doc" CACHE PATH "Path to the documentation") +SET(INSTALL_PREFIX ${INSTALL_DIR} CACHE PATH "Set INSTALL_PREFIX cache" FORCE) +SET(CMAKE_INSTALL_PREFIX ${INSTALL_DIR} CACHE PATH "Set CMAKE_INSTALL_PREFIX cache" FORCE) +SET(DOC_INSTALL_DIR "${INSTALL_DIR}/doc" CACHE PATH "Set path cache to the documentation") ### Allow project folders in MSVC. IF(MSVC OR XCODE) @@ -256,6 +263,14 @@ IF ((CV_GCC OR CV_CLANG) AND CODE_COVERAGE) SET(UNITTEST 1) ENDIF () +### Check if use Sanitizers for GCC and Clang +SET(USE_SAN 0) +IF (SANITIZE_ADDRESS OR SANITIZE_MEMORY OR SANITIZE_THREAD OR SANITIZE_UNDEFINED) + SET(USE_SAN 1) + find_package(Sanitizers) +ENDIF () + + ### Build Googletest if UNITTEST is set as True or 1 IF (UNITTEST STREQUAL 1) ADD_DEFINITIONS(-DUNITTEST) @@ -300,7 +315,7 @@ IF (UNITTEST STREQUAL 1) enable_testing() ADD_SUBDIRECTORY(${TEST_DIR}) # For the convenient of debugging in MSVC and CLion - IF (CMAKE_GENERATOR MATCHES "Visual Studio" OR CMAKE_GENERATOR MATCHES "XCode") + IF (CMAKE_GENERATOR MATCHES "Visual Studio" OR CMAKE_GENERATOR MATCHES "Xcode") FILE(COPY ${CCGL_BASE_DIR}/data DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/test/Debug) FILE(COPY ${CCGL_BASE_DIR}/data DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/test/Release) ELSE () diff --git a/Dockerfile-alpine b/Dockerfile-alpine deleted file mode 120000 index 0d71275a8..000000000 --- a/Dockerfile-alpine +++ /dev/null @@ -1 +0,0 @@ -/docker/alpine/Dockerfile \ No newline at end of file diff --git a/Dockerfile-alpine-with-gdal b/Dockerfile-alpine-with-gdal deleted file mode 120000 index 10214847b..000000000 --- a/Dockerfile-alpine-with-gdal +++ /dev/null @@ -1 +0,0 @@ -/docker/alpine-with-gdal/Dockerfile \ No newline at end of file diff --git a/Dockerfile-alpine-with-gdal-mongodb b/Dockerfile-alpine-with-gdal-mongodb deleted file mode 120000 index b0ecfa4c1..000000000 --- a/Dockerfile-alpine-with-gdal-mongodb +++ /dev/null @@ -1 +0,0 @@ -/docker/alpine-with-gdal-mongodb/Dockerfile \ No newline at end of file diff --git a/Dockerfile-alpine-with-mongodb b/Dockerfile-alpine-with-mongodb deleted file mode 120000 index fb9cbfe61..000000000 --- a/Dockerfile-alpine-with-mongodb +++ /dev/null @@ -1 +0,0 @@ -/docker/alpine-with-mongodb/Dockerfile \ No newline at end of file diff --git a/cmake/FindASan.cmake b/cmake/FindASan.cmake new file mode 100644 index 000000000..4548e46a8 --- /dev/null +++ b/cmake/FindASan.cmake @@ -0,0 +1,62 @@ +# The MIT License (MIT) +# +# Copyright (c) +# 2013 Matthew Arsenault +# 2015-2016 RWTH Aachen University, Federal Republic of Germany +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +option(SANITIZE_ADDRESS "Enable AddressSanitizer for sanitized targets." Off) + +set(FLAG_CANDIDATES + # MSVC uses + "/fsanitize=address" + + # Clang 3.2+ use this version. The no-omit-frame-pointer option is optional. + "-g -fsanitize=address -fno-omit-frame-pointer" + "-g -fsanitize=address" + + # Older deprecated flag for ASan + "-g -faddress-sanitizer" +) + + +if (SANITIZE_ADDRESS AND (SANITIZE_THREAD OR SANITIZE_MEMORY)) + message(FATAL_ERROR "AddressSanitizer is not compatible with " + "ThreadSanitizer or MemorySanitizer.") +endif () + + +include(sanitize-helpers) + +if (SANITIZE_ADDRESS) + sanitizer_check_compiler_flags("${FLAG_CANDIDATES}" "AddressSanitizer" + "ASan") + + find_program(ASan_WRAPPER "asan-wrapper" PATHS ${CMAKE_MODULE_PATH}) + mark_as_advanced(ASan_WRAPPER) +endif () + +function (add_sanitize_address TARGET) + if (NOT SANITIZE_ADDRESS) + return() + endif () + + sanitizer_add_flags(${TARGET} "AddressSanitizer" "ASan") +endfunction () diff --git a/cmake/FindLLVM.cmake b/cmake/FindLLVM.cmake index 74cb0923a..61bb13c76 100644 --- a/cmake/FindLLVM.cmake +++ b/cmake/FindLLVM.cmake @@ -1,6 +1,7 @@ # https://github.com/ldc-developers/ldc/blob/master/cmake/Modules/FindLLVM.cmake -# commit d595f4fefa5537afbf396b29c6a8e6776ff71b9b Nov 10, 2022 +# commit 2cd14c59dd878091140b89c8048dbdd457031aee May 19, 2024 # +# Updated: 11/21/2024 - lj - Only if LLVM_FIND_VERSION is assigned, the found version will be checked. # # - Find LLVM headers and libraries. # This module locates LLVM and adapts the llvm-config output for use with @@ -36,13 +37,10 @@ # We also want an user-specified LLVM_ROOT_DIR to take precedence over the # system default locations such as /usr/local/bin. Executing find_program() # multiples times is the approach recommended in the docs. -set(llvm_config_names llvm-config-15.0 llvm-config150 llvm-config-15 - llvm-config-14.0 llvm-config140 llvm-config-14 - llvm-config-13.0 llvm-config130 llvm-config-13 - llvm-config-12.0 llvm-config120 llvm-config-12 - llvm-config-11.0 llvm-config110 llvm-config-11 - llvm-config-10.0 llvm-config100 llvm-config-10 - llvm-config-9.0 llvm-config90 llvm-config-9 +set(llvm_config_names llvm-config-18.1 llvm-config181 llvm-config-18 + llvm-config-17.0 llvm-config170 llvm-config-17 + llvm-config-16.0 llvm-config160 llvm-config-16 + llvm-config-15.0 llvm-config150 llvm-config-15 llvm-config) find_program(LLVM_CONFIG NAMES ${llvm_config_names} @@ -53,13 +51,11 @@ if(APPLE) # extra fallbacks for MacPorts & Homebrew find_program(LLVM_CONFIG NAMES ${llvm_config_names} - PATHS /opt/local/libexec/llvm-15/bin - /opt/local/libexec/llvm-14/bin /opt/local/libexec/llvm-13/bin /opt/local/libexec/llvm-12/bin - /opt/local/libexec/llvm-11/bin /opt/local/libexec/llvm-10/bin /opt/local/libexec/llvm-9.0/bin + PATHS /opt/local/libexec/llvm-18/bin /opt/local/libexec/llvm-17/bin + /opt/local/libexec/llvm-16/bin /opt/local/libexec/llvm-15/bin /opt/local/libexec/llvm/bin - /usr/local/opt/llvm@15/bin - /usr/local/opt/llvm@14/bin /usr/local/opt/llvm@13/bin /usr/local/opt/llvm@12/bin - /usr/local/opt/llvm@11/bin /usr/local/opt/llvm@10/bin /usr/local/opt/llvm@9/bin + /usr/local/opt/llvm@18/bin /usr/local/opt/llvm@17/bin + /usr/local/opt/llvm@16/bin /usr/local/opt/llvm@15/bin /usr/local/opt/llvm/bin NO_DEFAULT_PATH) endif() diff --git a/cmake/FindMSan.cmake b/cmake/FindMSan.cmake new file mode 100644 index 000000000..d744c34be --- /dev/null +++ b/cmake/FindMSan.cmake @@ -0,0 +1,60 @@ +# The MIT License (MIT) +# +# Copyright (c) +# 2013 Matthew Arsenault +# 2015-2016 RWTH Aachen University, Federal Republic of Germany +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +option(SANITIZE_MEMORY "Enable MemorySanitizer for sanitized targets." Off) + +set(FLAG_CANDIDATES + # MSVC uses + "/fsanitize=memory" + # GNU/Clang + "-g -fsanitize=memory" +) + + +include(sanitize-helpers) + +if (SANITIZE_MEMORY) + if (NOT ${CMAKE_SYSTEM_NAME} STREQUAL "Linux") + message(WARNING "MemorySanitizer disabled for target ${TARGET} because " + "MemorySanitizer is supported for Linux systems only.") + set(SANITIZE_MEMORY Off CACHE BOOL + "Enable MemorySanitizer for sanitized targets." FORCE) + elseif (NOT ${CMAKE_SIZEOF_VOID_P} EQUAL 8) + message(WARNING "MemorySanitizer disabled for target ${TARGET} because " + "MemorySanitizer is supported for 64bit systems only.") + set(SANITIZE_MEMORY Off CACHE BOOL + "Enable MemorySanitizer for sanitized targets." FORCE) + else () + sanitizer_check_compiler_flags("${FLAG_CANDIDATES}" "MemorySanitizer" + "MSan") + endif () +endif () + +function (add_sanitize_memory TARGET) + if (NOT SANITIZE_MEMORY) + return() + endif () + + sanitizer_add_flags(${TARGET} "MemorySanitizer" "MSan") +endfunction () diff --git a/cmake/FindSanitizers.cmake b/cmake/FindSanitizers.cmake new file mode 100755 index 000000000..d9b438c0f --- /dev/null +++ b/cmake/FindSanitizers.cmake @@ -0,0 +1,91 @@ +# The MIT License (MIT) +# +# Copyright (c) +# 2013 Matthew Arsenault +# 2015-2016 RWTH Aachen University, Federal Republic of Germany +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +# If any of the used compiler is a GNU compiler, add a second option to static +# link against the sanitizers. +option(SANITIZE_LINK_STATIC "Try to link static against sanitizers." Off) + +# Highlight this module has been loaded. +set(Sanitizers_FOUND TRUE) + +set(FIND_QUIETLY_FLAG "") +if (DEFINED Sanitizers_FIND_QUIETLY) + set(FIND_QUIETLY_FLAG "QUIET") +endif () + +find_package(ASan ${FIND_QUIETLY_FLAG}) +find_package(TSan ${FIND_QUIETLY_FLAG}) +find_package(MSan ${FIND_QUIETLY_FLAG}) +find_package(UBSan ${FIND_QUIETLY_FLAG}) + +function(sanitizer_add_blacklist_file FILE) + if(NOT IS_ABSOLUTE ${FILE}) + set(FILE "${CMAKE_CURRENT_SOURCE_DIR}/${FILE}") + endif() + get_filename_component(FILE "${FILE}" REALPATH) + + sanitizer_check_compiler_flags("-fsanitize-blacklist=${FILE}" + "SanitizerBlacklist" "SanBlist") +endfunction() + +function(add_sanitizers) + # If no sanitizer is enabled, return immediately. + if (NOT (SANITIZE_ADDRESS OR SANITIZE_MEMORY OR SANITIZE_THREAD OR + SANITIZE_UNDEFINED)) + return() + endif () + + foreach (TARGET ${ARGV}) + # Check if this target will be compiled by exactly one compiler. Other- + # wise sanitizers can't be used and a warning should be printed once. + get_target_property(TARGET_TYPE ${TARGET} TYPE) + if (TARGET_TYPE STREQUAL "INTERFACE_LIBRARY") + message(WARNING "Can't use any sanitizers for target ${TARGET}, " + "because it is an interface library and cannot be " + "compiled directly.") + return() + endif () + sanitizer_target_compilers(${TARGET} TARGET_COMPILER) + list(LENGTH TARGET_COMPILER NUM_COMPILERS) + if (NUM_COMPILERS GREATER 1) + message(WARNING "Can't use any sanitizers for target ${TARGET}, " + "because it will be compiled by incompatible compilers. " + "Target will be compiled without sanitizers.") + return() + + elseif (NUM_COMPILERS EQUAL 0) + # If the target is compiled by no known compiler, give a warning. + message(WARNING "Sanitizers for target ${TARGET} may not be" + " usable, because it uses no or an unknown compiler. " + "This is a false warning for targets using only " + "object lib(s) as input.") + endif () + + # Add sanitizers for target. + add_sanitize_address(${TARGET}) + add_sanitize_thread(${TARGET}) + add_sanitize_memory(${TARGET}) + add_sanitize_undefined(${TARGET}) + endforeach () +endfunction(add_sanitizers) diff --git a/cmake/FindTSan.cmake b/cmake/FindTSan.cmake new file mode 100644 index 000000000..efb2e9525 --- /dev/null +++ b/cmake/FindTSan.cmake @@ -0,0 +1,68 @@ +# The MIT License (MIT) +# +# Copyright (c) +# 2013 Matthew Arsenault +# 2015-2016 RWTH Aachen University, Federal Republic of Germany +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +option(SANITIZE_THREAD "Enable ThreadSanitizer for sanitized targets." Off) + +set(FLAG_CANDIDATES + # MSVC uses + "/fsanitize=thread" + # GNU/Clang + "-g -fsanitize=thread" +) + + +# ThreadSanitizer is not compatible with MemorySanitizer. +if (SANITIZE_THREAD AND SANITIZE_MEMORY) + message(FATAL_ERROR "ThreadSanitizer is not compatible with " + "MemorySanitizer.") +endif () + + +include(sanitize-helpers) + +if (SANITIZE_THREAD) + if (NOT ${CMAKE_SYSTEM_NAME} STREQUAL "Linux" AND + NOT ${CMAKE_SYSTEM_NAME} STREQUAL "Darwin") + message(WARNING "ThreadSanitizer disabled for target ${TARGET} because " + "ThreadSanitizer is supported for Linux systems and macOS only.") + set(SANITIZE_THREAD Off CACHE BOOL + "Enable ThreadSanitizer for sanitized targets." FORCE) + elseif (NOT ${CMAKE_SIZEOF_VOID_P} EQUAL 8) + message(WARNING "ThreadSanitizer disabled for target ${TARGET} because " + "ThreadSanitizer is supported for 64bit systems only.") + set(SANITIZE_THREAD Off CACHE BOOL + "Enable ThreadSanitizer for sanitized targets." FORCE) + else () + sanitizer_check_compiler_flags("${FLAG_CANDIDATES}" "ThreadSanitizer" + "TSan") + endif () +endif () + +function (add_sanitize_thread TARGET) + if (NOT SANITIZE_THREAD) + return() + endif () + + sanitizer_add_flags(${TARGET} "ThreadSanitizer" "TSan") +endfunction () diff --git a/cmake/FindUBSan.cmake b/cmake/FindUBSan.cmake new file mode 100644 index 000000000..4354c2e4d --- /dev/null +++ b/cmake/FindUBSan.cmake @@ -0,0 +1,49 @@ +# The MIT License (MIT) +# +# Copyright (c) +# 2013 Matthew Arsenault +# 2015-2016 RWTH Aachen University, Federal Republic of Germany +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +option(SANITIZE_UNDEFINED + "Enable UndefinedBehaviorSanitizer for sanitized targets." Off) + +set(FLAG_CANDIDATES + # MSVC uses + "/fsanitize=undefined" + # GNU/Clang + "-g -fsanitize=undefined" +) + + +include(sanitize-helpers) + +if (SANITIZE_UNDEFINED) + sanitizer_check_compiler_flags("${FLAG_CANDIDATES}" + "UndefinedBehaviorSanitizer" "UBSan") +endif () + +function (add_sanitize_undefined TARGET) + if (NOT SANITIZE_UNDEFINED) + return() + endif () + + sanitizer_add_flags(${TARGET} "UndefinedBehaviorSanitizer" "UBSan") +endfunction () diff --git a/cmake/asan-wrapper b/cmake/asan-wrapper new file mode 100755 index 000000000..5d5410337 --- /dev/null +++ b/cmake/asan-wrapper @@ -0,0 +1,55 @@ +#!/bin/sh + +# The MIT License (MIT) +# +# Copyright (c) +# 2013 Matthew Arsenault +# 2015-2016 RWTH Aachen University, Federal Republic of Germany +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +# This script is a wrapper for AddressSanitizer. In some special cases you need +# to preload AddressSanitizer to avoid error messages - e.g. if you're +# preloading another library to your application. At the moment this script will +# only do something, if we're running on a Linux platform. OSX might not be +# affected. + + +# Exit immediately, if platform is not Linux. +if [ "$(uname)" != "Linux" ] +then + exec $@ +fi + + +# Get the used libasan of the application ($1). If a libasan was found, it will +# be prepended to LD_PRELOAD. +libasan=$(ldd $1 | grep libasan | sed "s/^[[:space:]]//" | cut -d' ' -f1) +if [ -n "$libasan" ] +then + if [ -n "$LD_PRELOAD" ] + then + export LD_PRELOAD="$libasan:$LD_PRELOAD" + else + export LD_PRELOAD="$libasan" + fi +fi + +# Execute the application. +exec $@ diff --git a/cmake/sanitize-helpers.cmake b/cmake/sanitize-helpers.cmake new file mode 100755 index 000000000..efc325ce3 --- /dev/null +++ b/cmake/sanitize-helpers.cmake @@ -0,0 +1,178 @@ +# The MIT License (MIT) +# +# Copyright (c) +# 2013 Matthew Arsenault +# 2015-2016 RWTH Aachen University, Federal Republic of Germany +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +# Helper function to get the language of a source file. +function (sanitizer_lang_of_source FILE RETURN_VAR) + get_filename_component(LONGEST_EXT "${FILE}" EXT) + # If extension is empty return. This can happen for extensionless headers + if("${LONGEST_EXT}" STREQUAL "") + set(${RETURN_VAR} "" PARENT_SCOPE) + return() + endif() + # Get shortest extension as some files can have dot in their names + string(REGEX REPLACE "^.*(\\.[^.]+)$" "\\1" FILE_EXT ${LONGEST_EXT}) + string(TOLOWER "${FILE_EXT}" FILE_EXT) + string(SUBSTRING "${FILE_EXT}" 1 -1 FILE_EXT) + + get_property(ENABLED_LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES) + foreach (LANG ${ENABLED_LANGUAGES}) + list(FIND CMAKE_${LANG}_SOURCE_FILE_EXTENSIONS "${FILE_EXT}" TEMP) + if (NOT ${TEMP} EQUAL -1) + set(${RETURN_VAR} "${LANG}" PARENT_SCOPE) + return() + endif () + endforeach() + + set(${RETURN_VAR} "" PARENT_SCOPE) +endfunction () + + +# Helper function to get compilers used by a target. +function (sanitizer_target_compilers TARGET RETURN_VAR) + # Check if all sources for target use the same compiler. If a target uses + # e.g. C and Fortran mixed and uses different compilers (e.g. clang and + # gfortran) this can trigger huge problems, because different compilers may + # use different implementations for sanitizers. + set(BUFFER "") + get_target_property(TSOURCES ${TARGET} SOURCES) + foreach (FILE ${TSOURCES}) + # If expression was found, FILE is a generator-expression for an object + # library. Object libraries will be ignored. + string(REGEX MATCH "TARGET_OBJECTS:([^ >]+)" _file ${FILE}) + if ("${_file}" STREQUAL "") + sanitizer_lang_of_source(${FILE} LANG) + if (LANG) + list(APPEND BUFFER ${CMAKE_${LANG}_COMPILER_ID}) + endif () + endif () + endforeach () + + list(REMOVE_DUPLICATES BUFFER) + set(${RETURN_VAR} "${BUFFER}" PARENT_SCOPE) +endfunction () + + +# Helper function to check compiler flags for language compiler. +function (sanitizer_check_compiler_flag FLAG LANG VARIABLE) + + if (${LANG} STREQUAL "C") + include(CheckCCompilerFlag) + check_c_compiler_flag("${FLAG}" ${VARIABLE}) + + elseif (${LANG} STREQUAL "CXX") + include(CheckCXXCompilerFlag) + check_cxx_compiler_flag("${FLAG}" ${VARIABLE}) + + elseif (${LANG} STREQUAL "Fortran") + # CheckFortranCompilerFlag was introduced in CMake 3.x. To be compatible + # with older Cmake versions, we will check if this module is present + # before we use it. Otherwise we will define Fortran coverage support as + # not available. + include(CheckFortranCompilerFlag OPTIONAL RESULT_VARIABLE INCLUDED) + if (INCLUDED) + check_fortran_compiler_flag("${FLAG}" ${VARIABLE}) + elseif (NOT CMAKE_REQUIRED_QUIET) + message(STATUS "Performing Test ${VARIABLE}") + message(STATUS "Performing Test ${VARIABLE}" + " - Failed (Check not supported)") + endif () + endif() + +endfunction () + + +# Helper function to test compiler flags. +function (sanitizer_check_compiler_flags FLAG_CANDIDATES NAME PREFIX) + set(CMAKE_REQUIRED_QUIET ${${PREFIX}_FIND_QUIETLY}) + + get_property(ENABLED_LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES) + foreach (LANG ${ENABLED_LANGUAGES}) + # Sanitizer flags are not dependend on language, but the used compiler. + # So instead of searching flags foreach language, search flags foreach + # compiler used. + set(COMPILER ${CMAKE_${LANG}_COMPILER_ID}) + if (COMPILER AND NOT DEFINED ${PREFIX}_${COMPILER}_FLAGS) + foreach (FLAG ${FLAG_CANDIDATES}) + if(NOT CMAKE_REQUIRED_QUIET) + message(STATUS "Try ${COMPILER} ${NAME} flag = [${FLAG}]") + endif() + + set(CMAKE_REQUIRED_FLAGS "${FLAG}") + unset(${PREFIX}_FLAG_DETECTED CACHE) + sanitizer_check_compiler_flag("${FLAG}" ${LANG} + ${PREFIX}_FLAG_DETECTED) + + if (${PREFIX}_FLAG_DETECTED) + # If compiler is a GNU compiler, search for static flag, if + # SANITIZE_LINK_STATIC is enabled. + if (SANITIZE_LINK_STATIC AND (${COMPILER} STREQUAL "GNU")) + string(TOLOWER ${PREFIX} PREFIX_lower) + sanitizer_check_compiler_flag( + "-static-lib${PREFIX_lower}" ${LANG} + ${PREFIX}_STATIC_FLAG_DETECTED) + + if (${PREFIX}_STATIC_FLAG_DETECTED) + set(FLAG "-static-lib${PREFIX_lower} ${FLAG}") + endif () + endif () + + set(${PREFIX}_${COMPILER}_FLAGS "${FLAG}" CACHE STRING + "${NAME} flags for ${COMPILER} compiler.") + mark_as_advanced(${PREFIX}_${COMPILER}_FLAGS) + break() + endif () + endforeach () + + if (NOT ${PREFIX}_FLAG_DETECTED) + set(${PREFIX}_${COMPILER}_FLAGS "" CACHE STRING + "${NAME} flags for ${COMPILER} compiler.") + mark_as_advanced(${PREFIX}_${COMPILER}_FLAGS) + + message(WARNING "${NAME} is not available for ${COMPILER} " + "compiler. Targets using this compiler will be " + "compiled without ${NAME}.") + endif () + endif () + endforeach () +endfunction () + + +# Helper to assign sanitizer flags for TARGET. +function (sanitizer_add_flags TARGET NAME PREFIX) + # Get list of compilers used by target and check, if sanitizer is available + # for this target. Other compiler checks like check for conflicting + # compilers will be done in add_sanitizers function. + sanitizer_target_compilers(${TARGET} TARGET_COMPILER) + list(LENGTH TARGET_COMPILER NUM_COMPILERS) + if ("${${PREFIX}_${TARGET_COMPILER}_FLAGS}" STREQUAL "") + return() + endif() + + separate_arguments(flags_list UNIX_COMMAND "${${PREFIX}_${TARGET_COMPILER}_FLAGS} ${SanBlist_${TARGET_COMPILER}_FLAGS}") + target_compile_options(${TARGET} PUBLIC ${flags_list}) + + separate_arguments(flags_list UNIX_COMMAND "${${PREFIX}_${TARGET_COMPILER}_FLAGS}") + target_link_options(${TARGET} PUBLIC ${flags_list}) + +endfunction () diff --git a/docker/Dockerfile.alpine-basic b/docker/Dockerfile.alpine-basic new file mode 100644 index 000000000..cdfc435c8 --- /dev/null +++ b/docker/Dockerfile.alpine-basic @@ -0,0 +1,59 @@ +## +# docker pull ghcr.io/crazyzlj/ccgl:alpine-basic +# or +# docker pull registry.cn-hangzhou.aliyuncs.com/ljzhu-geomodels/ccgl:alpine-basic +# +# This script is written based on PROJ and GDAL. +# https://github.com/OSGeo/PROJ/blob/master/Dockerfile +# https://github.com/OSGeo/gdal/tree/master/docker +# +# Build by yourself (DO NOT MISSING the dot at the end of the line): +# > cd CCGL +# > docker build -t ccgl:alpine-basic -f docker/Dockerfile.alpine-basic . +# +# Copyright 2022-2024 Liang-Jun Zhu + +# Use alpine as the build container +ARG ALPINE_VERSION=3.20 +FROM alpine:${ALPINE_VERSION} AS builder + +LABEL maintainer="Liang-Jun Zhu " + +# Replace alpine repository source cdn to accelarate access speed and setup build env, when necessary + +# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories && \ +# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && \ +RUN apk update && apk upgrade && \ + apk add --no-cache cmake make g++ + +# Copy source directory +WORKDIR /CCGL +COPY . ./ + +# # Build for release +ARG INSTALL_DIR=/CCGL/dist +RUN cd /CCGL \ + && mkdir build \ + && cd build \ + && cmake .. -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_INSTALL_PREFIX=${INSTALL_DIR} \ + && make -j 2 \ + && make install \ + && cd .. + +# # Build final image +FROM alpine:${ALPINE_VERSION} AS runner + +# Replace alpine repository source cdn to accelarate access speed and setup build env, when necessary +# Add gnu standard C++ library and OpenMP library for running apps based on CCGL +# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories && \ +# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && \ +RUN apk update && apk upgrade && \ + apk add --no-cache libstdc++ libgomp \ + rm -rf /var/cache/apk/* + +# Order layers starting with less frequently varying ones +ARG INSTALL_DIR=/CCGL/dist +COPY --from=builder ${INSTALL_DIR}/bin/ /usr/local/bin/ +COPY --from=builder ${INSTALL_DIR}/include/ /usr/local/include/ +COPY --from=builder ${INSTALL_DIR}/lib/ /usr/local/lib/ +COPY --from=builder ${INSTALL_DIR}/share/ccgl/ /usr/local/share/ccgl/ diff --git a/docker/Dockerfile.alpine-with-gdal b/docker/Dockerfile.alpine-with-gdal new file mode 100644 index 000000000..d363443e2 --- /dev/null +++ b/docker/Dockerfile.alpine-with-gdal @@ -0,0 +1,59 @@ +## +# docker pull ghcr.io/crazyzlj/ccgl:alpine-with-gdal +# or +# docker pull registry.cn-hangzhou.aliyuncs.com/ljzhu-geomodels/ccgl:alpine-with-gdal +# +# This script is written based on PROJ and GDAL. +# https://github.com/OSGeo/PROJ/blob/master/Dockerfile +# https://github.com/OSGeo/gdal/tree/master/docker +# +# Build by yourself (DO NOT MISSING the dot at the end of the line): +# > cd CCGL +# > docker build -t ccgl:alpine-with-gdal -f docker/Dockerfile.alpine-with-gdal . +# +# Copyright 2022-2024 Liang-Jun Zhu + +# Use GDAL image tagged alpine-small- as the build container +ARG GDAL_VERSION=3.9.2 +FROM ghcr.io/osgeo/gdal:alpine-small-${GDAL_VERSION} AS builder + +LABEL maintainer="Liang-Jun Zhu " + +# Replace alpine repository source cdn to accelarate access speed and setup build env, when necessary + +# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories && \ +# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && \ +RUN apk update && apk upgrade && \ + apk add --no-cache cmake make g++ + +# Copy source directory +WORKDIR /CCGL +COPY . ./ + +# # Build for release +ARG INSTALL_DIR=/CCGL/dist +RUN cd /CCGL \ + && mkdir build \ + && cd build \ + && cmake .. -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_INSTALL_PREFIX=${INSTALL_DIR} \ + && make -j 2 \ + && make install \ + && cd .. + +# # Build final image +FROM ghcr.io/osgeo/gdal:alpine-small-${GDAL_VERSION} AS runner + +# Replace alpine repository source cdn to accelarate access speed and setup build env, when necessary +# Add gnu standard C++ library and OpenMP library for running apps based on CCGL +# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories && \ +# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && \ +RUN apk update && apk upgrade && \ + apk add --no-cache libstdc++ libgomp \ + rm -rf /var/cache/apk/* + +# Order layers starting with less frequently varying ones +ARG INSTALL_DIR=/CCGL/dist +COPY --from=builder ${INSTALL_DIR}/bin/ /usr/local/bin/ +COPY --from=builder ${INSTALL_DIR}/include/ /usr/local/include/ +COPY --from=builder ${INSTALL_DIR}/lib/ /usr/local/lib/ +COPY --from=builder ${INSTALL_DIR}/share/ccgl/ /usr/local/share/ccgl/ diff --git a/docker/Dockerfile.alpine-with-gdal-mongodb b/docker/Dockerfile.alpine-with-gdal-mongodb new file mode 100644 index 000000000..612c47fbd --- /dev/null +++ b/docker/Dockerfile.alpine-with-gdal-mongodb @@ -0,0 +1,59 @@ +## +# docker pull ghcr.io/crazyzlj/ccgl:alpine-with-gdal-mongodb +# or +# docker pull registry.cn-hangzhou.aliyuncs.com/ljzhu-geomodels/ccgl:alpine-with-gdal-mongodb +# +# This script is written based on PROJ and GDAL. +# https://github.com/OSGeo/PROJ/blob/master/Dockerfile +# https://github.com/OSGeo/gdal/tree/master/docker +# +# Build by yourself (DO NOT MISSING the dot at the end of the line): +# > cd CCGL +# > docker build -t ccgl:alpine-with-gdal-mongodb -f docker/Dockerfile.alpine-with-gdal-mongodb . +# +# Copyright 2022-2024 Liang-Jun Zhu + +# Use GDAL image tagged alpine-small- as the build container +ARG GDAL_VERSION=3.9.2 +FROM ghcr.io/osgeo/gdal:alpine-small-${GDAL_VERSION} AS builder + +LABEL maintainer="Liang-Jun Zhu " + +# Replace alpine repository source cdn to accelarate access speed and setup build env, when necessary + +# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories && \ +# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && \ +RUN apk update && apk upgrade && \ + apk add --no-cache cmake make g++ mongo-c-driver-dev + +# Copy source directory +WORKDIR /CCGL +COPY . ./ + +# # Build for release +ARG INSTALL_DIR=/CCGL/dist +RUN cd /CCGL \ + && mkdir build \ + && cd build \ + && cmake .. -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_INSTALL_PREFIX=${INSTALL_DIR} \ + && make -j 2 \ + && make install \ + && cd .. + +# # Build final image +FROM ghcr.io/osgeo/gdal:alpine-small-${GDAL_VERSION} AS runner + +# Replace alpine repository source cdn to accelarate access speed and setup build env, when necessary +# Add gnu standard C++ library and OpenMP library for running apps based on CCGL +# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories && \ +# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && \ +RUN apk update && apk upgrade && \ + apk add --no-cache libstdc++ libgomp mongo-c-driver \ + rm -rf /var/cache/apk/* + +# Order layers starting with less frequently varying ones +ARG INSTALL_DIR=/CCGL/dist +COPY --from=builder ${INSTALL_DIR}/bin/ /usr/local/bin/ +COPY --from=builder ${INSTALL_DIR}/include/ /usr/local/include/ +COPY --from=builder ${INSTALL_DIR}/lib/ /usr/local/lib/ +COPY --from=builder ${INSTALL_DIR}/share/ccgl/ /usr/local/share/ccgl/ diff --git a/docker/Dockerfile.alpine-with-mongodb b/docker/Dockerfile.alpine-with-mongodb new file mode 100644 index 000000000..44c86437f --- /dev/null +++ b/docker/Dockerfile.alpine-with-mongodb @@ -0,0 +1,59 @@ +## +# docker pull ghcr.io/crazyzlj/ccgl:alpine-with-mongodb +# or +# docker pull registry.cn-hangzhou.aliyuncs.com/ljzhu-geomodels/ccgl:alpine-with-mongodb +# +# This script is written based on PROJ and GDAL. +# https://github.com/OSGeo/PROJ/blob/master/Dockerfile +# https://github.com/OSGeo/gdal/tree/master/docker +# +# Build by yourself (DO NOT MISSING the dot at the end of the line): +# > cd CCGL +# > docker build -t ccgl:alpine-with-mongodb -f docker/Dockerfile.alpine-with-mongodb . +# +# Copyright 2022-2024 Liang-Jun Zhu + +# Use alpine as the build container +ARG ALPINE_VERSION=3.20 +FROM alpine:${ALPINE_VERSION} AS builder + +LABEL maintainer="Liang-Jun Zhu " + +# Replace alpine repository source cdn to accelarate access speed and setup build env, when necessary + +# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories && \ +# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && \ +RUN apk update && apk upgrade && \ + apk add --no-cache cmake make g++ mongo-c-driver-dev + +# Copy source directory +WORKDIR /CCGL +COPY . ./ + +# # Build for release +ARG INSTALL_DIR=/CCGL/dist +RUN cd /CCGL \ + && mkdir build \ + && cd build \ + && cmake .. -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_INSTALL_PREFIX=${INSTALL_DIR} \ + && make -j 2 \ + && make install \ + && cd .. + +# # Build final image +FROM alpine:${ALPINE_VERSION} AS runner + +# Replace alpine repository source cdn to accelarate access speed and setup build env, when necessary +# Add gnu standard C++ library and OpenMP library for running apps based on CCGL +# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories && \ +# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && \ +RUN apk update && apk upgrade && \ + apk add --no-cache libstdc++ libgomp mongo-c-driver \ + rm -rf /var/cache/apk/* + +# Order layers starting with less frequently varying ones +ARG INSTALL_DIR=/CCGL/dist +COPY --from=builder ${INSTALL_DIR}/bin/ /usr/local/bin/ +COPY --from=builder ${INSTALL_DIR}/include/ /usr/local/include/ +COPY --from=builder ${INSTALL_DIR}/lib/ /usr/local/lib/ +COPY --from=builder ${INSTALL_DIR}/share/ccgl/ /usr/local/share/ccgl/ diff --git a/docker/README.md b/docker/README.md index 1f6e1806c..26448ef65 100644 --- a/docker/README.md +++ b/docker/README.md @@ -15,38 +15,21 @@ docker build --progress=plain -t crazyzlj/ccgl:-test -f docker/test//D ### CCGL with the support of MongoDB -See the [official tutorial](https://www.mongodb.com/compatibility/docker). - -+ Select a proper tag of the official mongo image in [supported-tags-and-respective-dockerfile-links](https://github.com/docker-library/docs/blob/master/mongo/README.md#supported-tags-and-respective-dockerfile-links), e.g., `4.4.14-focal`. -+ Start a mongo server instance: `docker run --name some-mongo -d -p :27017 mongo:tag`, -where `some-mongo` is the name you want to assign to your container and -`tag` is the tag specifying the MongoDB version you want, -`client_port` is the port you want to exposed to your localhost, e.g., `27017` or any other legal number. -For example, - + `docker network create docker-mongo-network` - + `docker run -d -p 27018:27017 --name mongodb-docker mongo:4.4.14-focal` - + `docker ps` will show something like: - ``` - CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES - 6cef37edee6a mongo:4.4.14-focal "docker-entrypoint.s…" 5 seconds ago Up 4 seconds 0.0.0.0:27018->27017/tcp mongodb-docker - ``` +See the [official tutorial: Docker and MongoDB](https://www.mongodb.com/resources/products/compatibilities/docker). + ++ Select a proper tag of the official [mongodb-community-server image](https://hub.docker.com/r/mongodb/mongodb-community-server), e.g., `mongodb/mongodb-community-server:4.4.28-ubuntu2004`. ++ Create a docker network for the MongoDB server and other docker applications who want use this MongoDB: `docker network create docker-mongo-network` ++ Start a mongodb server under the above docker network: `docker run -d --rm --name mongodb-docker --network docker-mongo-network -p :27017 -v :/data/db mongodb/mongodb-community-server:4.4.28-ubuntu2004 mongod --bind_ip localhost,mongodb-docker` + + `` means the local directory (path in English and without spaces) to store MongoDB data + + `client_port` is the port you want to exposed to your localhost, e.g., `27017` or any other legal number. + Run `docker logs mongodb-docker` to check the logs which provide a wealth of useful information. - + Now, you can connect to the MongoDB running in the container from *the host* host via `localhost:27018`. - -+ Mount data directory of host using `-v :/data/db`, e.g., -`docker run -d -v /Users/ljzhu/Documents/data/docker_mongodata:/data/db -p 27017:27017 --name mongodb-docker mongo:4.4.14-focal` - -+ There are couple of ways to do [Connect to MongoDB from another Docker container](https://github.com/docker-library/docs/blob/master/mongo/README.md#connect-to-mongodb-from-another-docker-container). -Also refers to one userful answer in [stackflow](https://stackoverflow.com/a/43962099). - + (Currently tested) Use mongodb container ip address: `docker inspect -f '{{.NetworkSettings.IPAddress}}' mongo-docker`. - The command will output the IP of the mongodb container which can be used in another application's container. - For example, `./unittestd -host 172.17.0.3 -port 27017` - - The IP of the mongodb container can be passed as an argument when build another docker image, - also see [here](https://stackoverflow.com/a/34254700): - - `docker build --progress=plain -t crazyzlj/ccgl:alpine-with-mongodb-test --build-arg host=172.17.0.2 --build-arg port=27017 -f docker/test/alpine-with-mongodb/Dockerfile .` - + ... + + Now, you can connect to the MongoDB running in the container from *the host* host via `localhost:` (e.g., using Robo 3T, VSCode with MongoDB plugin). ++ Check the IP address of the MongoDB container within the above defined docker network: `docker inspect network docker-mongo-network`, e.g., `172.18.0.2` ++ Run the `ccgl` container within the same docker network and pass the IP address as arguments: `docker run --rm -v $PWD/data:/data --net docker-mongo-network registry.cn-hangzhou.aliyuncs.com/ljzhu-geomodels/ccgl:dev-alpine-with-gdal-mongodb mask_rasterio -in /data/raster/dem_1.tif -out gfs dem_1 -mongo 172.18.0.2 27017 test spatial`. + +> References: +> + [Connect to MongoDB from another Docker container](https://github.com/docker-library/docs/blob/master/mongo/README.md#connect-to-mongodb-from-another-docker-container). +> + An useful answer in [stackflow](https://stackoverflow.com/a/43962099). ## Release docker images diff --git a/docker/alpine-with-gdal-mongodb/Dockerfile b/docker/alpine-with-gdal-mongodb/Dockerfile deleted file mode 100644 index 37a8de4bc..000000000 --- a/docker/alpine-with-gdal-mongodb/Dockerfile +++ /dev/null @@ -1,61 +0,0 @@ -## -# crazyzlj/ccgl:alpine-with-gdal-mongodb -# -# This script is written based on PROJ and GDAL. -# https://github.com/OSGeo/PROJ/blob/master/Dockerfile -# https://github.com/OSGeo/gdal/tree/master/docker -# -# Usage: -# > cd CCGL -# > docker build -t -f docker/apline-with-gdal-mongodb/Dockerfile . -# -# Copyright 2022 Liang-Jun Zhu - -# Use GDAL image with the tag of alpine-small-latest as the build and test container -ARG GDAL_VERSION=latest -FROM osgeo/gdal:alpine-small-${GDAL_VERSION} as builder - -LABEL maintainer="Liang-Jun Zhu " - -# Replace alpine repository source cdn -# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories -RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && apk update && apk upgrade - -# Setup build env -RUN apk add --no-cache cmake make g++ mongo-c-driver-dev - -# Copy source directory -WORKDIR /CCGL -COPY CMakeLists.txt . -COPY cmake cmake -COPY src src -COPY apps apps -COPY test test -COPY data data - -# # Build for release -ARG INSTALL_DIR=/CCGL/dist -RUN cd /CCGL \ - && mkdir build \ - && cd build \ - && cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=${INSTALL_DIR} \ - && make -j 2 \ - && make install \ - && cd .. - -# # Build final image -FROM osgeo/gdal:alpine-small-${GDAL_VERSION} as runner - -# Replace alpine repository source cdn -# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories -RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && apk update && apk upgrade - -# Add OpenMP library for running apps based on CCGL -RUN apk add --no-cache libgomp mongo-c-driver - -# Order layers starting with less frequently varying ones -ARG INSTALL_DIR=/CCGL/dist -COPY --from=builder ${INSTALL_DIR}/bin/ /usr/bin/ -COPY --from=builder ${INSTALL_DIR}/include/ /usr/include/ -COPY --from=builder ${INSTALL_DIR}/lib/ /usr/lib/ -COPY --from=builder ${INSTALL_DIR}/share/ccgl/ /usr/share/ccgl/ diff --git a/docker/alpine-with-gdal/Dockerfile b/docker/alpine-with-gdal/Dockerfile deleted file mode 100644 index dd9c8d20c..000000000 --- a/docker/alpine-with-gdal/Dockerfile +++ /dev/null @@ -1,61 +0,0 @@ -## -# crazyzlj/ccgl:alpine-with-gdal -# -# This script is written based on PROJ and GDAL. -# https://github.com/OSGeo/PROJ/blob/master/Dockerfile -# https://github.com/OSGeo/gdal/tree/master/docker -# -# Usage: -# > cd CCGL -# > docker build -t -f docker/apline-with-gdal/Dockerfile . -# -# Copyright 2022 Liang-Jun Zhu - -# Use GDAL image with the tag of alpine-small-latest as the build and test container -ARG GDAL_VERSION=latest -FROM osgeo/gdal:alpine-small-${GDAL_VERSION} as builder - -LABEL maintainer="Liang-Jun Zhu " - -# Replace alpine repository source cdn -# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories -RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && apk update && apk upgrade - -# Setup build env -RUN apk add --no-cache cmake make g++ - -# Copy source directory -WORKDIR /CCGL -COPY CMakeLists.txt . -COPY cmake cmake -COPY src src -COPY apps apps -COPY test test -COPY data data - -# # Build for release -ARG INSTALL_DIR=/CCGL/dist -RUN cd /CCGL \ - && mkdir build \ - && cd build \ - && cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=${INSTALL_DIR} \ - && make -j 2 \ - && make install \ - && cd .. - -# # Build final image -FROM osgeo/gdal:alpine-small-${GDAL_VERSION} as runner - -# Replace alpine repository source cdn -# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories -RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && apk update && apk upgrade - -# Add OpenMP library for running apps based on CCGL -RUN apk add --no-cache libgomp - -# Order layers starting with less frequently varying ones -ARG INSTALL_DIR=/CCGL/dist -COPY --from=builder ${INSTALL_DIR}/bin/ /usr/bin/ -COPY --from=builder ${INSTALL_DIR}/include/ /usr/include/ -COPY --from=builder ${INSTALL_DIR}/lib/ /usr/lib/ -COPY --from=builder ${INSTALL_DIR}/share/ccgl/ /usr/share/ccgl/ diff --git a/docker/alpine-with-mongodb/Dockerfile b/docker/alpine-with-mongodb/Dockerfile deleted file mode 100644 index 28bac6af6..000000000 --- a/docker/alpine-with-mongodb/Dockerfile +++ /dev/null @@ -1,61 +0,0 @@ -## -# crazyzlj/ccgl:alpine-with-mongodb -# -# This script is written based on PROJ and GDAL. -# https://github.com/OSGeo/PROJ/blob/master/Dockerfile -# https://github.com/OSGeo/gdal/tree/master/docker -# -# Usage: -# > cd CCGL -# > docker build -t -f docker/apline-with-mongodb/Dockerfile . -# -# Copyright 2022 Liang-Jun Zhu - -# Use alpine as the build and test container -ARG ALPINE_VERSION=3.15 -FROM alpine:${ALPINE_VERSION} as builder - -LABEL maintainer="Liang-Jun Zhu " - -# Replace alpine repository source cdn -# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories -RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && apk update && apk upgrade - -# Setup build env -RUN apk add --no-cache cmake make g++ mongo-c-driver-dev - -# Copy source directory -WORKDIR /CCGL -COPY CMakeLists.txt . -COPY cmake cmake -COPY src src -COPY apps apps -COPY test test -COPY data data - -# # Build for release -ARG INSTALL_DIR=/CCGL/dist -RUN cd /CCGL \ - && mkdir build \ - && cd build \ - && cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=${INSTALL_DIR} \ - && make -j 2 \ - && make install \ - && cd .. - -# # Build final image -FROM alpine:${ALPINE_VERSION} as runner - -# Replace alpine repository source cdn -# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories -RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && apk update && apk upgrade - -# Add gnu standard C++ library and OpenMP library for running apps based on CCGL -RUN apk add --no-cache libstdc++ libgomp mongo-c-driver - -# Order layers starting with less frequently varying ones -ARG INSTALL_DIR=/CCGL/dist -COPY --from=builder ${INSTALL_DIR}/bin/ /usr/bin/ -COPY --from=builder ${INSTALL_DIR}/include/ /usr/include/ -COPY --from=builder ${INSTALL_DIR}/lib/ /usr/lib/ -COPY --from=builder ${INSTALL_DIR}/share/ccgl/ /usr/share/ccgl/ diff --git a/docker/alpine/Dockerfile b/docker/alpine/Dockerfile deleted file mode 100644 index 116229287..000000000 --- a/docker/alpine/Dockerfile +++ /dev/null @@ -1,61 +0,0 @@ -## -# crazyzlj/ccgl:alpine -# -# This script is written based on PROJ and GDAL. -# https://github.com/OSGeo/PROJ/blob/master/Dockerfile -# https://github.com/OSGeo/gdal/tree/master/docker -# -# Usage: -# > cd CCGL -# > docker build -t -f docker/apline/Dockerfile . -# -# Copyright 2022 Liang-Jun Zhu - -# Use alpine as the build container -ARG ALPINE_VERSION=3.15 -FROM alpine:${ALPINE_VERSION} as builder - -LABEL maintainer="Liang-Jun Zhu " - -# Replace alpine repository source cdn to accelarate access speed -# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories -RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && apk update && apk upgrade - -# Setup build env -RUN apk add --no-cache cmake make g++ - -# Copy source directory -WORKDIR /CCGL -COPY CMakeLists.txt . -COPY cmake cmake -COPY src src -COPY apps apps -COPY test test -COPY data data - -# # Build for release -ARG INSTALL_DIR=/CCGL/dist -RUN cd /CCGL \ - && mkdir build \ - && cd build \ - && cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=${INSTALL_DIR} \ - && make -j 2 \ - && make install \ - && cd .. - -# # Build final image -FROM alpine:${ALPINE_VERSION} as runner - -# Replace alpine repository source cdn -# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories -RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && apk update && apk upgrade - -# Add gnu standard C++ library and OpenMP library for running apps based on CCGL -RUN apk add --no-cache libstdc++ libgomp - -# Order layers starting with less frequently varying ones -ARG INSTALL_DIR=/CCGL/dist -COPY --from=builder ${INSTALL_DIR}/bin/ /usr/bin/ -COPY --from=builder ${INSTALL_DIR}/include/ /usr/include/ -COPY --from=builder ${INSTALL_DIR}/lib/ /usr/lib/ -COPY --from=builder ${INSTALL_DIR}/share/ccgl/ /usr/share/ccgl/ diff --git a/docker/test/alpine-with-gdal-mongodb/Dockerfile b/docker/test/alpine-with-gdal-mongodb/Dockerfile deleted file mode 100644 index a3edfef02..000000000 --- a/docker/test/alpine-with-gdal-mongodb/Dockerfile +++ /dev/null @@ -1,48 +0,0 @@ -## -# crazyzlj/ccgl:alpine-with-gdal-mongodb-test for test locally -# -# This script is written based on PROJ and GDAL. -# https://github.com/OSGeo/PROJ/blob/master/Dockerfile -# https://github.com/OSGeo/gdal/tree/master/docker -# -# Usage: -# > cd CCGL -# > docker build -t -f docker/test/apline-with-gdal-mongodb/Dockerfile . -# -# Copyright 2022 Liang-Jun Zhu - -# Use GDAL image with the tag of alpine-small-latest as the build and test container -ARG GDAL_VERSION=latest -FROM osgeo/gdal:alpine-small-${GDAL_VERSION} as builder - -LABEL maintainer="Liang-Jun Zhu " - -# Specify MongoDB server -ARG host=172.17.0.3 -ARG port=27017 -RUN echo ">>>>>>>>>>>> Using MongoDB on another docker: ${host}:${port}" - -# Replace alpine repository source cdn -RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories - -# Setup build env -RUN apk add --no-cache cmake make g++ mongo-c-driver-dev - -# Copy source directory -WORKDIR /CCGL -COPY CMakeLists.txt . -COPY cmake cmake -COPY src src -COPY apps apps -COPY test test -COPY data data - -# Build with GoogleTest and run unittest -RUN cd /CCGL \ - && mkdir -p build_with_test \ - && cd build_with_test \ - && cmake -DCMAKE_BUILD_TYPE=Debug -DUNITTEST=1 -DHOST=${host} -DPORT=${port} -DCMAKE_INSTALL_PREFIX=/usr/local .. \ - && make -j 2 \ - && ctest -C Debug --rerun-failed --output-on-failure \ - && make install \ - && cd .. diff --git a/docker/test/alpine-with-gdal/Dockerfile b/docker/test/alpine-with-gdal/Dockerfile deleted file mode 100644 index 6109fd1f7..000000000 --- a/docker/test/alpine-with-gdal/Dockerfile +++ /dev/null @@ -1,43 +0,0 @@ -## -# crazyzlj/ccgl:alpine-with-gdal-test for test locally -# -# This script is written based on PROJ and GDAL. -# https://github.com/OSGeo/PROJ/blob/master/Dockerfile -# https://github.com/OSGeo/gdal/tree/master/docker -# -# Usage: -# > cd CCGL -# > docker build -t -f docker/test/apline-with-gdal/Dockerfile . -# -# Copyright 2022 Liang-Jun Zhu - -# Use GDAL image with the tag of alpine-small-latest as the build and test container -ARG GDAL_VERSION=latest -FROM osgeo/gdal:alpine-small-${GDAL_VERSION} as builder - -LABEL maintainer="Liang-Jun Zhu " - -# Replace alpine repository source cdn -RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories - -# Setup build env -RUN apk add --no-cache cmake make g++ - -# Copy source directory -WORKDIR /CCGL -COPY CMakeLists.txt . -COPY cmake cmake -COPY src src -COPY apps apps -COPY test test -COPY data data - -# Build with GoogleTest and run unittest -RUN cd /CCGL \ - && mkdir -p build_with_test \ - && cd build_with_test \ - && cmake -DCMAKE_BUILD_TYPE=Debug -DUNITTEST=1 -DCMAKE_INSTALL_PREFIX=/usr/local .. \ - && make -j 2 \ - && ctest -C Debug --rerun-failed --output-on-failure \ - && make install \ - && cd .. diff --git a/docker/test/alpine-with-mongodb/Dockerfile b/docker/test/alpine-with-mongodb/Dockerfile deleted file mode 100644 index 63ff9f3d4..000000000 --- a/docker/test/alpine-with-mongodb/Dockerfile +++ /dev/null @@ -1,48 +0,0 @@ -## -# crazyzlj/ccgl:alpine-with-mongodb-test for test locally -# -# This script is written based on PROJ and GDAL. -# https://github.com/OSGeo/PROJ/blob/master/Dockerfile -# https://github.com/OSGeo/gdal/tree/master/docker -# -# Usage: -# > cd CCGL -# > docker build -t --build-arg host= -f docker/test/apline-with-mongodb/Dockerfile . -# -# Copyright 2022 Liang-Jun Zhu - -# Use alpine as the build and test container -ARG ALPINE_VERSION=3.15 -FROM alpine:${ALPINE_VERSION} as builder - -LABEL maintainer="Liang-Jun Zhu " - -# Specify MongoDB server -ARG host=172.17.0.3 -ARG port=27017 -RUN echo ">>>>>>>>>>>> Using MongoDB on another docker: ${host}:${port}" - -# Replace alpine repository source cdn -RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories - -# Setup build env -RUN apk add --no-cache cmake make g++ mongo-c-driver-dev - -# Copy source directory -WORKDIR /CCGL -COPY CMakeLists.txt . -COPY cmake cmake -COPY src src -COPY apps apps -COPY test test -COPY data data - -# Build with GoogleTest and run unittest -RUN cd /CCGL \ - && mkdir -p build_with_test \ - && cd build_with_test \ - && cmake -DCMAKE_BUILD_TYPE=Debug -DUNITTEST=1 -DHOST=${host} -DPORT=${port} -DCMAKE_INSTALL_PREFIX=/usr/local .. \ - && make -j 2 \ - && ctest -C Debug --rerun-failed --output-on-failure \ - && make install \ - && cd .. diff --git a/docker/test/alpine/Dockerfile b/docker/test/alpine/Dockerfile deleted file mode 100644 index 7e6060fdc..000000000 --- a/docker/test/alpine/Dockerfile +++ /dev/null @@ -1,44 +0,0 @@ -## -# crazyzlj/ccgl:alpine-test for test locally -# -# This script is written based on PROJ and GDAL. -# https://github.com/OSGeo/PROJ/blob/master/Dockerfile -# https://github.com/OSGeo/gdal/tree/master/docker -# -# Usage: -# > cd CCGL -# > docker build -t -f docker/test/apline/Dockerfile . -# -# Copyright 2022 Liang-Jun Zhu - -# Use alpine as the build and test container -ARG ALPINE_VERSION=3.15 -FROM alpine:${ALPINE_VERSION} as builder - -LABEL maintainer="Liang-Jun Zhu " - -# Replace alpine repository source cdn -RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apk/repositories - -# Setup build env -RUN apk add --no-cache cmake make g++ - -# Copy source directory -WORKDIR /CCGL -COPY CMakeLists.txt . -COPY cmake cmake -COPY src src -COPY apps apps -COPY test test -COPY data data - -# Build with GoogleTest and run unittest -RUN cd /CCGL \ - && mkdir -p build_with_test \ - && cd build_with_test \ - && cmake -DCMAKE_BUILD_TYPE=Debug -DUNITTEST=1 -DCMAKE_INSTALL_PREFIX=/usr/local .. \ - && make -j 2 \ - && ctest -C Debug --rerun-failed --output-on-failure \ - && make install \ - && cd .. - diff --git a/src/data_raster.cpp b/src/data_raster.cpp index 7842cea17..d9b6bb073 100644 --- a/src/data_raster.cpp +++ b/src/data_raster.cpp @@ -4,6 +4,7 @@ * * \remarks * - 1. Apr. 2022 - lj - Separated from clsRasterData class for widely use. + * - 2. Aug. 2023 - lj - Add GDAL data types added from versions 3.5 and 3.7 * * \author Liangjun Zhu, zlj(at)lreis.ac.cn */ @@ -20,6 +21,8 @@ string RasterDataTypeToString(const int type) { case RDT_Int16: return("INT16"); // 16-bit signed integer case RDT_UInt32: return("UINT32"); // 32-bit unsigned integer case RDT_Int32: return("INT32"); // 32-bit signed integer + case RDT_UInt64: return("UINT64"); // 64-bit unsigned integer + case RDT_Int64: return("INT64"); // 64-bit signed integer case RDT_Float: return("FLOAT"); // 32-bit floating point case RDT_Double: return("DOUBLE"); // 64-bit floating point default: return("Unknown"); // All others @@ -29,11 +32,14 @@ string RasterDataTypeToString(const int type) { RasterDataType StringToRasterDataType(const string& stype) { if (StringMatch(stype, "UCHAR") || StringMatch(stype, "UINT8") || StringMatch(stype, "GDT_Byte")) { return RDT_UInt8; } - if (StringMatch(stype, "CHAR") || StringMatch(stype, "INT8")) { return RDT_Int8; } + if (StringMatch(stype, "CHAR") || StringMatch(stype, "INT8") + || StringMatch(stype, "GDT_Int8")) { return RDT_Int8; } if (StringMatch(stype, "UINT16") || StringMatch(stype, "GDT_UInt16")) { return RDT_UInt16; } if (StringMatch(stype, "INT16") || StringMatch(stype, "GDT_Int16")) { return RDT_Int16; } if (StringMatch(stype, "UINT32") || StringMatch(stype, "GDT_UInt32")) { return RDT_UInt32; } if (StringMatch(stype, "INT32") || StringMatch(stype, "GDT_Int32")) { return RDT_Int32; } + if (StringMatch(stype, "UINT64") || StringMatch(stype, "GDT_UInt64")) { return RDT_UInt64; } + if (StringMatch(stype, "INT64") || StringMatch(stype, "GDT_Int64")) { return RDT_Int64; } if (StringMatch(stype, "FLOAT") || StringMatch(stype, "GDT_Float32")) { return RDT_Float; } if (StringMatch(stype, "DOUBLE") || StringMatch(stype, "GDT_Float64")) { return RDT_Double; } return RDT_Unknown; @@ -46,6 +52,8 @@ RasterDataType TypeToRasterDataType(const std::type_info& t) { if (t == typeid(vint16_t)) { return RDT_Int16; } if (t == typeid(vuint32_t)) { return RDT_UInt32; } if (t == typeid(vint32_t)) { return RDT_Int32; } + if (t == typeid(vuint64_t)) { return RDT_UInt64; } + if (t == typeid(vint64_t)) { return RDT_Int64; } if (t == typeid(float)) { return RDT_Float; } if (t == typeid(double)) { return RDT_Double; } return RDT_Unknown; @@ -60,6 +68,8 @@ double DefaultNoDataByType(const RasterDataType type) { case RDT_Int16: return INT16_MIN; // 16-bit signed integer case RDT_UInt32: return UINT32_MAX; // 32-bit unsigned integer case RDT_Int32: return INT32_MIN; // 32-bit signed integer + case RDT_UInt64: return UINT64_MAX; // 32-bit unsigned integer + case RDT_Int64: return INT64_MIN; // 32-bit signed integer case RDT_Float: return MISSINGFLOAT; // 32-bit floating point case RDT_Double: return MISSINGFLOAT; // 64-bit floating point default: return NODATA_VALUE; // All others @@ -70,12 +80,21 @@ double DefaultNoDataByType(const RasterDataType type) { GDALDataType CvtToGDALDataType(const RasterDataType type) { switch (type) { case RDT_Unknown: return GDT_Unknown; // Unknown - case RDT_Int8: return GDT_Byte; // 8-bit signed integer is not initially supported by GDAL! case RDT_UInt8: return GDT_Byte; // 8-bit unsigned integer + case RDT_Int8: +#if GDAL_VERSION_MAJOR >= 3 && GDAL_VERSION_MINOR >= 7 + return GDT_Int8; // 8-bit signed integer not supported by GDAL<3.7! +#else + return GDT_Byte; // 8-bit signed integer supported by GDAL>=3.7! +#endif case RDT_UInt16: return GDT_UInt16; // 16-bit unsigned integer case RDT_Int16: return GDT_Int16; // 16-bit signed integer case RDT_UInt32: return GDT_UInt32; // 32-bit unsigned integer case RDT_Int32: return GDT_Int32; // 32-bit signed integer +#if GDAL_VERSION_MAJOR >= 3 && GDAL_VERSION_MINOR >= 5 + case RDT_UInt64: return GDT_UInt64; // 64-bit unsigned integer + case RDT_Int64: return GDT_Int64; // 64-bit signed integer +#endif case RDT_Float: return GDT_Float32; // 32-bit floating point case RDT_Double: return GDT_Float64; // 64-bit floating point default: return GDT_Unknown; // All others @@ -283,7 +302,9 @@ bool SubsetPositions::ReadFromMongoDB(MongoGridFs* gfs, const string& fname, con n_lyrs = db_nlyrs; if (n_lyrs == 1) { if (n_cells == db_ncells) { - return SetData(db_ncells, dbdata); + bool set_success = SetData(db_ncells, dbdata); + Release1DArray(dbdata); + return set_success; } if (nullptr == data_) { Initialize1DArray(n_cells, data_, NODATA_VALUE); diff --git a/src/data_raster.hpp b/src/data_raster.hpp index 8ab6d3313..ba1d59610 100644 --- a/src/data_raster.hpp +++ b/src/data_raster.hpp @@ -23,9 +23,10 @@ * -11. Apr. 2022 lj Comprehensive functional testing, bug fixing, and robustness improving. * Add subset feature to support data decomposition and combination. * -12. Jul. 2023 lj Add valid position index (1D array, pos_idx_) and will remove pos_data_ in next version. + * -13. Aug. 2023 lj Add GDAL data types added from versions 3.5 and 3.7 * * \author Liangjun Zhu, zlj(at)lreis.ac.cn - * \version 2.7 + * \version 2.8 */ #ifndef CCGL_DATA_RASTER_H #define CCGL_DATA_RASTER_H @@ -118,15 +119,17 @@ typedef std::pair XY_COOR; /// Coordinate pair * \brief Raster data types follows GDALDataType */ typedef enum { - RDT_Unknown, - RDT_UInt8, - RDT_Int8, - RDT_UInt16, - RDT_Int16, - RDT_UInt32, - RDT_Int32, - RDT_Float, - RDT_Double + RDT_Unknown, ///< GDT_Unknown + RDT_UInt8, ///< GDT_Byte + RDT_Int8, ///< GDT_Int8, GDAL>=3.7 + RDT_UInt16, ///< GDT_UInt16 + RDT_Int16, ///< GDT_Int16 + RDT_UInt32, ///< GDT_UInt32 + RDT_Int32, ///< GDT_Int32 + RDT_UInt64, ///< GDT_UInt64, GDAL>=3.5 + RDT_Int64, ///< GDT_Int64, GDAL>=3.5 + RDT_Float, ///< GDT_Float32 + RDT_Double ///< GDT_Float64 } RasterDataType; /** Common functions independent to clsRasterData **/ @@ -371,12 +374,14 @@ bool ReadRasterFileByGdal(const string& filename, STRDBL_MAP& header, T*& values double minmax[2]; T* tmprasterdata = nullptr; bool read_as_signedbyte = false; - signed char* char_data = nullptr; // DO NOT use char* unsigned char* uchar_data = nullptr; + signed char* char_data = nullptr; // DO NOT use char* vuint16_t* uint16_data = nullptr; // 16-bit unsigned integer vint16_t* int16_data = nullptr; // 16-bit signed integer vuint32_t* uint32_data = nullptr; // 32-bit unsigned integer vint32_t* int32_data = nullptr; // 32-bit signed integer + vuint64_t* uint64_data = nullptr; // 64-bit unsigned integer + vint64_t* int64_data = nullptr; // 64-bit signed integer float* float_data = nullptr; double* double_data = nullptr; CPLErr result; @@ -391,9 +396,11 @@ bool ReadRasterFileByGdal(const string& filename, STRDBL_MAP& header, T*& values // 2) maximum <= 127 and minimum >= 0 and no_data_value_ < 0 ==> signed char // Otherwise, unsigned char. // + // Update (08/09/2023): GDAL>=3.7 added the support of GDT_Int8. Keep this code for compatibility! + // po_band->ComputeRasterMinMax(approx_minmax, minmax); if ((minmax[1] <= 127 && minmax[0] < 0) - || (minmax[1] <= 127 && minmax[0] >= 0 && (!get_value_flag || get_value_flag && nodata < 0))) { + || (minmax[1] <= 127 && minmax[0] >= 0 && (!get_value_flag || (get_value_flag && nodata < 0)))) { read_as_signedbyte = true; } uchar_data = static_cast(CPLMalloc(sizeof(unsigned char) * n_cols * n_rows)); @@ -417,6 +424,21 @@ bool ReadRasterFileByGdal(const string& filename, STRDBL_MAP& header, T*& values } CPLFree(uchar_data); break; +#if GDAL_VERSION_MAJOR >= 3 && GDAL_VERSION_MINOR >= 7 + case GDT_Int8: + char_data = static_cast(CPLMalloc(sizeof(signed char) * n_cols * n_rows)); + result = po_band->RasterIO(GF_Read, 0, 0, n_cols, n_rows, char_data, + n_cols, n_rows, GDT_Int8, 0, 0); + if (result != CE_None) { + StatusMessage("RaterIO trouble: " + string(CPLGetLastErrorMsg())); + GDALClose(po_dataset); + return false; + } + Initialize1DArray(n_rows * n_cols, tmprasterdata, char_data); + CPLFree(char_data); + in_type = RDT_Int8; + break; +#endif case GDT_UInt16: uint16_data = static_cast(CPLMalloc(sizeof(vuint16_t) * n_cols * n_rows)); result = po_band->RasterIO(GF_Read, 0, 0, n_cols, n_rows, uint16_data, @@ -469,6 +491,34 @@ bool ReadRasterFileByGdal(const string& filename, STRDBL_MAP& header, T*& values CPLFree(int32_data); in_type = RDT_Int32; break; +#if GDAL_VERSION_MAJOR >= 3 && GDAL_VERSION_MINOR >= 5 + case GDT_UInt64: + uint64_data = static_cast(CPLMalloc(sizeof(vuint64_t) * n_cols * n_rows)); + result = po_band->RasterIO(GF_Read, 0, 0, n_cols, n_rows, uint64_data, + n_cols, n_rows, GDT_UInt64, 0, 0); + if (result != CE_None) { + StatusMessage("RaterIO trouble: " + string(CPLGetLastErrorMsg())); + GDALClose(po_dataset); + return false; + } + Initialize1DArray(n_rows * n_cols, tmprasterdata, uint64_data); + CPLFree(uint64_data); + in_type = RDT_UInt64; + break; + case GDT_Int64: + int64_data = static_cast(CPLMalloc(sizeof(vint64_t) * n_cols * n_rows)); + result = po_band->RasterIO(GF_Read, 0, 0, n_cols, n_rows, int64_data, + n_cols, n_rows, GDT_Int64, 0, 0); + if (result != CE_None) { + StatusMessage("RaterIO trouble: " + string(CPLGetLastErrorMsg())); + GDALClose(po_dataset); + return false; + } + Initialize1DArray(n_rows * n_cols, tmprasterdata, int64_data); + CPLFree(int64_data); + in_type = RDT_Int64; + break; +#endif case GDT_Float32: float_data = static_cast(CPLMalloc(sizeof(float) * n_cols * n_rows)); result = po_band->RasterIO(GF_Read, 0, 0, n_cols, n_rows, float_data, @@ -574,7 +624,9 @@ bool WriteSingleGeotiff(const string& filename, const STRDBL_MAP& header, } else if (outtype == RDT_Int8) { // [-128, 127] // https://gdal.org/drivers/raster/gtiff.html +#if GDAL_VERSION_MAJOR < 3 || (GDAL_VERSION_MAJOR == 3 && GDAL_VERSION_MINOR < 7) papsz_options = CSLSetNameValue(papsz_options, "PIXELTYPE", "SIGNEDBYTE"); +#endif new_values = static_cast(CPLMalloc(sizeof(signed char) * n_cols * n_rows)); signed char* values_char = static_cast(new_values); if (old_nodata < INT8_MIN || old_nodata > INT8_MAX) { @@ -669,6 +721,46 @@ bool WriteSingleGeotiff(const string& filename, const STRDBL_MAP& header, } if (illegal_count > 0) convert_permit = false; } +#if GDAL_VERSION_MAJOR >= 3 && GDAL_VERSION_MINOR >=5 + else if (outtype == RDT_UInt64) { // [0, 18446744073709551615] + new_values = static_cast(CPLMalloc(sizeof(vuint64_t) * n_cols * n_rows)); + vuint64_t* values_uint64 = static_cast(new_values); + if (old_nodata < 0 || old_nodata > UINT64_MAX) { + new_nodata = UINT64_MAX; + change_nodata = true; + } + int illegal_count = 0; +#pragma omp parallel for reduction(+:illegal_count) + for (int i = 0; i < n_cols * n_rows; i++) { + if (FloatEqual(values[i], old_nodata) && change_nodata) { + values_uint64[i] = UINT64_MAX; + continue; + } + if (values[i] < 0 || values[i] > UINT64_MAX) illegal_count += 1; + values_uint64[i] = static_cast(values[i]); + } + if (illegal_count > 0) convert_permit = false; + } + else if (outtype == RDT_Int64) { // [-18446744073709551615, 18446744073709551615] + new_values = static_cast(CPLMalloc(sizeof(vint64_t) * n_cols * n_rows)); + vint64_t* values_int64 = static_cast(new_values); + if (old_nodata < INT64_MIN || old_nodata > INT64_MAX) { + new_nodata = INT64_MIN; + change_nodata = true; + } + int illegal_count = 0; +#pragma omp parallel for reduction(+:illegal_count) + for (int i = 0; i < n_cols * n_rows; i++) { + if (FloatEqual(values[i], old_nodata) && change_nodata) { + values_int64[i] = INT64_MIN; + continue; + } + if (values[i] < INT64_MIN || values[i] > INT64_MAX) illegal_count += 1; + values_int64[i] = static_cast(values[i]); + } + if (illegal_count > 0) convert_permit = false; + } +#endif else if (outtype == RDT_Float) { new_values = static_cast(CPLMalloc(sizeof(float) * n_cols * n_rows)); float* values_float = static_cast(new_values); @@ -827,48 +919,59 @@ bool ReadGridFsFile(MongoGridFs* gfs, const string& filename, if (rstype == RDT_Double && size_dtype == sizeof(double)) { double* data_dbl = reinterpret_cast(buf); Initialize1DArray(value_count, data, data_dbl); - Release1DArray(data_dbl); + //Release1DArray(data_dbl); } else if (rstype == RDT_Float && size_dtype == sizeof(float)) { float* data_flt = reinterpret_cast(buf); Initialize1DArray(value_count, data, data_flt); - Release1DArray(data_flt); + //Release1DArray(data_flt); } else if (rstype == RDT_Int32 && size_dtype == sizeof(vint32_t)) { vint32_t* data_int32 = reinterpret_cast(buf); Initialize1DArray(value_count, data, data_int32); - Release1DArray(data_int32); + //Release1DArray(data_int32); } else if (rstype == RDT_UInt32 && size_dtype == sizeof(vuint32_t)) { vuint32_t* data_uint32 = reinterpret_cast(buf); Initialize1DArray(value_count, data, data_uint32); - Release1DArray(data_uint32); + //Release1DArray(data_uint32); + } + else if (rstype == RDT_Int64 && size_dtype == sizeof(vint64_t)) { + vint64_t* data_int64 = reinterpret_cast(buf); + Initialize1DArray(value_count, data, data_int64); + //Release1DArray(data_int64); + } + else if (rstype == RDT_UInt64 && size_dtype == sizeof(vuint64_t)) { + vuint64_t* data_uint64 = reinterpret_cast(buf); + Initialize1DArray(value_count, data, data_uint64); + //Release1DArray(data_uint64); } else if (rstype == RDT_Int16 && size_dtype == sizeof(vint16_t)) { vint16_t* data_int16 = reinterpret_cast(buf); Initialize1DArray(value_count, data, data_int16); - Release1DArray(data_int16); + //Release1DArray(data_int16); } else if (rstype == RDT_UInt16 && size_dtype == sizeof(vuint16_t)) { vuint16_t* data_uint16 = reinterpret_cast(buf); Initialize1DArray(value_count, data, data_uint16); - Release1DArray(data_uint16); + //Release1DArray(data_uint16); } else if (rstype == RDT_Int8 && size_dtype == sizeof(vint8_t)) { vint8_t* data_int8 = reinterpret_cast(buf); Initialize1DArray(value_count, data, data_int8); - Release1DArray(data_int8); + //Release1DArray(data_int8); } else if (rstype == RDT_UInt8 && size_dtype == sizeof(vuint8_t)) { vuint8_t* data_uint8 = reinterpret_cast(buf); Initialize1DArray(value_count, data, data_uint8); - Release1DArray(data_uint8); + //Release1DArray(data_uint8); } else { StatusMessage("Unconsistent of data type and size!"); delete[] buf; return false; } + delete[] buf; return true; } @@ -1928,7 +2031,8 @@ void clsRasterData::InitializeRasterClass(bool is_2d /* = false */) { n_cells_ = -1; rs_type_ = RDT_Unknown; rs_type_out_ = RDT_Unknown; - no_data_value_ = static_cast(NODATA_VALUE); // Be careful of unsigned data type! + no_data_value_ = DefaultNoDataByType(TypeToRasterDataType(typeid(T))); + //no_data_value_ = static_cast(NODATA_VALUE); // Be careful of unsigned data type! default_value_ = NODATA_VALUE; raster_ = nullptr; pos_data_ = nullptr; @@ -2380,7 +2484,7 @@ double clsRasterData::GetStatistics(string sindex, const int lyr /* = sindex = GetUpper(sindex); if (!ValidateRasterData() || !ValidateLayer(lyr)) { StatusMessage("No available raster statistics!"); - return CVT_DBL(no_data_value_); + return CVT_DBL(default_value_); } if (is_2draster && nullptr != raster_2d_) { // for 2D raster data @@ -2393,7 +2497,7 @@ double clsRasterData::GetStatistics(string sindex, const int lyr /* = return stats_2d_.at(sindex)[lyr - 1]; } StatusMessage("WARNING: " + ValueToString(sindex) + " is not supported currently."); - return CVT_DBL(no_data_value_); + return CVT_DBL(default_value_); } // Else, for 1D raster data auto it = stats_.find(sindex); @@ -2404,7 +2508,7 @@ double clsRasterData::GetStatistics(string sindex, const int lyr /* = return stats_.at(sindex); } StatusMessage("WARNING: " + ValueToString(sindex) + " is not supported currently."); - return CVT_DBL(no_data_value_); + return CVT_DBL(default_value_); } template @@ -4044,7 +4148,7 @@ int clsRasterData::MaskAndCalculateValidPosition() { Initialize1DArray(n_cells_, raster_, no_data_value_); } // Loop the masked raster values - int synthesis_idx = -1; + size_t synthesis_idx = 0; for (size_t k = 0; k < pos_rows.size(); ++k) { synthesis_idx = k; int tmpr = pos_rows.at(k); diff --git a/src/db_mongoc.cpp b/src/db_mongoc.cpp index 282def2ef..b4d71b389 100644 --- a/src/db_mongoc.cpp +++ b/src/db_mongoc.cpp @@ -272,19 +272,9 @@ MongoCollection::~MongoCollection() { mongoc_collection_destroy(collection_); } -mongoc_cursor_t* MongoCollection::ExecuteQuery(const bson_t* b) { - // NOTE: mongoc_collection_find should be deprecated from v1.5.0, however, mongoc_collection_find_with_opts - // do not work in my Windows 10 both by MSVC and MINGW64. - // Upd 12/13/2017 The new method also failed in our linux cluster (redhat 6.2 and Intel C++ 12.1). - // Upd 12/29/2021 I decide to use new method from a quite later version such as v1.8.0. - // Maybe a precise version can be determined after a thorough test. - // Upd 06/24/2022 The new API still not working in Windows. +mongoc_cursor_t* MongoCollection::ExecuteQuery(const bson_t* b, const bson_t* opts /* = nullptr */) { mongoc_cursor_t* cursor = nullptr; -// #if MONGOC_CHECK_VERSION(1, 8, 0) -// cursor = mongoc_collection_find_with_opts(collection_, b, NULL, NULL); -// #else // Deprecated from 1.5.0 - cursor = mongoc_collection_find(collection_, MONGOC_QUERY_NONE, 0, 0, 0, b, NULL, NULL); -// #endif + cursor = mongoc_collection_find_with_opts(collection_, b, opts, NULL); return cursor; } diff --git a/src/db_mongoc.h b/src/db_mongoc.h index bb5974eb3..1f8134ba5 100644 --- a/src/db_mongoc.h +++ b/src/db_mongoc.h @@ -126,7 +126,7 @@ class MongoCollection { ~MongoCollection(); /*! Execute query */ - mongoc_cursor_t* ExecuteQuery(const bson_t* b); + mongoc_cursor_t* ExecuteQuery(const bson_t* b, const bson_t* opts = nullptr); /*! Query the records number */ vint QueryRecordsCount(); diff --git a/src/utils_array.h b/src/utils_array.h index 4b93b830b..f0027af01 100644 --- a/src/utils_array.h +++ b/src/utils_array.h @@ -103,7 +103,6 @@ void Release1DArray(T*& data); /*! * \brief Release DT_Array2D data - * \param[in] row row * \param[in] data */ template diff --git a/src/utils_string.cpp b/src/utils_string.cpp index 5170a6a72..d612483bb 100644 --- a/src/utils_string.cpp +++ b/src/utils_string.cpp @@ -137,7 +137,8 @@ void _ui64tow_s(vuint64_t value, wchar_t* buffer, size_t size, vint radix) { } void _gcvt_s(char* buffer, size_t size, double value, vint numberOfDigits) { - sprintf(buffer, "%f", value); + //sprintf(buffer, "%f", value); + snprintf(buffer, size, "%f", value); char* point = strchr(buffer, '.'); if (!point) return; char* zero = buffer + strlen(buffer); @@ -248,14 +249,14 @@ wstring atow(const string& astr) { vint IsInt(const string& num_str, bool& success) { char* endptr = nullptr; - int result = strtol(num_str.c_str(), &endptr, 10); + int result = CVT_INT(strtol(num_str.c_str(), &endptr, 10)); success = endptr == num_str.c_str() + num_str.length() && itoa(result) == num_str; return result; } vint IsInt(const wstring& num_str, bool& success) { wchar_t* endptr = nullptr; - int result = wcstol(num_str.c_str(), &endptr, 10); + int result = CVT_INT(wcstol(num_str.c_str(), &endptr, 10)); success = endptr == num_str.c_str() + num_str.length() && itow(result) == num_str; return result; } diff --git a/src/utils_time.cpp b/src/utils_time.cpp index f56efcafc..bbc66920e 100644 --- a/src/utils_time.cpp +++ b/src/utils_time.cpp @@ -324,10 +324,10 @@ DateTime ConvertTMToDateTime(tm* time_info, vint milliseconds) { dt.minute = time_info->tm_min; dt.second = time_info->tm_sec; - dt.milliseconds = milliseconds; + dt.milliseconds = CVT_INT(milliseconds); dt.filetime = CVT_VUINT64(timer * 1000 + milliseconds); dt.total_milliseconds = CVT_VUINT64(timer * 1000 + milliseconds); - delete time_info; + //delete &time_info; return dt; } @@ -351,7 +351,9 @@ DateTime DateTime::LocalTime() { #else localtime_r(&timer, time_info); #endif - return ConvertTMToDateTime(time_info, GetCurrentMilliseconds()); + DateTime dt = ConvertTMToDateTime(time_info, GetCurrentMilliseconds()); + delete time_info; + return dt; #endif /* CPP_MSVC */ } @@ -368,7 +370,9 @@ DateTime DateTime::UTCTime() { #else gmtime_r(&timer, time_info); #endif - return ConvertTMToDateTime(time_info, GetCurrentMilliseconds()); + DateTime dt = ConvertTMToDateTime(time_info, GetCurrentMilliseconds()); + delete time_info; + return dt; #endif /* CPP_MSVC */ } @@ -390,7 +394,7 @@ DateTime DateTime::FromDateTime(const int iyear, const int imonth, const int ida FileTimeToSystemTime(&file_time, &sys_time); return SystemTimeToDateTime(sys_time); #elif (defined CPP_GCC) || (defined CPP_ICC) - tm time_info; + struct tm time_info; memset(&time_info, 0, sizeof(time_info)); time_info.tm_year = iyear - 1900; time_info.tm_mon = imonth - 1; @@ -400,7 +404,9 @@ DateTime DateTime::FromDateTime(const int iyear, const int imonth, const int ida time_info.tm_sec = isecond; time_info.tm_isdst = -1; - return ConvertTMToDateTime(&time_info, imillisecond); + DateTime dt = ConvertTMToDateTime(&time_info, imillisecond); + //delete &time_info; + return dt; #endif } @@ -423,7 +429,9 @@ DateTime DateTime::FromFileTime(const vuint64_t ifiletime) { #else localtime_r(&timer, time_info); #endif - return ConvertTMToDateTime(time_info, ifiletime % 1000); + DateTime dt = ConvertTMToDateTime(time_info, ifiletime % 1000); + delete time_info; + return dt; #endif } @@ -450,7 +458,9 @@ DateTime DateTime::ToLocalTime() { #else localtime_r(&timer, time_info); #endif - return ConvertTMToDateTime(time_info, milliseconds); + DateTime dt = ConvertTMToDateTime(time_info, milliseconds); + //delete &time_info; + return dt; #endif } @@ -469,7 +479,9 @@ DateTime DateTime::ToUTCTime() { gmtime_r(&timer, time_info); #endif - return ConvertTMToDateTime(time_info, milliseconds); + DateTime dt = ConvertTMToDateTime(time_info, milliseconds); + //delete &time_info; + return dt; #endif } diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt index 407a8daa5..e7801b343 100644 --- a/test/CMakeLists.txt +++ b/test/CMakeLists.txt @@ -81,3 +81,7 @@ endif () # endif () # SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS}" PARENT_SCOPE) # SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}" PARENT_SCOPE) + +if (USE_SAN) + add_sanitizers(${APPNAME}) +endif () diff --git a/test/raster/test_raster1d_mask_exceed.cpp b/test/raster/test_raster1d_mask_exceed.cpp index e559b13d9..e2582dc7e 100644 --- a/test/raster/test_raster1d_mask_exceed.cpp +++ b/test/raster/test_raster1d_mask_exceed.cpp @@ -68,6 +68,9 @@ struct InputRasterFiles { mask_name = maskf.c_str(); mask2_name = maskf2.c_str(); } + + ~InputRasterFiles() { ; } + const char* raster_name; const char* mask_name; const char* mask2_name; @@ -337,6 +340,7 @@ TEST_P(clsRasterDataTestMaskExceed, MatchExactNoPosNoMaskExt) { delete mongors; delete mongors_valid; #endif + delete rs_; } // matched_exactly = True, calc_pos = False, use_mask_ext = True @@ -524,6 +528,8 @@ TEST_P(clsRasterDataTestMaskExceed, MatchExactNoPosUseMaskExt) { delete mongors; delete mongors_valid; #endif + + delete rs_; } // matched_exactly = True, calc_pos = True, use_mask_ext = False @@ -709,6 +715,8 @@ TEST_P(clsRasterDataTestMaskExceed, MatchExactCalPosNoMaskExt) { delete mongors; delete mongors_valid; #endif + + delete rs_; } // matched_exactly = True, calc_pos = True, use_mask_ext = True @@ -896,6 +904,8 @@ TEST_P(clsRasterDataTestMaskExceed, MatchExactCalPosUseMaskExt) { delete mongors; delete mongors_valid; #endif + + delete rs_; } // matched_exactly = False, calc_pos = False, use_mask_ext = False @@ -1082,6 +1092,8 @@ TEST_P(clsRasterDataTestMaskExceed, NotMatchExactNoPosNoMaskExt) { delete mongors; delete mongors_valid; #endif + + delete rs_; } // matched_exactly = False, calc_pos = False, use_mask_ext = True @@ -1277,6 +1289,8 @@ TEST_P(clsRasterDataTestMaskExceed, NotMatchExactNoPosUseMaskExt) { delete mongors; delete mongors_valid; #endif + + delete rs_; } // matched_exactly = False, calc_pos = True, use_mask_ext = False @@ -1463,6 +1477,8 @@ TEST_P(clsRasterDataTestMaskExceed, NotMatchExactCalPosNoMaskExt) { delete mongors; delete mongors_valid; #endif + + delete rs_; } // matched_exactly = False, calc_pos = True, use_mask_ext = True @@ -1649,16 +1665,17 @@ TEST_P(clsRasterDataTestMaskExceed, NotMatchExactCalPosUseMaskExt) { delete mongors; delete mongors_valid; #endif + delete rs_; } #ifdef USE_GDAL INSTANTIATE_TEST_CASE_P(SingleLayer, clsRasterDataTestMaskExceed, Values(new InputRasterFiles(AscFile, MaskAscFile, MaskAscFile2), - new InputRasterFiles(TifFile, MaskTifFile, MaskTifFile2))); + new InputRasterFiles(TifFile, MaskTifFile, MaskTifFile2)),); #else INSTANTIATE_TEST_CASE_P(SingleLayer, clsRasterDataTestMaskExceed, - Values(new InputRasterFiles(AscFile, MaskAscFile, MaskAscFile2))); + Values(new InputRasterFiles(AscFile, MaskAscFile, MaskAscFile2)),); #endif /* USE_GDAL */ } /* namespace */ diff --git a/test/raster/test_raster1d_mask_within.cpp b/test/raster/test_raster1d_mask_within.cpp index 115cd1117..1ed0b1e20 100644 --- a/test/raster/test_raster1d_mask_within.cpp +++ b/test/raster/test_raster1d_mask_within.cpp @@ -68,6 +68,9 @@ struct InputRasterFiles { mask_name = maskf.c_str(); mask2_name = maskf2.c_str(); } + + ~InputRasterFiles() { ; } + const char* raster_name; const char* mask_name; const char* mask2_name; @@ -984,6 +987,8 @@ TEST_P(clsRasterDataTestMaskWithin, MatchExactCalPosUseMaskExt) { delete mongors; delete mongors_valid; #endif + + delete rs_; } // matched_exactly = False, calc_pos = False, use_mask_ext = False @@ -1801,10 +1806,10 @@ TEST_P(clsRasterDataTestMaskWithin, NotMatchExactCalcPosUseMaskExt) { #ifdef USE_GDAL INSTANTIATE_TEST_CASE_P(SingleLayer, clsRasterDataTestMaskWithin, Values(new InputRasterFiles(AscFile, MaskAscFileS, MaskAscFileS2), - new InputRasterFiles(TifFile, MaskTifFileS, MaskTifFileS2))); + new InputRasterFiles(TifFile, MaskTifFileS, MaskTifFileS2)),); #else INSTANTIATE_TEST_CASE_P(SingleLayer, clsRasterDataTestMaskWithin, - Values(new InputRasterFiles(AscFile, MaskAscFileS, MaskAscFileS2))); + Values(new InputRasterFiles(AscFile, MaskAscFileS, MaskAscFileS2)),); #endif /* USE_GDAL */ } /* namespace */ diff --git a/test/raster/test_raster1d_nomask.cpp b/test/raster/test_raster1d_nomask.cpp index d834ffb17..ce9ccdec8 100644 --- a/test/raster/test_raster1d_nomask.cpp +++ b/test/raster/test_raster1d_nomask.cpp @@ -552,10 +552,10 @@ TEST_P(clsRasterDataTestNoMask, RasterIOWithCalcPos) { // instantiate them several times. #ifdef USE_GDAL INSTANTIATE_TEST_CASE_P(SingleLayer, clsRasterDataTestNoMask, - Values(AscFileChars, TifFileChars)); + Values(AscFileChars, TifFileChars),); #else INSTANTIATE_TEST_CASE_P(SingleLayer, clsRasterDataTestNoMask, - Values(AscFileChars)); + Values(AscFileChars),); #endif /* USE_GDAL */ } /* namespace */ diff --git a/test/raster/test_raster1d_split_merge.cpp b/test/raster/test_raster1d_split_merge.cpp index e35fd7ed7..91b7995fe 100644 --- a/test/raster/test_raster1d_split_merge.cpp +++ b/test/raster/test_raster1d_split_merge.cpp @@ -58,6 +58,9 @@ struct InputRasterFiles { mask_name = maskf.c_str(); raster_name = rsf.c_str(); } + + ~InputRasterFiles() { ; } + const char* mask_name; const char* raster_name; }; @@ -569,6 +572,9 @@ TEST_P(clsRasterDataSplitMerge, SplitRaster) { Release1DArray(it->second); } subarray.clear(); + + Release1DArray(datacom); + Release1DArray(datacomvalid); delete rs; } @@ -576,10 +582,10 @@ TEST_P(clsRasterDataSplitMerge, SplitRaster) { #ifdef USE_GDAL INSTANTIATE_TEST_CASE_P(SingleLayer, clsRasterDataSplitMerge, Values(new InputRasterFiles(mask_asc_file, rs1_asc), - new InputRasterFiles(mask_tif_file, rs1_tif))); + new InputRasterFiles(mask_tif_file, rs1_tif)),); #else INSTANTIATE_TEST_CASE_P(SingleLayer, clsRasterDataSplitMerge, - Values(new InputRasterFiles(mask_asc_file, rs1_asc))); + Values(new InputRasterFiles(mask_asc_file, rs1_asc)),); #endif /* USE_GDAL */ } /* namespace */ diff --git a/test/raster/test_raster2d_mask.cpp b/test/raster/test_raster2d_mask.cpp index 42e30efb3..57d203fe4 100644 --- a/test/raster/test_raster2d_mask.cpp +++ b/test/raster/test_raster2d_mask.cpp @@ -74,6 +74,9 @@ struct InputRasterFiles { raster_name3 = rs3.c_str(); mask_name = maskf.c_str(); } + + ~InputRasterFiles() { ; } + const char* raster_name1; const char* raster_name2; const char* raster_name3; @@ -646,10 +649,10 @@ TEST_P(clsRasterDataTestMask2D, CalcPos) { #ifdef USE_GDAL INSTANTIATE_TEST_CASE_P(MultipleLayers, clsRasterDataTestMask2D, Values(new InputRasterFiles(rs1_asc, rs2_asc, rs3_asc, mask_asc_file), - new InputRasterFiles(rs1_tif, rs2_tif, rs3_tif, mask_tif_file))); + new InputRasterFiles(rs1_tif, rs2_tif, rs3_tif, mask_tif_file)),); #else INSTANTIATE_TEST_CASE_P(MultipleLayers, clsRasterDataTestMask2D, - Values(new InputRasterFiles(rs1_asc, rs2_asc, rs3_asc, mask_asc_file))); + Values(new InputRasterFiles(rs1_asc, rs2_asc, rs3_asc, mask_asc_file)),); #endif /* USE_GDAL */ } /* namespace */ diff --git a/test/raster/test_raster2d_nomask.cpp b/test/raster/test_raster2d_nomask.cpp index 52d7c0959..67f4d826f 100644 --- a/test/raster/test_raster2d_nomask.cpp +++ b/test/raster/test_raster2d_nomask.cpp @@ -56,6 +56,9 @@ struct InputRasterFiles { raster_name2 = rs2.c_str(); raster_name3 = rs3.c_str(); } + + ~InputRasterFiles() { ; } + const char* raster_name1; const char* raster_name2; const char* raster_name3; @@ -791,10 +794,10 @@ TEST_P(clsRasterDataTest2DNoMask, RasterIOWithCalcPos) { #ifdef USE_GDAL INSTANTIATE_TEST_CASE_P(MultipleLayers, clsRasterDataTest2DNoMask, Values(new InputRasterFiles(rs1_asc, rs2_asc, rs3_asc), - new InputRasterFiles(rs1_tif, rs2_tif, rs3_tif))); + new InputRasterFiles(rs1_tif, rs2_tif, rs3_tif)),); #else INSTANTIATE_TEST_CASE_P(MultipleLayers, clsRasterDataTest2DNoMask, - Values(new InputRasterFiles(rs1_asc, rs2_asc, rs3_asc))); + Values(new InputRasterFiles(rs1_asc, rs2_asc, rs3_asc)),); #endif /* USE_GDAL */ } /* namespace */ diff --git a/test/raster/test_raster2d_split_merge.cpp b/test/raster/test_raster2d_split_merge.cpp index 3ea203940..97af1ee40 100644 --- a/test/raster/test_raster2d_split_merge.cpp +++ b/test/raster/test_raster2d_split_merge.cpp @@ -67,6 +67,9 @@ struct InputRasterFiles { raster_name3 = rs3.c_str(); mask_name = maskf.c_str(); } + + ~InputRasterFiles() { ; } + const char* raster_name1; const char* raster_name2; const char* raster_name3; @@ -246,7 +249,7 @@ TEST_P(clsRasterData2DSplitMerge, MaskLyrIO) { data3[2][1] = 36.f; EXPECT_TRUE(newsub3->Set2DData(newsub3->n_cells, newlyrs, data3)); newdata[3] = data3; - + float** datafull = nullptr; Initialize2DArray(maskrs_->GetValidNumber(), 2, datafull, -9999); datafull[0][0] = 2008.f; @@ -345,7 +348,7 @@ TEST_P(clsRasterData2DSplitMerge, MaskLyrIO) { } } delete newrs; - + #ifdef USE_MONGODB /** Output subset data to MongoDB **/ string mask_subset_name = maskrs_->GetCoreName() + "_2d"; @@ -364,7 +367,7 @@ TEST_P(clsRasterData2DSplitMerge, MaskLyrIO) { STRING_MAP opts_valid; UpdateStrHeader(opts_full, HEADER_INC_NODATA, "TRUE"); UpdateStrHeader(opts_valid, HEADER_INC_NODATA, "FALSE"); - + for (auto it = subsetsfull.begin(); it != subsetsfull.end(); ++it) { string gfsfull = itoa(it->first) + "_" + mask_subset_name; EXPECT_TRUE(it->second->ReadFromMongoDB(GlobalEnv->gfs_, gfsfull, opts_full)); @@ -435,6 +438,8 @@ TEST_P(clsRasterData2DSplitMerge, MaskLyrIO) { Release2DArray(it->second); } newdata.clear(); + + Release2DArray(datafull); } // Raster IO based on mask layer which has several subset // 1. output raster data according to mask's subset @@ -590,7 +595,7 @@ TEST_P(clsRasterData2DSplitMerge, SplitRaster) { datacombvalid[13][2] = 67.67f; datacombvalid[14][2] = -9999.f; datacombvalid[15][2] = -9999.f; - + for (auto it = rs_subset.begin(); it != rs_subset.end(); ++it) { vector outfiles(lyrs); for (int ilyr = 0; ilyr < lyrs; ilyr++) { @@ -651,6 +656,9 @@ TEST_P(clsRasterData2DSplitMerge, SplitRaster) { Release2DArray(it->second); } subarray.clear(); + + Release2DArray(datacombvalid); + delete rs; } @@ -658,10 +666,10 @@ TEST_P(clsRasterData2DSplitMerge, SplitRaster) { #ifdef USE_GDAL INSTANTIATE_TEST_CASE_P(MultiLayers, clsRasterData2DSplitMerge, Values(new InputRasterFiles(rs1_asc, rs2_asc, rs3_asc, mask_asc_file), - new InputRasterFiles(rs1_tif, rs2_tif, rs3_tif, mask_tif_file))); + new InputRasterFiles(rs1_tif, rs2_tif, rs3_tif, mask_tif_file)),); #else INSTANTIATE_TEST_CASE_P(MultiLayers, clsRasterData2DSplitMerge, - Values(new InputRasterFiles(rs1_asc, rs2_asc, rs3_asc, mask_asc_file))); + Values(new InputRasterFiles(rs1_asc, rs2_asc, rs3_asc, mask_asc_file)),); #endif /* USE_GDAL */ } /* namespace */ diff --git a/test/raster/test_raster_constructor.cpp b/test/raster/test_raster_constructor.cpp index 0cebde583..1b742955c 100644 --- a/test/raster/test_raster_constructor.cpp +++ b/test/raster/test_raster_constructor.cpp @@ -60,7 +60,8 @@ TEST(clsRasterDataTestBlankCtor, ValidateAccess) { EXPECT_EQ(-1, rs->GetDataLength()); // m_nCells EXPECT_EQ(-1, rs->GetCellNumber()); // m_nCells - EXPECT_EQ(-9999., rs->GetNoDataValue()); // m_noDataValue + float nodata_value = -1 * FLT_MAX; // the default nodata value depends on the data type T + EXPECT_EQ(nodata_value, rs->GetNoDataValue()); // m_noDataValue EXPECT_EQ(-9999., rs->GetDefaultValue()); // m_defaultValue EXPECT_EQ("", rs->GetFilePath()); // m_filePathName @@ -121,14 +122,14 @@ TEST(clsRasterDataTestBlankCtor, ValidateAccess) { EXPECT_EQ(nullptr, rs_2ddata); /** Get raster cell value by various way **/ - EXPECT_FLOAT_EQ(-9999.f, rs->GetValueByIndex(-1)); - EXPECT_FLOAT_EQ(-9999.f, rs->GetValueByIndex(0)); - EXPECT_FLOAT_EQ(-9999.f, rs->GetValueByIndex(540, 1)); - EXPECT_FLOAT_EQ(-9999.f, rs->GetValueByIndex(541, 1)); - EXPECT_FLOAT_EQ(-9999.f, rs->GetValueByIndex(29)); - EXPECT_FLOAT_EQ(-9999.f, rs->GetValueByIndex(29, 0)); - EXPECT_FLOAT_EQ(-9999.f, rs->GetValueByIndex(-1, 2)); - EXPECT_FLOAT_EQ(-9999.f, rs->GetValueByIndex(541, 2)); + EXPECT_FLOAT_EQ(nodata_value, rs->GetValueByIndex(-1)); + EXPECT_FLOAT_EQ(nodata_value, rs->GetValueByIndex(0)); + EXPECT_FLOAT_EQ(nodata_value, rs->GetValueByIndex(540, 1)); + EXPECT_FLOAT_EQ(nodata_value, rs->GetValueByIndex(541, 1)); + EXPECT_FLOAT_EQ(nodata_value, rs->GetValueByIndex(29)); + EXPECT_FLOAT_EQ(nodata_value, rs->GetValueByIndex(29, 0)); + EXPECT_FLOAT_EQ(nodata_value, rs->GetValueByIndex(-1, 2)); + EXPECT_FLOAT_EQ(nodata_value, rs->GetValueByIndex(541, 2)); int tmp_lyr = rs->GetLayers(); float* tmp_values = nullptr; @@ -138,14 +139,14 @@ TEST(clsRasterDataTestBlankCtor, ValidateAccess) { rs->GetValueByIndex(0, tmp_values); EXPECT_EQ(nullptr, tmp_values); - EXPECT_FLOAT_EQ(-9999.f, rs->GetValue(-1, 0)); - EXPECT_FLOAT_EQ(-9999.f, rs->GetValue(20, 0)); - EXPECT_FLOAT_EQ(-9999.f, rs->GetValue(0, -1)); - EXPECT_FLOAT_EQ(-9999.f, rs->GetValue(0, 30)); - EXPECT_FLOAT_EQ(-9999.f, rs->GetValue(2, 4, -1)); - EXPECT_FLOAT_EQ(-9999.f, rs->GetValue(2, 4, 2)); - EXPECT_FLOAT_EQ(-9999.f, rs->GetValue(2, 4)); - EXPECT_FLOAT_EQ(-9999.f, rs->GetValue(2, 4, 1)); + EXPECT_FLOAT_EQ(nodata_value, rs->GetValue(-1, 0)); + EXPECT_FLOAT_EQ(nodata_value, rs->GetValue(20, 0)); + EXPECT_FLOAT_EQ(nodata_value, rs->GetValue(0, -1)); + EXPECT_FLOAT_EQ(nodata_value, rs->GetValue(0, 30)); + EXPECT_FLOAT_EQ(nodata_value, rs->GetValue(2, 4, -1)); + EXPECT_FLOAT_EQ(nodata_value, rs->GetValue(2, 4, 2)); + EXPECT_FLOAT_EQ(nodata_value, rs->GetValue(2, 4)); + EXPECT_FLOAT_EQ(nodata_value, rs->GetValue(2, 4, 1)); rs->GetValue(-1, 0, tmp_values); EXPECT_EQ(nullptr, tmp_values); @@ -170,9 +171,9 @@ TEST(clsRasterDataTestBlankCtor, ValidateAccess) { // Set raster data value rs->SetValue(2, 4, 18.06f); - EXPECT_FLOAT_EQ(-9999.f, rs->GetValue(2, 4)); + EXPECT_FLOAT_EQ(nodata_value, rs->GetValue(2, 4)); rs->SetValue(0, 0, 1.f); - EXPECT_FLOAT_EQ(-9999.f, rs->GetValue(0, 0)); + EXPECT_FLOAT_EQ(nodata_value, rs->GetValue(0, 0)); /** Output to new file **/ string newfullname = GetAppPath() + SEP + "no_output.tif"; @@ -187,7 +188,7 @@ TEST(clsRasterDataASCConstructor, SupportedCases) { if (HasFailure()) { return; } EXPECT_EQ(4, not_std_rs->GetCellNumber()); EXPECT_EQ(2, not_std_rs->GetValidNumber()); - + delete not_std_rs; } TEST(clsRasterDataFailedConstructor, FailedCases) { @@ -479,6 +480,8 @@ TEST(clsRasterDataInt32, IOWithoutDefNodata) { EXPECT_EQ(INT32_MIN, rs->GetNoDataValue()); EXPECT_TRUE(rs->GetDataType() == RDT_Int32); EXPECT_TRUE(rs->GetOutDataType() == RDT_Int32); + + delete rs; } TEST(clsRasterDataFloat, FullIO) { diff --git a/test/test_main.cpp b/test/test_main.cpp index f82d98ac8..25685d561 100644 --- a/test/test_main.cpp +++ b/test/test_main.cpp @@ -92,5 +92,11 @@ int main(int argc, char** argv) { // Current Google test constantly leak 2 blocks at exit _CrtMemDumpAllObjectsSince(&memoryState); #endif /* Run Visual Leak Detector during Debug */ + +#ifdef USE_MONGODB + //delete GlobalEnv; + delete client_; + delete gfs_; +#endif return retval; }