From e3acb4e897ff4f417d673ef4427d6f23bdb12ba1 Mon Sep 17 00:00:00 2001 From: DMITRIY PEDCHENKO Date: Fri, 29 Mar 2024 15:19:40 +0300 Subject: [PATCH 01/11] Bump version --- pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index f2b126e3..a5d231b8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "data-syncmaster" -version = "0.1.1" +version = "0.1.2" license = "Apache-2.0" description = "Syncmaster REST API + Worker" authors = ["DataOps.ETL "] @@ -197,6 +197,7 @@ exclude_lines = [ "def downgrade\\(\\)", ] + [tool.poetry.group.docs.dependencies] autodoc-pydantic = {version = "^2.0.1", python = ">=3.8"} numpydoc = {version = "^1.6.0", python = ">=3.8"} From d1e3d9a7233117c55e5e5dfd6c5cb316c97bc3b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=9C=D0=B0=D1=80=D1=82=D1=8B=D0=BD=D0=BE=D0=B2=20=D0=9C?= =?UTF-8?q?=D0=B0=D0=BA=D1=81=D0=B8=D0=BC=20=D0=A1=D0=B5=D1=80=D0=B3=D0=B5?= =?UTF-8?q?=D0=B5=D0=B2=D0=B8=D1=87?= Date: Fri, 29 Mar 2024 15:25:46 +0300 Subject: [PATCH 02/11] Fix README badges --- README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 6b258c7e..d36b538e 100644 --- a/README.rst +++ b/README.rst @@ -17,9 +17,9 @@ SyncMaster :target: https://badge.fury.io/py/data-syncmaster .. |Docker image| image:: https://img.shields.io/docker/v/mtsrus/syncmaster-backend?sort=semver&label=docker :target: https://hub.docker.com/r/mtsrus/syncmaster-backend -.. |Documentation| image:: https://readthedocs.org/projects/data-syncmaster/badge/?version=stable +.. |Documentation| image:: https://readthedocs.org/projects/syncmaster/badge/?version=stable :target: https://syncmaster.readthedocs.io -.. |Build Status| image:: https://github.com/MobileTeleSystems/syncmaster/workflows/Tests/badge.svg +.. |Build Status| image:: https://github.com/MobileTeleSystems/syncmaster/workflows/Run%20All%20Tests/badge.svg :target: https://github.com/MobileTeleSystems/syncmaster/actions .. |Coverage| image:: https://codecov.io/gh/MobileTeleSystems/syncmaster/graph/badge.svg?token=ky7UyUxolB :target: https://codecov.io/gh/MobileTeleSystems/syncmaster From c7a0921d30aa36a2c7afc3a2f54e6375bfa84bcd Mon Sep 17 00:00:00 2001 From: DMITRIY PEDCHENKO Date: Fri, 29 Mar 2024 15:33:12 +0300 Subject: [PATCH 03/11] Fix conf file version --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index 2f3edcd9..5a215b31 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -36,7 +36,7 @@ # The short X.Y version. # this value is updated automatically by `poetry version ...` and poetry-bumpversion plugin -ver = Version.parse("0.1.0") +ver = Version.parse("0.1.1") version = ver.base_version # The full version, including alpha/beta/rc tags. release = ver.public From 8fefa7cef3eb0cda82fc8933246d86218a52ce97 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 21:06:03 +0000 Subject: [PATCH 04/11] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/macisamuele/language-formatters-pre-commit-hooks: v2.12.0 → v2.13.0](https://github.com/macisamuele/language-formatters-pre-commit-hooks/compare/v2.12.0...v2.13.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a81b1431..6494ba39 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -111,7 +111,7 @@ repos: - id: docker-compose-check - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks - rev: v2.12.0 + rev: v2.13.0 hooks: - id: pretty-format-yaml args: [--autofix, --indent, '2', --offset, '2'] From 8be231da2f89fc6b6a68386a172f9e2b647fe783 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Apr 2024 05:15:47 +0000 Subject: [PATCH 05/11] Bump the python-packages group with 2 updates Bumps the python-packages group with 2 updates: [fastapi](https://github.com/tiangolo/fastapi) and [faker](https://github.com/joke2k/faker). Updates `fastapi` from 0.110.0 to 0.110.1 - [Release notes](https://github.com/tiangolo/fastapi/releases) - [Commits](https://github.com/tiangolo/fastapi/compare/0.110.0...0.110.1) Updates `faker` from 24.4.0 to 24.7.1 - [Release notes](https://github.com/joke2k/faker/releases) - [Changelog](https://github.com/joke2k/faker/blob/master/CHANGELOG.md) - [Commits](https://github.com/joke2k/faker/compare/v24.4.0...v24.7.1) --- updated-dependencies: - dependency-name: fastapi dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-packages - dependency-name: faker dependency-type: direct:development update-type: version-update:semver-minor dependency-group: python-packages ... Signed-off-by: dependabot[bot] --- poetry.lock | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/poetry.lock b/poetry.lock index 5c225197..7b75863f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -861,13 +861,13 @@ files = [ [[package]] name = "faker" -version = "24.4.0" +version = "24.7.1" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" files = [ - {file = "Faker-24.4.0-py3-none-any.whl", hash = "sha256:998c29ee7d64429bd59204abffa9ba11f784fb26c7b9df4def78d1a70feb36a7"}, - {file = "Faker-24.4.0.tar.gz", hash = "sha256:a5ddccbe97ab691fad6bd8036c31f5697cfaa550e62e000078d1935fa8a7ec2e"}, + {file = "Faker-24.7.1-py3-none-any.whl", hash = "sha256:73f2bd886e8ce751e660c7d37a6c0a128aab5e1551359335bb79cfea0f4fabfc"}, + {file = "Faker-24.7.1.tar.gz", hash = "sha256:39d34c63f0d62ed574161e23fe32008917b923d18098ce94c2650fe16463b7d5"}, ] [package.dependencies] @@ -875,18 +875,18 @@ python-dateutil = ">=2.4" [[package]] name = "fastapi" -version = "0.110.0" +version = "0.110.1" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = true python-versions = ">=3.8" files = [ - {file = "fastapi-0.110.0-py3-none-any.whl", hash = "sha256:87a1f6fb632a218222c5984be540055346a8f5d8a68e8f6fb647b1dc9934de4b"}, - {file = "fastapi-0.110.0.tar.gz", hash = "sha256:266775f0dcc95af9d3ef39bad55cff525329a931d5fd51930aadd4f428bf7ff3"}, + {file = "fastapi-0.110.1-py3-none-any.whl", hash = "sha256:5df913203c482f820d31f48e635e022f8cbfe7350e4830ef05a3163925b1addc"}, + {file = "fastapi-0.110.1.tar.gz", hash = "sha256:6feac43ec359dfe4f45b2c18ec8c94edb8dc2dfc461d417d9e626590c071baad"}, ] [package.dependencies] pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.36.3,<0.37.0" +starlette = ">=0.37.2,<0.38.0" typing-extensions = ">=4.8.0" [package.extras] @@ -2867,13 +2867,13 @@ url = ["furl (>=0.4.1)"] [[package]] name = "starlette" -version = "0.36.3" +version = "0.37.2" description = "The little ASGI library that shines." optional = true python-versions = ">=3.8" files = [ - {file = "starlette-0.36.3-py3-none-any.whl", hash = "sha256:13d429aa93a61dc40bf503e8c801db1f1bca3dc706b10ef2434a36123568f044"}, - {file = "starlette-0.36.3.tar.gz", hash = "sha256:90a671733cfb35771d8cc605e0b679d23b992f8dcfad48cc60b38cb29aeb7080"}, + {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, + {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, ] [package.dependencies] From b7f4d029b6891b7890c044fe82ec0fcf302ea42a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 8 Apr 2024 21:24:29 +0000 Subject: [PATCH 06/11] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/pre-commit-hooks: v4.5.0 → v4.6.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.5.0...v4.6.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6494ba39..78792990 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ default_language_version: repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: check-added-large-files - id: check-ast From 759a89da7ff4b736a90dea4fbe51b68de129eba2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=9C=D0=B0=D1=80=D1=82=D1=8B=D0=BD=D0=BE=D0=B2=20=D0=9C?= =?UTF-8?q?=D0=B0=D0=BA=D1=81=D0=B8=D0=BC=20=D0=A1=D0=B5=D1=80=D0=B3=D0=B5?= =?UTF-8?q?=D0=B5=D0=B2=D0=B8=D1=87?= Date: Thu, 11 Apr 2024 15:45:24 +0300 Subject: [PATCH 07/11] [DOP-15023] Improve docker images --- .env.docker | 1 + .env.local | 3 +- .github/workflows/backend_docker_image.yml | 2 +- .github/workflows/hdfs-tests.yml | 16 +- .github/workflows/hive-tests.yml | 16 +- .github/workflows/oracle-tests.yml | 16 +- .github/workflows/s3-tests.yml | 16 +- .github/workflows/unit-test.yml | 11 +- .github/workflows/worker_docker_image.yml | 2 +- .gitignore | 2 + .pre-commit-config.yaml | 14 +- Makefile | 4 +- docker-compose.test.yml | 109 ++++++----- docker-compose.yml | 35 ++-- ...{backend.dockerfile => Dockerfile.backend} | 24 ++- .../{worker.dockerfile => Dockerfile.worker} | 21 +- docker/entrypoint_backend.sh | 7 + docker/entrypoint_worker.sh | 9 + docker/wait-for-it.sh | 182 ------------------ docs/backend/install.rst | 24 ++- docs/changelog/next_release/26.feature.rst | 1 - docs/changelog/next_release/28.doc.rst | 1 - docs/changelog/next_release/35.feature.1.rst | 1 + docs/changelog/next_release/35.feature.2.rst | 1 + docs/changelog/next_release/35.feature.3.rst | 1 + .../next_release/35.improvement.1.rst | 1 + .../next_release/35.improvement.2.rst | 1 + pytest_runner.sh | 23 --- syncmaster/backend/__init__.py | 54 ++++++ syncmaster/backend/__main__.py | 35 ++++ syncmaster/backend/export_openapi_schema.py | 4 +- syncmaster/backend/logger.py | 2 - syncmaster/backend/main.py | 63 ------ syncmaster/backend/pre_start.py | 94 --------- syncmaster/db/migrations/__init__.py | 2 - syncmaster/db/migrations/__main__.py | 38 ++++ syncmaster/db/{ => migrations}/alembic.ini | 4 +- syncmaster/db/migrations/env.py | 4 +- tests/conftest.py | 4 +- 39 files changed, 324 insertions(+), 524 deletions(-) rename docker/{backend.dockerfile => Dockerfile.backend} (59%) rename docker/{worker.dockerfile => Dockerfile.worker} (54%) create mode 100755 docker/entrypoint_backend.sh create mode 100755 docker/entrypoint_worker.sh delete mode 100755 docker/wait-for-it.sh delete mode 100644 docs/changelog/next_release/26.feature.rst delete mode 100644 docs/changelog/next_release/28.doc.rst create mode 100644 docs/changelog/next_release/35.feature.1.rst create mode 100644 docs/changelog/next_release/35.feature.2.rst create mode 100644 docs/changelog/next_release/35.feature.3.rst create mode 100644 docs/changelog/next_release/35.improvement.1.rst create mode 100644 docs/changelog/next_release/35.improvement.2.rst delete mode 100755 pytest_runner.sh create mode 100755 syncmaster/backend/__main__.py delete mode 100644 syncmaster/backend/logger.py delete mode 100644 syncmaster/backend/main.py delete mode 100644 syncmaster/backend/pre_start.py delete mode 100644 syncmaster/db/migrations/__init__.py create mode 100755 syncmaster/db/migrations/__main__.py rename syncmaster/db/{ => migrations}/alembic.ini (90%) diff --git a/.env.docker b/.env.docker index 1d99c33c..cc08d70b 100644 --- a/.env.docker +++ b/.env.docker @@ -51,3 +51,4 @@ ENV=LOCAL SPARK_CONF_DIR=/app/tests/spark/hive/conf/ HADOOP_CONF_DIR=/app/tests/spark/hadoop/ HIVE_CONF_DIR=/app/tests/spark/hive/conf/ +CREATE_SPARK_SESSION_FUNCTION=tests.spark.get_worker_spark_session.get_worker_spark_session diff --git a/.env.local b/.env.local index 0aabbc83..bc58de7e 100644 --- a/.env.local +++ b/.env.local @@ -50,4 +50,5 @@ export ENV=LOCAL export SPARK_CONF_DIR=./app/tests/spark/hive/conf/ export HADOOP_CONF_DIR=./app/tests/spark/hadoop/ -export HIVE_CONF_DIR=./app/tests/spark/hive/conf/ \ No newline at end of file +export HIVE_CONF_DIR=./app/tests/spark/hive/conf/ +export CREATE_SPARK_SESSION_FUNCTION=tests.spark.get_worker_spark_session.get_worker_spark_session diff --git a/.github/workflows/backend_docker_image.yml b/.github/workflows/backend_docker_image.yml index 95c41a65..8149b214 100644 --- a/.github/workflows/backend_docker_image.yml +++ b/.github/workflows/backend_docker_image.yml @@ -48,7 +48,7 @@ jobs: with: tags: ${{ env.TAG }} context: . - file: docker/backend.dockerfile + file: docker/Dockerfile.backend pull: true push: true cache-to: type=gha,mode=max diff --git a/.github/workflows/hdfs-tests.yml b/.github/workflows/hdfs-tests.yml index aee5d6bc..921dc739 100644 --- a/.github/workflows/hdfs-tests.yml +++ b/.github/workflows/hdfs-tests.yml @@ -47,9 +47,9 @@ jobs: uses: docker/build-push-action@v5 with: context: . - tags: syncmaster_worker:${{ github.sha }} + tags: mtsrus/syncmaster-worker:${{ github.sha }} target: test - file: docker/worker.dockerfile + file: docker/Dockerfile.worker load: true cache-to: type=gha,mode=max cache-from: type=gha @@ -60,22 +60,16 @@ jobs: docker compose -f docker-compose.test.yml up -d db worker rabbitmq test-hive test-postgres --wait --wait-timeout 200 env: WORKER_IMAGE_TAG: ${{ github.sha }} - COMPOSE_PROJECT_NAME: ${{ github.run_id }}-syncmaster - - name: Run HDFS Tests. - # Tests for the backend are run on the worker. - # Backend and worker on the same container. + # This is important, as coverage is exported after receiving SIGTERM + - name: Run HDFS Tests run: | - docker compose -f ./docker-compose.test.yml exec -T worker coverage run -m pytest -vvv -s -m hdfs - env: - COMPOSE_PROJECT_NAME: ${{ github.run_id }}-syncmaster + docker compose -f ./docker-compose.test.yml exec -T worker coverage run -m pytest -vvv -s -m "worker and hdfs" - name: Shutdown if: always() run: | docker compose -f docker-compose.test.yml down -v --remove-orphans - env: - COMPOSE_PROJECT_NAME: ${{ github.run_id }}-syncmaster - name: Upload coverage results uses: actions/upload-artifact@v4 diff --git a/.github/workflows/hive-tests.yml b/.github/workflows/hive-tests.yml index a0533acf..346ab9ae 100644 --- a/.github/workflows/hive-tests.yml +++ b/.github/workflows/hive-tests.yml @@ -47,9 +47,9 @@ jobs: uses: docker/build-push-action@v5 with: context: . - tags: syncmaster_worker:${{ github.sha }} + tags: mtsrus/syncmaster-worker:${{ github.sha }} target: test - file: docker/worker.dockerfile + file: docker/Dockerfile.worker load: true cache-to: type=gha,mode=max cache-from: type=gha @@ -60,22 +60,16 @@ jobs: docker compose -f docker-compose.test.yml up -d db worker rabbitmq test-hive test-postgres --wait --wait-timeout 200 env: WORKER_IMAGE_TAG: ${{ github.sha }} - COMPOSE_PROJECT_NAME: ${{ github.run_id }}-syncmaster - - name: Run Hive Tests. - # Tests for the backend are run on the worker. - # Backend and worker on the same container. + # This is important, as coverage is exported after receiving SIGTERM + - name: Run Hive Tests run: | - docker compose -f ./docker-compose.test.yml exec -T worker coverage run -m pytest -vvv -s -m hive - env: - COMPOSE_PROJECT_NAME: ${{ github.run_id }}-syncmaster + docker compose -f ./docker-compose.test.yml exec -T worker coverage run -m pytest -vvv -s -m "worker and hive" - name: Shutdown if: always() run: | docker compose -f docker-compose.test.yml down -v --remove-orphans - env: - COMPOSE_PROJECT_NAME: ${{ github.run_id }}-syncmaster - name: Upload coverage results uses: actions/upload-artifact@v4 diff --git a/.github/workflows/oracle-tests.yml b/.github/workflows/oracle-tests.yml index 55f5efc1..1b2a348b 100644 --- a/.github/workflows/oracle-tests.yml +++ b/.github/workflows/oracle-tests.yml @@ -47,9 +47,9 @@ jobs: uses: docker/build-push-action@v5 with: context: . - tags: syncmaster_worker:${{ github.sha }} + tags: mtsrus/syncmaster-worker:${{ github.sha }} target: test - file: docker/worker.dockerfile + file: docker/Dockerfile.worker load: true cache-to: type=gha,mode=max cache-from: type=gha @@ -60,22 +60,16 @@ jobs: docker compose -f docker-compose.test.yml up -d db worker rabbitmq test-oracle test-postgres --wait --wait-timeout 200 env: WORKER_IMAGE_TAG: ${{ github.sha }} - COMPOSE_PROJECT_NAME: ${{ github.run_id }}-syncmaster - - name: Run Oracle Tests. - # Tests for the backend are run on the worker. - # Backend and worker on the same container. + # This is important, as coverage is exported after receiving SIGTERM + - name: Run Oracle Tests run: | - docker compose -f ./docker-compose.test.yml exec -T worker coverage run -m pytest -vvv -s -m oracle - env: - COMPOSE_PROJECT_NAME: ${{ github.run_id }}-syncmaster + docker compose -f ./docker-compose.test.yml exec -T worker coverage run -m pytest -vvv -s -m "worker and oracle" - name: Shutdown if: always() run: | docker compose -f docker-compose.test.yml down -v --remove-orphans - env: - COMPOSE_PROJECT_NAME: ${{ github.run_id }}-syncmaster - name: Upload coverage results uses: actions/upload-artifact@v4 diff --git a/.github/workflows/s3-tests.yml b/.github/workflows/s3-tests.yml index 44b914ce..46a0fd42 100644 --- a/.github/workflows/s3-tests.yml +++ b/.github/workflows/s3-tests.yml @@ -47,8 +47,8 @@ jobs: uses: docker/build-push-action@v5 with: context: . - tags: syncmaster_worker:${{ github.sha }} - file: docker/worker.dockerfile + tags: mtsrus/syncmaster-worker:${{ github.sha }} + file: docker/Dockerfile.worker target: test load: true cache-to: type=gha,mode=max @@ -60,22 +60,16 @@ jobs: docker compose -f docker-compose.test.yml up -d db worker rabbitmq test-s3 test-postgres --wait --wait-timeout 200 env: WORKER_IMAGE_TAG: ${{ github.sha }} - COMPOSE_PROJECT_NAME: ${{ github.run_id }}-syncmaster - - name: Run S3 Tests. - # Tests for the backend are run on the worker. - # Backend and worker on the same container. + - name: Run S3 Tests run: | - docker compose -f ./docker-compose.test.yml exec -T worker coverage run -m pytest -vvv -s -m s3 - env: - COMPOSE_PROJECT_NAME: ${{ github.run_id }}-syncmaster + docker compose -f ./docker-compose.test.yml exec -T worker coverage run -m pytest -vvv -s -m "worker and s3" + # This is important, as coverage is exported after receiving SIGTERM - name: Shutdown if: always() run: | docker compose -f docker-compose.test.yml down -v --remove-orphans - env: - COMPOSE_PROJECT_NAME: ${{ github.run_id }}-syncmaster - name: Upload coverage results uses: actions/upload-artifact@v4 diff --git a/.github/workflows/unit-test.yml b/.github/workflows/unit-test.yml index fafc4c38..47f2b46e 100644 --- a/.github/workflows/unit-test.yml +++ b/.github/workflows/unit-test.yml @@ -52,22 +52,17 @@ jobs: run: | docker compose -f docker-compose.test.yml down -v --remove-orphans docker compose -f docker-compose.test.yml up -d db --wait --wait-timeout 200 - env: - COMPOSE_PROJECT_NAME: ${{ github.run_id }}-syncmaster - - name: Run Unit Tests. + - name: Run Unit Tests run: | source .env.local - ./pytest_runner.sh -m backend - env: - COMPOSE_PROJECT_NAME: ${{ github.run_id }}-syncmaster + poetry run python -m syncmaster.db.migrations upgrade head + poetry run coverage run -m pytest -vvv -s -m backend - name: Shutdown if: always() run: | docker compose -f docker-compose.test.yml down -v --remove-orphans - env: - COMPOSE_PROJECT_NAME: ${{ github.run_id }}-syncmaster - name: Upload coverage results uses: actions/upload-artifact@v4 diff --git a/.github/workflows/worker_docker_image.yml b/.github/workflows/worker_docker_image.yml index f8168502..ce0708fd 100644 --- a/.github/workflows/worker_docker_image.yml +++ b/.github/workflows/worker_docker_image.yml @@ -49,7 +49,7 @@ jobs: tags: ${{ env.TAG }} context: . target: prod - file: docker/worker.dockerfile + file: docker/Dockerfile.worker pull: true push: true cache-to: type=gha,mode=max diff --git a/.gitignore b/.gitignore index 0cfca9ce..9da31cd9 100644 --- a/.gitignore +++ b/.gitignore @@ -158,3 +158,5 @@ cython_debug/ **/metastore_db/ **/spark-warehouse/ cached_jars/ + +.DS_Store diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 78792990..321fdb1b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -40,13 +40,13 @@ repos: - id: chmod args: ['644'] exclude_types: [shell] - # exclude: ^(.*__main__\.py)$ - #- id: chmod - # args: ['755'] - # types: [shell] - #- id: chmod - # args: ['755'] - # files: ^(.*__main__\.py)$ + exclude: ^(.*__main__\.py)$ + - id: chmod + args: ['755'] + types: [shell] + - id: chmod + args: ['755'] + files: ^(.*__main__\.py)$ - id: insert-license files: .*\.py$ exclude: ^(tests/.*\.py)$ diff --git a/Makefile b/Makefile index 9e7d625f..e9bb2140 100644 --- a/Makefile +++ b/Makefile @@ -50,7 +50,7 @@ revision: ##@Database Create new revision of migrations RABBITMQ_USER=${RABBITMQ_USER} \ RABBITMQ_PASSWORD=${RABBITMQ_PASSWORD} \ PYTHONPATH=${APP_PATH} \ - ${POETRY} run alembic -c ${APP_PATH}/alembic.ini revision --autogenerate + ${POETRY} run python -m syncmaster.db.migrations --autogenerate migrate: ##@Database Upgdade database to last migration @POSTGRES_HOST=${POSTGRES_HOST} \ @@ -63,7 +63,7 @@ migrate: ##@Database Upgdade database to last migration RABBITMQ_USER=${RABBITMQ_USER} \ RABBITMQ_PASSWORD=${RABBITMQ_PASSWORD} \ PYTHONPATH=${APP_PATH} \ - ${POETRY} run alembic -c ${APP_PATH}/alembic.ini upgrade head + ${POETRY} run python -m syncmaster.db.migrations upgrade head test: ##@Test Run tests @POSTGRES_HOST=${POSTGRES_HOST} \ diff --git a/docker-compose.test.yml b/docker-compose.test.yml index 5dbe784e..13d43ff1 100644 --- a/docker-compose.test.yml +++ b/docker-compose.test.yml @@ -8,61 +8,77 @@ services: - 5432:5432 volumes: - postgres_data:/var/lib/postgresql/data - networks: - - network + healthcheck: + test: pg_isready + start_period: 5s + interval: 30s + timeout: 5s + retries: 3 worker: - image: syncmaster_worker:${WORKER_IMAGE_TAG:-latest} + image: mtsrus/syncmaster-worker:${WORKER_IMAGE_TAG:-test} restart: unless-stopped build: - dockerfile: docker/worker.dockerfile + dockerfile: docker/Dockerfile.worker context: . + target: test network: host - networks: - - network - depends_on: - - db - - rabbitmq - - test-oracle - - test-postgres - - test-hive - - test-s3 + command: --loglevel=info -Q test_queue + env_file: .env.docker volumes: - ./cached_jars:/root/.ivy2 - ./reports:/app/reports - env_file: .env.docker + - ./tests:/app/tests + - ./pyproject.toml:/app/pyproject.toml + depends_on: + db: + condition: service_healthy + rabbitmq: + condition: service_healthy + test-oracle: + condition: service_started # Oracle image does not have healthcheck + test-postgres: + condition: service_healthy + test-hive: + condition: service_healthy + test-s3: + condition: service_healthy backend: - image: syncmaster_back + image: mtsrus/syncmaster-backend::${BACKEND_IMAGE_TAG:-test} restart: unless-stopped build: - dockerfile: docker/backend.dockerfile + dockerfile: docker/Dockerfile.backend context: . + target: test network: host + env_file: .env.docker ports: - 8000:8000 - networks: - - network - depends_on: - - db - - rabbitmq - - test-oracle - - test-postgres - - test-hive - - test-s3 volumes: - ./cached_jars:/root/.ivy2 - ./reports:/app/reports - env_file: .env.docker - command: [coverage, run, /app/syncmaster/backend/main.py] + - ./tests:/app/tests + - ./pyproject.toml:/app/pyproject.toml + depends_on: + db: + condition: service_healthy + rabbitmq: + condition: service_healthy rabbitmq: image: rabbitmq restart: unless-stopped ports: - 5672:5672 - networks: - - network + volumes: + - rabbitmq_data:/var/lib/rabbitmq + healthcheck: + test: rabbitmq-diagnostics -q ping + start_period: 5s + interval: 30s + timeout: 5s + retries: 3 test-s3: image: bitnami/minio:latest @@ -72,8 +88,12 @@ services: ports: - 9010:9000 - 9011:9001 - networks: - - network + healthcheck: + test: curl -f http://localhost:9000/minio/health/live + start_period: 5s + interval: 30s + timeout: 5s + retries: 3 test-postgres: image: postgres:15 @@ -81,8 +101,12 @@ services: ports: - 5433:5432 env_file: .env.docker - networks: - - network + healthcheck: + test: pg_isready + start_period: 5s + interval: 30s + timeout: 5s + retries: 3 test-oracle: image: gvenzl/oracle-xe:slim-faststart @@ -95,8 +119,6 @@ services: ORACLE_DATABASE: test_oracle APP_USER: test_user APP_USER_PASSWORD: test_password - networks: - - network metastore-hive: image: postgres:15 @@ -107,15 +129,20 @@ services: POSTGRES_PASSWORD: test_hive ports: - 5440:5432 - networks: - - network + healthcheck: + test: pg_isready + start_period: 5s + interval: 30s + timeout: 5s + retries: 3 test-hive: image: mtsrus/hadoop:hadoop2.7.3-hive2.3.9 restart: unless-stopped hostname: test-hive depends_on: - - metastore-hive + metastore-hive: + condition: service_healthy ports: - 9820:9820 # HDFS IPC - 9870:9870 # HDFS WebHDFS @@ -132,11 +159,7 @@ services: HIVE_METASTORE_DB_DRIVER: org.postgresql.Driver HIVE_METASTORE_DB_USER: test_hive HIVE_METASTORE_DB_PASSWORD: test_hive - networks: - - network - -networks: - network: volumes: postgres_data: + rabbitmq_data: diff --git a/docker-compose.yml b/docker-compose.yml index 2a716814..b606bdc5 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,34 +8,36 @@ services: - 5432:5432 volumes: - postgres_data:/var/lib/postgresql/data - networks: - - network + healthcheck: + test: pg_isready + start_period: 5s + interval: 30s + timeout: 5s + retries: 3 worker: image: syncmaster_worker restart: unless-stopped build: - dockerfile: docker/worker.dockerfile + dockerfile: docker/Dockerfile.worker context: . network: host - networks: - - network env_file: .env.docker backend: image: syncmaster_back restart: unless-stopped build: - dockerfile: docker/backend.dockerfile + dockerfile: docker/Dockerfile.backend context: . network: host ports: - 8000:8000 - networks: - - network depends_on: - - db - - rabbitmq + db: + condition: service_healthy + rabbitmq: + condition: service_healthy env_file: .env.docker rabbitmq: @@ -43,10 +45,15 @@ services: restart: unless-stopped ports: - 5672:5672 - networks: - - network -networks: - network: + volumes: + - rabbitmq_data:/var/lib/rabbitmq + healthcheck: + test: rabbitmq-diagnostics -q ping + start_period: 5s + interval: 30s + timeout: 5s + retries: 3 volumes: postgres_data: + rabbitmq_data: diff --git a/docker/backend.dockerfile b/docker/Dockerfile.backend similarity index 59% rename from docker/backend.dockerfile rename to docker/Dockerfile.backend index 1fdd5a13..057180f2 100644 --- a/docker/backend.dockerfile +++ b/docker/Dockerfile.backend @@ -1,19 +1,12 @@ -FROM python:3.11-slim +ARG BASE_IMAGE=python:3.11-slim +FROM $BASE_IMAGE AS prod RUN apt-get update && apt-get install -y \ - libkrb5-dev \ - libsasl2-dev \ - libsasl2-modules-gssapi-mit \ - libsasl2-modules-ldap \ - libsasl2-modules \ - openjdk-17-jdk \ libssl-dev \ - libldap2-dev \ autoconf \ gcc \ g++ \ make \ - libnghttp2-dev \ libffi-dev \ && rm -rf /var/lib/apt/lists/* @@ -25,11 +18,16 @@ WORKDIR /app COPY ./pyproject.toml ./poetry.lock* /app/ RUN pip install --upgrade pip setuptools wheel packaging - RUN poetry install --no-root --extras "backend" -COPY ./syncmaster /app/ - +COPY ./syncmaster/ /app/syncmaster/ ENV PYTHONPATH=/app -CMD [ "python", "/app/syncmaster/backend/main.py" ] \ No newline at end of file +COPY ./docker/entrypoint_backend.sh /app/entrypoint.sh +ENTRYPOINT ["/app/entrypoint.sh"] + + +FROM prod as test + +RUN poetry install --no-root --extras "backend" --with test +RUN sed -i 's/python -m/coverage run -m/g' /app/entrypoint.sh diff --git a/docker/worker.dockerfile b/docker/Dockerfile.worker similarity index 54% rename from docker/worker.dockerfile rename to docker/Dockerfile.worker index 1c336190..8df5f2e0 100644 --- a/docker/worker.dockerfile +++ b/docker/Dockerfile.worker @@ -1,4 +1,5 @@ -FROM python:3.11-slim as prod +ARG BASE_IMAGE=python:3.11-slim +FROM $BASE_IMAGE AS prod RUN apt-get update && apt-get install -y \ libkrb5-dev \ @@ -15,7 +16,6 @@ RUN apt-get update && apt-get install -y \ make \ libnghttp2-dev \ libffi-dev \ - telnetd \ && rm -rf /var/lib/apt/lists/* RUN pip install --no-cache-dir --timeout 3 --retries 3 poetry && poetry config virtualenvs.create false @@ -25,19 +25,18 @@ WORKDIR /app COPY ./pyproject.toml ./poetry.lock* /app/ RUN pip install --upgrade pip setuptools wheel packaging - RUN poetry install --no-root --extras "worker" -COPY . /app/ +COPY ./syncmaster/ /app/syncmaster/ +ENV PYTHONPATH=/app + +COPY ./docker/entrypoint_worker.sh /app/entrypoint.sh +ENTRYPOINT ["/app/entrypoint.sh"] +CMD ["--loglevel=info"] -# https://docs.celeryq.dev/en/stable/userguide/workers.html#max-tasks-per-child-setting -# Required to start each Celery task in separated process, avoiding issues with global Spark session object -CMD ["celery", "-A" ,"syncmaster.worker.config.celery", "worker", "--loglevel=info", "--max-tasks-per-child=1"] FROM prod as test +# CI runs tests in the worker container, so we need backend dependencies too RUN poetry install --no-root --extras "worker backend" --with test - -ENV CREATE_SPARK_SESSION_FUNCTION="tests.spark.get_worker_spark_session.get_worker_spark_session" -# Queue for tests -CMD ["coverage", "run", "-m", "celery", "-A" ,"syncmaster.worker.config.celery", "worker", "--loglevel=info", "--max-tasks-per-child=1", "-Q", "test_queue"] +RUN sed -i 's/python -m/coverage run -m/g' /app/entrypoint.sh diff --git a/docker/entrypoint_backend.sh b/docker/entrypoint_backend.sh new file mode 100755 index 00000000..91ed0cfb --- /dev/null +++ b/docker/entrypoint_backend.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash +set -e + +python -m syncmaster.db.migrations upgrade head + +# exec is required to forward all signals to the main process +exec python -m syncmaster.backend --host 0.0.0.0 --port 8000 "$@" & diff --git a/docker/entrypoint_worker.sh b/docker/entrypoint_worker.sh new file mode 100755 index 00000000..864e8acf --- /dev/null +++ b/docker/entrypoint_worker.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash +set -e + +# https://docs.celeryq.dev/en/stable/userguide/workers.html#max-tasks-per-child-setting +# Required to start each Celery task in separated process, avoiding issues with global Spark session object + +# exec is required to forward all signals to the main process +exec python -m celery -A syncmaster.worker.config.celery worker --max-tasks-per-child=1 "$@" + diff --git a/docker/wait-for-it.sh b/docker/wait-for-it.sh deleted file mode 100755 index 7410fa3a..00000000 --- a/docker/wait-for-it.sh +++ /dev/null @@ -1,182 +0,0 @@ -#!/usr/bin/env bash -# Use this script to test if a given TCP host/port are available - -WAITFORIT_cmdname=${0##*/} - -echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } - -usage() -{ - cat << USAGE >&2 -Usage: - $WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args] - -h HOST | --host=HOST Host or IP under test - -p PORT | --port=PORT TCP port under test - Alternatively, you specify the host and port as host:port - -s | --strict Only execute subcommand if the test succeeds - -q | --quiet Don't output any status messages - -t TIMEOUT | --timeout=TIMEOUT - Timeout in seconds, zero for no timeout - -- COMMAND ARGS Execute command with args after the test finishes -USAGE - exit 1 -} - -wait_for() -{ - if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then - echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" - else - echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout" - fi - WAITFORIT_start_ts=$(date +%s) - while : - do - if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then - nc -z $WAITFORIT_HOST $WAITFORIT_PORT - WAITFORIT_result=$? - else - (echo > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1 - WAITFORIT_result=$? - fi - if [[ $WAITFORIT_result -eq 0 ]]; then - WAITFORIT_end_ts=$(date +%s) - echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds" - break - fi - sleep 1 - done - return $WAITFORIT_result -} - -wait_for_wrapper() -{ - # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 - if [[ $WAITFORIT_QUIET -eq 1 ]]; then - timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT & - else - timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT & - fi - WAITFORIT_PID=$! - trap "kill -INT -$WAITFORIT_PID" INT - wait $WAITFORIT_PID - WAITFORIT_RESULT=$? - if [[ $WAITFORIT_RESULT -ne 0 ]]; then - echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" - fi - return $WAITFORIT_RESULT -} - -# process arguments -while [[ $# -gt 0 ]] -do - case "$1" in - *:* ) - WAITFORIT_hostport=(${1//:/ }) - WAITFORIT_HOST=${WAITFORIT_hostport[0]} - WAITFORIT_PORT=${WAITFORIT_hostport[1]} - shift 1 - ;; - --child) - WAITFORIT_CHILD=1 - shift 1 - ;; - -q | --quiet) - WAITFORIT_QUIET=1 - shift 1 - ;; - -s | --strict) - WAITFORIT_STRICT=1 - shift 1 - ;; - -h) - WAITFORIT_HOST="$2" - if [[ $WAITFORIT_HOST == "" ]]; then break; fi - shift 2 - ;; - --host=*) - WAITFORIT_HOST="${1#*=}" - shift 1 - ;; - -p) - WAITFORIT_PORT="$2" - if [[ $WAITFORIT_PORT == "" ]]; then break; fi - shift 2 - ;; - --port=*) - WAITFORIT_PORT="${1#*=}" - shift 1 - ;; - -t) - WAITFORIT_TIMEOUT="$2" - if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi - shift 2 - ;; - --timeout=*) - WAITFORIT_TIMEOUT="${1#*=}" - shift 1 - ;; - --) - shift - WAITFORIT_CLI=("$@") - break - ;; - --help) - usage - ;; - *) - echoerr "Unknown argument: $1" - usage - ;; - esac -done - -if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then - echoerr "Error: you need to provide a host and port to test." - usage -fi - -WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15} -WAITFORIT_STRICT=${WAITFORIT_STRICT:-0} -WAITFORIT_CHILD=${WAITFORIT_CHILD:-0} -WAITFORIT_QUIET=${WAITFORIT_QUIET:-0} - -# Check to see if timeout is from busybox? -WAITFORIT_TIMEOUT_PATH=$(type -p timeout) -WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH) - -WAITFORIT_BUSYTIMEFLAG="" -if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then - WAITFORIT_ISBUSY=1 - # Check if busybox timeout uses -t flag - # (recent Alpine versions don't support -t anymore) - if timeout &>/dev/stdout | grep -q -e '-t '; then - WAITFORIT_BUSYTIMEFLAG="-t" - fi -else - WAITFORIT_ISBUSY=0 -fi - -if [[ $WAITFORIT_CHILD -gt 0 ]]; then - wait_for - WAITFORIT_RESULT=$? - exit $WAITFORIT_RESULT -else - if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then - wait_for_wrapper - WAITFORIT_RESULT=$? - else - wait_for - WAITFORIT_RESULT=$? - fi -fi - -if [[ $WAITFORIT_CLI != "" ]]; then - if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then - echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess" - exit $WAITFORIT_RESULT - fi - exec "${WAITFORIT_CLI[@]}" -else - exit $WAITFORIT_RESULT -fi diff --git a/docs/backend/install.rst b/docs/backend/install.rst index c8483c80..20dea3fe 100644 --- a/docs/backend/install.rst +++ b/docs/backend/install.rst @@ -69,10 +69,26 @@ Start Postgres instance somewhere, and set up environment variables: You can use virtually any database supported by `SQLAlchemy `_, but the only one we really tested is Postgres. -Set environment variables to connect to RabbitMQ -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Run migrations +~~~~~~~~~~~~~~ -Set variables to connect to RabbitMQ +To apply migrations (database structure changes) you need to execute following command: + +.. code-block:: console + + $ python -m syncmaster.db.migrations upgrade head + +This is a thin wrapper around `alembic `_ cli, +options and commands are just the same. + +.. note:: + + This command should be executed after each upgrade to new SyncMaster version. + +Run RabbitMQ +~~~~~~~~~~~~ + +Start RabbitMQ instance somewhere, and set up environment variables: .. code-block:: bash @@ -107,6 +123,6 @@ To start backend server you need to execute following command: .. code-block:: console - $ python -m syncmaster.backend.main + $ python -m horizon.backend --host 0.0.0.0 --port 8000 After server is started and ready, open http://localhost:8000/docs. diff --git a/docs/changelog/next_release/26.feature.rst b/docs/changelog/next_release/26.feature.rst deleted file mode 100644 index d2695b0c..00000000 --- a/docs/changelog/next_release/26.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Automation of work with Github. \ No newline at end of file diff --git a/docs/changelog/next_release/28.doc.rst b/docs/changelog/next_release/28.doc.rst deleted file mode 100644 index eeca6a7b..00000000 --- a/docs/changelog/next_release/28.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Improve documentation. \ No newline at end of file diff --git a/docs/changelog/next_release/35.feature.1.rst b/docs/changelog/next_release/35.feature.1.rst new file mode 100644 index 00000000..56d342e3 --- /dev/null +++ b/docs/changelog/next_release/35.feature.1.rst @@ -0,0 +1 @@ +Rename ``syncmaster.backend.main`` module to ``syncmaster.backend``. It now also accepts the same args as ``uvicorn``. diff --git a/docs/changelog/next_release/35.feature.2.rst b/docs/changelog/next_release/35.feature.2.rst new file mode 100644 index 00000000..887852b9 --- /dev/null +++ b/docs/changelog/next_release/35.feature.2.rst @@ -0,0 +1 @@ +Add ``syncmaster.db.migrations`` module to run ``alembic`` with proper config. diff --git a/docs/changelog/next_release/35.feature.3.rst b/docs/changelog/next_release/35.feature.3.rst new file mode 100644 index 00000000..07472ea7 --- /dev/null +++ b/docs/changelog/next_release/35.feature.3.rst @@ -0,0 +1 @@ +Change backend and worker images to use entrypoint. diff --git a/docs/changelog/next_release/35.improvement.1.rst b/docs/changelog/next_release/35.improvement.1.rst new file mode 100644 index 00000000..770d7c5b --- /dev/null +++ b/docs/changelog/next_release/35.improvement.1.rst @@ -0,0 +1 @@ +Run database migrations in the entrypoint of backend image, before starting the backend server. diff --git a/docs/changelog/next_release/35.improvement.2.rst b/docs/changelog/next_release/35.improvement.2.rst new file mode 100644 index 00000000..f794a60b --- /dev/null +++ b/docs/changelog/next_release/35.improvement.2.rst @@ -0,0 +1 @@ +Add healthchecks to example ``docker-compose.yml``. diff --git a/pytest_runner.sh b/pytest_runner.sh deleted file mode 100755 index f1079715..00000000 --- a/pytest_runner.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env bash - -_term() { - echo "Caught SIGTERM signal!" - exit 255 -} - -trap _term SIGTERM SIGINT - -if [[ "x$CI" == "xtrue" ]]; then - root_path=$(dirname $(realpath $0)) - python_version=$(python -c 'import sys; print("{0}.{1}".format(*sys.version_info))') - poetry run coverage run -m pytest --junitxml=$root_path/reports/junit/python${python_version}.xml "$@" -else - poetry run pytest "$@" -fi - -ret=$? -if [[ "x$ret" == "x5" ]]; then - echo "No tests collected. Exiting with 0 (instead of 5)." - exit 0 -fi -exit "$ret" \ No newline at end of file diff --git a/syncmaster/backend/__init__.py b/syncmaster/backend/__init__.py index 104aecaf..a3081aa3 100644 --- a/syncmaster/backend/__init__.py +++ b/syncmaster/backend/__init__.py @@ -1,2 +1,56 @@ # SPDX-FileCopyrightText: 2023-2024 MTS (Mobile Telesystems) # SPDX-License-Identifier: Apache-2.0 +from fastapi import FastAPI, HTTPException +from starlette.middleware.cors import CORSMiddleware + +from syncmaster.backend.api.deps import ( + AuthMarker, + DatabaseEngineMarker, + DatabaseSessionMarker, + SettingsMarker, + UnitOfWorkMarker, +) +from syncmaster.backend.api.router import api_router +from syncmaster.backend.handler import ( + http_exception_handler, + syncmsater_exception_handler, +) +from syncmaster.backend.services import get_auth_scheme +from syncmaster.config import Settings +from syncmaster.db.factory import create_engine, create_session_factory, get_uow +from syncmaster.exceptions import SyncmasterError + + +def application_factory(settings: Settings) -> FastAPI: + application = FastAPI( + title=settings.PROJECT_NAME, + debug=settings.DEBUG, + ) + application.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + + application.include_router(api_router) + application.exception_handler(HTTPException)(http_exception_handler) + application.exception_handler(SyncmasterError)(syncmsater_exception_handler) + + engine = create_engine(connection_uri=settings.build_db_connection_uri()) + session_factory = create_session_factory(engine=engine) + + auth_scheme = get_auth_scheme(settings) + + application.dependency_overrides.update( + { + SettingsMarker: lambda: settings, + DatabaseEngineMarker: lambda: engine, + DatabaseSessionMarker: lambda: session_factory, + UnitOfWorkMarker: get_uow(session_factory, settings), + AuthMarker: auth_scheme, + } + ) + + return application diff --git a/syncmaster/backend/__main__.py b/syncmaster/backend/__main__.py new file mode 100755 index 00000000..079357f4 --- /dev/null +++ b/syncmaster/backend/__main__.py @@ -0,0 +1,35 @@ +#!/bin/env python3 +# SPDX-FileCopyrightText: 2023-2024 MTS (Mobile Telesystems) +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import annotations + +import os +import sys +from pathlib import Path + +import uvicorn + +here = Path(__file__).resolve() + + +def main(prog_name: str | None = None, args: list[str] | None = None): + """Run uvicorn and pass the command line arguments to it.""" + if args is None: + args = sys.argv.copy() + prog_name = args.pop(0) + + if not prog_name: + prog_name = os.fspath(here) + + args = args.copy() + # prepend config path before command line arguments + args.insert(0, "--factory") + args.insert(1, "syncmaster.backend:get_application") + + # call uvicorn + uvicorn.main.main(args=args, prog_name=prog_name) + + +if __name__ == "__main__": + main() diff --git a/syncmaster/backend/export_openapi_schema.py b/syncmaster/backend/export_openapi_schema.py index 59c5ae8c..0552deb1 100644 --- a/syncmaster/backend/export_openapi_schema.py +++ b/syncmaster/backend/export_openapi_schema.py @@ -7,7 +7,7 @@ from fastapi import FastAPI -from syncmaster.backend.main import get_application +from syncmaster.backend import application_factory from syncmaster.config import Settings @@ -17,7 +17,7 @@ def get_openapi_schema(app: FastAPI) -> dict: if __name__ == "__main__": settings = Settings() - app = get_application(settings) + app = application_factory(settings) schema = get_openapi_schema(app) file_path = sys.argv[1] if not file_path: diff --git a/syncmaster/backend/logger.py b/syncmaster/backend/logger.py deleted file mode 100644 index 104aecaf..00000000 --- a/syncmaster/backend/logger.py +++ /dev/null @@ -1,2 +0,0 @@ -# SPDX-FileCopyrightText: 2023-2024 MTS (Mobile Telesystems) -# SPDX-License-Identifier: Apache-2.0 diff --git a/syncmaster/backend/main.py b/syncmaster/backend/main.py deleted file mode 100644 index 642e8537..00000000 --- a/syncmaster/backend/main.py +++ /dev/null @@ -1,63 +0,0 @@ -# SPDX-FileCopyrightText: 2023-2024 MTS (Mobile Telesystems) -# SPDX-License-Identifier: Apache-2.0 -import uvicorn -from fastapi import FastAPI, HTTPException -from starlette.middleware.cors import CORSMiddleware - -from syncmaster.backend.api.deps import ( - AuthMarker, - DatabaseEngineMarker, - DatabaseSessionMarker, - SettingsMarker, - UnitOfWorkMarker, -) -from syncmaster.backend.api.router import api_router -from syncmaster.backend.handler import ( - http_exception_handler, - syncmsater_exception_handler, -) -from syncmaster.backend.services import get_auth_scheme -from syncmaster.config import Settings -from syncmaster.db.factory import create_engine, create_session_factory, get_uow -from syncmaster.exceptions import SyncmasterError - - -def get_application(settings: Settings) -> FastAPI: - application = FastAPI( - title=settings.PROJECT_NAME, - debug=settings.DEBUG, - ) - application.add_middleware( - CORSMiddleware, - allow_origins=["*"], - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], - ) - - application.include_router(api_router) - application.exception_handler(HTTPException)(http_exception_handler) - application.exception_handler(SyncmasterError)(syncmsater_exception_handler) - - engine = create_engine(connection_uri=settings.build_db_connection_uri()) - session_factory = create_session_factory(engine=engine) - - auth_scheme = get_auth_scheme(settings) - - application.dependency_overrides.update( - { - SettingsMarker: lambda: settings, - DatabaseEngineMarker: lambda: engine, - DatabaseSessionMarker: lambda: session_factory, - UnitOfWorkMarker: get_uow(session_factory, settings), - AuthMarker: auth_scheme, - } - ) - - return application - - -if __name__ == "__main__": - settings = Settings() - app = get_application(settings=settings) - uvicorn.run(app, host="0.0.0.0", port=8000) diff --git a/syncmaster/backend/pre_start.py b/syncmaster/backend/pre_start.py deleted file mode 100644 index b547d8a3..00000000 --- a/syncmaster/backend/pre_start.py +++ /dev/null @@ -1,94 +0,0 @@ -# SPDX-FileCopyrightText: 2023-2024 MTS (Mobile Telesystems) -# SPDX-License-Identifier: Apache-2.0 -import logging -from time import sleep - -from syncmaster.config import Settings, TestSettings -from tests.test_integration.test_run_transfer.conftest import get_spark_session - -TIMEOUT = 5 -COUNT = 12 - -logger = logging.getLogger(__name__) - - -def check_test_postgres(settings: Settings, test_settings: TestSettings) -> None: - from onetl.connection import Postgres - - spark_session = get_spark_session(settings) - - count = COUNT - connection = Postgres( - host=test_settings.TEST_POSTGRES_HOST, - port=test_settings.TEST_POSTGRES_PORT, - user=test_settings.TEST_POSTGRES_USER, - password=test_settings.TEST_POSTGRES_PASSWORD, - database=test_settings.TEST_POSTGRES_DB, - spark=spark_session, - ) - exception = None - while count > 0: - try: - connection.check() - return - except Exception: - count -= 1 - logger.info("Got exception on postgres. Will try after %d sec", TIMEOUT) - sleep(TIMEOUT) - if exception: - raise exception - - -def check_test_oracle(settings: Settings, test_settings: TestSettings) -> None: - from onetl.connection import Oracle - - spark_session = get_spark_session(settings) - - count = COUNT - connection = Oracle( - host=test_settings.TEST_ORACLE_HOST, - port=test_settings.TEST_ORACLE_PORT, - user=test_settings.TEST_ORACLE_USER, - password=test_settings.TEST_ORACLE_PASSWORD, - service_name=test_settings.TEST_ORACLE_SERVICE_NAME, - sid=test_settings.TEST_ORACLE_SID, - spark=spark_session, - ) - exception = None - while count > 0: - try: - connection.check() - return - except Exception: - count -= 1 - logger.info("Got exception on oracle. Will try after %d sec", TIMEOUT) - sleep(TIMEOUT) - if exception: - raise exception - - -def check_test_hive(settings: Settings, test_settings: TestSettings) -> None: - from onetl.connection import Hive - - spark_session = get_spark_session(settings) - - count = COUNT - connection = Hive( - cluster=test_settings.TEST_HIVE_CLUSTER, - spark=spark_session, - ) - exception = None - while count > 0: - try: - connection.check() - connection.execute("SHOW DATABASES") - return - except Exception: - count -= 1 - logger.info("Got exception on hive. will try after %d sec", TIMEOUT) - sleep(TIMEOUT) - if exception: - raise exception - - -settings = Settings() diff --git a/syncmaster/db/migrations/__init__.py b/syncmaster/db/migrations/__init__.py deleted file mode 100644 index 104aecaf..00000000 --- a/syncmaster/db/migrations/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# SPDX-FileCopyrightText: 2023-2024 MTS (Mobile Telesystems) -# SPDX-License-Identifier: Apache-2.0 diff --git a/syncmaster/db/migrations/__main__.py b/syncmaster/db/migrations/__main__.py new file mode 100755 index 00000000..56f51292 --- /dev/null +++ b/syncmaster/db/migrations/__main__.py @@ -0,0 +1,38 @@ +#!/bin/env python3 + +# SPDX-FileCopyrightText: 2023-2024 MTS (Mobile Telesystems) +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import annotations + +import os +import sys +from pathlib import Path + +from alembic.config import CommandLine + +here = Path(__file__).resolve() +config_path = here.parent / "alembic.ini" + + +def main(prog_name: str | None = None, args: list[str] | None = None): + """Run alembic and pass the command line arguments to it.""" + if args is None: + args = sys.argv.copy() + prog_name = args.pop(0) + + if not prog_name: + prog_name = os.fspath(here) + + # prepend config path before command line arguments + args = args.copy() + args.insert(0, "-c") + args.insert(1, os.fspath(config_path)) + + # call alembic + cmd = CommandLine(prog=prog_name) + cmd.main(argv=args) + + +if __name__ == "__main__": + main() diff --git a/syncmaster/db/alembic.ini b/syncmaster/db/migrations/alembic.ini similarity index 90% rename from syncmaster/db/alembic.ini rename to syncmaster/db/migrations/alembic.ini index 06eeb6a2..2768810c 100644 --- a/syncmaster/db/alembic.ini +++ b/syncmaster/db/migrations/alembic.ini @@ -1,7 +1,7 @@ [alembic] -script_location = %(here)s/migrations +script_location = %(here)s file_template = %%(year)d-%%(month).2d-%%(day).2d_%%(rev)s_%%(slug)s -prepend_sys_path = . +prepend_sys_path = %(here)s/../../../ version_path_separator = os [post_write_hooks] diff --git a/syncmaster/db/migrations/env.py b/syncmaster/db/migrations/env.py index b470a4d4..ebff2aa9 100644 --- a/syncmaster/db/migrations/env.py +++ b/syncmaster/db/migrations/env.py @@ -18,7 +18,9 @@ if config.config_file_name is not None: fileConfig(config.config_file_name) -config.set_main_option("sqlalchemy.url", Settings().build_db_connection_uri()) +if not config.get_main_option("sqlalchemy.url"): + # read application settings only if sqlalchemy.url is not being passed via cli arguments + config.set_main_option("sqlalchemy.url", Settings().build_db_connection_uri()) target_metadata = ( Base.metadata, diff --git a/tests/conftest.py b/tests/conftest.py index df39ad71..31293944 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -17,8 +17,8 @@ create_async_engine, ) +from syncmaster.backend import application_factory from syncmaster.backend.api.v1.auth.utils import sign_jwt -from syncmaster.backend.main import get_application from syncmaster.config import Settings, TestSettings from syncmaster.db.models import Base, Connection, Queue from syncmaster.db.repositories.utils import decrypt_auth_data @@ -110,7 +110,7 @@ async def client( async_engine: AsyncEngine, ) -> AsyncGenerator: logger.info("START CLIENT FIXTURE", datetime.now().isoformat()) - app = get_application(settings=settings) + app = application_factory(settings=settings) async with AsyncClient(app=app, base_url="http://testserver") as client: yield client logger.info("END CLIENT FIXTURE", datetime.now().isoformat()) From 11df203a9be01a1048fc2113b8f675be1b787ed8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Apr 2024 06:11:12 +0000 Subject: [PATCH 08/11] Bump the python-packages group with 4 updates Bumps the python-packages group with 4 updates: [pydantic](https://github.com/pydantic/pydantic), [faker](https://github.com/joke2k/faker), [black](https://github.com/psf/black) and [sphinx-issues](https://github.com/sloria/sphinx-issues). Updates `pydantic` from 2.6.4 to 2.7.0 - [Release notes](https://github.com/pydantic/pydantic/releases) - [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md) - [Commits](https://github.com/pydantic/pydantic/compare/v2.6.4...v2.7.0) Updates `faker` from 24.7.1 to 24.9.0 - [Release notes](https://github.com/joke2k/faker/releases) - [Changelog](https://github.com/joke2k/faker/blob/master/CHANGELOG.md) - [Commits](https://github.com/joke2k/faker/compare/v24.7.1...v24.9.0) Updates `black` from 24.3.0 to 24.4.0 - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/24.3.0...24.4.0) Updates `sphinx-issues` from 4.0.0 to 4.1.0 - [Commits](https://github.com/sloria/sphinx-issues/compare/4.0.0...4.1.0) --- updated-dependencies: - dependency-name: pydantic dependency-type: direct:production update-type: version-update:semver-minor dependency-group: python-packages - dependency-name: faker dependency-type: direct:development update-type: version-update:semver-minor dependency-group: python-packages - dependency-name: black dependency-type: direct:development update-type: version-update:semver-minor dependency-group: python-packages - dependency-name: sphinx-issues dependency-type: direct:development update-type: version-update:semver-minor dependency-group: python-packages ... Signed-off-by: dependabot[bot] --- poetry.lock | 228 ++++++++++++++++++++++++++-------------------------- 1 file changed, 114 insertions(+), 114 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7b75863f..c4ecabd6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -305,33 +305,33 @@ files = [ [[package]] name = "black" -version = "24.3.0" +version = "24.4.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"}, - {file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"}, - {file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"}, - {file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"}, - {file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"}, - {file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"}, - {file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"}, - {file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"}, - {file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"}, - {file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"}, - {file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"}, - {file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"}, - {file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"}, - {file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"}, - {file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"}, - {file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"}, - {file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"}, - {file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"}, - {file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"}, - {file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"}, - {file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"}, - {file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"}, + {file = "black-24.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ad001a9ddd9b8dfd1b434d566be39b1cd502802c8d38bbb1ba612afda2ef436"}, + {file = "black-24.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3a3a092b8b756c643fe45f4624dbd5a389f770a4ac294cf4d0fce6af86addaf"}, + {file = "black-24.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dae79397f367ac8d7adb6c779813328f6d690943f64b32983e896bcccd18cbad"}, + {file = "black-24.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:71d998b73c957444fb7c52096c3843875f4b6b47a54972598741fe9a7f737fcb"}, + {file = "black-24.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8e5537f456a22cf5cfcb2707803431d2feeb82ab3748ade280d6ccd0b40ed2e8"}, + {file = "black-24.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64e60a7edd71fd542a10a9643bf369bfd2644de95ec71e86790b063aa02ff745"}, + {file = "black-24.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd5b4f76056cecce3e69b0d4c228326d2595f506797f40b9233424e2524c070"}, + {file = "black-24.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:64578cf99b6b46a6301bc28bdb89f9d6f9b592b1c5837818a177c98525dbe397"}, + {file = "black-24.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f95cece33329dc4aa3b0e1a771c41075812e46cf3d6e3f1dfe3d91ff09826ed2"}, + {file = "black-24.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4396ca365a4310beef84d446ca5016f671b10f07abdba3e4e4304218d2c71d33"}, + {file = "black-24.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d99dfdf37a2a00a6f7a8dcbd19edf361d056ee51093b2445de7ca09adac965"}, + {file = "black-24.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:21f9407063ec71c5580b8ad975653c66508d6a9f57bd008bb8691d273705adcd"}, + {file = "black-24.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:652e55bb722ca026299eb74e53880ee2315b181dfdd44dca98e43448620ddec1"}, + {file = "black-24.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f2966b9b2b3b7104fca9d75b2ee856fe3fdd7ed9e47c753a4bb1a675f2caab8"}, + {file = "black-24.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bb9ca06e556a09f7f7177bc7cb604e5ed2d2df1e9119e4f7d2f1f7071c32e5d"}, + {file = "black-24.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4e71cdebdc8efeb6deaf5f2deb28325f8614d48426bed118ecc2dcaefb9ebf3"}, + {file = "black-24.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6644f97a7ef6f401a150cca551a1ff97e03c25d8519ee0bbc9b0058772882665"}, + {file = "black-24.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75a2d0b4f5eb81f7eebc31f788f9830a6ce10a68c91fbe0fade34fff7a2836e6"}, + {file = "black-24.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb949f56a63c5e134dfdca12091e98ffb5fd446293ebae123d10fc1abad00b9e"}, + {file = "black-24.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:7852b05d02b5b9a8c893ab95863ef8986e4dda29af80bbbda94d7aee1abf8702"}, + {file = "black-24.4.0-py3-none-any.whl", hash = "sha256:74eb9b5420e26b42c00a3ff470dc0cd144b80a766128b1771d07643165e08d0e"}, + {file = "black-24.4.0.tar.gz", hash = "sha256:f07b69fda20578367eaebbd670ff8fc653ab181e1ff95d84497f9fa20e7d0641"}, ] [package.dependencies] @@ -861,13 +861,13 @@ files = [ [[package]] name = "faker" -version = "24.7.1" +version = "24.9.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" files = [ - {file = "Faker-24.7.1-py3-none-any.whl", hash = "sha256:73f2bd886e8ce751e660c7d37a6c0a128aab5e1551359335bb79cfea0f4fabfc"}, - {file = "Faker-24.7.1.tar.gz", hash = "sha256:39d34c63f0d62ed574161e23fe32008917b923d18098ce94c2650fe16463b7d5"}, + {file = "Faker-24.9.0-py3-none-any.whl", hash = "sha256:97c7874665e8eb7b517f97bf3b59f03bf3f07513fe2c159e98b6b9ea6b9f2b3d"}, + {file = "Faker-24.9.0.tar.gz", hash = "sha256:73b1e7967b0ceeac42fc99a8c973bb49e4499cc4044d20d17ab661d5cb7eda1d"}, ] [package.dependencies] @@ -2001,18 +2001,18 @@ files = [ [[package]] name = "pydantic" -version = "2.6.4" +version = "2.7.0" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"}, - {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"}, + {file = "pydantic-2.7.0-py3-none-any.whl", hash = "sha256:9dee74a271705f14f9a1567671d144a851c675b072736f0a7b2608fd9e495352"}, + {file = "pydantic-2.7.0.tar.gz", hash = "sha256:b5ecdd42262ca2462e2624793551e80911a1e989f462910bb81aef974b4bb383"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.16.3" +pydantic-core = "2.18.1" typing-extensions = ">=4.6.1" [package.extras] @@ -2020,90 +2020,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.16.3" -description = "" +version = "2.18.1" +description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, - {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, - {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, - {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, - {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, - {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, - {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, - {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, - {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, - {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, - {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, - {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, - {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, - {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, + {file = "pydantic_core-2.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ee9cf33e7fe14243f5ca6977658eb7d1042caaa66847daacbd2117adb258b226"}, + {file = "pydantic_core-2.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6b7bbb97d82659ac8b37450c60ff2e9f97e4eb0f8a8a3645a5568b9334b08b50"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df4249b579e75094f7e9bb4bd28231acf55e308bf686b952f43100a5a0be394c"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d0491006a6ad20507aec2be72e7831a42efc93193d2402018007ff827dc62926"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ae80f72bb7a3e397ab37b53a2b49c62cc5496412e71bc4f1277620a7ce3f52b"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58aca931bef83217fca7a390e0486ae327c4af9c3e941adb75f8772f8eeb03a1"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1be91ad664fc9245404a789d60cba1e91c26b1454ba136d2a1bf0c2ac0c0505a"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:667880321e916a8920ef49f5d50e7983792cf59f3b6079f3c9dac2b88a311d17"}, + {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f7054fdc556f5421f01e39cbb767d5ec5c1139ea98c3e5b350e02e62201740c7"}, + {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:030e4f9516f9947f38179249778709a460a3adb516bf39b5eb9066fcfe43d0e6"}, + {file = "pydantic_core-2.18.1-cp310-none-win32.whl", hash = "sha256:2e91711e36e229978d92642bfc3546333a9127ecebb3f2761372e096395fc649"}, + {file = "pydantic_core-2.18.1-cp310-none-win_amd64.whl", hash = "sha256:9a29726f91c6cb390b3c2338f0df5cd3e216ad7a938762d11c994bb37552edb0"}, + {file = "pydantic_core-2.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9ece8a49696669d483d206b4474c367852c44815fca23ac4e48b72b339807f80"}, + {file = "pydantic_core-2.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a5d83efc109ceddb99abd2c1316298ced2adb4570410defe766851a804fcd5b"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7973c381283783cd1043a8c8f61ea5ce7a3a58b0369f0ee0ee975eaf2f2a1b"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:54c7375c62190a7845091f521add19b0f026bcf6ae674bdb89f296972272e86d"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd63cec4e26e790b70544ae5cc48d11b515b09e05fdd5eff12e3195f54b8a586"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:561cf62c8a3498406495cfc49eee086ed2bb186d08bcc65812b75fda42c38294"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68717c38a68e37af87c4da20e08f3e27d7e4212e99e96c3d875fbf3f4812abfc"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d5728e93d28a3c63ee513d9ffbac9c5989de8c76e049dbcb5bfe4b923a9739d"}, + {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f0f17814c505f07806e22b28856c59ac80cee7dd0fbb152aed273e116378f519"}, + {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d816f44a51ba5175394bc6c7879ca0bd2be560b2c9e9f3411ef3a4cbe644c2e9"}, + {file = "pydantic_core-2.18.1-cp311-none-win32.whl", hash = "sha256:09f03dfc0ef8c22622eaa8608caa4a1e189cfb83ce847045eca34f690895eccb"}, + {file = "pydantic_core-2.18.1-cp311-none-win_amd64.whl", hash = "sha256:27f1009dc292f3b7ca77feb3571c537276b9aad5dd4efb471ac88a8bd09024e9"}, + {file = "pydantic_core-2.18.1-cp311-none-win_arm64.whl", hash = "sha256:48dd883db92e92519201f2b01cafa881e5f7125666141a49ffba8b9facc072b0"}, + {file = "pydantic_core-2.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b6b0e4912030c6f28bcb72b9ebe4989d6dc2eebcd2a9cdc35fefc38052dd4fe8"}, + {file = "pydantic_core-2.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3202a429fe825b699c57892d4371c74cc3456d8d71b7f35d6028c96dfecad31"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3982b0a32d0a88b3907e4b0dc36809fda477f0757c59a505d4e9b455f384b8b"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25595ac311f20e5324d1941909b0d12933f1fd2171075fcff763e90f43e92a0d"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14fe73881cf8e4cbdaded8ca0aa671635b597e42447fec7060d0868b52d074e6"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca976884ce34070799e4dfc6fbd68cb1d181db1eefe4a3a94798ddfb34b8867f"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684d840d2c9ec5de9cb397fcb3f36d5ebb6fa0d94734f9886032dd796c1ead06"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:54764c083bbe0264f0f746cefcded6cb08fbbaaf1ad1d78fb8a4c30cff999a90"}, + {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:201713f2f462e5c015b343e86e68bd8a530a4f76609b33d8f0ec65d2b921712a"}, + {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd1a9edb9dd9d79fbeac1ea1f9a8dd527a6113b18d2e9bcc0d541d308dae639b"}, + {file = "pydantic_core-2.18.1-cp312-none-win32.whl", hash = "sha256:d5e6b7155b8197b329dc787356cfd2684c9d6a6b1a197f6bbf45f5555a98d411"}, + {file = "pydantic_core-2.18.1-cp312-none-win_amd64.whl", hash = "sha256:9376d83d686ec62e8b19c0ac3bf8d28d8a5981d0df290196fb6ef24d8a26f0d6"}, + {file = "pydantic_core-2.18.1-cp312-none-win_arm64.whl", hash = "sha256:c562b49c96906b4029b5685075fe1ebd3b5cc2601dfa0b9e16c2c09d6cbce048"}, + {file = "pydantic_core-2.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:3e352f0191d99fe617371096845070dee295444979efb8f27ad941227de6ad09"}, + {file = "pydantic_core-2.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0295d52b012cbe0d3059b1dba99159c3be55e632aae1999ab74ae2bd86a33d7"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56823a92075780582d1ffd4489a2e61d56fd3ebb4b40b713d63f96dd92d28144"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd3f79e17b56741b5177bcc36307750d50ea0698df6aa82f69c7db32d968c1c2"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38a5024de321d672a132b1834a66eeb7931959c59964b777e8f32dbe9523f6b1"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2ce426ee691319d4767748c8e0895cfc56593d725594e415f274059bcf3cb76"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2adaeea59849ec0939af5c5d476935f2bab4b7f0335b0110f0f069a41024278e"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9b6431559676a1079eac0f52d6d0721fb8e3c5ba43c37bc537c8c83724031feb"}, + {file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:85233abb44bc18d16e72dc05bf13848a36f363f83757541f1a97db2f8d58cfd9"}, + {file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:641a018af4fe48be57a2b3d7a1f0f5dbca07c1d00951d3d7463f0ac9dac66622"}, + {file = "pydantic_core-2.18.1-cp38-none-win32.whl", hash = "sha256:63d7523cd95d2fde0d28dc42968ac731b5bb1e516cc56b93a50ab293f4daeaad"}, + {file = "pydantic_core-2.18.1-cp38-none-win_amd64.whl", hash = "sha256:907a4d7720abfcb1c81619863efd47c8a85d26a257a2dbebdb87c3b847df0278"}, + {file = "pydantic_core-2.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:aad17e462f42ddbef5984d70c40bfc4146c322a2da79715932cd8976317054de"}, + {file = "pydantic_core-2.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:94b9769ba435b598b547c762184bcfc4783d0d4c7771b04a3b45775c3589ca44"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80e0e57cc704a52fb1b48f16d5b2c8818da087dbee6f98d9bf19546930dc64b5"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76b86e24039c35280ceee6dce7e62945eb93a5175d43689ba98360ab31eebc4a"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a05db5013ec0ca4a32cc6433f53faa2a014ec364031408540ba858c2172bb0"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:250ae39445cb5475e483a36b1061af1bc233de3e9ad0f4f76a71b66231b07f88"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a32204489259786a923e02990249c65b0f17235073149d0033efcebe80095570"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6395a4435fa26519fd96fdccb77e9d00ddae9dd6c742309bd0b5610609ad7fb2"}, + {file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2533ad2883f001efa72f3d0e733fb846710c3af6dcdd544fe5bf14fa5fe2d7db"}, + {file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b560b72ed4816aee52783c66854d96157fd8175631f01ef58e894cc57c84f0f6"}, + {file = "pydantic_core-2.18.1-cp39-none-win32.whl", hash = "sha256:582cf2cead97c9e382a7f4d3b744cf0ef1a6e815e44d3aa81af3ad98762f5a9b"}, + {file = "pydantic_core-2.18.1-cp39-none-win_amd64.whl", hash = "sha256:ca71d501629d1fa50ea7fa3b08ba884fe10cefc559f5c6c8dfe9036c16e8ae89"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e178e5b66a06ec5bf51668ec0d4ac8cfb2bdcb553b2c207d58148340efd00143"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:72722ce529a76a4637a60be18bd789d8fb871e84472490ed7ddff62d5fed620d"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fe0c1ce5b129455e43f941f7a46f61f3d3861e571f2905d55cdbb8b5c6f5e2c"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4284c621f06a72ce2cb55f74ea3150113d926a6eb78ab38340c08f770eb9b4d"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a0c3e718f4e064efde68092d9d974e39572c14e56726ecfaeebbe6544521f47"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2027493cc44c23b598cfaf200936110433d9caa84e2c6cf487a83999638a96ac"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:76909849d1a6bffa5a07742294f3fa1d357dc917cb1fe7b470afbc3a7579d539"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ee7ccc7fb7e921d767f853b47814c3048c7de536663e82fbc37f5eb0d532224b"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ee2794111c188548a4547eccc73a6a8527fe2af6cf25e1a4ebda2fd01cdd2e60"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a139fe9f298dc097349fb4f28c8b81cc7a202dbfba66af0e14be5cfca4ef7ce5"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d074b07a10c391fc5bbdcb37b2f16f20fcd9e51e10d01652ab298c0d07908ee2"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c69567ddbac186e8c0aadc1f324a60a564cfe25e43ef2ce81bcc4b8c3abffbae"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:baf1c7b78cddb5af00971ad5294a4583188bda1495b13760d9f03c9483bb6203"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2684a94fdfd1b146ff10689c6e4e815f6a01141781c493b97342cdc5b06f4d5d"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:73c1bc8a86a5c9e8721a088df234265317692d0b5cd9e86e975ce3bc3db62a59"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e60defc3c15defb70bb38dd605ff7e0fae5f6c9c7cbfe0ad7868582cb7e844a6"}, + {file = "pydantic_core-2.18.1.tar.gz", hash = "sha256:de9d3e8717560eb05e28739d1b35e4eac2e458553a52a301e51352a7ffc86a35"}, ] [package.dependencies] @@ -2624,13 +2624,13 @@ test = ["beautifulsoup4", "pytest", "pytest-cov"] [[package]] name = "sphinx-issues" -version = "4.0.0" +version = "4.1.0" description = "A Sphinx extension for linking to your project's issue tracker" optional = false python-versions = ">=3.8" files = [ - {file = "sphinx_issues-4.0.0-py3-none-any.whl", hash = "sha256:ce1ee87682eb8e661d2d500672756916f113ac0a37da3678aa474b7d8ce3d039"}, - {file = "sphinx_issues-4.0.0.tar.gz", hash = "sha256:5b8fcc229ffaeaef53fef0dca88dc89bc14b4aa81f221d9682a64ecced02e25d"}, + {file = "sphinx_issues-4.1.0-py3-none-any.whl", hash = "sha256:d779dddff441175c9fddb7a4018eca38f9f6cdb1e0a2fe31d93b4f89587c7ba1"}, + {file = "sphinx_issues-4.1.0.tar.gz", hash = "sha256:a67f7ef31d164b420b2f21b6c9b020baeb4a014afd4045f5be314c984e8ee520"}, ] [package.dependencies] From 7cd7abe68a00993cd99cb0b8758d64702f028e3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=9C=D0=B0=D1=80=D1=82=D1=8B=D0=BD=D0=BE=D0=B2=20=D0=9C?= =?UTF-8?q?=D0=B0=D0=BA=D1=81=D0=B8=D0=BC=20=D0=A1=D0=B5=D1=80=D0=B3=D0=B5?= =?UTF-8?q?=D0=B5=D0=B2=D0=B8=D1=87?= Date: Mon, 15 Apr 2024 10:44:14 +0300 Subject: [PATCH 09/11] Fix tests --- .../test_connections/test_copy_connection.py | 2 +- .../test_create_oracle_connection.py | 2 +- .../test_connections/test_create_connection.py | 8 ++++---- .../test_connections/test_update_connection.py | 4 ++-- .../test_groups/test_add_user_to_group.py | 2 +- .../test_groups/test_update_group_by_id.py | 2 +- tests/test_unit/test_queue/test_create_queue.py | 12 ++++++------ .../test_unit/test_transfers/test_copy_transfer.py | 2 +- .../test_transfers/test_create_transfer.py | 14 +++++++------- .../test_transfers/test_update_transfer.py | 2 +- tests/test_unit/test_users.py | 2 +- 11 files changed, 26 insertions(+), 26 deletions(-) diff --git a/tests/test_unit/test_connections/test_copy_connection.py b/tests/test_unit/test_connections/test_copy_connection.py index 8e3d572f..e8cc013d 100644 --- a/tests/test_unit/test_connections/test_copy_connection.py +++ b/tests/test_unit/test_connections/test_copy_connection.py @@ -473,7 +473,7 @@ async def test_check_name_validation_copy_connection_with_new_connection_name( "loc": ["body", "new_name"], "msg": "String should have at least 1 character", "type": "string_too_short", - "url": "https://errors.pydantic.dev/2.6/v/string_too_short", + "url": "https://errors.pydantic.dev/2.7/v/string_too_short", } ] } diff --git a/tests/test_unit/test_connections/test_create_all_connection/test_create_oracle_connection.py b/tests/test_unit/test_connections/test_create_all_connection/test_create_oracle_connection.py index 78e7a464..9f473096 100644 --- a/tests/test_unit/test_connections/test_create_all_connection/test_create_oracle_connection.py +++ b/tests/test_unit/test_connections/test_create_all_connection/test_create_oracle_connection.py @@ -199,7 +199,7 @@ async def test_developer_plus_create_oracle_connection_with_sid_and_service_name "loc": ["body", "connection_data", "oracle"], "msg": "Value error, You must specify either sid or service_name but not both", "type": "value_error", - "url": "https://errors.pydantic.dev/2.6/v/value_error", + "url": "https://errors.pydantic.dev/2.7/v/value_error", } ] } diff --git a/tests/test_unit/test_connections/test_create_connection.py b/tests/test_unit/test_connections/test_create_connection.py index 7595bf43..4ee964ee 100644 --- a/tests/test_unit/test_connections/test_create_connection.py +++ b/tests/test_unit/test_connections/test_create_connection.py @@ -165,7 +165,7 @@ async def test_check_fields_validation_on_create_connection( "loc": ["body", "name"], "msg": "String should have at least 1 character", "type": "string_too_short", - "url": "https://errors.pydantic.dev/2.6/v/string_too_short", + "url": "https://errors.pydantic.dev/2.7/v/string_too_short", } ] } @@ -201,7 +201,7 @@ async def test_check_fields_validation_on_create_connection( "loc": ["body", "name"], "msg": "Input should be a valid string", "type": "string_type", - "url": "https://errors.pydantic.dev/2.6/v/string_type", + "url": "https://errors.pydantic.dev/2.7/v/string_type", } ] } @@ -234,7 +234,7 @@ async def test_check_fields_validation_on_create_connection( "loc": ["body", "description"], "msg": "Input should be a valid string", "type": "string_type", - "url": "https://errors.pydantic.dev/2.6/v/string_type", + "url": "https://errors.pydantic.dev/2.7/v/string_type", } ] } @@ -280,7 +280,7 @@ async def test_check_fields_validation_on_create_connection( "msg": "Input tag 'POSTGRESQL' found using 'type' does not match " f"any of the expected tags: {ALLOWED_SOURCES}", "type": "union_tag_invalid", - "url": "https://errors.pydantic.dev/2.6/v/union_tag_invalid", + "url": "https://errors.pydantic.dev/2.7/v/union_tag_invalid", } ] } diff --git a/tests/test_unit/test_connections/test_update_connection.py b/tests/test_unit/test_connections/test_update_connection.py index f60799d6..00bcc2f7 100644 --- a/tests/test_unit/test_connections/test_update_connection.py +++ b/tests/test_unit/test_connections/test_update_connection.py @@ -87,7 +87,7 @@ async def test_check_name_field_validation_on_update_connection( "loc": ["body", "name"], "msg": "String should have at least 1 character", "type": "string_too_short", - "url": "https://errors.pydantic.dev/2.6/v/string_too_short", + "url": "https://errors.pydantic.dev/2.7/v/string_too_short", } ] } @@ -175,7 +175,7 @@ async def test_update_connection_data_fields( "loc": ["body", "connection_data"], "msg": "Unable to extract tag using discriminator 'type'", "type": "union_tag_not_found", - "url": "https://errors.pydantic.dev/2.6/v/union_tag_not_found", + "url": "https://errors.pydantic.dev/2.7/v/union_tag_not_found", } ] } diff --git a/tests/test_unit/test_groups/test_add_user_to_group.py b/tests/test_unit/test_groups/test_add_user_to_group.py index 16ff0ded..d8fe6c3f 100644 --- a/tests/test_unit/test_groups/test_add_user_to_group.py +++ b/tests/test_unit/test_groups/test_add_user_to_group.py @@ -158,7 +158,7 @@ async def test_owner_cannot_add_user_to_group_without_role( "loc": ["body"], "msg": "Field required", "type": "missing", - "url": "https://errors.pydantic.dev/2.6/v/missing", + "url": "https://errors.pydantic.dev/2.7/v/missing", } ] } diff --git a/tests/test_unit/test_groups/test_update_group_by_id.py b/tests/test_unit/test_groups/test_update_group_by_id.py index d114dd67..44156fe0 100644 --- a/tests/test_unit/test_groups/test_update_group_by_id.py +++ b/tests/test_unit/test_groups/test_update_group_by_id.py @@ -128,7 +128,7 @@ async def test_validation_on_update_group( "loc": ["body"], "msg": "Field required", "type": "missing", - "url": "https://errors.pydantic.dev/2.6/v/missing", + "url": "https://errors.pydantic.dev/2.7/v/missing", } ] } diff --git a/tests/test_unit/test_queue/test_create_queue.py b/tests/test_unit/test_queue/test_create_queue.py index d34452d8..dcbe3f14 100644 --- a/tests/test_unit/test_queue/test_create_queue.py +++ b/tests/test_unit/test_queue/test_create_queue.py @@ -229,7 +229,7 @@ async def test_superuser_cannot_create_queue_with_unknown_group_error( "loc": ["body", "name"], "msg": "String should match pattern '^[-_a-zA-Z0-9]+$'", "type": "string_pattern_mismatch", - "url": "https://errors.pydantic.dev/2.6/v/string_pattern_mismatch", + "url": "https://errors.pydantic.dev/2.7/v/string_pattern_mismatch", } ] }, @@ -244,7 +244,7 @@ async def test_superuser_cannot_create_queue_with_unknown_group_error( "loc": ["body", "name"], "msg": "String should match pattern '^[-_a-zA-Z0-9]+$'", "type": "string_pattern_mismatch", - "url": "https://errors.pydantic.dev/2.6/v/string_pattern_mismatch", + "url": "https://errors.pydantic.dev/2.7/v/string_pattern_mismatch", } ] }, @@ -259,7 +259,7 @@ async def test_superuser_cannot_create_queue_with_unknown_group_error( "loc": ["body", "name"], "msg": "String should match pattern '^[-_a-zA-Z0-9]+$'", "type": "string_pattern_mismatch", - "url": "https://errors.pydantic.dev/2.6/v/string_pattern_mismatch", + "url": "https://errors.pydantic.dev/2.7/v/string_pattern_mismatch", } ] }, @@ -274,7 +274,7 @@ async def test_superuser_cannot_create_queue_with_unknown_group_error( "msg": "String should have at most 128 characters", "input": 129 * "q", "ctx": {"max_length": 128}, - "url": "https://errors.pydantic.dev/2.6/v/string_too_long", + "url": "https://errors.pydantic.dev/2.7/v/string_too_long", } ] }, @@ -289,7 +289,7 @@ async def test_superuser_cannot_create_queue_with_unknown_group_error( "loc": ["body", "name"], "msg": "String should match pattern '^[-_a-zA-Z0-9]+$'", "type": "string_pattern_mismatch", - "url": "https://errors.pydantic.dev/2.6/v/string_pattern_mismatch", + "url": "https://errors.pydantic.dev/2.7/v/string_pattern_mismatch", } ] }, @@ -303,7 +303,7 @@ async def test_superuser_cannot_create_queue_with_unknown_group_error( "loc": ["body", "name"], "msg": "Input should be a valid string", "type": "string_type", - "url": "https://errors.pydantic.dev/2.6/v/string_type", + "url": "https://errors.pydantic.dev/2.7/v/string_type", } ] }, diff --git a/tests/test_unit/test_transfers/test_copy_transfer.py b/tests/test_unit/test_transfers/test_copy_transfer.py index 6f8d01a3..6f987e0f 100644 --- a/tests/test_unit/test_transfers/test_copy_transfer.py +++ b/tests/test_unit/test_transfers/test_copy_transfer.py @@ -392,7 +392,7 @@ async def test_check_validate_copy_transfer_parameter_new_name( "loc": ["body", "new_name"], "msg": "String should have at least 1 character", "type": "string_too_short", - "url": "https://errors.pydantic.dev/2.6/v/string_too_short", + "url": "https://errors.pydantic.dev/2.7/v/string_too_short", } ] } diff --git a/tests/test_unit/test_transfers/test_create_transfer.py b/tests/test_unit/test_transfers/test_create_transfer.py index 4ec6dea4..f3af13df 100644 --- a/tests/test_unit/test_transfers/test_create_transfer.py +++ b/tests/test_unit/test_transfers/test_create_transfer.py @@ -245,7 +245,7 @@ async def test_superuser_can_create_transfer( "loc": ["body", "name"], "msg": "String should have at least 1 character", "type": "string_too_short", - "url": "https://errors.pydantic.dev/2.6/v/string_too_short", + "url": "https://errors.pydantic.dev/2.7/v/string_too_short", }, ), ( @@ -255,7 +255,7 @@ async def test_superuser_can_create_transfer( "loc": ["body", "name"], "msg": "Input should be a valid string", "type": "string_type", - "url": "https://errors.pydantic.dev/2.6/v/string_type", + "url": "https://errors.pydantic.dev/2.7/v/string_type", }, ), ( @@ -265,7 +265,7 @@ async def test_superuser_can_create_transfer( "loc": ["body", "is_scheduled"], "msg": "Input should be a valid boolean, unable to interpret input", "type": "bool_parsing", - "url": "https://errors.pydantic.dev/2.6/v/bool_parsing", + "url": "https://errors.pydantic.dev/2.7/v/bool_parsing", }, ), ( @@ -288,7 +288,7 @@ async def test_superuser_can_create_transfer( "loc": ["body"], "msg": "Value error, If transfer must be scheduled than set schedule param", "type": "value_error", - "url": "https://errors.pydantic.dev/2.6/v/value_error", + "url": "https://errors.pydantic.dev/2.7/v/value_error", }, ), ( @@ -307,7 +307,7 @@ async def test_superuser_can_create_transfer( "does not match any of the expected tags: 'full', " "'incremental'", "type": "union_tag_invalid", - "url": "https://errors.pydantic.dev/2.6/v/union_tag_invalid", + "url": "https://errors.pydantic.dev/2.7/v/union_tag_invalid", }, ), ( @@ -329,7 +329,7 @@ async def test_superuser_can_create_transfer( "does not match any of the expected tags: 'postgres', " "'hdfs', 'hive', 'oracle', 's3'", "type": "union_tag_invalid", - "url": "https://errors.pydantic.dev/2.6/v/union_tag_invalid", + "url": "https://errors.pydantic.dev/2.7/v/union_tag_invalid", }, ), ), @@ -572,7 +572,7 @@ async def test_developer_plus_can_not_create_transfer_with_target_s3_json( "msg": "Input tag 'json' found using 'type' does not match any of the expected tags: 'csv', 'jsonline'", "input": {"type": "json", "lineSep": "\n", "encoding": "utf-8"}, "ctx": {"discriminator": "'type'", "tag": "json", "expected_tags": "'csv', 'jsonline'"}, - "url": "https://errors.pydantic.dev/2.6/v/union_tag_invalid", + "url": "https://errors.pydantic.dev/2.7/v/union_tag_invalid", } ] } diff --git a/tests/test_unit/test_transfers/test_update_transfer.py b/tests/test_unit/test_transfers/test_update_transfer.py index 45a37cef..61e6a317 100644 --- a/tests/test_unit/test_transfers/test_update_transfer.py +++ b/tests/test_unit/test_transfers/test_update_transfer.py @@ -140,7 +140,7 @@ async def test_check_name_field_validation_on_update_transfer( "loc": ["body", "name"], "msg": "String should have at least 1 character", "type": "string_too_short", - "url": "https://errors.pydantic.dev/2.6/v/string_too_short", + "url": "https://errors.pydantic.dev/2.7/v/string_too_short", } ] } diff --git a/tests/test_unit/test_users.py b/tests/test_unit/test_users.py index 004f719d..6d0cdc5f 100644 --- a/tests/test_unit/test_users.py +++ b/tests/test_unit/test_users.py @@ -152,7 +152,7 @@ async def test_update_user( "loc": ["body", "username"], "msg": "String should match pattern '^[_a-z0-9]+$'", "type": "string_pattern_mismatch", - "url": "https://errors.pydantic.dev/2.6/v/string_pattern_mismatch", + "url": "https://errors.pydantic.dev/2.7/v/string_pattern_mismatch", } ] } From 2646806c2421cd18059f8f9eb54ace5d08ead409 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=9C=D0=B0=D1=80=D1=82=D1=8B=D0=BD=D0=BE=D0=B2=20=D0=9C?= =?UTF-8?q?=D0=B0=D0=BA=D1=81=D0=B8=D0=BC=20=D0=A1=D0=B5=D1=80=D0=B3=D0=B5?= =?UTF-8?q?=D0=B5=D0=B2=D0=B8=D1=87?= Date: Mon, 15 Apr 2024 11:38:34 +0300 Subject: [PATCH 10/11] Fix tests --- tests/test_unit/test_groups/test_add_user_to_group.py | 1 + tests/test_unit/test_groups/test_update_member_role.py | 1 + 2 files changed, 2 insertions(+) diff --git a/tests/test_unit/test_groups/test_add_user_to_group.py b/tests/test_unit/test_groups/test_add_user_to_group.py index d8fe6c3f..b7ff3d5f 100644 --- a/tests/test_unit/test_groups/test_add_user_to_group.py +++ b/tests/test_unit/test_groups/test_add_user_to_group.py @@ -130,6 +130,7 @@ async def test_owner_cannot_add_user_to_group_with_wrong_role( "loc": ["body", "role"], "msg": "Input should be 'Maintainer', 'Developer' or 'Guest'", "type": "enum", + "url": "https://errors.pydantic.dev/2.7/v/enum", } ] } diff --git a/tests/test_unit/test_groups/test_update_member_role.py b/tests/test_unit/test_groups/test_update_member_role.py index 4c6cd9cb..82ab297d 100644 --- a/tests/test_unit/test_groups/test_update_member_role.py +++ b/tests/test_unit/test_groups/test_update_member_role.py @@ -78,6 +78,7 @@ async def test_owner_of_group_can_not_update_user_role_with_wrong_role( "loc": ["body", "role"], "msg": "Input should be 'Maintainer', 'Developer' or 'Guest'", "type": "enum", + "url": "https://errors.pydantic.dev/2.7/v/enum", } ] } From 23f4f44adf267962e96be9f9db2ba171bc12b26b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=9C=D0=B0=D1=80=D1=82=D1=8B=D0=BD=D0=BE=D0=B2=20=D0=9C?= =?UTF-8?q?=D0=B0=D0=BA=D1=81=D0=B8=D0=BC=20=D0=A1=D0=B5=D1=80=D0=B3=D0=B5?= =?UTF-8?q?=D0=B5=D0=B2=D0=B8=D1=87?= Date: Mon, 15 Apr 2024 12:02:13 +0300 Subject: [PATCH 11/11] [DOP-15023] Prepare for release --- docs/changelog/0.1.2.rst | 16 ++++++++++++++++ .../{NEXT_RELEASE.rst => NEXT_RELEASE .rst} | 0 docs/changelog/index.rst | 1 + docs/changelog/next_release/35.feature.1.rst | 1 - docs/changelog/next_release/35.feature.2.rst | 1 - docs/changelog/next_release/35.feature.3.rst | 1 - docs/changelog/next_release/35.improvement.1.rst | 1 - docs/changelog/next_release/35.improvement.2.rst | 1 - pyproject.toml | 1 + 9 files changed, 18 insertions(+), 5 deletions(-) create mode 100644 docs/changelog/0.1.2.rst rename docs/changelog/{NEXT_RELEASE.rst => NEXT_RELEASE .rst} (100%) delete mode 100644 docs/changelog/next_release/35.feature.1.rst delete mode 100644 docs/changelog/next_release/35.feature.2.rst delete mode 100644 docs/changelog/next_release/35.feature.3.rst delete mode 100644 docs/changelog/next_release/35.improvement.1.rst delete mode 100644 docs/changelog/next_release/35.improvement.2.rst diff --git a/docs/changelog/0.1.2.rst b/docs/changelog/0.1.2.rst new file mode 100644 index 00000000..d551c010 --- /dev/null +++ b/docs/changelog/0.1.2.rst @@ -0,0 +1,16 @@ +0.1.2 (2024-04-15) +================== + +Features +-------- + +- Rename ``syncmaster.backend.main`` module to ``syncmaster.backend``. It now also accepts the same args as ``uvicorn``. (:issue:`35`) +- Add ``syncmaster.db.migrations`` module to run ``alembic`` with proper config. (:issue:`35`) +- Change backend and worker images to use entrypoint. (:issue:`35`) + + +Improvements +------------ + +- Run database migrations in the entrypoint of backend image, before starting the backend server. (:issue:`35`) +- Add healthchecks to example ``docker-compose.yml``. (:issue:`35`) diff --git a/docs/changelog/NEXT_RELEASE.rst b/docs/changelog/NEXT_RELEASE .rst similarity index 100% rename from docs/changelog/NEXT_RELEASE.rst rename to docs/changelog/NEXT_RELEASE .rst diff --git a/docs/changelog/index.rst b/docs/changelog/index.rst index 6bb6c593..d78d4b4b 100644 --- a/docs/changelog/index.rst +++ b/docs/changelog/index.rst @@ -3,4 +3,5 @@ :caption: Changelog DRAFT + 0.1.2 0.1.1 diff --git a/docs/changelog/next_release/35.feature.1.rst b/docs/changelog/next_release/35.feature.1.rst deleted file mode 100644 index 56d342e3..00000000 --- a/docs/changelog/next_release/35.feature.1.rst +++ /dev/null @@ -1 +0,0 @@ -Rename ``syncmaster.backend.main`` module to ``syncmaster.backend``. It now also accepts the same args as ``uvicorn``. diff --git a/docs/changelog/next_release/35.feature.2.rst b/docs/changelog/next_release/35.feature.2.rst deleted file mode 100644 index 887852b9..00000000 --- a/docs/changelog/next_release/35.feature.2.rst +++ /dev/null @@ -1 +0,0 @@ -Add ``syncmaster.db.migrations`` module to run ``alembic`` with proper config. diff --git a/docs/changelog/next_release/35.feature.3.rst b/docs/changelog/next_release/35.feature.3.rst deleted file mode 100644 index 07472ea7..00000000 --- a/docs/changelog/next_release/35.feature.3.rst +++ /dev/null @@ -1 +0,0 @@ -Change backend and worker images to use entrypoint. diff --git a/docs/changelog/next_release/35.improvement.1.rst b/docs/changelog/next_release/35.improvement.1.rst deleted file mode 100644 index 770d7c5b..00000000 --- a/docs/changelog/next_release/35.improvement.1.rst +++ /dev/null @@ -1 +0,0 @@ -Run database migrations in the entrypoint of backend image, before starting the backend server. diff --git a/docs/changelog/next_release/35.improvement.2.rst b/docs/changelog/next_release/35.improvement.2.rst deleted file mode 100644 index f794a60b..00000000 --- a/docs/changelog/next_release/35.improvement.2.rst +++ /dev/null @@ -1 +0,0 @@ -Add healthchecks to example ``docker-compose.yml``. diff --git a/pyproject.toml b/pyproject.toml index a5d231b8..d838cf8b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -198,6 +198,7 @@ exclude_lines = [ ] + [tool.poetry.group.docs.dependencies] autodoc-pydantic = {version = "^2.0.1", python = ">=3.8"} numpydoc = {version = "^1.6.0", python = ">=3.8"}