diff --git a/.github/actions/test-coverage/action.yml b/.github/actions/test-coverage/action.yml index ef2ad18..4aebdda 100644 --- a/.github/actions/test-coverage/action.yml +++ b/.github/actions/test-coverage/action.yml @@ -47,3 +47,4 @@ runs: repo-token: ${{ inputs.repoToken }} repo-token-user-login: 'github-actions[bot]' allow-repeats: true + update-only: true diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 4c55ed4..d7fd052 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -1,41 +1,82 @@ -# This workflow will upload a Python Package using Twine when a release is created -# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries - -# This workflow uses actions that are not certified by GitHub. -# They are provided by a third-party and are governed by -# separate terms of service, privacy policy, and support -# documentation. - -name: Upload Python Package +name: Release on: release: types: [published] + push: + tags: + - '*.*.*' + +env: + # Change these for your project's URLs + PYPI_URL: https://pypi.org/p/django-tasks-scheduler + PYPI_TEST_URL: https://test.pypi.org/p/django-tasks-scheduler jobs: - publish: + + build: + name: Build distribution 📦 runs-on: ubuntu-latest - permissions: - id-token: write # IMPORTANT: this permission is mandatory for trusted publishing steps: - uses: actions/checkout@v4 - - name: Set up Python uses: actions/setup-python@v5 with: - python-version: '3.12' - cache-dependency-path: poetry.lock + python-version: "3.12" + - name: Install pypa/build + run: + python3 -m pip install build --user + - name: Build a binary wheel and a source tarball + run: python3 -m build + - name: Store the distribution packages + uses: actions/upload-artifact@v4 + with: + name: python-package-distributions + path: dist/ - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install build + publish-to-pypi: + name: >- + Publish Python 🐍 distribution 📦 to PyPI + if: startsWith(github.ref, 'refs/tags/') # only publish to PyPI on tag pushes + needs: + - build + runs-on: ubuntu-latest + environment: + name: pypi + url: ${{ env.PYPI_URL }} + permissions: + id-token: write # IMPORTANT: mandatory for trusted publishing + steps: + - name: Download all the dists + uses: actions/download-artifact@v4 + with: + name: python-package-distributions + path: dist/ + - name: Publish distribution 📦 to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + + publish-to-testpypi: + name: Publish Python 🐍 distribution 📦 to TestPyPI + needs: + - build + runs-on: ubuntu-latest - - name: Build package - run: python -m build + environment: + name: testpypi + url: ${{ env.PYPI_TEST_URL }} - - name: Publish package to pypi - uses: pypa/gh-action-pypi-publish@v1.8.14 + permissions: + id-token: write # IMPORTANT: mandatory for trusted publishing + + steps: + - name: Download all the dists + uses: actions/download-artifact@v4 + with: + name: python-package-distributions + path: dist/ + - name: Publish distribution 📦 to TestPyPI + uses: pypa/gh-action-pypi-publish@release/v1 with: - print-hash: true \ No newline at end of file + repository-url: https://test.pypi.org/legacy/ + skip-existing: true \ No newline at end of file diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 98ad381..cdf707e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,7 +1,7 @@ name: Django CI on: - pull_request_target: + pull_request: branches: - master push: @@ -30,17 +30,17 @@ jobs: test: needs: [ 'flake8' ] runs-on: ubuntu-latest - name: "Run tests ${{ matrix.python-version }}/${{ matrix.django-version }}/${{ matrix.fake-redis }}" + name: "Run tests ${{ matrix.python-version }}/${{ matrix.django-version }}/${{ matrix.broker }}" strategy: max-parallel: 6 matrix: python-version: [ '3.10', '3.11', '3.12' ] django-version: [ '5.0.7', '5.1.1' ] - fake-redis: ['True', 'False'] + broker: [ 'redis', 'fakeredis', 'valkey' ] include: - python-version: '3.12' django-version: '5.1.1' - fake-redis: 'False' + broker: 'redis' coverage: yes services: @@ -54,6 +54,16 @@ jobs: --health-timeout 5s --health-retries 5 + valkey: + image: valkey/valkey:8.0 + ports: + - 6380:6379 + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + outputs: version: ${{ steps.getVersion.outputs.VERSION }} @@ -91,7 +101,12 @@ jobs: if: ${{ matrix.coverage != 'yes' }} run: | cd testproject - export FAKEREDIS=${{ matrix.fake-redis }} + export FAKEREDIS=${{ matrix.broker == 'fakeredis' }} + if [ ${{ matrix.broker == 'valkey' }} == true ]; then + export BROKER_PORT=6380 + else + export BROKER_PORT=6379 + fi poetry run python manage.py test scheduler # Steps for coverage check @@ -102,6 +117,7 @@ jobs: pythonVer: ${{ matrix.python-version }} djangoVer: ${{ matrix.django-version }} repoToken: ${{ secrets.GITHUB_TOKEN }} + - name: Create coverage badge if: ${{ matrix.coverage == 'yes' && github.event_name == 'push' }} uses: schneegans/dynamic-badges-action@v1.7.0 diff --git a/docs/changelog.md b/docs/changelog.md index 413c188..8336aba 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,5 +1,18 @@ # Changelog +## v2.1.0 🌈 + +### 🚀 Features + +- Support for custom job-class for every worker, using `--job-class` option in `rqworker` command. +- Support for integrating with sentry, using `--sentry-dsn`, `--sentry-debug`, and `--sentry-ca-certs` options in + `rqworker` command. +- Support for using ValKey as broker instead of redis. + +### 🧰 Maintenance + +- Refactor settings module. + ## v2.0.0 🌈 ### Breaking Changes diff --git a/docs/commands.md b/docs/commands.md index cb55187..7020cf9 100644 --- a/docs/commands.md +++ b/docs/commands.md @@ -8,10 +8,47 @@ If no queues are specified, will run on default queue only. All queues must have the same redis settings on `SCHEDULER_QUEUES`. ```shell -python manage.py rqworker queue1 queue2 queue3 - +usage: manage.py rqworker [-h] [--pid PIDFILE] [--burst] [--name NAME] [--worker-ttl WORKER_TTL] [--max-jobs MAX_JOBS] + [--fork-job-execution FORK_JOB_EXECUTION] [--job-class JOB_CLASS] [--sentry-dsn SENTRY_DSN] [--sentry-debug] + [--sentry-ca-certs SENTRY_CA_CERTS] [--version] [-v {0,1,2,3}] [--settings SETTINGS] [--pythonpath PYTHONPATH] + [--traceback] [--no-color] [--force-color] [--skip-checks] + [queues ...] + +positional arguments: + queues The queues to work on, separated by space, all queues should be using the same redis + +options: + -h, --help show this help message and exit + --pid PIDFILE file to write the worker`s pid into + --burst Run worker in burst mode + --name NAME Name of the worker + --worker-ttl WORKER_TTL + Default worker timeout to be used + --max-jobs MAX_JOBS Maximum number of jobs to execute before terminating worker + --fork-job-execution FORK_JOB_EXECUTION + Fork job execution to another process + --job-class JOB_CLASS + Jobs class to use + --sentry-dsn SENTRY_DSN + Sentry DSN to use + --sentry-debug Enable Sentry debug mode + --sentry-ca-certs SENTRY_CA_CERTS + Path to CA certs file + --version Show program's version number and exit. + -v {0,1,2,3}, --verbosity {0,1,2,3} + Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output + --settings SETTINGS The Python path to a settings module, e.g. "myproject.settings.main". If this isn't provided, the + DJANGO_SETTINGS_MODULE environment variable will be used. + --pythonpath PYTHONPATH + A directory to add to the Python path, e.g. "/home/djangoprojects/myproject". + --traceback Raise on CommandError exceptions. + --no-color Don't colorize the command output. + --force-color Force colorization of the command output. + --skip-checks Skip system checks. ``` + + ## export Export all scheduled tasks from django db to json/yaml format. diff --git a/docs/configuration.md b/docs/configuration.md index fb83b6b..fb05ec8 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -10,6 +10,7 @@ SCHEDULER_CONFIG = { 'DEFAULT_RESULT_TTL': 500, 'DEFAULT_TIMEOUT': 300, # 5 minutes 'SCHEDULER_INTERVAL': 10, # 10 seconds + 'BROKER': 'redis', } SCHEDULER_QUEUES = { 'default': { @@ -67,6 +68,12 @@ Enables checking stats using API token. Default: no tokens allowed. +### SCHEDULER_CONFIG: `BROKER` + +Broker driver to use for the scheduler. Can be `redis` or `valkey` or `fakeredis`. + +Default: `redis`. + ### `SCHEDULER_QUEUES` You can configure the queues to work with. diff --git a/docs/index.md b/docs/index.md index 93a234c..24f3275 100644 --- a/docs/index.md +++ b/docs/index.md @@ -121,16 +121,24 @@ sequenceDiagram ## Reporting issues or Features requests -Please report issues via [GitHub Issues](https://github.com/django-commons/django-tasks-scheduler/issues) . +Please report issues via [GitHub Issues][6] . --- ## Acknowledgements -A lot of django-admin views and their tests were adopted from [django-rq](https://github.com/rq/django-rq). +A lot of django-admin views and their tests were adopted from [django-rq][7]. [1]:https://github.com/django-commons/django-tasks-scheduler/actions/workflows/test.yml/badge.svg + [2]:https://github.com/django-commons/django-tasks-scheduler/actions/workflows/test.yml + [3]:https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/cunla/b756396efb895f0e34558c980f1ca0c7/raw/django-tasks-scheduler-4.json + [4]:https://img.shields.io/pypi/dm/django-tasks-scheduler + [5]:https://pypi.org/project/django-tasks-scheduler/ + +[6]:https://github.com/django-commons/django-tasks-scheduler/issues + +[7]:https://github.com/rq/django-rq \ No newline at end of file diff --git a/docs/installation.md b/docs/installation.md index 14b4269..6a2db1d 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -66,6 +66,7 @@ 'DEFAULT_RESULT_TTL': 500, 'DEFAULT_TIMEOUT': 300, # 5 minutes 'SCHEDULER_INTERVAL': 10, # 10 seconds + 'BROKER': 'redis', # } ``` diff --git a/docs/usage.md b/docs/usage.md index 4957961..dacd016 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -43,7 +43,7 @@ You can set in `settings.py` a default value for `DEFAULT_RESULT_TTL` and `DEFAU ```python # settings.py -RQ = { +SCHEDULER_CONFIG = { 'DEFAULT_RESULT_TTL': 360, 'DEFAULT_TIMEOUT': 60, } @@ -51,8 +51,7 @@ RQ = { ## Scheduling a job Through django-admin -* Sign in to the Django Admin site (e.g., http://localhost:8000/admin/) and locate the - **Tasks Scheduler** section. +* Sign in to the Django Admin site (e.g., http://localhost:8000/admin/) and locate the `Tasks Scheduler` section. * Click on the **Add** link for the type of job you want to add (`Scheduled Task` - run once, `Repeatable Task` - run multiple times, `Cron Task` - Run based on cron schedule). * Enter a unique name for the job in the **Name** field. @@ -83,49 +82,28 @@ calculated in runtime. ![](media/add-args.jpg) -### Scheduled Task - run once +### Scheduled Task: run once -No additional steps required. +No additional steps are required. -### Repeatable Task - Run a job multiple time based on interval +### Repeatable Task: Run a job multiple time based on interval -Additional fields required: +These additional fields are required: * Enter an **Interval**, and choose the **Interval unit**. This will calculate the time before the function is called again. * In the **Repeat** field, enter the number of time the job is to be run. Leaving the field empty, means the job will be scheduled to run forever. -### Cron Task - Run a job multiple time based on cron +### Cron Task: Run a job multiple times based on cron -Additional fields required: +These additional fields are required: * In the **Repeat** field, enter the number of time the job is to be run. Leaving the field empty, means the job will be scheduled to run forever. * In the **cron string** field, enter a cron string describing how often the job should run. -### Scheduled Task - run once - -No additional steps required. - -### Repeatable Task - Run a job multiple time based on interval - -Additional fields required: - -* Enter an **Interval**, and choose the **Interval unit**. This will calculate the time before the function is called - again. -* In the **Repeat** field, enter the number of time the job is to be run. Leaving the field empty, means the job will - be scheduled to run forever. - -### Cron Task - Run a job multiple time based on cron - -Additional fields required: - -* In the **Repeat** field, enter the number of time the job is to be run. Leaving the field empty, means the job will be - scheduled to run forever. -* In the **cron string** field, enter a cron string describing how often the job should run. - -## Enqueue jobs through command line +## Enqueue jobs using the command line It is possible to queue a job to be executed from the command line using django management command: @@ -134,18 +112,24 @@ using django management command: python manage.py run_job -q {queue} -t {timeout} -r {result_ttl} {callable} {args} ``` -## Running a worker +## Running a worker to process queued jobs in the background Create a worker to execute queued jobs on specific queues using: ```shell -python manage.py rqworker [queues ...] +python manage.py rqworker [-h] [--pid PIDFILE] [--burst] [--name NAME] [--worker-ttl WORKER_TTL] [--max-jobs MAX_JOBS] [--fork-job-execution FORK_JOB_EXECUTION] + [--job-class JOB_CLASS] [--version] [-v {0,1,2,3}] [--settings SETTINGS] [--pythonpath PYTHONPATH] [--traceback] [--no-color] [--force-color] + [--skip-checks] + [queues ...] + ``` +More information about the different parameters can be found in the [commands documentation](commands.md). + ### Running multiple workers as unix/linux services using systemd You can have multiple workers running as system services. -In order to have multiple rqworkers, edit the `/etc/systemd/system/rqworker@.service` +To have multiple rqworkers, edit the `/etc/systemd/system/rqworker@.service` file, make sure it ends with `@.service`, the following is example: ```ini diff --git a/poetry.lock b/poetry.lock index 9b5b3ef..94c636e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -28,6 +28,52 @@ files = [ {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, ] +[[package]] +name = "black" +version = "24.8.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, + {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, + {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, + {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, + {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, + {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, + {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, + {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, + {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, + {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, + {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, + {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, + {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, + {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, + {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, + {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, + {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, + {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, + {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, + {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, + {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, + {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + [[package]] name = "build" version = "1.2.2" @@ -683,15 +729,18 @@ python-dateutil = ">=2.7" [[package]] name = "idna" -version = "3.8" +version = "3.9" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" files = [ - {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, - {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, + {file = "idna-3.9-py3-none-any.whl", hash = "sha256:69297d5da0cc9281c77efffb4e730254dd45943f45bbfb461de5991713989b1e"}, + {file = "idna-3.9.tar.gz", hash = "sha256:e5c5dafde284f26e9e0f28f6ea2d6400abd5ca099864a67f576f3981c6476124"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "importlib-metadata" version = "8.5.0" @@ -972,6 +1021,17 @@ files = [ {file = "msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e"}, ] +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + [[package]] name = "packaging" version = "24.1" @@ -983,6 +1043,17 @@ files = [ {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + [[package]] name = "pexpect" version = "4.9.0" @@ -1013,13 +1084,13 @@ testing = ["pytest", "pytest-cov", "wheel"] [[package]] name = "platformdirs" -version = "4.3.2" +version = "4.3.3" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.3.2-py3-none-any.whl", hash = "sha256:eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617"}, - {file = "platformdirs-4.3.2.tar.gz", hash = "sha256:9e5e27a08aa095dd127b9f2e764d74254f482fef22b0970773bfba79d091ab8c"}, + {file = "platformdirs-4.3.3-py3-none-any.whl", hash = "sha256:50a5450e2e84f44539718293cbb1da0a0885c9d14adf21b77bae4e66fc99d9b5"}, + {file = "platformdirs-4.3.3.tar.gz", hash = "sha256:d4e0b7d8ec176b341fb03cb11ca12d0276faa8c485f9cd218f613840463fc2c0"}, ] [package.extras] @@ -1517,13 +1588,13 @@ files = [ [[package]] name = "trove-classifiers" -version = "2024.7.2" +version = "2024.9.12" description = "Canonical source for classifiers on PyPI (pypi.org)." optional = false python-versions = "*" files = [ - {file = "trove_classifiers-2024.7.2-py3-none-any.whl", hash = "sha256:ccc57a33717644df4daca018e7ec3ef57a835c48e96a1e71fc07eb7edac67af6"}, - {file = "trove_classifiers-2024.7.2.tar.gz", hash = "sha256:8328f2ac2ce3fd773cbb37c765a0ed7a83f89dc564c7d452f039b69249d0ac35"}, + {file = "trove_classifiers-2024.9.12-py3-none-any.whl", hash = "sha256:f88a27a892891c87c5f8bbdf110710ae9e0a4725ea8e0fb45f1bcadf088a491f"}, + {file = "trove_classifiers-2024.9.12.tar.gz", hash = "sha256:4b46b3e134a4d01999ac5bc6e528afcc10cc48f0f724f185f267e276005768f4"}, ] [[package]] @@ -1565,6 +1636,24 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "valkey" +version = "6.0.1" +description = "Python client for Valkey forked from redis-py" +optional = false +python-versions = ">=3.8" +files = [ + {file = "valkey-6.0.1-py3-none-any.whl", hash = "sha256:6702bf323e88e50ef0be37aad697bcc6334edd40cc66f01259265dd410fa22dc"}, + {file = "valkey-6.0.1.tar.gz", hash = "sha256:58f4628dc038ab5aa04eea6e75557309c9412a8c45e81ad42d53e42b9a36e7dc"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.11\""} + +[package.extras] +libvalkey = ["libvalkey (>=4.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] + [[package]] name = "virtualenv" version = "20.26.4" @@ -1660,13 +1749,13 @@ test = ["pytest"] [[package]] name = "zipp" -version = "3.20.1" +version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"}, - {file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"}, + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] @@ -1683,4 +1772,4 @@ yaml = ["pyyaml"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "4ba527f3e2d3e9d14569a1e9b147abb74153458be38c8765be0b91c0229cf294" +content-hash = "f6f35ae29aa4944f12e0261def333a8634cf6e83d7d0c4fd662732691c816520" diff --git a/pyproject.toml b/pyproject.toml index 7f497b1..f301a66 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ name = "django-tasks-scheduler" packages = [ { include = "scheduler" }, ] -version = "2.0.1" +version = "2.1.0" description = "An async job scheduler for django using redis" readme = "README.md" keywords = ["redis", "django", "background-jobs", "job-queue", "task-queue", "redis-queue", "scheduled-jobs"] @@ -47,9 +47,11 @@ croniter = ">=2.0" click = "^8.1" rq = "^1.16" pyyaml = { version = "^6.0", optional = true } +valkey = "6.0.1" [tool.poetry.dev-dependencies] -poetry = "^1.8.2" +poetry = "^1.8.3" +black = "^24.4.2" coverage = "^7.6" fakeredis = { version = "^2.21.5", extras = ['lua'] } Flake8-pyproject = "^1.2" @@ -61,7 +63,7 @@ freezegun = "^1.5" yaml = ["pyyaml"] [tool.flake8] -max-line-length = 119 +max-line-length = 120 exclude = [ 'scheduler/migrations', 'testproject', @@ -72,3 +74,30 @@ exclude = [ include = [ 'scheduler', ] + +[tool.black] +color = true +line-length = 120 +target-version = ['py310'] +include = [ + "scheduler", + "testproject", +] +exclude = ''' + ( + /( + | \.git # root of the project + | \.github + | \.mypy_cache + | \.tox + | \.venv + | _build + | buck-out + | build + | dist + | scheduler/migrations + | scheduler/templates + )/ + | .*/__pycache__/.* +) +''' \ No newline at end of file diff --git a/scheduler/__init__.py b/scheduler/__init__.py index 4675745..c6530c8 100644 --- a/scheduler/__init__.py +++ b/scheduler/__init__.py @@ -1,5 +1,5 @@ import importlib.metadata -__version__ = importlib.metadata.version('django-tasks-scheduler') +__version__ = importlib.metadata.version("django-tasks-scheduler") from .decorators import job # noqa: F401 diff --git a/scheduler/admin/__init__.py b/scheduler/admin/__init__.py index 5b0fa97..237e1c8 100644 --- a/scheduler/admin/__init__.py +++ b/scheduler/admin/__init__.py @@ -1,2 +1,2 @@ from .task_models import TaskAdmin # noqa: F401 -from .redis_models import QueueAdmin, WorkerAdmin # noqa: F401 +from .ephemeral_models import QueueAdmin, WorkerAdmin # noqa: F401 diff --git a/scheduler/admin/redis_models.py b/scheduler/admin/ephemeral_models.py similarity index 95% rename from scheduler/admin/redis_models.py rename to scheduler/admin/ephemeral_models.py index 846130a..bcb3421 100644 --- a/scheduler/admin/redis_models.py +++ b/scheduler/admin/ephemeral_models.py @@ -23,7 +23,7 @@ def has_module_permission(self, request): does not restrict access to the add, change or delete views. Use `ModelAdmin.has_(add|change|delete)_permission` for that. """ - return request.user.has_module_perms('django-tasks-scheduler') + return request.user.has_module_perms("django-tasks-scheduler") @admin.register(Queue) diff --git a/scheduler/admin/task_models.py b/scheduler/admin/task_models.py index ffc7d07..fc5fb50 100644 --- a/scheduler/admin/task_models.py +++ b/scheduler/admin/task_models.py @@ -1,4 +1,5 @@ import redis +import valkey from django.contrib import admin, messages from django.contrib.contenttypes.admin import GenericStackedInline from django.utils.translation import gettext_lazy as _ @@ -11,16 +12,26 @@ class HiddenMixin(object): class Media: - js = ['admin/js/jquery.init.js', ] + js = [ + "admin/js/jquery.init.js", + ] class JobArgInline(HiddenMixin, GenericStackedInline): model = TaskArg extra = 0 fieldsets = ( - (None, { - 'fields': (('arg_type', 'val',),), - }), + ( + None, + { + "fields": ( + ( + "arg_type", + "val", + ), + ), + }, + ), ) @@ -28,32 +39,72 @@ class JobKwargInline(HiddenMixin, GenericStackedInline): model = TaskKwarg extra = 0 fieldsets = ( - (None, { - 'fields': (('key',), ('arg_type', 'val',),), - }), + ( + None, + { + "fields": ( + ("key",), + ( + "arg_type", + "val", + ), + ), + }, + ), ) _LIST_DISPLAY_EXTRA = dict( - CronTask=('cron_string', 'next_run', 'successful_runs', 'last_successful_run', 'failed_runs', 'last_failed_run',), - ScheduledTask=('scheduled_time',), + CronTask=( + "cron_string", + "next_run", + "successful_runs", + "last_successful_run", + "failed_runs", + "last_failed_run", + ), + ScheduledTask=("scheduled_time",), RepeatableTask=( - 'scheduled_time', 'interval_display', 'successful_runs', 'last_successful_run', 'failed_runs', - 'last_failed_run',), + "scheduled_time", + "interval_display", + "successful_runs", + "last_successful_run", + "failed_runs", + "last_failed_run", + ), ) _FIELDSET_EXTRA = dict( CronTask=( - 'cron_string', 'timeout', 'result_ttl', - ('successful_runs', 'last_successful_run',), - ('failed_runs', 'last_failed_run',), + "cron_string", + "timeout", + "result_ttl", + ( + "successful_runs", + "last_successful_run", + ), + ( + "failed_runs", + "last_failed_run", + ), ), - ScheduledTask=('scheduled_time', 'timeout', 'result_ttl'), + ScheduledTask=("scheduled_time", "timeout", "result_ttl"), RepeatableTask=( - 'scheduled_time', - ('interval', 'interval_unit',), - 'repeat', 'timeout', 'result_ttl', - ('successful_runs', 'last_successful_run',), - ('failed_runs', 'last_failed_run',), + "scheduled_time", + ( + "interval", + "interval_unit", + ), + "repeat", + "timeout", + "result_ttl", + ( + "successful_runs", + "last_successful_run", + ), + ( + "failed_runs", + "last_failed_run", + ), ), ) @@ -65,60 +116,94 @@ class TaskAdmin(admin.ModelAdmin): """ save_on_top = True - change_form_template = 'admin/scheduler/change_form.html' - actions = ['disable_selected', 'enable_selected', 'enqueue_job_now', ] - inlines = [JobArgInline, JobKwargInline, ] - list_filter = ('enabled',) - list_display = ('enabled', 'name', 'job_id', 'function_string', 'is_scheduled', 'queue',) - list_display_links = ('name',) - readonly_fields = ('job_id',) + change_form_template = "admin/scheduler/change_form.html" + actions = [ + "disable_selected", + "enable_selected", + "enqueue_job_now", + ] + inlines = [ + JobArgInline, + JobKwargInline, + ] + list_filter = ("enabled",) + list_display = ( + "enabled", + "name", + "job_id", + "function_string", + "is_scheduled", + "queue", + ) + list_display_links = ("name",) + readonly_fields = ("job_id",) fieldsets = ( - (None, { - 'fields': ('name', 'callable', 'enabled', 'at_front',), - }), - (_('RQ Settings'), { - 'fields': ('queue', 'job_id',), - }), + ( + None, + { + "fields": ( + "name", + "callable", + "enabled", + "at_front", + ), + }, + ), + ( + _("RQ Settings"), + { + "fields": ( + "queue", + "job_id", + ), + }, + ), ) def get_list_display(self, request): if self.model.__name__ not in _LIST_DISPLAY_EXTRA: - raise ValueError(f'Unrecognized model {self.model}') + raise ValueError(f"Unrecognized model {self.model}") return TaskAdmin.list_display + _LIST_DISPLAY_EXTRA[self.model.__name__] def get_fieldsets(self, request, obj=None): if self.model.__name__ not in _FIELDSET_EXTRA: - raise ValueError(f'Unrecognized model {self.model}') - return TaskAdmin.fieldsets + ((_('Scheduling'), { - 'fields': _FIELDSET_EXTRA[self.model.__name__], - }),) - - @admin.display(description='Next run') + raise ValueError(f"Unrecognized model {self.model}") + return TaskAdmin.fieldsets + ( + ( + _("Scheduling"), + { + "fields": _FIELDSET_EXTRA[self.model.__name__], + }, + ), + ) + + @admin.display(description="Next run") def next_run(self, o: CronTask): return tools.get_next_cron_time(o.cron_string) - def change_view(self, request, object_id, form_url='', extra_context=None): + def change_view(self, request, object_id, form_url="", extra_context=None): extra = extra_context or {} obj = self.get_object(request, object_id) try: execution_list = get_job_executions(obj.queue, obj) - except redis.ConnectionError as e: - logger.warn(f'Could not get job executions: {e}') + except (redis.ConnectionError, valkey.ConnectionError) as e: + logger.warn(f"Could not get job executions: {e}") execution_list = list() - paginator = self.get_paginator(request, execution_list, SCHEDULER_CONFIG['EXECUTIONS_IN_PAGE']) - page_number = request.GET.get('p', 1) + paginator = self.get_paginator(request, execution_list, SCHEDULER_CONFIG.EXECUTIONS_IN_PAGE) + page_number = request.GET.get("p", 1) page_obj = paginator.get_page(page_number) page_range = paginator.get_elided_page_range(page_obj.number) - extra.update({ - "pagination_required": paginator.count > SCHEDULER_CONFIG['EXECUTIONS_IN_PAGE'], - 'executions': page_obj, - 'page_range': page_range, - 'page_var': 'p', - }) + extra.update( + { + "pagination_required": paginator.count > SCHEDULER_CONFIG.EXECUTIONS_IN_PAGE, + "executions": page_obj, + "page_range": page_range, + "page_var": "p", + } + ) - return super(TaskAdmin, self).change_view( - request, object_id, form_url, extra_context=extra) + return super(TaskAdmin, self).change_view(request, object_id, form_url, extra_context=extra) def delete_queryset(self, request, queryset): for job in queryset: @@ -129,7 +214,7 @@ def delete_model(self, request, obj): obj.unschedule() super(TaskAdmin, self).delete_model(request, obj) - @admin.action(description=_("Disable selected %(verbose_name_plural)s"), permissions=('change',)) + @admin.action(description=_("Disable selected %(verbose_name_plural)s"), permissions=("change",)) def disable_selected(self, request, queryset): rows_updated = 0 for obj in queryset.filter(enabled=True).iterator(): @@ -142,7 +227,7 @@ def disable_selected(self, request, queryset): level = messages.WARNING if not rows_updated else messages.INFO self.message_user(request, f"{message_bit} successfully disabled and unscheduled.", level=level) - @admin.action(description=_("Enable selected %(verbose_name_plural)s"), permissions=('change',)) + @admin.action(description=_("Enable selected %(verbose_name_plural)s"), permissions=("change",)) def enable_selected(self, request, queryset): rows_updated = 0 for obj in queryset.filter(enabled=False).iterator(): @@ -154,10 +239,13 @@ def enable_selected(self, request, queryset): level = messages.WARNING if not rows_updated else messages.INFO self.message_user(request, f"{message_bit} successfully enabled and scheduled.", level=level) - @admin.action(description="Enqueue now", permissions=('change',)) + @admin.action(description="Enqueue now", permissions=("change",)) def enqueue_job_now(self, request, queryset): task_names = [] for task in queryset: task.enqueue_to_run() task_names.append(task.name) - self.message_user(request, f"The following jobs have been enqueued: {', '.join(task_names)}", ) + self.message_user( + request, + f"The following jobs have been enqueued: {', '.join(task_names)}", + ) diff --git a/scheduler/apps.py b/scheduler/apps.py index d789818..3032280 100644 --- a/scheduler/apps.py +++ b/scheduler/apps.py @@ -3,9 +3,9 @@ class SchedulerConfig(AppConfig): - default_auto_field = 'django.db.models.AutoField' - name = 'scheduler' - verbose_name = _('Tasks Scheduler') + default_auto_field = "django.db.models.AutoField" + name = "scheduler" + verbose_name = _("Tasks Scheduler") def ready(self): pass diff --git a/scheduler/connection_types.py b/scheduler/connection_types.py new file mode 100644 index 0000000..70405d6 --- /dev/null +++ b/scheduler/connection_types.py @@ -0,0 +1,19 @@ +from typing import Union, Dict, Tuple, Type + +import redis +import valkey + +from scheduler.settings import Broker + +ConnectionErrorType = Union[redis.ConnectionError, valkey.ConnectionError] +ConnectionType = Union[redis.Redis, valkey.Valkey] +PipelineType = Union[redis.client.Pipeline, valkey.client.Pipeline] +RedisSentinel = redis.sentinel.Sentinel + +BrokerConnectionClass: Dict[Tuple[Broker, bool], Type] = { + # Map of (Broker, Strict flag) => Connection Class + (Broker.REDIS, False): redis.Redis, + (Broker.VALKEY, False): valkey.Valkey, + (Broker.REDIS, True): redis.StrictRedis, + (Broker.VALKEY, True): valkey.StrictValkey, +} diff --git a/scheduler/decorators.py b/scheduler/decorators.py index f78b467..76c1a1c 100644 --- a/scheduler/decorators.py +++ b/scheduler/decorators.py @@ -13,11 +13,11 @@ def job(*args, **kwargs): """ if len(args) == 0: func = None - queue = 'default' + queue = "default" else: if callable(args[0]): func = args[0] - queue = 'default' + queue = "default" else: func = None queue = args[0] @@ -26,15 +26,13 @@ def job(*args, **kwargs): if isinstance(queue, str): try: queue = get_queue(queue) - if 'connection' not in kwargs: - kwargs['connection'] = queue.connection + if "connection" not in kwargs: + kwargs["connection"] = queue.connection except KeyError: - raise QueueNotFoundError(f'Queue {queue} does not exist') + raise QueueNotFoundError(f"Queue {queue} does not exist") - config = settings.SCHEDULER_CONFIG - - kwargs.setdefault('result_ttl', config.get('DEFAULT_RESULT_TTL')) - kwargs.setdefault('timeout', config.get('DEFAULT_TIMEOUT')) + kwargs.setdefault("result_ttl", settings.SCHEDULER_CONFIG.DEFAULT_RESULT_TTL) + kwargs.setdefault("timeout", settings.SCHEDULER_CONFIG.DEFAULT_TIMEOUT) decorator = rq_job_decorator(queue, *args, **kwargs) if func: diff --git a/scheduler/management/commands/delete_failed_executions.py b/scheduler/management/commands/delete_failed_executions.py index cf3f59e..01224e0 100644 --- a/scheduler/management/commands/delete_failed_executions.py +++ b/scheduler/management/commands/delete_failed_executions.py @@ -6,26 +6,24 @@ class Command(BaseCommand): - help = 'Delete failed jobs from Django queue.' + help = "Delete failed jobs from Django queue." def add_arguments(self, parser): - parser.add_argument( - '--queue', '-q', dest='queue', default='default', - help='Specify the queue [default]') - parser.add_argument('-f', '--func', help='optional job function name, e.g. "app.tasks.func"') - parser.add_argument('--dry-run', action='store_true', help='Do not actually delete failed jobs') + parser.add_argument("--queue", "-q", dest="queue", default="default", help="Specify the queue [default]") + parser.add_argument("-f", "--func", help='optional job function name, e.g. "app.tasks.func"') + parser.add_argument("--dry-run", action="store_true", help="Do not actually delete failed jobs") def handle(self, *args, **options): - queue = get_queue(options.get('queue', 'default')) + queue = get_queue(options.get("queue", "default")) job_ids = queue.failed_job_registry.get_job_ids() jobs = JobExecution.fetch_many(job_ids, connection=queue.connection) - func_name = options.get('func', None) + func_name = options.get("func", None) if func_name is not None: jobs = [job for job in jobs if job.func_name == func_name] - dry_run = options.get('dry_run', False) - click.echo(f'Found {len(jobs)} failed jobs') + dry_run = options.get("dry_run", False) + click.echo(f"Found {len(jobs)} failed jobs") for job in jobs: - click.echo(f'Deleting {job.id}') + click.echo(f"Deleting {job.id}") if not dry_run: job.delete() - click.echo(f'Deleted {len(jobs)} failed jobs') + click.echo(f"Deleted {len(jobs)} failed jobs") diff --git a/scheduler/management/commands/export.py b/scheduler/management/commands/export.py index 594d853..6a83595 100644 --- a/scheduler/management/commands/export.py +++ b/scheduler/management/commands/export.py @@ -11,48 +11,53 @@ class Command(BaseCommand): """ Export all scheduled jobs """ + help = __doc__ def add_arguments(self, parser): parser.add_argument( - '-o', '--output', - action='store', - choices=['json', 'yaml'], - default='json', - dest='format', - help='format of output', + "-o", + "--output", + action="store", + choices=["json", "yaml"], + default="json", + dest="format", + help="format of output", ) parser.add_argument( - '-e', '--enabled', - action='store_true', - dest='enabled', - help='Export only enabled jobs', + "-e", + "--enabled", + action="store_true", + dest="enabled", + help="Export only enabled jobs", ) parser.add_argument( - '-f', '--filename', - action='store', - dest='filename', - help='File name to load (otherwise writes to standard output)', + "-f", + "--filename", + action="store", + dest="filename", + help="File name to load (otherwise writes to standard output)", ) def handle(self, *args, **options): - file = open(options.get('filename'), 'w') if options.get("filename") else sys.stdout + file = open(options.get("filename"), "w") if options.get("filename") else sys.stdout res = list() for model_name in MODEL_NAMES: - model = apps.get_model(app_label='scheduler', model_name=model_name) + model = apps.get_model(app_label="scheduler", model_name=model_name) jobs = model.objects.all() - if options.get('enabled'): + if options.get("enabled"): jobs = jobs.filter(enabled=True) for job in jobs: res.append(job.to_dict()) - if options.get("format") == 'json': + if options.get("format") == "json": import json + click.echo(json.dumps(res, indent=2), file=file) return - if options.get("format") == 'yaml': + if options.get("format") == "yaml": try: import yaml except ImportError: diff --git a/scheduler/management/commands/import.py b/scheduler/management/commands/import.py index c0ad01b..60934db 100644 --- a/scheduler/management/commands/import.py +++ b/scheduler/management/commands/import.py @@ -13,14 +13,14 @@ def job_model_str(model_str: str) -> str: - if model_str.find('Job') == len(model_str) - 3: - return model_str[:-3] + 'Task' + if model_str.find("Job") == len(model_str) - 3: + return model_str[:-3] + "Task" return model_str def create_job_from_dict(job_dict: Dict[str, Any], update): - model = apps.get_model(app_label='scheduler', model_name=job_model_str(job_dict['model'])) - existing_job = model.objects.filter(name=job_dict['name']).first() + model = apps.get_model(app_label="scheduler", model_name=job_model_str(job_dict["model"])) + existing_job = model.objects.filter(name=job_dict["name"]).first() if existing_job: if update: click.echo(f'Found existing job "{existing_job}, removing it to be reinserted"') @@ -29,76 +29,87 @@ def create_job_from_dict(job_dict: Dict[str, Any], update): click.echo(f'Found existing job "{existing_job}", skipping') return kwargs = dict(job_dict) - del kwargs['model'] - del kwargs['callable_args'] - del kwargs['callable_kwargs'] - if kwargs.get('scheduled_time', None): - target = timezone.datetime.fromisoformat(kwargs['scheduled_time']) + del kwargs["model"] + del kwargs["callable_args"] + del kwargs["callable_kwargs"] + if kwargs.get("scheduled_time", None): + target = timezone.datetime.fromisoformat(kwargs["scheduled_time"]) if not settings.USE_TZ and not timezone.is_naive(target): target = timezone.make_naive(target) - kwargs['scheduled_time'] = target + kwargs["scheduled_time"] = target model_fields = set(map(lambda field: field.attname, model._meta.get_fields())) keys_to_ignore = list(filter(lambda k: k not in model_fields, kwargs.keys())) for k in keys_to_ignore: del kwargs[k] scheduled_job = model.objects.create(**kwargs) - click.echo(f'Created job {scheduled_job}') + click.echo(f"Created job {scheduled_job}") content_type = ContentType.objects.get_for_model(scheduled_job) - for arg in job_dict['callable_args']: + for arg in job_dict["callable_args"]: TaskArg.objects.create( - content_type=content_type, object_id=scheduled_job.id, **arg, ) - for arg in job_dict['callable_kwargs']: + content_type=content_type, + object_id=scheduled_job.id, + **arg, + ) + for arg in job_dict["callable_kwargs"]: TaskKwarg.objects.create( - content_type=content_type, object_id=scheduled_job.id, **arg, ) + content_type=content_type, + object_id=scheduled_job.id, + **arg, + ) class Command(BaseCommand): """ Import scheduled jobs """ + help = __doc__ def add_arguments(self, parser): parser.add_argument( - '-f', '--format', - action='store', - choices=['json', 'yaml'], - default='json', - dest='format', - help='format of input', + "-f", + "--format", + action="store", + choices=["json", "yaml"], + default="json", + dest="format", + help="format of input", ) parser.add_argument( - '--filename', - action='store', - dest='filename', - help='File name to load (otherwise loads from standard input)', + "--filename", + action="store", + dest="filename", + help="File name to load (otherwise loads from standard input)", ) parser.add_argument( - '-r', '--reset', - action='store_true', - dest='reset', - help='Remove all currently scheduled jobs before importing', + "-r", + "--reset", + action="store_true", + dest="reset", + help="Remove all currently scheduled jobs before importing", ) parser.add_argument( - '-u', '--update', - action='store_true', - dest='update', - help='Update existing records', + "-u", + "--update", + action="store_true", + dest="update", + help="Update existing records", ) def handle(self, *args, **options): - file = open(options.get('filename')) if options.get("filename") else sys.stdin + file = open(options.get("filename")) if options.get("filename") else sys.stdin jobs = list() - if options.get("format") == 'json': + if options.get("format") == "json": import json + try: jobs = json.load(file) except json.decoder.JSONDecodeError: - click.echo('Error decoding json', err=True) + click.echo("Error decoding json", err=True) exit(1) - elif options.get("format") == 'yaml': + elif options.get("format") == "yaml": try: import yaml except ImportError: @@ -108,10 +119,10 @@ def handle(self, *args, **options): yaml.Dumper.ignore_aliases = lambda *x: True jobs = yaml.load(file, yaml.SafeLoader) - if options.get('reset'): + if options.get("reset"): for model_name in MODEL_NAMES: - model = apps.get_model(app_label='scheduler', model_name=model_name) + model = apps.get_model(app_label="scheduler", model_name=model_name) model.objects.all().delete() for job in jobs: - create_job_from_dict(job, update=options.get('update')) + create_job_from_dict(job, update=options.get("update")) diff --git a/scheduler/management/commands/rqstats.py b/scheduler/management/commands/rqstats.py index fa9c810..2b9ac98 100644 --- a/scheduler/management/commands/rqstats.py +++ b/scheduler/management/commands/rqstats.py @@ -9,13 +9,14 @@ ANSI_LIGHT_WHITE = "\033[1;37m" ANSI_RESET = "\033[0m" -KEYS = ('jobs', 'started_jobs', 'deferred_jobs', 'finished_jobs', 'canceled_jobs', 'workers') +KEYS = ("jobs", "started_jobs", "deferred_jobs", "finished_jobs", "canceled_jobs", "workers") class Command(BaseCommand): """ Print statistics """ + help = __doc__ def __init__(self, *args, **kwargs): @@ -25,21 +26,31 @@ def __init__(self, *args, **kwargs): def add_arguments(self, parser): parser.add_argument( - '-j', '--json', action='store_true', dest='json', - help='Output statistics as JSON', ) + "-j", + "--json", + action="store_true", + dest="json", + help="Output statistics as JSON", + ) parser.add_argument( - '-y', '--yaml', action='store_true', dest='yaml', - help='Output statistics as YAML', + "-y", + "--yaml", + action="store_true", + dest="yaml", + help="Output statistics as YAML", ) parser.add_argument( - '-i', '--interval', dest='interval', type=float, - help='Poll statistics every N seconds', + "-i", + "--interval", + dest="interval", + type=float, + help="Poll statistics every N seconds", ) def _print_separator(self): - click.echo('-' * self.table_width) + click.echo("-" * self.table_width) def _print_stats_dashboard(self, statistics, prev_stats=None): if self.interval: @@ -48,24 +59,22 @@ def _print_stats_dashboard(self, statistics, prev_stats=None): click.echo("Django-Scheduler CLI Dashboard") click.echo() self._print_separator() - click.echo(f'| {"Name":<16} | Queued | Active | Deferred |' - f' Finished |' - f' Canceled |' - f' Workers |') + click.echo( + f'| {"Name":<16} | Queued | Active | Deferred |' f" Finished |" f" Canceled |" f" Workers |" + ) self._print_separator() for ind, queue in enumerate(statistics["queues"]): vals = list((queue[k] for k in KEYS)) # Deal with colors - if prev_stats and len(prev_stats['queues']) > ind: + if prev_stats and len(prev_stats["queues"]) > ind: prev = prev_stats["queues"][ind] prev_vals = (prev[k] for k in KEYS) - colors = [ANSI_LIGHT_GREEN - if vals[i] != prev_vals[i] else ANSI_LIGHT_WHITE - for i in range(len(prev_vals)) - ] + colors = [ + ANSI_LIGHT_GREEN if vals[i] != prev_vals[i] else ANSI_LIGHT_WHITE for i in range(len(prev_vals)) + ] else: colors = [ANSI_LIGHT_WHITE for _ in range(len(vals))] - to_print = ' | '.join([f'{colors[i]}{vals[i]:9}{ANSI_RESET}' for i in range(len(vals))]) + to_print = " | ".join([f"{colors[i]}{vals[i]:9}{ANSI_RESET}" for i in range(len(vals))]) click.echo(f'| {queue["name"]:<16} | {to_print} |', color=True) self._print_separator() @@ -78,17 +87,22 @@ def handle(self, *args, **options): if options.get("json"): import json - click.secho(json.dumps(get_statistics(), indent=2), ) + + click.secho( + json.dumps(get_statistics(), indent=2), + ) return if options.get("yaml"): try: import yaml except ImportError: - click.secho("Aborting. yaml not supported", err=True, fg='red') + click.secho("Aborting. yaml not supported", err=True, fg="red") return - click.secho(yaml.dump(get_statistics(), default_flow_style=False), ) + click.secho( + yaml.dump(get_statistics(), default_flow_style=False), + ) return self.interval = options.get("interval") diff --git a/scheduler/management/commands/rqworker.py b/scheduler/management/commands/rqworker.py index 89a1aa6..a4dbd9e 100644 --- a/scheduler/management/commands/rqworker.py +++ b/scheduler/management/commands/rqworker.py @@ -3,12 +3,13 @@ import sys import click +import redis +import valkey from django.core.management.base import BaseCommand from django.db import connections -from django.template.defaultfilters import default -from redis.exceptions import ConnectionError from rq.logutils import setup_loghandlers +from scheduler.rq_classes import register_sentry from scheduler.tools import create_worker VERBOSITY_TO_LOG_LEVEL = { @@ -33,39 +34,65 @@ class Command(BaseCommand): python manage.py rqworker high medium low """ - args = '' + args = "" def add_arguments(self, parser): - parser.add_argument('--pid', action='store', dest='pidfile', - default=None, help='file to write the worker`s pid into') - parser.add_argument('--burst', action='store_true', dest='burst', - default=False, help='Run worker in burst mode') - parser.add_argument('--name', action='store', dest='name', - default=None, help='Name of the worker') - parser.add_argument('--worker-ttl', action='store', type=int, dest='worker_ttl', default=420, - help='Default worker timeout to be used') - parser.add_argument('--max-jobs', action='store', default=None, dest='max_jobs', type=int, - help='Maximum number of jobs to execute before terminating worker') - parser.add_argument('--fork-job-execution', action='store', default=True, dest='fork_job_execution', type=bool, - help='Fork job execution to another process') - parser.add_argument('--job-class', action='store', dest='job_class', - help='Jobs class to use') parser.add_argument( - 'queues', nargs='*', type=str, - help='The queues to work on, separated by space, all queues should be using the same redis') + "--pid", action="store", dest="pidfile", default=None, help="file to write the worker`s pid into" + ) + parser.add_argument( + "--burst", action="store_true", dest="burst", default=False, help="Run worker in burst mode" + ) + parser.add_argument("--name", action="store", dest="name", default=None, help="Name of the worker") + parser.add_argument( + "--worker-ttl", + action="store", + type=int, + dest="worker_ttl", + default=420, + help="Default worker timeout to be used", + ) + parser.add_argument( + "--max-jobs", + action="store", + default=None, + dest="max_jobs", + type=int, + help="Maximum number of jobs to execute before terminating worker", + ) + parser.add_argument( + "--fork-job-execution", + action="store", + default=True, + dest="fork_job_execution", + type=bool, + help="Fork job execution to another process", + ) + parser.add_argument("--job-class", action="store", dest="job_class", help="Jobs class to use") + parser.add_argument( + "queues", + nargs="*", + type=str, + help="The queues to work on, separated by space, all queues should be using the same redis", + ) + parser.add_argument("--sentry-dsn", action="store", dest="sentry_dsn", help="Sentry DSN to use") + parser.add_argument("--sentry-debug", action="store_true", dest="sentry_debug", help="Enable Sentry debug mode") + parser.add_argument("--sentry-ca-certs", action="store", dest="sentry_ca_certs", help="Path to CA certs file") def handle(self, **options): - queues = options.get('queues', []) + queues = options.get("queues", []) if not queues: - queues = ['default', ] - click.echo(f'Starting worker for queues {queues}') - pidfile = options.get('pidfile') + queues = [ + "default", + ] + click.echo(f"Starting worker for queues {queues}") + pidfile = options.get("pidfile") if pidfile: with open(os.path.expanduser(pidfile), "w") as fp: fp.write(str(os.getpid())) # Verbosity is defined by default in BaseCommand for all commands - verbosity = options.get('verbosity', 1) + verbosity = options.get("verbosity", 1) log_level = VERBOSITY_TO_LOG_LEVEL.get(verbosity, logging.INFO) setup_loghandlers(log_level) @@ -73,17 +100,25 @@ def handle(self, **options): # Instantiate a worker w = create_worker( *queues, - name=options['name'], - job_class=options.get('job_class'), - default_worker_ttl=options['worker_ttl'], - fork_job_execution=options['fork_job_execution'], ) + name=options["name"], + job_class=options.get("job_class"), + default_worker_ttl=options["worker_ttl"], + fork_job_execution=options["fork_job_execution"], + ) # Close any opened DB connection before any fork reset_db_connections() - w.work(burst=options.get('burst', False), - logging_level=log_level, - max_jobs=options['max_jobs'], ) - except ConnectionError as e: + # Check whether sentry is enabled + if options.get("sentry_dsn") is not None: + sentry_opts = dict(ca_certs=options.get("sentry_ca_certs"), debug=options.get("sentry_debug")) + register_sentry(options.get("sentry_dsn"), **sentry_opts) + + w.work( + burst=options.get("burst", False), + logging_level=log_level, + max_jobs=options["max_jobs"], + ) + except (redis.ConnectionError, valkey.ConnectionError) as e: click.echo(str(e), err=True) sys.exit(1) diff --git a/scheduler/management/commands/run_job.py b/scheduler/management/commands/run_job.py index 467e084..48c7458 100644 --- a/scheduler/management/commands/run_job.py +++ b/scheduler/management/commands/run_job.py @@ -9,29 +9,29 @@ class Command(BaseCommand): Queues the function given with the first argument with the parameters given with the rest of the argument list. """ + help = __doc__ - args = '' + args = "" def add_arguments(self, parser): + parser.add_argument("--queue", "-q", dest="queue", default="default", help="Specify the queue [default]") + parser.add_argument("--timeout", "-t", type=int, dest="timeout", help="A timeout in seconds") parser.add_argument( - '--queue', '-q', dest='queue', default='default', - help='Specify the queue [default]') - parser.add_argument( - '--timeout', '-t', type=int, dest='timeout', - help='A timeout in seconds') + "--result-ttl", "-r", type=int, dest="result_ttl", help="Time to store job results in seconds" + ) parser.add_argument( - '--result-ttl', '-r', type=int, dest='result_ttl', - help='Time to store job results in seconds') - parser.add_argument('callable', help='Method to call', ) - parser.add_argument('args', nargs='*', help='Args for callable') + "callable", + help="Method to call", + ) + parser.add_argument("args", nargs="*", help="Args for callable") def handle(self, **options): - verbosity = int(options.get('verbosity', 1)) - timeout = options.get('timeout') - result_ttl = options.get('result_ttl') - queue = get_queue(options.get('queue')) - func = options.get('callable') - args = options.get('args') + verbosity = int(options.get("verbosity", 1)) + timeout = options.get("timeout") + result_ttl = options.get("result_ttl") + queue = get_queue(options.get("queue")) + func = options.get("callable") + args = options.get("args") job = queue.enqueue_call(func, args=args, timeout=timeout, result_ttl=result_ttl) if verbosity: - click.echo(f'Job {job.id} created') + click.echo(f"Job {job.id} created") diff --git a/scheduler/models/args.py b/scheduler/models/args.py index 532b0c6..f7cd57b 100644 --- a/scheduler/models/args.py +++ b/scheduler/models/args.py @@ -10,51 +10,56 @@ from scheduler import tools ARG_TYPE_TYPES_DICT = { - 'str': str, - 'int': int, - 'bool': bool, - 'datetime': datetime, - 'callable': Callable, + "str": str, + "int": int, + "bool": bool, + "datetime": datetime, + "callable": Callable, } class BaseTaskArg(models.Model): class ArgType(models.TextChoices): - STR = 'str', _('string') - INT = 'int', _('int') - BOOL = 'bool', _('boolean') - DATETIME = 'datetime', _('datetime') - CALLABLE = 'callable', _('callable') + STR = "str", _("string") + INT = "int", _("int") + BOOL = "bool", _("boolean") + DATETIME = "datetime", _("datetime") + CALLABLE = "callable", _("callable") arg_type = models.CharField( - _('Argument Type'), max_length=12, choices=ArgType.choices, default=ArgType.STR, + _("Argument Type"), + max_length=12, + choices=ArgType.choices, + default=ArgType.STR, ) - val = models.CharField(_('Argument Value'), blank=True, max_length=255) + val = models.CharField(_("Argument Value"), blank=True, max_length=255) content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) object_id = models.PositiveIntegerField() content_object = GenericForeignKey() def clean(self): if self.arg_type not in ARG_TYPE_TYPES_DICT: - raise ValidationError({ - 'arg_type': ValidationError( - _(f'Could not parse {self.arg_type}, options are: {ARG_TYPE_TYPES_DICT.keys()}'), code='invalid') - }) + raise ValidationError( + { + "arg_type": ValidationError( + _(f"Could not parse {self.arg_type}, options are: {ARG_TYPE_TYPES_DICT.keys()}"), code="invalid" + ) + } + ) try: - if self.arg_type == 'callable': + if self.arg_type == "callable": tools.callable_func(self.val) - elif self.arg_type == 'datetime': + elif self.arg_type == "datetime": datetime.fromisoformat(self.val) - elif self.arg_type == 'bool': - if self.val.lower() not in {'true', 'false'}: + elif self.arg_type == "bool": + if self.val.lower() not in {"true", "false"}: raise ValidationError - elif self.arg_type == 'int': + elif self.arg_type == "int": int(self.val) except Exception: - raise ValidationError({ - 'arg_type': ValidationError( - _(f'Could not parse {self.val} as {self.arg_type}'), code='invalid') - }) + raise ValidationError( + {"arg_type": ValidationError(_(f"Could not parse {self.val} as {self.arg_type}"), code="invalid")} + ) def save(self, **kwargs): super(BaseTaskArg, self).save(**kwargs) @@ -65,24 +70,24 @@ def delete(self, **kwargs): self.content_object.save() def value(self): - if self.arg_type == 'callable': + if self.arg_type == "callable": res = tools.callable_func(self.val)() - elif self.arg_type == 'datetime': + elif self.arg_type == "datetime": res = datetime.fromisoformat(self.val) - elif self.arg_type == 'bool': - res = self.val.lower() == 'true' + elif self.arg_type == "bool": + res = self.val.lower() == "true" else: res = ARG_TYPE_TYPES_DICT[self.arg_type](self.val) return res class Meta: abstract = True - ordering = ['id'] + ordering = ["id"] class TaskArg(BaseTaskArg): def __str__(self): - return f'TaskArg[arg_type={self.arg_type},value={self.value()}]' + return f"TaskArg[arg_type={self.arg_type},value={self.value()}]" class TaskKwarg(BaseTaskArg): @@ -90,7 +95,7 @@ class TaskKwarg(BaseTaskArg): def __str__(self): key, value = self.value() - return f'TaskKwarg[key={key},arg_type={self.arg_type},value={self.val}]' + return f"TaskKwarg[key={key},arg_type={self.arg_type},value={self.val}]" def value(self): return self.key, super(TaskKwarg, self).value() diff --git a/scheduler/models/queue.py b/scheduler/models/queue.py index 5c03689..84b6c06 100644 --- a/scheduler/models/queue.py +++ b/scheduler/models/queue.py @@ -8,5 +8,5 @@ class Queue(models.Model): class Meta: managed = False # not in Django's database default_permissions = () - permissions = [['view', 'Access admin page']] + permissions = [["view", "Access admin page"]] verbose_name_plural = " Queues" diff --git a/scheduler/models/scheduled_task.py b/scheduler/models/scheduled_task.py index 1f0d4d1..ece0d2e 100644 --- a/scheduler/models/scheduled_task.py +++ b/scheduler/models/scheduled_task.py @@ -25,20 +25,22 @@ from scheduler.settings import QUEUES from scheduler.settings import logger -SCHEDULER_INTERVAL = settings.SCHEDULER_CONFIG['SCHEDULER_INTERVAL'] +SCHEDULER_INTERVAL = settings.SCHEDULER_CONFIG.SCHEDULER_INTERVAL def failure_callback(job, connection, result, *args, **kwargs): - model_name = job.meta.get('task_type', None) + model_name = job.meta.get("task_type", None) if model_name is None: return - model = apps.get_model(app_label='scheduler', model_name=model_name) + model = apps.get_model(app_label="scheduler", model_name=model_name) task = model.objects.filter(job_id=job.id).first() if task is None: - logger.warn(f'Could not find {model_name} task for job {job.id}') + logger.warn(f"Could not find {model_name} task for job {job.id}") return - mail_admins(f'Task {task.id}/{task.name} has failed', - 'See django-admin for logs', ) + mail_admins( + f"Task {task.id}/{task.name} has failed", + "See django-admin for logs", + ) task.job_id = None if isinstance(task, (CronTask, RepeatableTask)): task.failed_runs += 1 @@ -47,10 +49,10 @@ def failure_callback(job, connection, result, *args, **kwargs): def success_callback(job, connection, result, *args, **kwargs): - model_name = job.meta.get('task_type', None) + model_name = job.meta.get("task_type", None) if model_name is None: return - model = apps.get_model(app_label='scheduler', model_name=model_name) + model = apps.get_model(app_label="scheduler", model_name=model_name) task = model.objects.filter(job_id=job.id).first() if task is None: return @@ -68,45 +70,66 @@ def get_queue_choices(): class BaseTask(models.Model): created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) - TASK_TYPE = 'BaseTask' + TASK_TYPE = "BaseTask" name = models.CharField( - _('name'), max_length=128, unique=True, - help_text='Name of the job.', ) - callable = models.CharField(_('callable'), max_length=2048) - callable_args = GenericRelation(TaskArg, related_query_name='args') - callable_kwargs = GenericRelation(TaskKwarg, related_query_name='kwargs') + _("name"), + max_length=128, + unique=True, + help_text="Name of the job.", + ) + callable = models.CharField(_("callable"), max_length=2048) + callable_args = GenericRelation(TaskArg, related_query_name="args") + callable_kwargs = GenericRelation(TaskKwarg, related_query_name="kwargs") enabled = models.BooleanField( - _('enabled'), default=True, - help_text=_('Should job be scheduled? This field is useful to keep ' - 'past jobs that should no longer be scheduled'), + _("enabled"), + default=True, + help_text=_( + "Should job be scheduled? This field is useful to keep " "past jobs that should no longer be scheduled" + ), ) queue = models.CharField( - _('queue'), max_length=255, choices=get_queue_choices, - help_text=_('Queue name'), ) + _("queue"), + max_length=255, + choices=get_queue_choices, + help_text=_("Queue name"), + ) job_id = models.CharField( - _('job id'), max_length=128, editable=False, blank=True, null=True, - help_text=_('Current job_id on queue')) + _("job id"), max_length=128, editable=False, blank=True, null=True, help_text=_("Current job_id on queue") + ) at_front = models.BooleanField( - _('At front'), default=False, blank=True, null=True, - help_text=_('When queuing the job, add it in the front of the queue'), ) + _("At front"), + default=False, + blank=True, + null=True, + help_text=_("When queuing the job, add it in the front of the queue"), + ) timeout = models.IntegerField( - _('timeout'), blank=True, null=True, - help_text=_("Timeout specifies the maximum runtime, in seconds, for the job " - "before it'll be considered 'lost'. Blank uses the default " - "timeout."), ) + _("timeout"), + blank=True, + null=True, + help_text=_( + "Timeout specifies the maximum runtime, in seconds, for the job " + "before it'll be considered 'lost'. Blank uses the default " + "timeout." + ), + ) result_ttl = models.IntegerField( - _('result ttl'), blank=True, null=True, + _("result ttl"), + blank=True, + null=True, help_text=mark_safe( """The TTL value (in seconds) of the job result.
-1: Result never expires, you should delete jobs manually.
0: Result gets deleted immediately.
- >0: Result expires after n seconds."""), ) + >0: Result expires after n seconds.""" + ), + ) def callable_func(self): """Translate callable string to callable""" return tools.callable_func(self.callable) - @admin.display(boolean=True, description=_('is scheduled?')) + @admin.display(boolean=True, description=_("is scheduled?")) def is_scheduled(self) -> bool: """Check whether a next job for this task is queued/scheduled to be executed""" if self.job_id is None: # no job_id => is not scheduled @@ -115,9 +138,7 @@ def is_scheduled(self) -> bool: scheduled_jobs = self.rqueue.scheduled_job_registry.get_job_ids() enqueued_jobs = self.rqueue.get_job_ids() active_jobs = self.rqueue.started_job_registry.get_job_ids() - res = ((self.job_id in scheduled_jobs) - or (self.job_id in enqueued_jobs) - or (self.job_id in active_jobs)) + res = (self.job_id in scheduled_jobs) or (self.job_id in enqueued_jobs) or (self.job_id in active_jobs) # If the job_id is not scheduled/queued/started, # update the job_id to None. (The job_id belongs to a previous run which is completed) if not res: @@ -125,12 +146,12 @@ def is_scheduled(self) -> bool: super(BaseTask, self).save() return res - @admin.display(description='Callable') + @admin.display(description="Callable") def function_string(self) -> str: args = self.parse_args() args_list = [repr(arg) for arg in args] kwargs = self.parse_kwargs() - kwargs_list = [k + '=' + repr(v) for (k, v) in kwargs.items()] + kwargs_list = [k + "=" + repr(v) for (k, v) in kwargs.items()] return self.callable + f"({', '.join(args_list + kwargs_list)})" def parse_args(self): @@ -145,8 +166,8 @@ def parse_kwargs(self): def _next_job_id(self): addition = uuid.uuid4().hex[-10:] - name = self.name.replace('/', '.') - return f'{self.queue}:{name}:{addition}' + name = self.name.replace("/", ".") + return f"{self.queue}:{name}:{addition}" def _enqueue_args(self) -> Dict: """Args for DjangoQueue.enqueue. @@ -167,16 +188,16 @@ def _enqueue_args(self) -> Dict: job_id=self._next_job_id(), ) if self.at_front: - res['at_front'] = self.at_front + res["at_front"] = self.at_front if self.timeout: - res['job_timeout'] = self.timeout + res["job_timeout"] = self.timeout if self.result_ttl is not None: - res['result_ttl'] = self.result_ttl + res["result_ttl"] = self.result_ttl return res @property def rqueue(self) -> DjangoQueue: - """Returns redis-queue for job""" + """Returns django-queue for job""" return get_queue(self.queue) def ready_for_schedule(self) -> bool: @@ -188,10 +209,10 @@ def ready_for_schedule(self) -> bool: :returns: True if the task is ready to be scheduled. """ if self.is_scheduled(): - logger.debug(f'Task {self.name} already scheduled') + logger.debug(f"Task {self.name} already scheduled") return False if not self.enabled: - logger.debug(f'Task {str(self)} disabled, enable task before scheduling') + logger.debug(f"Task {str(self)} disabled, enable task before scheduling") return False return True @@ -207,7 +228,8 @@ def schedule(self) -> bool: schedule_time, tools.run_task, args=(self.TASK_TYPE, self.id), - **kwargs, ) + **kwargs, + ) self.job_id = job.id super(BaseTask, self).save() return True @@ -248,41 +270,55 @@ def to_dict(self) -> Dict: name=self.name, callable=self.callable, callable_args=[ - dict(arg_type=arg.arg_type, val=arg.val, ) - for arg in self.callable_args.all()], + dict( + arg_type=arg.arg_type, + val=arg.val, + ) + for arg in self.callable_args.all() + ], callable_kwargs=[ - dict(arg_type=arg.arg_type, key=arg.key, val=arg.val, ) - for arg in self.callable_kwargs.all()], + dict( + arg_type=arg.arg_type, + key=arg.key, + val=arg.val, + ) + for arg in self.callable_kwargs.all() + ], enabled=self.enabled, queue=self.queue, - repeat=getattr(self, 'repeat', None), + repeat=getattr(self, "repeat", None), at_front=self.at_front, timeout=self.timeout, result_ttl=self.result_ttl, - cron_string=getattr(self, 'cron_string', None), + cron_string=getattr(self, "cron_string", None), scheduled_time=self._schedule_time().isoformat(), - interval=getattr(self, 'interval', None), - interval_unit=getattr(self, 'interval_unit', None), - successful_runs=getattr(self, 'successful_runs', None), - failed_runs=getattr(self, 'failed_runs', None), - last_successful_run=getattr(self, 'last_successful_run', None), - last_failed_run=getattr(self, 'last_failed_run', None), + interval=getattr(self, "interval", None), + interval_unit=getattr(self, "interval_unit", None), + successful_runs=getattr(self, "successful_runs", None), + failed_runs=getattr(self, "failed_runs", None), + last_successful_run=getattr(self, "last_successful_run", None), + last_failed_run=getattr(self, "last_failed_run", None), ) return res def get_absolute_url(self): model = self._meta.model.__name__.lower() - return reverse(f'admin:scheduler_{model}_change', args=[self.id, ]) + return reverse( + f"admin:scheduler_{model}_change", + args=[ + self.id, + ], + ) def __str__(self): func = self.function_string() - return f'{self.TASK_TYPE}[{self.name}={func}]' + return f"{self.TASK_TYPE}[{self.name}={func}]" def save(self, **kwargs): - schedule_job = kwargs.pop('schedule_job', True) - update_fields = kwargs.get('update_fields', None) + schedule_job = kwargs.pop("schedule_job", True) + update_fields = kwargs.get("update_fields", None) if update_fields: - kwargs['update_fields'] = set(update_fields).union({'modified'}) + kwargs["update_fields"] = set(update_fields).union({"modified"}) super(BaseTask, self).save(**kwargs) if schedule_job: self.schedule() @@ -296,19 +332,20 @@ def clean_callable(self): try: tools.callable_func(self.callable) except Exception: - raise ValidationError({ - 'callable': ValidationError( - _('Invalid callable, must be importable'), code='invalid') - }) + raise ValidationError( + {"callable": ValidationError(_("Invalid callable, must be importable"), code="invalid")} + ) def clean_queue(self): queue_keys = settings.QUEUES.keys() if self.queue not in queue_keys: - raise ValidationError({ - 'queue': ValidationError( - _('Invalid queue, must be one of: {}'.format( - ', '.join(queue_keys))), code='invalid') - }) + raise ValidationError( + { + "queue": ValidationError( + _("Invalid queue, must be one of: {}".format(", ".join(queue_keys))), code="invalid" + ) + } + ) def clean(self): self.clean_queue() @@ -319,7 +356,7 @@ class Meta: class ScheduledTimeMixin(models.Model): - scheduled_time = models.DateTimeField(_('scheduled time')) + scheduled_time = models.DateTimeField(_("scheduled time")) class Meta: abstract = True @@ -327,52 +364,65 @@ class Meta: class RepeatableMixin(models.Model): failed_runs = models.PositiveIntegerField( - _('failed runs'), default=0, - help_text=_('Number of times the task has failed'), ) + _("failed runs"), + default=0, + help_text=_("Number of times the task has failed"), + ) successful_runs = models.PositiveIntegerField( - _('successful runs'), default=0, - help_text=_('Number of times the task has succeeded'), ) + _("successful runs"), + default=0, + help_text=_("Number of times the task has succeeded"), + ) last_successful_run = models.DateTimeField( - _('last successful run'), blank=True, null=True, - help_text=_('Last time the task has succeeded'), ) + _("last successful run"), + blank=True, + null=True, + help_text=_("Last time the task has succeeded"), + ) last_failed_run = models.DateTimeField( - _('last failed run'), blank=True, null=True, - help_text=_('Last time the task has failed'), ) + _("last failed run"), + blank=True, + null=True, + help_text=_("Last time the task has failed"), + ) class Meta: abstract = True class ScheduledTask(ScheduledTimeMixin, BaseTask): - TASK_TYPE = 'ScheduledTask' + TASK_TYPE = "ScheduledTask" def ready_for_schedule(self) -> bool: - return (super(ScheduledTask, self).ready_for_schedule() - and (self.scheduled_time is None - or self.scheduled_time >= timezone.now())) + return super(ScheduledTask, self).ready_for_schedule() and ( + self.scheduled_time is None or self.scheduled_time >= timezone.now() + ) class Meta: - verbose_name = _('Scheduled Task') - verbose_name_plural = _('Scheduled Tasks') - ordering = ('name',) + verbose_name = _("Scheduled Task") + verbose_name_plural = _("Scheduled Tasks") + ordering = ("name",) class RepeatableTask(RepeatableMixin, ScheduledTimeMixin, BaseTask): class TimeUnits(models.TextChoices): - SECONDS = 'seconds', _('seconds') - MINUTES = 'minutes', _('minutes') - HOURS = 'hours', _('hours') - DAYS = 'days', _('days') - WEEKS = 'weeks', _('weeks') + SECONDS = "seconds", _("seconds") + MINUTES = "minutes", _("minutes") + HOURS = "hours", _("hours") + DAYS = "days", _("days") + WEEKS = "weeks", _("weeks") - interval = models.PositiveIntegerField(_('interval')) + interval = models.PositiveIntegerField(_("interval")) interval_unit = models.CharField( - _('interval unit'), max_length=12, choices=TimeUnits.choices, default=TimeUnits.HOURS + _("interval unit"), max_length=12, choices=TimeUnits.choices, default=TimeUnits.HOURS ) repeat = models.PositiveIntegerField( - _('repeat'), blank=True, null=True, - help_text=_('Number of times to run the job. Leaving this blank means it will run forever.'), ) - TASK_TYPE = 'RepeatableTask' + _("repeat"), + blank=True, + null=True, + help_text=_("Number of times to run the job. Leaving this blank means it will run forever."), + ) + TASK_TYPE = "RepeatableTask" def clean(self): super(RepeatableTask, self).clean() @@ -382,15 +432,16 @@ def clean(self): def clean_interval_unit(self): if SCHEDULER_INTERVAL > self.interval_seconds(): raise ValidationError( - _("Job interval is set lower than %(queue)r queue's interval. " - "minimum interval is %(interval)"), - code='invalid', - params={'queue': self.queue, 'interval': SCHEDULER_INTERVAL}) + _("Job interval is set lower than %(queue)r queue's interval. " "minimum interval is %(interval)"), + code="invalid", + params={"queue": self.queue, "interval": SCHEDULER_INTERVAL}, + ) if self.interval_seconds() % SCHEDULER_INTERVAL: raise ValidationError( _("Job interval is not a multiple of rq_scheduler's interval frequency: %(interval)ss"), - code='invalid', - params={'interval': SCHEDULER_INTERVAL}) + code="invalid", + params={"interval": SCHEDULER_INTERVAL}, + ) def clean_result_ttl(self) -> None: """ @@ -399,22 +450,27 @@ def clean_result_ttl(self) -> None: """ if self.result_ttl and self.result_ttl != -1 and self.result_ttl < self.interval_seconds() and self.repeat: raise ValidationError( - _("Job result_ttl must be either indefinite (-1) or " - "longer than the interval, %(interval)s seconds, to ensure rescheduling."), - code='invalid', - params={'interval': self.interval_seconds()}, ) + _( + "Job result_ttl must be either indefinite (-1) or " + "longer than the interval, %(interval)s seconds, to ensure rescheduling." + ), + code="invalid", + params={"interval": self.interval_seconds()}, + ) def interval_display(self): - return '{} {}'.format(self.interval, self.get_interval_unit_display()) + return "{} {}".format(self.interval, self.get_interval_unit_display()) def interval_seconds(self): - kwargs = {self.interval_unit: self.interval, } + kwargs = { + self.interval_unit: self.interval, + } return timedelta(**kwargs).total_seconds() def _enqueue_args(self): res = super(RepeatableTask, self)._enqueue_args() - res['meta']['interval'] = self.interval_seconds() - res['meta']['repeat'] = self.repeat + res["meta"]["interval"] = self.interval_seconds() + res["meta"]["repeat"] = self.repeat return res def _schedule_time(self): @@ -435,19 +491,21 @@ def ready_for_schedule(self): return True class Meta: - verbose_name = _('Repeatable Task') - verbose_name_plural = _('Repeatable Tasks') - ordering = ('name',) + verbose_name = _("Repeatable Task") + verbose_name_plural = _("Repeatable Tasks") + ordering = ("name",) class CronTask(RepeatableMixin, BaseTask): - TASK_TYPE = 'CronTask' + TASK_TYPE = "CronTask" cron_string = models.CharField( - _('cron string'), max_length=64, + _("cron string"), + max_length=64, help_text=mark_safe( - '''Define the schedule in a crontab like syntax. - Times are in UTC. Use crontab.guru to create a cron string.''') + """Define the schedule in a crontab like syntax. + Times are in UTC. Use crontab.guru to create a cron string.""" + ), ) def clean(self): @@ -458,13 +516,13 @@ def clean_cron_string(self): try: croniter.croniter(self.cron_string) except ValueError as e: - raise ValidationError({'cron_string': ValidationError(_(str(e)), code='invalid')}) + raise ValidationError({"cron_string": ValidationError(_(str(e)), code="invalid")}) def _schedule_time(self): self.scheduled_time = tools.get_next_cron_time(self.cron_string) return super()._schedule_time() class Meta: - verbose_name = _('Cron Task') - verbose_name_plural = _('Cron Tasks') - ordering = ('name',) + verbose_name = _("Cron Task") + verbose_name_plural = _("Cron Tasks") + ordering = ("name",) diff --git a/scheduler/models/worker.py b/scheduler/models/worker.py index f34181e..f8ee7af 100644 --- a/scheduler/models/worker.py +++ b/scheduler/models/worker.py @@ -8,5 +8,5 @@ class Worker(models.Model): class Meta: managed = False # not in Django's database default_permissions = () - permissions = [['view', 'Access admin page']] + permissions = [["view", "Access admin page"]] verbose_name_plural = " Workers" diff --git a/scheduler/queues.py b/scheduler/queues.py index c8b88b7..8edd8fe 100644 --- a/scheduler/queues.py +++ b/scheduler/queues.py @@ -1,25 +1,26 @@ from typing import List, Dict import redis -from redis.sentinel import Sentinel +import valkey +from .connection_types import RedisSentinel, BrokerConnectionClass from .rq_classes import JobExecution, DjangoQueue, DjangoWorker -from .settings import get_config -from .settings import logger +from .settings import SCHEDULER_CONFIG +from .settings import logger, Broker _CONNECTION_PARAMS = { - 'URL', - 'DB', - 'USE_REDIS_CACHE', - 'UNIX_SOCKET_PATH', - 'HOST', - 'PORT', - 'PASSWORD', - 'SENTINELS', - 'MASTER_NAME', - 'SOCKET_TIMEOUT', - 'SSL', - 'CONNECTION_KWARGS', + "URL", + "DB", + "USE_REDIS_CACHE", + "UNIX_SOCKET_PATH", + "HOST", + "PORT", + "PASSWORD", + "SENTINELS", + "MASTER_NAME", + "SOCKET_TIMEOUT", + "SSL", + "CONNECTION_KWARGS", } @@ -31,110 +32,101 @@ def _get_redis_connection(config, use_strict_redis=False): """ Returns a redis connection from a connection config """ - if get_config('FAKEREDIS'): + if SCHEDULER_CONFIG.BROKER == Broker.FAKEREDIS: import fakeredis + redis_cls = fakeredis.FakeRedis if use_strict_redis else fakeredis.FakeStrictRedis else: - redis_cls = redis.StrictRedis if use_strict_redis else redis.Redis - logger.debug(f'Getting connection for {config}') - if 'URL' in config: - if config.get('SSL') or config.get('URL').startswith('rediss://'): + redis_cls = BrokerConnectionClass[(SCHEDULER_CONFIG.BROKER, use_strict_redis)] + logger.debug(f"Getting connection for {config}") + if "URL" in config: + if config.get("SSL") or config.get("URL").startswith("rediss://"): return redis_cls.from_url( - config['URL'], - db=config.get('DB'), - ssl_cert_reqs=config.get('SSL_CERT_REQS', 'required'), + config["URL"], + db=config.get("DB"), + ssl_cert_reqs=config.get("SSL_CERT_REQS", "required"), ) else: return redis_cls.from_url( - config['URL'], - db=config.get('DB'), + config["URL"], + db=config.get("DB"), ) - if 'UNIX_SOCKET_PATH' in config: - return redis_cls(unix_socket_path=config['UNIX_SOCKET_PATH'], db=config['DB']) + if "UNIX_SOCKET_PATH" in config: + return redis_cls(unix_socket_path=config["UNIX_SOCKET_PATH"], db=config["DB"]) - if 'SENTINELS' in config: + if "SENTINELS" in config: connection_kwargs = { - 'db': config.get('DB'), - 'password': config.get('PASSWORD'), - 'username': config.get('USERNAME'), - 'socket_timeout': config.get('SOCKET_TIMEOUT'), + "db": config.get("DB"), + "password": config.get("PASSWORD"), + "username": config.get("USERNAME"), + "socket_timeout": config.get("SOCKET_TIMEOUT"), } - connection_kwargs.update(config.get('CONNECTION_KWARGS', {})) - sentinel_kwargs = config.get('SENTINEL_KWARGS', {}) - sentinel = Sentinel(config['SENTINELS'], sentinel_kwargs=sentinel_kwargs, **connection_kwargs) + connection_kwargs.update(config.get("CONNECTION_KWARGS", {})) + sentinel_kwargs = config.get("SENTINEL_KWARGS", {}) + sentinel = RedisSentinel(config["SENTINELS"], sentinel_kwargs=sentinel_kwargs, **connection_kwargs) return sentinel.master_for( - service_name=config['MASTER_NAME'], + service_name=config["MASTER_NAME"], redis_class=redis_cls, ) return redis_cls( - host=config['HOST'], - port=config['PORT'], - db=config.get('DB', 0), - username=config.get('USERNAME', None), - password=config.get('PASSWORD'), - ssl=config.get('SSL', False), - ssl_cert_reqs=config.get('SSL_CERT_REQS', 'required'), - **config.get('REDIS_CLIENT_KWARGS', {}) + host=config["HOST"], + port=config["PORT"], + db=config.get("DB", 0), + username=config.get("USERNAME", None), + password=config.get("PASSWORD"), + ssl=config.get("SSL", False), + ssl_cert_reqs=config.get("SSL_CERT_REQS", "required"), + **config.get("REDIS_CLIENT_KWARGS", {}), ) def get_connection(queue_settings, use_strict_redis=False): - """Returns a Redis connection to use based on parameters in SCHEDULER_QUEUES - """ + """Returns a Redis connection to use based on parameters in SCHEDULER_QUEUES""" return _get_redis_connection(queue_settings, use_strict_redis) def get_queue( - name='default', - default_timeout=None, is_async=None, - autocommit=None, - connection=None, - **kwargs + name="default", default_timeout=None, is_async=None, autocommit=None, connection=None, **kwargs ) -> DjangoQueue: - """Returns an DjangoQueue using parameters defined in `SCHEDULER_QUEUES` - """ + """Returns an DjangoQueue using parameters defined in `SCHEDULER_QUEUES`""" from .settings import QUEUES + if name not in QUEUES: - raise QueueNotFoundError(f'Queue {name} not found, queues={QUEUES.keys()}') + raise QueueNotFoundError(f"Queue {name} not found, queues={QUEUES.keys()}") queue_settings = QUEUES[name] if is_async is None: - is_async = queue_settings.get('ASYNC', True) + is_async = queue_settings.get("ASYNC", True) if default_timeout is None: - default_timeout = queue_settings.get('DEFAULT_TIMEOUT') + default_timeout = queue_settings.get("DEFAULT_TIMEOUT") if connection is None: connection = get_connection(queue_settings) return DjangoQueue( - name, - default_timeout=default_timeout, - connection=connection, - is_async=is_async, - autocommit=autocommit, - **kwargs + name, default_timeout=default_timeout, connection=connection, is_async=is_async, autocommit=autocommit, **kwargs ) def get_all_workers(): from .settings import QUEUES + workers = set() for queue_name in QUEUES: connection = get_connection(QUEUES[queue_name]) try: curr_workers = set(DjangoWorker.all(connection=connection)) workers.update(curr_workers) - except redis.ConnectionError as e: - logger.error(f'Could not connect for queue {queue_name}: {e}') + except (redis.ConnectionError, valkey.ConnectionError) as e: + logger.error(f"Could not connect for queue {queue_name}: {e}") return workers def _queues_share_connection_params(q1_params: Dict, q2_params: Dict): - """Check that both queues share the same connection parameters - """ + """Check that both queues share the same connection parameters""" return all( - ((p not in q1_params and p not in q2_params) - or (q1_params.get(p, None) == q2_params.get(p, None))) - for p in _CONNECTION_PARAMS) + ((p not in q1_params and p not in q2_params) or (q1_params.get(p, None) == q2_params.get(p, None))) + for p in _CONNECTION_PARAMS + ) def get_queues(*queue_names, **kwargs) -> List[DjangoQueue]: @@ -143,7 +135,7 @@ def get_queues(*queue_names, **kwargs) -> List[DjangoQueue]: """ from .settings import QUEUES - kwargs['job_class'] = JobExecution + kwargs["job_class"] = JobExecution queue_params = QUEUES[queue_names[0]] queues = [get_queue(queue_names[0], **kwargs)] # perform consistency checks while building return list @@ -151,7 +143,8 @@ def get_queues(*queue_names, **kwargs) -> List[DjangoQueue]: if not _queues_share_connection_params(queue_params, QUEUES[name]): raise ValueError( f'Queues must have the same redis connection. "{name}" and' - f' "{queue_names[0]}" have different connections') + f' "{queue_names[0]}" have different connections' + ) queue = get_queue(name, **kwargs) queues.append(queue) diff --git a/scheduler/rq_classes.py b/scheduler/rq_classes.py index f875dc5..99bc1ec 100644 --- a/scheduler/rq_classes.py +++ b/scheduler/rq_classes.py @@ -2,8 +2,6 @@ import django from django.apps import apps -from redis import Redis -from redis.client import Pipeline from rq import Worker from rq.command import send_stop_job_command from rq.decorators import job @@ -12,21 +10,33 @@ from rq.job import get_current_job # noqa from rq.queue import Queue, logger from rq.registry import ( - DeferredJobRegistry, FailedJobRegistry, FinishedJobRegistry, - ScheduledJobRegistry, StartedJobRegistry, CanceledJobRegistry, BaseRegistry, + DeferredJobRegistry, + FailedJobRegistry, + FinishedJobRegistry, + ScheduledJobRegistry, + StartedJobRegistry, + CanceledJobRegistry, + BaseRegistry, ) from rq.scheduler import RQScheduler from rq.worker import WorkerStatus from scheduler import settings +from scheduler.connection_types import PipelineType, ConnectionType -MODEL_NAMES = ['ScheduledTask', 'RepeatableTask', 'CronTask'] +MODEL_NAMES = ["ScheduledTask", "RepeatableTask", "CronTask"] rq_job_decorator = job ExecutionStatus = JobStatus InvalidJobOperation = InvalidJobOperation +def register_sentry(sentry_dsn, **opts): + from rq.contrib.sentry import register_sentry as rq_register_sentry + + rq_register_sentry(sentry_dsn, **opts) + + def as_text(v: Union[bytes, str]) -> Optional[str]: """Converts a bytes value to a string using `utf-8`. @@ -37,11 +47,11 @@ def as_text(v: Union[bytes, str]) -> Optional[str]: if v is None: return None elif isinstance(v, bytes): - return v.decode('utf-8') + return v.decode("utf-8") elif isinstance(v, str): return v else: - raise ValueError('Unknown type %r' % type(v)) + raise ValueError("Unknown type %r" % type(v)) def compact(lst: List[Any]) -> List[Any]: @@ -58,13 +68,15 @@ def __eq__(self, other): @property def is_scheduled_task(self): - return self.meta.get('scheduled_task_id', None) is not None + return self.meta.get("scheduled_task_id", None) is not None def is_execution_of(self, scheduled_job): - return (self.meta.get('task_type', None) == scheduled_job.TASK_TYPE - and self.meta.get('scheduled_task_id', None) == scheduled_job.id) + return ( + self.meta.get("task_type", None) == scheduled_job.TASK_TYPE + and self.meta.get("scheduled_task_id", None) == scheduled_job.id + ) - def stop_execution(self, connection: Redis): + def stop_execution(self, connection: ConnectionType): send_stop_job_command(connection, self.id) @@ -81,22 +93,20 @@ def __init__(self, *args, **kwargs): super(DjangoWorker, self).__init__(*args, **kwargs) def __eq__(self, other): - return (isinstance(other, Worker) - and self.key == other.key - and self.name == other.name) + return isinstance(other, Worker) and self.key == other.key and self.name == other.name def __hash__(self): - return hash((self.name, self.key, ','.join(self.queue_names()))) + return hash((self.name, self.key, ",".join(self.queue_names()))) def __str__(self): return f"{self.name}/{','.join(self.queue_names())}" def _start_scheduler( - self, - burst: bool = False, - logging_level: str = "INFO", - date_format: str = '%H:%M:%S', - log_format: str = '%(asctime)s %(message)s', + self, + burst: bool = False, + logging_level: str = "INFO", + date_format: str = "%H:%M:%S", + log_format: str = "%(asctime)s %(message)s", ) -> None: """Starts the scheduler process. This is specifically designed to be run by the worker when running the `work()` method. @@ -126,9 +136,9 @@ def _start_scheduler( self.scheduler.release_locks() else: proc = self.scheduler.start() - self._set_property('scheduler_pid', proc.pid) + self._set_property("scheduler_pid", proc.pid) - def execute_job(self, job: 'Job', queue: 'Queue'): + def execute_job(self, job: "Job", queue: "Queue"): if self.fork_job_execution: super(DjangoWorker, self).execute_job(job, queue) else: @@ -137,17 +147,17 @@ def execute_job(self, job: 'Job', queue: 'Queue'): self.set_state(WorkerStatus.IDLE) def work(self, **kwargs) -> bool: - kwargs.setdefault('with_scheduler', True) + kwargs.setdefault("with_scheduler", True) return super(DjangoWorker, self).work(**kwargs) - def _set_property(self, prop_name: str, val, pipeline: Optional[Pipeline] = None): + def _set_property(self, prop_name: str, val, pipeline: Optional[PipelineType] = None): connection = pipeline if pipeline is not None else self.connection if val is None: connection.hdel(self.key, prop_name) else: connection.hset(self.key, prop_name, val) - def _get_property(self, prop_name: str, pipeline: Optional[Pipeline] = None): + def _get_property(self, prop_name: str, pipeline: Optional[PipelineType] = None): connection = pipeline if pipeline is not None else self.connection return as_text(connection.hget(self.key, prop_name)) @@ -161,12 +171,12 @@ def scheduler_pid(self) -> Optional[int]: class DjangoQueue(Queue): REGISTRIES = dict( - finished='finished_job_registry', - failed='failed_job_registry', - scheduled='scheduled_job_registry', - started='started_job_registry', - deferred='deferred_job_registry', - canceled='canceled_job_registry', + finished="finished_job_registry", + failed="failed_job_registry", + scheduled="scheduled_job_registry", + started="started_job_registry", + deferred="deferred_job_registry", + canceled="canceled_job_registry", ) """ A subclass of RQ's QUEUE that allows jobs to be stored temporarily to be @@ -174,12 +184,12 @@ class DjangoQueue(Queue): """ def __init__(self, *args, **kwargs): - kwargs['job_class'] = JobExecution + kwargs["job_class"] = JobExecution super(DjangoQueue, self).__init__(*args, **kwargs) - def get_registry(self, name: str) -> Union[None, BaseRegistry, 'DjangoQueue']: + def get_registry(self, name: str) -> Union[None, BaseRegistry, "DjangoQueue"]: name = name.lower() - if name == 'queued': + if name == "queued": return self elif name in DjangoQueue.REGISTRIES: return getattr(self, DjangoQueue.REGISTRIES[name]) @@ -191,23 +201,43 @@ def finished_job_registry(self): @property def started_job_registry(self): - return StartedJobRegistry(self.name, self.connection, job_class=JobExecution, ) + return StartedJobRegistry( + self.name, + self.connection, + job_class=JobExecution, + ) @property def deferred_job_registry(self): - return DeferredJobRegistry(self.name, self.connection, job_class=JobExecution, ) + return DeferredJobRegistry( + self.name, + self.connection, + job_class=JobExecution, + ) @property def failed_job_registry(self): - return FailedJobRegistry(self.name, self.connection, job_class=JobExecution, ) + return FailedJobRegistry( + self.name, + self.connection, + job_class=JobExecution, + ) @property def scheduled_job_registry(self): - return ScheduledJobRegistry(self.name, self.connection, job_class=JobExecution, ) + return ScheduledJobRegistry( + self.name, + self.connection, + job_class=JobExecution, + ) @property def canceled_job_registry(self): - return CanceledJobRegistry(self.name, self.connection, job_class=JobExecution, ) + return CanceledJobRegistry( + self.name, + self.connection, + job_class=JobExecution, + ) def get_all_job_ids(self) -> List[str]: res = list() @@ -238,13 +268,13 @@ def last_job_id(self): class DjangoScheduler(RQScheduler): def __init__(self, *args, **kwargs): - kwargs.setdefault('interval', settings.SCHEDULER_CONFIG['SCHEDULER_INTERVAL']) + kwargs.setdefault("interval", settings.SCHEDULER_CONFIG.SCHEDULER_INTERVAL) super(DjangoScheduler, self).__init__(*args, **kwargs) @staticmethod def reschedule_all_jobs(): for model_name in MODEL_NAMES: - model = apps.get_model(app_label='scheduler', model_name=model_name) + model = apps.get_model(app_label="scheduler", model_name=model_name) enabled_jobs = model.objects.filter(enabled=True) unscheduled_jobs = filter(lambda j: j.ready_for_schedule(), enabled_jobs) for item in unscheduled_jobs: diff --git a/scheduler/settings.py b/scheduler/settings.py index 57c254a..cf7e3dd 100644 --- a/scheduler/settings.py +++ b/scheduler/settings.py @@ -1,4 +1,7 @@ import logging +from dataclasses import dataclass +from enum import Enum +from typing import Callable from django.conf import settings from django.core.exceptions import ImproperlyConfigured @@ -6,38 +9,57 @@ logger = logging.getLogger(__package__) QUEUES = dict() -SCHEDULER_CONFIG = dict() + + +class Broker(Enum): + REDIS = "redis" + FAKEREDIS = "fakeredis" + VALKEY = "valkey" + + +@dataclass +class SchedulerConfig: + EXECUTIONS_IN_PAGE: int + DEFAULT_RESULT_TTL: int + DEFAULT_TIMEOUT: int + SCHEDULER_INTERVAL: int + BROKER: Broker + TOKEN_VALIDATION_METHOD: Callable[[str], bool] def _token_validation(token: str) -> bool: return False +SCHEDULER_CONFIG: SchedulerConfig = SchedulerConfig( + EXECUTIONS_IN_PAGE=20, + DEFAULT_RESULT_TTL=600, + DEFAULT_TIMEOUT=300, + SCHEDULER_INTERVAL=10, + BROKER=Broker.REDIS, + TOKEN_VALIDATION_METHOD=_token_validation, +) + + def conf_settings(): global QUEUES global SCHEDULER_CONFIG - QUEUES = getattr(settings, 'SCHEDULER_QUEUES', None) + QUEUES = getattr(settings, "SCHEDULER_QUEUES", None) if QUEUES is None: - logger.warning('Configuration using RQ_QUEUES is deprecated. Use SCHEDULER_QUEUES instead') - QUEUES = getattr(settings, 'RQ_QUEUES', None) + logger.warning("Configuration using RQ_QUEUES is deprecated. Use SCHEDULER_QUEUES instead") + QUEUES = getattr(settings, "RQ_QUEUES", None) if QUEUES is None: raise ImproperlyConfigured("You have to define SCHEDULER_QUEUES in settings.py") - SCHEDULER_CONFIG = { - 'EXECUTIONS_IN_PAGE': 20, - 'DEFAULT_RESULT_TTL': 600, # 10 minutes - 'DEFAULT_TIMEOUT': 300, # 5 minutes - 'SCHEDULER_INTERVAL': 10, # 10 seconds - 'FAKEREDIS': False, # For testing purposes - 'TOKEN_VALIDATION_METHOD': _token_validation, # Access stats from another application using API tokens - } - user_settings = getattr(settings, 'SCHEDULER_CONFIG', {}) - SCHEDULER_CONFIG.update(user_settings) + user_settings = getattr(settings, "SCHEDULER_CONFIG", {}) + if "FAKEREDIS" in user_settings: + user_settings["BROKER"] = Broker.FAKEREDIS if user_settings["FAKEREDIS"] else Broker.REDIS + user_settings.pop("FAKEREDIS") + for k in user_settings: + if k not in SCHEDULER_CONFIG.__annotations__: + raise ImproperlyConfigured(f"Unknown setting {k} in SCHEDULER_CONFIG") + setattr(SCHEDULER_CONFIG, k, user_settings[k]) conf_settings() - - -def get_config(key: str, default=None): - return SCHEDULER_CONFIG.get(key, None) diff --git a/scheduler/templatetags/scheduler_tags.py b/scheduler/templatetags/scheduler_tags.py index 47c98c1..9b0cafe 100644 --- a/scheduler/templatetags/scheduler_tags.py +++ b/scheduler/templatetags/scheduler_tags.py @@ -13,7 +13,7 @@ def show_func_name(rq_job: JobExecution) -> str: try: res = rq_job.func_name - if res == 'scheduler.tools.run_task': + if res == "scheduler.tools.run_task": task = get_scheduled_task(*rq_job.args) res = task.function_string() return mark_safe(res) @@ -54,7 +54,7 @@ def job_runtime(job: JobExecution): ended_at = job.ended_at if ended_at: runtime = job.ended_at - job.started_at - return f'{int(runtime.microseconds / 1000)}ms' + return f"{int(runtime.microseconds / 1000)}ms" elif job.started_at: return "Still running" else: diff --git a/scheduler/tests/jobs.py b/scheduler/tests/jobs.py index 145e15e..a6b0871 100644 --- a/scheduler/tests/jobs.py +++ b/scheduler/tests/jobs.py @@ -14,15 +14,15 @@ def arg_callable(): def test_args_kwargs(*args, **kwargs): func = "test_args_kwargs({})" args_list = [repr(arg) for arg in args] - kwargs_list = [f'{k}={v}' for (k, v) in kwargs.items()] - return func.format(', '.join(args_list + kwargs_list)) + kwargs_list = [f"{k}={v}" for (k, v) in kwargs.items()] + return func.format(", ".join(args_list + kwargs_list)) def long_job(): sleep(10) -test_non_callable = 'I am a teapot' +test_non_callable = "I am a teapot" def failing_job(): diff --git a/scheduler/tests/test_cron_task.py b/scheduler/tests/test_cron_task.py index abcbedc..a64f9b7 100644 --- a/scheduler/tests/test_cron_task.py +++ b/scheduler/tests/test_cron_task.py @@ -13,21 +13,23 @@ class TestCronTask(BaseTestCases.TestBaseTask): def test_clean(self): task = task_factory(CronTask) - task.cron_string = '* * * * *' + task.cron_string = "* * * * *" task.queue = list(settings.QUEUES)[0] - task.callable = 'scheduler.tests.jobs.test_job' + task.callable = "scheduler.tests.jobs.test_job" self.assertIsNone(task.clean()) def test_clean_cron_string_invalid(self): task = task_factory(CronTask) - task.cron_string = 'not-a-cron-string' + task.cron_string = "not-a-cron-string" task.queue = list(settings.QUEUES)[0] - task.callable = 'scheduler.tests.jobs.test_job' + task.callable = "scheduler.tests.jobs.test_job" with self.assertRaises(ValidationError): task.clean_cron_string() def test_check_rescheduled_after_execution(self): - task = task_factory(CronTask, ) + task = task_factory( + CronTask, + ) queue = task.rqueue first_run_id = task.job_id entry = queue.fetch_job(first_run_id) @@ -61,14 +63,17 @@ def test_cron_task_enqueuing_jobs(self): queue = get_queue() prev_queued = len(queue.scheduled_job_registry) prev_finished = len(queue.finished_job_registry) - task = task_factory(CronTask, callable_name='scheduler.tests.jobs.enqueue_jobs') + task = task_factory(CronTask, callable_name="scheduler.tests.jobs.enqueue_jobs") self.assertEqual(prev_queued + 1, len(queue.scheduled_job_registry)) first_run_id = task.job_id entry = queue.fetch_job(first_run_id) queue.run_sync(entry) self.assertEqual(20, len(queue)) self.assertEqual(prev_finished + 1, len(queue.finished_job_registry)) - worker = create_worker('default', fork_job_execution=False, ) + worker = create_worker( + "default", + fork_job_execution=False, + ) worker.work(burst=True) self.assertEqual(prev_finished + 21, len(queue.finished_job_registry)) worker.refresh() diff --git a/scheduler/tests/test_internals.py b/scheduler/tests/test_internals.py index 1df9acc..a8cb491 100644 --- a/scheduler/tests/test_internals.py +++ b/scheduler/tests/test_internals.py @@ -14,4 +14,4 @@ def test_get_scheduled_job(self): with self.assertRaises(ValueError): get_scheduled_task(task.TASK_TYPE, task.id + 1) with self.assertRaises(ValueError): - get_scheduled_task('UNKNOWN_JOBTYPE', task.id) + get_scheduled_task("UNKNOWN_JOBTYPE", task.id) diff --git a/scheduler/tests/test_job_arg_models.py b/scheduler/tests/test_job_arg_models.py index 1100054..453778a 100644 --- a/scheduler/tests/test_job_arg_models.py +++ b/scheduler/tests/test_job_arg_models.py @@ -11,37 +11,53 @@ class TestAllTaskArg(TestCase): TaskArgClass = TaskArg def test_bad_arg_type(self): - arg = taskarg_factory(self.TaskArgClass, arg_type='bad_arg_type', val='something') + arg = taskarg_factory(self.TaskArgClass, arg_type="bad_arg_type", val="something") with self.assertRaises(ValidationError): arg.clean() def test_clean_one_value_invalid_str_int(self): - arg = taskarg_factory(self.TaskArgClass, arg_type='int', val='not blank', ) + arg = taskarg_factory( + self.TaskArgClass, + arg_type="int", + val="not blank", + ) with self.assertRaises(ValidationError): arg.clean() def test_clean_callable_invalid(self): - arg = taskarg_factory(self.TaskArgClass, arg_type='callable', val='bad_callable', ) + arg = taskarg_factory( + self.TaskArgClass, + arg_type="callable", + val="bad_callable", + ) with self.assertRaises(ValidationError): arg.clean() def test_clean_datetime_invalid(self): - arg = taskarg_factory(self.TaskArgClass, arg_type='datetime', val='bad datetime', ) + arg = taskarg_factory( + self.TaskArgClass, + arg_type="datetime", + val="bad datetime", + ) with self.assertRaises(ValidationError): arg.clean() def test_clean_bool_invalid(self): - arg = taskarg_factory(self.TaskArgClass, arg_type='bool', val='bad bool', ) + arg = taskarg_factory( + self.TaskArgClass, + arg_type="bool", + val="bad bool", + ) with self.assertRaises(ValidationError): arg.clean() def test_clean_int_invalid(self): - arg = taskarg_factory(self.TaskArgClass, arg_type='int', val='str') + arg = taskarg_factory(self.TaskArgClass, arg_type="int", val="str") with self.assertRaises(ValidationError): arg.clean() def test_str_clean(self): - arg = taskarg_factory(self.TaskArgClass, val='something') + arg = taskarg_factory(self.TaskArgClass, val="something") self.assertIsNone(arg.clean()) @@ -50,52 +66,53 @@ class TestTaskArg(TestCase): def test_str(self): arg = taskarg_factory(self.TaskArgClass) - self.assertEqual( - f'TaskArg[arg_type={arg.arg_type},value={arg.value()}]', str(arg)) + self.assertEqual(f"TaskArg[arg_type={arg.arg_type},value={arg.value()}]", str(arg)) def test_value(self): - arg = taskarg_factory(self.TaskArgClass, arg_type='str', val='something') - self.assertEqual(arg.value(), 'something') + arg = taskarg_factory(self.TaskArgClass, arg_type="str", val="something") + self.assertEqual(arg.value(), "something") def test__str__str_val(self): - arg = taskarg_factory(self.TaskArgClass, arg_type='str', val='something') - self.assertEqual('something', str(arg.value())) + arg = taskarg_factory(self.TaskArgClass, arg_type="str", val="something") + self.assertEqual("something", str(arg.value())) def test__str__int_val(self): - arg = taskarg_factory(self.TaskArgClass, arg_type='int', val='1') - self.assertEqual('1', str(arg.value())) + arg = taskarg_factory(self.TaskArgClass, arg_type="int", val="1") + self.assertEqual("1", str(arg.value())) def test__str__datetime_val(self): _time = timezone.now() - arg = taskarg_factory(self.TaskArgClass, arg_type='datetime', val=str(_time)) + arg = taskarg_factory(self.TaskArgClass, arg_type="datetime", val=str(_time)) self.assertEqual(str(_time), str(arg.value())) def test__str__bool_val(self): - arg = taskarg_factory(self.TaskArgClass, arg_type='bool', val='True') - self.assertEqual('True', str(arg.value())) + arg = taskarg_factory(self.TaskArgClass, arg_type="bool", val="True") + self.assertEqual("True", str(arg.value())) def test__repr__str_val(self): - arg = taskarg_factory(self.TaskArgClass, arg_type='str', val='something') + arg = taskarg_factory(self.TaskArgClass, arg_type="str", val="something") self.assertEqual("'something'", repr(arg.value())) def test__repr__int_val(self): - arg = taskarg_factory(self.TaskArgClass, arg_type='int', val='1') - self.assertEqual('1', repr(arg.value())) + arg = taskarg_factory(self.TaskArgClass, arg_type="int", val="1") + self.assertEqual("1", repr(arg.value())) def test__repr__datetime_val(self): _time = timezone.now() - arg = taskarg_factory(self.TaskArgClass, arg_type='datetime', val=str(_time)) + arg = taskarg_factory(self.TaskArgClass, arg_type="datetime", val=str(_time)) self.assertEqual(repr(_time), repr(arg.value())) def test__repr__bool_val(self): - arg = taskarg_factory(self.TaskArgClass, arg_type='bool', val='False') - self.assertEqual('False', repr(arg.value())) + arg = taskarg_factory(self.TaskArgClass, arg_type="bool", val="False") + self.assertEqual("False", repr(arg.value())) def test_callable_arg_type__clean(self): method = arg_callable arg = taskarg_factory( - self.TaskArgClass, arg_type='callable', - val=f'{method.__module__}.{method.__name__}', ) + self.TaskArgClass, + arg_type="callable", + val=f"{method.__module__}.{method.__name__}", + ) self.assertIsNone(arg.clean()) self.assertEqual(1, arg.value()) self.assertEqual(2, arg.value()) @@ -106,43 +123,42 @@ class TestTaskKwarg(TestAllTaskArg): def test_str(self): arg = taskarg_factory(self.TaskArgClass) - self.assertEqual( - f'TaskKwarg[key={arg.key},arg_type={arg.arg_type},value={arg.val}]', str(arg)) + self.assertEqual(f"TaskKwarg[key={arg.key},arg_type={arg.arg_type},value={arg.val}]", str(arg)) def test_value(self): - kwarg = taskarg_factory(self.TaskArgClass, key='key', arg_type='str', val='value') - self.assertEqual(kwarg.value(), ('key', 'value')) + kwarg = taskarg_factory(self.TaskArgClass, key="key", arg_type="str", val="value") + self.assertEqual(kwarg.value(), ("key", "value")) def test__str__str_val(self): - kwarg = taskarg_factory(self.TaskArgClass, key='key', arg_type='str', val='something') - self.assertEqual('TaskKwarg[key=key,arg_type=str,value=something]', str(kwarg)) + kwarg = taskarg_factory(self.TaskArgClass, key="key", arg_type="str", val="something") + self.assertEqual("TaskKwarg[key=key,arg_type=str,value=something]", str(kwarg)) def test__str__int_val(self): - kwarg = taskarg_factory(self.TaskArgClass, key='key', arg_type='int', val=1) - self.assertEqual('TaskKwarg[key=key,arg_type=int,value=1]', str(kwarg)) + kwarg = taskarg_factory(self.TaskArgClass, key="key", arg_type="int", val=1) + self.assertEqual("TaskKwarg[key=key,arg_type=int,value=1]", str(kwarg)) def test__str__datetime_val(self): _time = timezone.now() - kwarg = taskarg_factory(self.TaskArgClass, key='key', arg_type='datetime', val=str(_time)) - self.assertEqual(f'TaskKwarg[key=key,arg_type=datetime,value={_time}]', str(kwarg)) + kwarg = taskarg_factory(self.TaskArgClass, key="key", arg_type="datetime", val=str(_time)) + self.assertEqual(f"TaskKwarg[key=key,arg_type=datetime,value={_time}]", str(kwarg)) def test__str__bool_val(self): - kwarg = taskarg_factory(self.TaskArgClass, key='key', arg_type='bool', val='True') - self.assertEqual('TaskKwarg[key=key,arg_type=bool,value=True]', str(kwarg)) + kwarg = taskarg_factory(self.TaskArgClass, key="key", arg_type="bool", val="True") + self.assertEqual("TaskKwarg[key=key,arg_type=bool,value=True]", str(kwarg)) def test__repr__str_val(self): - kwarg = taskarg_factory(self.TaskArgClass, key='key', arg_type='str', val='something') + kwarg = taskarg_factory(self.TaskArgClass, key="key", arg_type="str", val="something") self.assertEqual("('key', 'something')", repr(kwarg.value())) def test__repr__int_val(self): - kwarg = taskarg_factory(self.TaskArgClass, key='key', arg_type='int', val='1') + kwarg = taskarg_factory(self.TaskArgClass, key="key", arg_type="int", val="1") self.assertEqual("('key', 1)", repr(kwarg.value())) def test__repr__datetime_val(self): _time = timezone.now() - kwarg = taskarg_factory(self.TaskArgClass, key='key', arg_type='datetime', val=str(_time)) + kwarg = taskarg_factory(self.TaskArgClass, key="key", arg_type="datetime", val=str(_time)) self.assertEqual("('key', {})".format(repr(_time)), repr(kwarg.value())) def test__repr__bool_val(self): - kwarg = taskarg_factory(self.TaskArgClass, key='key', arg_type='bool', val='True') + kwarg = taskarg_factory(self.TaskArgClass, key="key", arg_type="bool", val="True") self.assertEqual("('key', True)", repr(kwarg.value())) diff --git a/scheduler/tests/test_job_decorator.py b/scheduler/tests/test_job_decorator.py index 0910664..7b78554 100644 --- a/scheduler/tests/test_job_decorator.py +++ b/scheduler/tests/test_job_decorator.py @@ -13,7 +13,7 @@ def test_job(): return 1 + 1 -@job('django_tasks_scheduler_test') +@job("django_tasks_scheduler_test") def test_job_diff_queue(): time.sleep(1) return 1 + 1 @@ -32,40 +32,34 @@ def test_job_result_ttl(): class JobDecoratorTest(TestCase): def setUp(self) -> None: - get_queue('default').connection.flushall() + get_queue("default").connection.flushall() def test_job_decorator_no_params(self): test_job.delay() config = settings.SCHEDULER_CONFIG - self._assert_job_with_func_and_props( - 'default', test_job, config.get('DEFAULT_RESULT_TTL'), config.get('DEFAULT_TIMEOUT')) + self._assert_job_with_func_and_props("default", test_job, config.DEFAULT_RESULT_TTL, config.DEFAULT_TIMEOUT) def test_job_decorator_timeout(self): test_job_timeout.delay() config = settings.SCHEDULER_CONFIG - self._assert_job_with_func_and_props( - 'default', test_job_timeout, config.get('DEFAULT_RESULT_TTL'), 1) + self._assert_job_with_func_and_props("default", test_job_timeout, config.DEFAULT_RESULT_TTL, 1) def test_job_decorator_result_ttl(self): test_job_result_ttl.delay() config = settings.SCHEDULER_CONFIG - self._assert_job_with_func_and_props( - 'default', test_job_result_ttl, 1, config.get('DEFAULT_TIMEOUT')) + self._assert_job_with_func_and_props("default", test_job_result_ttl, 1, config.DEFAULT_TIMEOUT) def test_job_decorator_different_queue(self): test_job_diff_queue.delay() config = settings.SCHEDULER_CONFIG self._assert_job_with_func_and_props( - 'django_tasks_scheduler_test', + "django_tasks_scheduler_test", test_job_diff_queue, - config.get('DEFAULT_RESULT_TTL'), - config.get('DEFAULT_TIMEOUT')) - - def _assert_job_with_func_and_props( - self, queue_name, - expected_func, - expected_result_ttl, - expected_timeout): + config.DEFAULT_RESULT_TTL, + config.DEFAULT_TIMEOUT, + ) + + def _assert_job_with_func_and_props(self, queue_name, expected_func, expected_result_ttl, expected_timeout): queue = get_queue(queue_name) jobs = queue.get_jobs() self.assertEqual(1, len(jobs)) @@ -77,7 +71,8 @@ def _assert_job_with_func_and_props( def test_job_decorator_bad_queue(self): with self.assertRaises(QueueNotFoundError): - @job('bad-queue') + + @job("bad-queue") def test_job_bad_queue(): time.sleep(1) return 1 + 1 diff --git a/scheduler/tests/test_mgmt_cmds.py b/scheduler/tests/test_mgmt_cmds.py index 4b99193..6257935 100644 --- a/scheduler/tests/test_mgmt_cmds.py +++ b/scheduler/tests/test_mgmt_cmds.py @@ -19,7 +19,7 @@ class RqworkerTestCase(TestCase): def test_rqworker__no_queues_params(self): - queue = get_queue('default') + queue = get_queue("default") # enqueue some jobs that will fail jobs = [] @@ -30,14 +30,14 @@ def test_rqworker__no_queues_params(self): job_ids.append(job.id) # Create a worker to execute these jobs - call_command('rqworker', fork_job_execution=False, burst=True) + call_command("rqworker", fork_job_execution=False, burst=True) # check if all jobs are really failed for job in jobs: self.assertTrue(job.is_failed) def test_rqworker__job_class_param__green(self): - queue = get_queue('default') + queue = get_queue("default") # enqueue some jobs that will fail jobs = [] @@ -48,14 +48,16 @@ def test_rqworker__job_class_param__green(self): job_ids.append(job.id) # Create a worker to execute these jobs - call_command('rqworker', '--job-class', 'scheduler.rq_classes.JobExecution', fork_job_execution=False, burst=True) + call_command( + "rqworker", "--job-class", "scheduler.rq_classes.JobExecution", fork_job_execution=False, burst=True + ) # check if all jobs are really failed for job in jobs: self.assertTrue(job.is_failed) def test_rqworker__bad_job_class__fail(self): - queue = get_queue('default') + queue = get_queue("default") # enqueue some jobs that will fail jobs = [] @@ -67,10 +69,10 @@ def test_rqworker__bad_job_class__fail(self): # Create a worker to execute these jobs with self.assertRaises(ImportError): - call_command('rqworker', '--job-class', 'rq.badclass', fork_job_execution=False, burst=True) + call_command("rqworker", "--job-class", "rq.badclass", fork_job_execution=False, burst=True) def test_rqworker__run_jobs(self): - queue = get_queue('default') + queue = get_queue("default") # enqueue some jobs that will fail jobs = [] @@ -81,15 +83,15 @@ def test_rqworker__run_jobs(self): job_ids.append(job.id) # Create a worker to execute these jobs - call_command('rqworker', 'default', fork_job_execution=False, burst=True) + call_command("rqworker", "default", fork_job_execution=False, burst=True) # check if all jobs are really failed for job in jobs: self.assertTrue(job.is_failed) def test_rqworker__worker_with_two_queues(self): - queue = get_queue('default') - queue2 = get_queue('django_tasks_scheduler_test') + queue = get_queue("default") + queue2 = get_queue("django_tasks_scheduler_test") # enqueue some jobs that will fail jobs = [] @@ -103,21 +105,21 @@ def test_rqworker__worker_with_two_queues(self): job_ids.append(job.id) # Create a worker to execute these jobs - call_command('rqworker', 'default', 'django_tasks_scheduler_test', fork_job_execution=False, burst=True) + call_command("rqworker", "default", "django_tasks_scheduler_test", fork_job_execution=False, burst=True) # check if all jobs are really failed for job in jobs: self.assertTrue(job.is_failed) def test_rqworker__worker_with_one_queue__does_not_perform_other_queue_job(self): - queue = get_queue('default') - queue2 = get_queue('django_tasks_scheduler_test') + queue = get_queue("default") + queue2 = get_queue("django_tasks_scheduler_test") job = queue.enqueue(failing_job) other_job = queue2.enqueue(failing_job) # Create a worker to execute these jobs - call_command('rqworker', 'default', fork_job_execution=False, burst=True) + call_command("rqworker", "default", fork_job_execution=False, burst=True) # assert self.assertTrue(job.is_failed) self.assertTrue(other_job.is_queued) @@ -125,34 +127,34 @@ def test_rqworker__worker_with_one_queue__does_not_perform_other_queue_job(self) class RqstatsTest(TestCase): def test_rqstats__does_not_fail(self): - call_command('rqstats', '-j') - call_command('rqstats', '-y') - call_command('rqstats') + call_command("rqstats", "-j") + call_command("rqstats", "-y") + call_command("rqstats") class DeleteFailedExecutionsTest(BaseTestCase): def test_delete_failed_executions__delete_jobs(self): - queue = get_queue('default') - call_command('delete_failed_executions', queue='default') + queue = get_queue("default") + call_command("delete_failed_executions", queue="default") queue.enqueue(failing_job) - worker = create_worker('default') + worker = create_worker("default") worker.work(burst=True) self.assertEqual(1, len(queue.failed_job_registry)) - call_command('delete_failed_executions', queue='default') + call_command("delete_failed_executions", queue="default") self.assertEqual(0, len(queue.failed_job_registry)) class RunJobTest(TestCase): def test_run_job__should_schedule_job(self): - queue = get_queue('default') + queue = get_queue("default") queue.empty() - func_name = f'{test_job.__module__}.{test_job.__name__}' + func_name = f"{test_job.__module__}.{test_job.__name__}" # act - call_command('run_job', func_name, queue='default') + call_command("run_job", func_name, queue="default") # assert job_list = queue.get_jobs() self.assertEqual(1, len(job_list)) - self.assertEqual(func_name + '()', job_list[0].get_call_string()) + self.assertEqual(func_name + "()", job_list[0].get_call_string()) class ExportTest(TestCase): @@ -168,7 +170,7 @@ def test_export__should_export_job(self): jobs.append(task_factory(RepeatableTask, enabled=True)) # act - call_command('export', filename=self.tmpfile.name) + call_command("export", filename=self.tmpfile.name) # assert result = json.load(self.tmpfile) self.assertEqual(len(jobs), len(result)) @@ -181,7 +183,7 @@ def test_export__should_export_enabled_jobs_only(self): jobs.append(task_factory(RepeatableTask, enabled=False)) # act - call_command('export', filename=self.tmpfile.name, enabled=True) + call_command("export", filename=self.tmpfile.name, enabled=True) # assert result = json.load(self.tmpfile) self.assertEqual(len(jobs) - 1, len(result)) @@ -193,9 +195,9 @@ def test_export__should_export_job_yaml_without_yaml_lib(self): jobs.append(task_factory(RepeatableTask, enabled=True)) # act - with mock.patch.dict('sys.modules', {'yaml': None}): + with mock.patch.dict("sys.modules", {"yaml": None}): with self.assertRaises(SystemExit) as cm: - call_command('export', filename=self.tmpfile.name, format='yaml') + call_command("export", filename=self.tmpfile.name, format="yaml") self.assertEqual(cm.exception.code, 1) def test_export__should_export_job_yaml_green(self): @@ -204,7 +206,7 @@ def test_export__should_export_job_yaml_green(self): jobs.append(task_factory(RepeatableTask, enabled=True)) # act - call_command('export', filename=self.tmpfile.name, format='yaml') + call_command("export", filename=self.tmpfile.name, format="yaml") # assert result = yaml.load(self.tmpfile, yaml.SafeLoader) self.assertEqual(len(jobs), len(result)) @@ -214,7 +216,7 @@ def test_export__should_export_job_yaml_green(self): class ImportTest(TestCase): def setUp(self) -> None: - self.tmpfile = tempfile.NamedTemporaryFile(mode='w') + self.tmpfile = tempfile.NamedTemporaryFile(mode="w") def tearDown(self) -> None: os.remove(self.tmpfile.name) @@ -227,11 +229,11 @@ def test_import__should_schedule_job(self): self.tmpfile.write(res) self.tmpfile.flush() # act - call_command('import', filename=self.tmpfile.name) + call_command("import", filename=self.tmpfile.name) # assert self.assertEqual(1, ScheduledTask.objects.count()) db_job = ScheduledTask.objects.first() - attrs = ['name', 'queue', 'callable', 'enabled', 'timeout'] + attrs = ["name", "queue", "callable", "enabled", "timeout"] for attr in attrs: self.assertEqual(getattr(jobs[0], attr), getattr(db_job, attr)) @@ -243,11 +245,11 @@ def test_import__should_schedule_job_yaml(self): self.tmpfile.write(res) self.tmpfile.flush() # act - call_command('import', filename=self.tmpfile.name, format='yaml') + call_command("import", filename=self.tmpfile.name, format="yaml") # assert self.assertEqual(1, ScheduledTask.objects.count()) db_job = ScheduledTask.objects.first() - attrs = ['name', 'queue', 'callable', 'enabled', 'timeout'] + attrs = ["name", "queue", "callable", "enabled", "timeout"] for attr in attrs: self.assertEqual(getattr(jobs[0], attr), getattr(db_job, attr)) @@ -259,9 +261,9 @@ def test_import__should_schedule_job_yaml_without_yaml_lib(self): self.tmpfile.write(res) self.tmpfile.flush() # act - with mock.patch.dict('sys.modules', {'yaml': None}): + with mock.patch.dict("sys.modules", {"yaml": None}): with self.assertRaises(SystemExit) as cm: - call_command('import', filename=self.tmpfile.name, format='yaml') + call_command("import", filename=self.tmpfile.name, format="yaml") self.assertEqual(cm.exception.code, 1) def test_import__should_schedule_job_reset(self): @@ -274,16 +276,20 @@ def test_import__should_schedule_job_reset(self): self.tmpfile.write(res) self.tmpfile.flush() # act - call_command('import', filename=self.tmpfile.name, reset=True, ) + call_command( + "import", + filename=self.tmpfile.name, + reset=True, + ) # assert self.assertEqual(1, ScheduledTask.objects.count()) db_job = ScheduledTask.objects.first() - attrs = ['name', 'queue', 'callable', 'enabled', 'timeout'] + attrs = ["name", "queue", "callable", "enabled", "timeout"] for attr in attrs: self.assertEqual(getattr(jobs[0], attr), getattr(db_job, attr)) self.assertEqual(1, RepeatableTask.objects.count()) db_job = RepeatableTask.objects.first() - attrs = ['name', 'queue', 'callable', 'enabled', 'timeout'] + attrs = ["name", "queue", "callable", "enabled", "timeout"] for attr in attrs: self.assertEqual(getattr(jobs[1], attr), getattr(db_job, attr)) @@ -295,12 +301,16 @@ def test_import__should_schedule_job_update_existing(self): self.tmpfile.write(res) self.tmpfile.flush() # act - call_command('import', filename=self.tmpfile.name, update=True, ) + call_command( + "import", + filename=self.tmpfile.name, + update=True, + ) # assert self.assertEqual(2, ScheduledTask.objects.count()) db_job = ScheduledTask.objects.get(name=jobs[0].name) self.assertNotEqual(jobs[0].id, db_job.id) - attrs = ['name', 'queue', 'callable', 'enabled', 'timeout'] + attrs = ["name", "queue", "callable", "enabled", "timeout"] for attr in attrs: self.assertEqual(getattr(jobs[0], attr), getattr(db_job, attr)) @@ -312,10 +322,13 @@ def test_import__should_schedule_job_without_update_existing(self): self.tmpfile.write(res) self.tmpfile.flush() # act - call_command('import', filename=self.tmpfile.name, ) + call_command( + "import", + filename=self.tmpfile.name, + ) # assert self.assertEqual(2, ScheduledTask.objects.count()) db_job = ScheduledTask.objects.get(name=jobs[0].name) - attrs = ['id', 'name', 'queue', 'callable', 'enabled', 'timeout'] + attrs = ["id", "name", "queue", "callable", "enabled", "timeout"] for attr in attrs: self.assertEqual(getattr(jobs[0], attr), getattr(db_job, attr)) diff --git a/scheduler/tests/test_models.py b/scheduler/tests/test_models.py index a21d624..2a1873d 100644 --- a/scheduler/tests/test_models.py +++ b/scheduler/tests/test_models.py @@ -13,8 +13,12 @@ from scheduler.tools import run_task, create_worker from . import jobs from .testtools import ( - task_factory, taskarg_factory, _get_job_from_scheduled_registry, - SchedulerBaseCase, _get_executions) + task_factory, + taskarg_factory, + _get_job_from_scheduled_registry, + SchedulerBaseCase, + _get_executions, +) from ..queues import get_queue @@ -29,7 +33,7 @@ def assert_has_execution_with_status(task, status): for job in job_list: if job[1] == status: return - raise AssertionError(f'{task} does not have an execution with status {status}: {job_list}') + raise AssertionError(f"{task} does not have an execution with status {status}: {job_list}") class BaseTestCases: @@ -38,24 +42,24 @@ class TestBaseTask(SchedulerBaseCase): def test_callable_func(self): task = task_factory(self.TaskModelClass) - task.callable = 'scheduler.tests.jobs.test_job' + task.callable = "scheduler.tests.jobs.test_job" func = task.callable_func() self.assertEqual(jobs.test_job, func) def test_callable_func_not_callable(self): task = task_factory(self.TaskModelClass) - task.callable = 'scheduler.tests.jobs.test_non_callable' + task.callable = "scheduler.tests.jobs.test_non_callable" with self.assertRaises(TypeError): task.callable_func() def test_clean_callable(self): task = task_factory(self.TaskModelClass) - task.callable = 'scheduler.tests.jobs.test_job' + task.callable = "scheduler.tests.jobs.test_job" self.assertIsNone(task.clean_callable()) def test_clean_callable_invalid(self): task = task_factory(self.TaskModelClass) - task.callable = 'scheduler.tests.jobs.test_non_callable' + task.callable = "scheduler.tests.jobs.test_non_callable" with self.assertRaises(ValidationError): task.clean_callable() @@ -67,8 +71,8 @@ def test_clean_queue(self): def test_clean_queue_invalid(self): task = task_factory(self.TaskModelClass) - task.queue = 'xxxxxx' - task.callable = 'scheduler.tests.jobs.test_job' + task.queue = "xxxxxx" + task.callable = "scheduler.tests.jobs.test_job" with self.assertRaises(ValidationError): task.clean() @@ -76,25 +80,27 @@ def test_clean_queue_invalid(self): def test_clean_base(self): task = task_factory(self.TaskModelClass) task.queue = list(settings.QUEUES)[0] - task.callable = 'scheduler.tests.jobs.test_job' + task.callable = "scheduler.tests.jobs.test_job" self.assertIsNone(task.clean()) def test_clean_invalid_callable(self): task = task_factory(self.TaskModelClass) task.queue = list(settings.QUEUES)[0] - task.callable = 'scheduler.tests.jobs.test_non_callable' + task.callable = "scheduler.tests.jobs.test_non_callable" with self.assertRaises(ValidationError): task.clean() def test_clean_invalid_queue(self): task = task_factory(self.TaskModelClass) - task.queue = 'xxxxxx' - task.callable = 'scheduler.tests.jobs.test_job' + task.queue = "xxxxxx" + task.callable = "scheduler.tests.jobs.test_job" with self.assertRaises(ValidationError): task.clean() def test_is_schedulable_already_scheduled(self): - task = task_factory(self.TaskModelClass, ) + task = task_factory( + self.TaskModelClass, + ) task.schedule() self.assertTrue(task.is_scheduled()) @@ -104,7 +110,9 @@ def test_is_schedulable_disabled(self): self.assertFalse(task.enabled) def test_schedule(self): - task = task_factory(self.TaskModelClass, ) + task = task_factory( + self.TaskModelClass, + ) self.assertTrue(task.is_scheduled()) self.assertIsNotNone(task.job_id) @@ -114,7 +122,9 @@ def test_unschedulable(self): self.assertIsNone(task.job_id) def test_unschedule(self): - task = task_factory(self.TaskModelClass, ) + task = task_factory( + self.TaskModelClass, + ) self.assertTrue(task.unschedule()) self.assertIsNone(task.job_id) @@ -124,7 +134,9 @@ def test_unschedule_not_scheduled(self): self.assertIsNone(task.job_id) def test_save_enabled(self): - task = task_factory(self.TaskModelClass, ) + task = task_factory( + self.TaskModelClass, + ) self.assertIsNotNone(task.job_id) def test_save_disabled(self): @@ -133,7 +145,9 @@ def test_save_disabled(self): self.assertIsNone(task.job_id) def test_save_and_schedule(self): - task = task_factory(self.TaskModelClass, ) + task = task_factory( + self.TaskModelClass, + ) self.assertIsNotNone(task.job_id) self.assertTrue(task.is_scheduled()) @@ -145,7 +159,9 @@ def test_schedule2(self): self.assertFalse(task.schedule()) def test_delete_and_unschedule(self): - task = task_factory(self.TaskModelClass, ) + task = task_factory( + self.TaskModelClass, + ) self.assertIsNotNone(task.job_id) self.assertTrue(task.is_scheduled()) task.delete() @@ -159,7 +175,7 @@ def test_job_create(self): def test_str(self): name = "test" task = task_factory(self.TaskModelClass, name=name) - self.assertEqual(f'{self.TaskModelClass.__name__}[{name}={task.callable}()]', str(task)) + self.assertEqual(f"{self.TaskModelClass.__name__}[{name}={task.callable}()]", str(task)) def test_callable_passthrough(self): task = task_factory(self.TaskModelClass) @@ -181,82 +197,97 @@ def test_at_front_passthrough(self): self.assertIn(task.job_id, jobs_to_schedule) def test_callable_result(self): - task = task_factory(self.TaskModelClass, ) + task = task_factory( + self.TaskModelClass, + ) entry = _get_job_from_scheduled_registry(task) self.assertEqual(entry.perform(), 2) def test_callable_empty_args_and_kwargs(self): - task = task_factory(self.TaskModelClass, callable='scheduler.tests.jobs.test_args_kwargs') + task = task_factory(self.TaskModelClass, callable="scheduler.tests.jobs.test_args_kwargs") entry = _get_job_from_scheduled_registry(task) - self.assertEqual(entry.perform(), 'test_args_kwargs()') + self.assertEqual(entry.perform(), "test_args_kwargs()") def test_delete_args(self): - task = task_factory(self.TaskModelClass, ) - arg = taskarg_factory(TaskArg, val='one', content_object=task) + task = task_factory( + self.TaskModelClass, + ) + arg = taskarg_factory(TaskArg, val="one", content_object=task) self.assertEqual(1, task.callable_args.count()) arg.delete() self.assertEqual(0, task.callable_args.count()) def test_delete_kwargs(self): - task = task_factory(self.TaskModelClass, ) - kwarg = taskarg_factory(TaskKwarg, key='key1', arg_type='str', val='one', content_object=task) + task = task_factory( + self.TaskModelClass, + ) + kwarg = taskarg_factory(TaskKwarg, key="key1", arg_type="str", val="one", content_object=task) self.assertEqual(1, task.callable_kwargs.count()) kwarg.delete() self.assertEqual(0, task.callable_kwargs.count()) def test_parse_args(self): - task = task_factory(self.TaskModelClass, ) + task = task_factory( + self.TaskModelClass, + ) date = timezone.now() - taskarg_factory(TaskArg, val='one', content_object=task) - taskarg_factory(TaskArg, arg_type='int', val=2, content_object=task) - taskarg_factory(TaskArg, arg_type='bool', val=True, content_object=task) - taskarg_factory(TaskArg, arg_type='bool', val=False, content_object=task) - taskarg_factory(TaskArg, arg_type='datetime', val=date, content_object=task) - self.assertEqual(task.parse_args(), ['one', 2, True, False, date]) + taskarg_factory(TaskArg, val="one", content_object=task) + taskarg_factory(TaskArg, arg_type="int", val=2, content_object=task) + taskarg_factory(TaskArg, arg_type="bool", val=True, content_object=task) + taskarg_factory(TaskArg, arg_type="bool", val=False, content_object=task) + taskarg_factory(TaskArg, arg_type="datetime", val=date, content_object=task) + self.assertEqual(task.parse_args(), ["one", 2, True, False, date]) def test_parse_kwargs(self): - job = task_factory(self.TaskModelClass, ) + job = task_factory( + self.TaskModelClass, + ) date = timezone.now() - taskarg_factory(TaskKwarg, key='key1', arg_type='str', val='one', content_object=job) - taskarg_factory(TaskKwarg, key='key2', arg_type='int', val=2, content_object=job) - taskarg_factory(TaskKwarg, key='key3', arg_type='bool', val=True, content_object=job) - taskarg_factory(TaskKwarg, key='key4', arg_type='datetime', val=date, content_object=job) + taskarg_factory(TaskKwarg, key="key1", arg_type="str", val="one", content_object=job) + taskarg_factory(TaskKwarg, key="key2", arg_type="int", val=2, content_object=job) + taskarg_factory(TaskKwarg, key="key3", arg_type="bool", val=True, content_object=job) + taskarg_factory(TaskKwarg, key="key4", arg_type="datetime", val=date, content_object=job) kwargs = job.parse_kwargs() - self.assertEqual(kwargs, dict(key1='one', key2=2, key3=True, key4=date)) + self.assertEqual(kwargs, dict(key1="one", key2=2, key3=True, key4=date)) def test_callable_args_and_kwargs(self): - task = task_factory(self.TaskModelClass, callable='scheduler.tests.jobs.test_args_kwargs') + task = task_factory(self.TaskModelClass, callable="scheduler.tests.jobs.test_args_kwargs") date = timezone.now() - taskarg_factory(TaskArg, arg_type='str', val='one', content_object=task) - taskarg_factory(TaskKwarg, key='key1', arg_type='int', val=2, content_object=task) - taskarg_factory(TaskKwarg, key='key2', arg_type='datetime', val=date, content_object=task) - taskarg_factory(TaskKwarg, key='key3', arg_type='bool', val=False, content_object=task) + taskarg_factory(TaskArg, arg_type="str", val="one", content_object=task) + taskarg_factory(TaskKwarg, key="key1", arg_type="int", val=2, content_object=task) + taskarg_factory(TaskKwarg, key="key2", arg_type="datetime", val=date, content_object=task) + taskarg_factory(TaskKwarg, key="key3", arg_type="bool", val=False, content_object=task) task.save() entry = _get_job_from_scheduled_registry(task) - self.assertEqual(entry.perform(), - "test_args_kwargs('one', key1=2, key2={}, key3=False)".format(date)) + self.assertEqual(entry.perform(), "test_args_kwargs('one', key1=2, key2={}, key3=False)".format(date)) def test_function_string(self): - task = task_factory(self.TaskModelClass, ) + task = task_factory( + self.TaskModelClass, + ) date = timezone.now() - taskarg_factory(TaskArg, arg_type='str', val='one', content_object=task) - taskarg_factory(TaskArg, arg_type='int', val='1', content_object=task) - taskarg_factory(TaskArg, arg_type='datetime', val=date, content_object=task) - taskarg_factory(TaskArg, arg_type='bool', val=True, content_object=task) - taskarg_factory(TaskKwarg, key='key1', arg_type='str', val='one', content_object=task) - taskarg_factory(TaskKwarg, key='key2', arg_type='int', val=2, content_object=task) - taskarg_factory(TaskKwarg, key='key3', arg_type='datetime', val=date, content_object=task) - taskarg_factory(TaskKwarg, key='key4', arg_type='bool', val=False, content_object=task) - self.assertEqual(task.function_string(), - f"scheduler.tests.jobs.test_job('one', 1, {repr(date)}, True, " - f"key1='one', key2=2, key3={repr(date)}, key4=False)") + taskarg_factory(TaskArg, arg_type="str", val="one", content_object=task) + taskarg_factory(TaskArg, arg_type="int", val="1", content_object=task) + taskarg_factory(TaskArg, arg_type="datetime", val=date, content_object=task) + taskarg_factory(TaskArg, arg_type="bool", val=True, content_object=task) + taskarg_factory(TaskKwarg, key="key1", arg_type="str", val="one", content_object=task) + taskarg_factory(TaskKwarg, key="key2", arg_type="int", val=2, content_object=task) + taskarg_factory(TaskKwarg, key="key3", arg_type="datetime", val=date, content_object=task) + taskarg_factory(TaskKwarg, key="key4", arg_type="bool", val=False, content_object=task) + self.assertEqual( + task.function_string(), + f"scheduler.tests.jobs.test_job('one', 1, {repr(date)}, True, " + f"key1='one', key2=2, key3={repr(date)}, key4=False)", + ) def test_admin_list_view(self): # arrange - self.client.login(username='admin', password='admin') - job = task_factory(self.TaskModelClass, ) + self.client.login(username="admin", password="admin") + job = task_factory( + self.TaskModelClass, + ) model = job._meta.model.__name__.lower() - url = reverse(f'admin:scheduler_{model}_changelist') + url = reverse(f"admin:scheduler_{model}_changelist") # act res = self.client.get(url) # assert @@ -264,29 +295,43 @@ def test_admin_list_view(self): def test_admin_list_view_delete_model(self): # arrange - self.client.login(username='admin', password='admin') - task = task_factory(self.TaskModelClass, ) + self.client.login(username="admin", password="admin") + task = task_factory( + self.TaskModelClass, + ) model = task._meta.model.__name__.lower() - url = reverse(f'admin:scheduler_{model}_changelist') + url = reverse(f"admin:scheduler_{model}_changelist") # act - res = self.client.post(url, data={ - 'action': 'delete_model', - '_selected_action': [task.pk, ], - }) + res = self.client.post( + url, + data={ + "action": "delete_model", + "_selected_action": [ + task.pk, + ], + }, + ) # assert self.assertEqual(302, res.status_code) def test_admin_run_job_now_enqueues_job_at(self): # arrange - self.client.login(username='admin', password='admin') - task = task_factory(self.TaskModelClass, ) + self.client.login(username="admin", password="admin") + task = task_factory( + self.TaskModelClass, + ) model = task._meta.model.__name__.lower() - url = reverse(f'admin:scheduler_{model}_changelist') + url = reverse(f"admin:scheduler_{model}_changelist") # act - res = self.client.post(url, data={ - 'action': 'enqueue_job_now', - '_selected_action': [task.pk, ], - }) + res = self.client.post( + url, + data={ + "action": "enqueue_job_now", + "_selected_action": [ + task.pk, + ], + }, + ) # assert self.assertEqual(302, res.status_code) task.refresh_from_db() @@ -295,10 +340,17 @@ def test_admin_run_job_now_enqueues_job_at(self): def test_admin_change_view(self): # arrange - self.client.login(username='admin', password='admin') - task = task_factory(self.TaskModelClass, ) + self.client.login(username="admin", password="admin") + task = task_factory( + self.TaskModelClass, + ) model = task._meta.model.__name__.lower() - url = reverse(f'admin:scheduler_{model}_change', args=[task.pk, ]) + url = reverse( + f"admin:scheduler_{model}_change", + args=[ + task.pk, + ], + ) # act res = self.client.get(url) # assert @@ -306,11 +358,16 @@ def test_admin_change_view(self): def test_admin_change_view__bad_redis_connection(self): # arrange - self.client.login(username='admin', password='admin') - task = task_factory(self.TaskModelClass, queue='test2', instance_only=True) + self.client.login(username="admin", password="admin") + task = task_factory(self.TaskModelClass, queue="test2", instance_only=True) task.save(schedule_job=False) model = task._meta.model.__name__.lower() - url = reverse(f'admin:scheduler_{model}_change', args=[task.pk, ]) + url = reverse( + f"admin:scheduler_{model}_change", + args=[ + task.pk, + ], + ) # act res = self.client.get(url) # assert @@ -318,16 +375,20 @@ def test_admin_change_view__bad_redis_connection(self): def test_admin_enqueue_job_now(self): # arrange - self.client.login(username='admin', password='admin') - task = task_factory(self.TaskModelClass, ) + self.client.login(username="admin", password="admin") + task = task_factory( + self.TaskModelClass, + ) self.assertIsNotNone(task.job_id) self.assertTrue(task.is_scheduled()) data = { - 'action': 'enqueue_job_now', - '_selected_action': [task.id, ], + "action": "enqueue_job_now", + "_selected_action": [ + task.id, + ], } model = task._meta.model.__name__.lower() - url = reverse(f'admin:scheduler_{model}_changelist') + url = reverse(f"admin:scheduler_{model}_changelist") # act res = self.client.post(url, data=data, follow=True) @@ -337,31 +398,36 @@ def test_admin_enqueue_job_now(self): task_model, scheduled_task_id = entry.args self.assertEqual(task_model, task.TASK_TYPE) self.assertEqual(scheduled_task_id, task.id) - self.assertEqual('scheduled', entry.get_status()) - assert_has_execution_with_status(task, 'queued') + self.assertEqual("scheduled", entry.get_status()) + assert_has_execution_with_status(task, "queued") # act 2 - worker = create_worker('default', fork_job_execution=False, ) + worker = create_worker( + "default", + fork_job_execution=False, + ) worker.work(burst=True) # assert 2 entry = _get_job_from_scheduled_registry(task) self.assertEqual(task_model, task.TASK_TYPE) self.assertEqual(scheduled_task_id, task.id) - assert_has_execution_with_status(task, 'finished') + assert_has_execution_with_status(task, "finished") def test_admin_enable_job(self): # arrange - self.client.login(username='admin', password='admin') + self.client.login(username="admin", password="admin") task = task_factory(self.TaskModelClass, enabled=False) self.assertIsNone(task.job_id) self.assertFalse(task.is_scheduled()) data = { - 'action': 'enable_selected', - '_selected_action': [task.id, ], + "action": "enable_selected", + "_selected_action": [ + task.id, + ], } model = task._meta.model.__name__.lower() - url = reverse(f'admin:scheduler_{model}_changelist') + url = reverse(f"admin:scheduler_{model}_changelist") # act res = self.client.post(url, data=data, follow=True) # assert @@ -369,19 +435,21 @@ def test_admin_enable_job(self): task.refresh_from_db() self.assertTrue(task.enabled) self.assertTrue(task.is_scheduled()) - assert_response_has_msg(res, '1 job was successfully enabled and scheduled.') + assert_response_has_msg(res, "1 job was successfully enabled and scheduled.") def test_admin_disable_job(self): # arrange - self.client.login(username='admin', password='admin') + self.client.login(username="admin", password="admin") task = task_factory(self.TaskModelClass, enabled=True) task.save() data = { - 'action': 'disable_selected', - '_selected_action': [task.id, ], + "action": "disable_selected", + "_selected_action": [ + task.id, + ], } model = task._meta.model.__name__.lower() - url = reverse(f'admin:scheduler_{model}_changelist') + url = reverse(f"admin:scheduler_{model}_changelist") self.assertTrue(task.is_scheduled()) # act res = self.client.post(url, data=data, follow=True) @@ -390,20 +458,27 @@ def test_admin_disable_job(self): task.refresh_from_db() self.assertFalse(task.is_scheduled()) self.assertFalse(task.enabled) - assert_response_has_msg(res, '1 job was successfully disabled and unscheduled.') + assert_response_has_msg(res, "1 job was successfully disabled and unscheduled.") def test_admin_single_delete(self): # arrange - self.client.login(username='admin', password='admin') + self.client.login(username="admin", password="admin") prev_count = self.TaskModelClass.objects.count() - task = task_factory(self.TaskModelClass, ) + task = task_factory( + self.TaskModelClass, + ) self.assertIsNotNone(task.job_id) self.assertTrue(task.is_scheduled()) prev = len(_get_executions(task)) model = task._meta.model.__name__.lower() - url = reverse(f'admin:scheduler_{model}_delete', args=[task.pk, ]) + url = reverse( + f"admin:scheduler_{model}_delete", + args=[ + task.pk, + ], + ) data = { - 'post': 'yes', + "post": "yes", } # act res = self.client.post(url, data=data, follow=True) @@ -414,7 +489,7 @@ def test_admin_single_delete(self): def test_admin_delete_selected(self): # arrange - self.client.login(username='admin', password='admin') + self.client.login(username="admin", password="admin") task = task_factory(self.TaskModelClass, enabled=True) task.save() queue = get_queue(task.queue) @@ -422,17 +497,19 @@ def test_admin_delete_selected(self): job_id = task.job_id self.assertIn(job_id, scheduled_jobs) data = { - 'action': 'delete_selected', - '_selected_action': [task.id, ], - 'post': 'yes', + "action": "delete_selected", + "_selected_action": [ + task.id, + ], + "post": "yes", } model = task._meta.model.__name__.lower() - url = reverse(f'admin:scheduler_{model}_changelist') + url = reverse(f"admin:scheduler_{model}_changelist") # act res = self.client.post(url, data=data, follow=True) # assert self.assertEqual(200, res.status_code) - assert_response_has_msg(res, f'Successfully deleted 1 {self.TaskModelClass._meta.verbose_name}.') + assert_response_has_msg(res, f"Successfully deleted 1 {self.TaskModelClass._meta.verbose_name}.") self.assertIsNone(self.TaskModelClass.objects.filter(id=task.id).first()) scheduled_jobs = queue.scheduled_job_registry.get_job_ids() self.assertNotIn(job_id, scheduled_jobs) @@ -452,7 +529,7 @@ def test_schedule_time_no_tz(self): @override_settings(USE_TZ=True) def test_schedule_time_with_tz(self): task = task_factory(self.TaskModelClass) - est = zoneinfo.ZoneInfo('US/Eastern') + est = zoneinfo.ZoneInfo("US/Eastern") task.scheduled_time = datetime(2016, 12, 25, 8, 0, 0, tzinfo=est) self.assertEqual("2016-12-25T13:00:00+00:00", task._schedule_time().isoformat()) @@ -468,7 +545,7 @@ class TestScheduledJob(BaseTestCases.TestSchedulableJob): def test_clean(self): job = task_factory(self.TaskModelClass) job.queue = list(settings.QUEUES)[0] - job.callable = 'scheduler.tests.jobs.test_job' + job.callable = "scheduler.tests.jobs.test_job" self.assertIsNone(job.clean()) def test_unschedulable_old_job(self): diff --git a/scheduler/tests/test_redis_models.py b/scheduler/tests/test_redis_models.py index f3b7393..fc830e1 100644 --- a/scheduler/tests/test_redis_models.py +++ b/scheduler/tests/test_redis_models.py @@ -6,9 +6,9 @@ class TestWorkerAdmin(SchedulerBaseCase): def test_admin_list_view(self): # arrange - self.client.login(username='admin', password='admin') - model = 'worker' - url = reverse(f'admin:scheduler_{model}_changelist') + self.client.login(username="admin", password="admin") + model = "worker" + url = reverse(f"admin:scheduler_{model}_changelist") # act res = self.client.get(url) @@ -19,9 +19,9 @@ def test_admin_list_view(self): class TestQueueAdmin(SchedulerBaseCase): def test_admin_list_view(self): # arrange - self.client.login(username='admin', password='admin') - model = 'queue' - url = reverse(f'admin:scheduler_{model}_changelist') + self.client.login(username="admin", password="admin") + model = "queue" + url = reverse(f"admin:scheduler_{model}_changelist") # act res = self.client.get(url) diff --git a/scheduler/tests/test_repeatable_task.py b/scheduler/tests/test_repeatable_task.py index 11840a7..c55c0c0 100644 --- a/scheduler/tests/test_repeatable_task.py +++ b/scheduler/tests/test_repeatable_task.py @@ -7,8 +7,7 @@ from scheduler import settings from scheduler.models import RepeatableTask from scheduler.tests.test_models import BaseTestCases -from .testtools import ( - task_factory, _get_job_from_scheduled_registry) +from .testtools import task_factory, _get_job_from_scheduled_registry class TestRepeatableTask(BaseTestCases.TestSchedulableJob): @@ -26,7 +25,7 @@ def test_schedulable_old_job_repeat_none(self): def test_clean(self): job = task_factory(self.TaskModelClass) job.queue = list(settings.QUEUES)[0] - job.callable = 'scheduler.tests.jobs.test_job' + job.callable = "scheduler.tests.jobs.test_job" job.interval = 1 job.result_ttl = -1 self.assertIsNone(job.clean()) @@ -34,42 +33,44 @@ def test_clean(self): def test_clean_seconds(self): job = task_factory(self.TaskModelClass) job.queue = list(settings.QUEUES)[0] - job.callable = 'scheduler.tests.jobs.test_job' + job.callable = "scheduler.tests.jobs.test_job" job.interval = 60 job.result_ttl = -1 - job.interval_unit = 'seconds' + job.interval_unit = "seconds" self.assertIsNone(job.clean()) - @override_settings(SCHEDULER_CONFIG={ - 'SCHEDULER_INTERVAL': 10, - }) + @override_settings( + SCHEDULER_CONFIG={ + "SCHEDULER_INTERVAL": 10, + } + ) def test_clean_too_frequent(self): job = task_factory(self.TaskModelClass) job.queue = list(settings.QUEUES)[0] - job.callable = 'scheduler.tests.jobs.test_job' + job.callable = "scheduler.tests.jobs.test_job" job.interval = 2 # Smaller than 10 job.result_ttl = -1 - job.interval_unit = 'seconds' + job.interval_unit = "seconds" with self.assertRaises(ValidationError): job.clean_interval_unit() def test_clean_not_multiple(self): job = task_factory(self.TaskModelClass) job.queue = list(settings.QUEUES)[0] - job.callable = 'scheduler.tests.jobs.test_job' + job.callable = "scheduler.tests.jobs.test_job" job.interval = 121 - job.interval_unit = 'seconds' + job.interval_unit = "seconds" with self.assertRaises(ValidationError): job.clean_interval_unit() def test_clean_short_result_ttl(self): job = task_factory(self.TaskModelClass) job.queue = list(settings.QUEUES)[0] - job.callable = 'scheduler.tests.jobs.test_job' + job.callable = "scheduler.tests.jobs.test_job" job.interval = 1 job.repeat = 1 job.result_ttl = 3599 - job.interval_unit = 'hours' + job.interval_unit = "hours" job.repeat = 42 with self.assertRaises(ValidationError): job.clean_result_ttl() @@ -77,53 +78,55 @@ def test_clean_short_result_ttl(self): def test_clean_indefinite_result_ttl(self): job = task_factory(self.TaskModelClass) job.queue = list(settings.QUEUES)[0] - job.callable = 'scheduler.tests.jobs.test_job' + job.callable = "scheduler.tests.jobs.test_job" job.interval = 1 job.result_ttl = -1 - job.interval_unit = 'hours' + job.interval_unit = "hours" job.clean_result_ttl() def test_clean_undefined_result_ttl(self): job = task_factory(self.TaskModelClass) job.queue = list(settings.QUEUES)[0] - job.callable = 'scheduler.tests.jobs.test_job' + job.callable = "scheduler.tests.jobs.test_job" job.interval = 1 - job.interval_unit = 'hours' + job.interval_unit = "hours" job.clean_result_ttl() def test_interval_seconds_weeks(self): - job = task_factory(self.TaskModelClass, interval=2, interval_unit='weeks') + job = task_factory(self.TaskModelClass, interval=2, interval_unit="weeks") self.assertEqual(1209600.0, job.interval_seconds()) def test_interval_seconds_days(self): - job = task_factory(self.TaskModelClass, interval=2, interval_unit='days') + job = task_factory(self.TaskModelClass, interval=2, interval_unit="days") self.assertEqual(172800.0, job.interval_seconds()) def test_interval_seconds_hours(self): - job = task_factory(self.TaskModelClass, interval=2, interval_unit='hours') + job = task_factory(self.TaskModelClass, interval=2, interval_unit="hours") self.assertEqual(7200.0, job.interval_seconds()) def test_interval_seconds_minutes(self): - job = task_factory(self.TaskModelClass, interval=15, interval_unit='minutes') + job = task_factory(self.TaskModelClass, interval=15, interval_unit="minutes") self.assertEqual(900.0, job.interval_seconds()) def test_interval_seconds_seconds(self): - job = RepeatableTask(interval=15, interval_unit='seconds') + job = RepeatableTask(interval=15, interval_unit="seconds") self.assertEqual(15.0, job.interval_seconds()) def test_interval_display(self): - job = task_factory(self.TaskModelClass, interval=15, interval_unit='minutes') - self.assertEqual(job.interval_display(), '15 minutes') + job = task_factory(self.TaskModelClass, interval=15, interval_unit="minutes") + self.assertEqual(job.interval_display(), "15 minutes") def test_result_interval(self): - job = task_factory(self.TaskModelClass, ) + job = task_factory( + self.TaskModelClass, + ) entry = _get_job_from_scheduled_registry(job) - self.assertEqual(entry.meta['interval'], 3600) + self.assertEqual(entry.meta["interval"], 3600) def test_repeat(self): job = task_factory(self.TaskModelClass, repeat=10) entry = _get_job_from_scheduled_registry(job) - self.assertEqual(entry.meta['repeat'], 10) + self.assertEqual(entry.meta["repeat"], 10) def test_repeat_old_job_exhausted(self): base_time = timezone.now() @@ -147,7 +150,7 @@ def test_repeat_none_interval_2_min(self): base_time = timezone.now() job = task_factory(self.TaskModelClass, scheduled_time=base_time - timedelta(minutes=29), repeat=None) job.interval = 120 - job.interval_unit = 'seconds' + job.interval_unit = "seconds" job.schedule() self.assertTrue(job.scheduled_time > base_time) self.assertTrue(job.is_scheduled()) @@ -168,9 +171,11 @@ def test_check_rescheduled_after_execution(self): def test_check_rescheduled_after_execution_failed_job(self): task = task_factory( - self.TaskModelClass, callable_name='scheduler.tests.jobs.failing_job', + self.TaskModelClass, + callable_name="scheduler.tests.jobs.failing_job", scheduled_time=timezone.now() + timedelta(seconds=1), - repeat=10, ) + repeat=10, + ) queue = task.rqueue first_run_id = task.job_id entry = queue.fetch_job(first_run_id) diff --git a/scheduler/tests/test_settings.py b/scheduler/tests/test_settings.py index 4253f89..d8e6cf3 100644 --- a/scheduler/tests/test_settings.py +++ b/scheduler/tests/test_settings.py @@ -5,95 +5,95 @@ from scheduler.settings import conf_settings settings.SCHEDULER_QUEUES = { - 'default': {'HOST': 'localhost', 'PORT': 6379, 'DB': 0, 'DEFAULT_TIMEOUT': 500}, - 'test': { - 'HOST': 'localhost', - 'PORT': 1, - 'DB': 1, - }, - 'sentinel': { - 'SENTINELS': [('localhost', 26736), ('localhost', 26737)], - 'MASTER_NAME': 'testmaster', - 'DB': 1, - 'USERNAME': 'redis-user', - 'PASSWORD': 'secret', - 'SOCKET_TIMEOUT': 10, - 'SENTINEL_KWARGS': {}, - }, - 'test1': { - 'HOST': 'localhost', - 'PORT': 1, - 'DB': 1, - 'DEFAULT_TIMEOUT': 400, - }, - 'test2': { - 'HOST': 'localhost', - 'PORT': 1, - 'DB': 1, - }, - 'test3': { - 'HOST': 'localhost', - 'PORT': 6379, - 'DB': 1, - }, - 'async': { - 'HOST': 'localhost', - 'PORT': 6379, - 'DB': 1, - 'ASYNC': False, - }, - 'url': { - 'URL': 'redis://username:password@host:1234/', - 'DB': 4, - }, - 'url_with_db': { - 'URL': 'redis://username:password@host:1234/5', - }, - 'url_default_db': { - 'URL': 'redis://username:password@host:1234', - }, - 'django_tasks_scheduler_test': { - 'HOST': 'localhost', - 'PORT': 6379, - 'DB': 0, - }, - 'scheduler_scheduler_active_test': { - 'HOST': 'localhost', - 'PORT': 6379, - 'DB': 0, - 'ASYNC': False, - }, - 'scheduler_scheduler_inactive_test': { - 'HOST': 'localhost', - 'PORT': 6379, - 'DB': 0, - 'ASYNC': False, - }, - 'worker_scheduler_active_test': { - 'HOST': 'localhost', - 'PORT': 6379, - 'DB': 0, - 'ASYNC': False, - }, - 'worker_scheduler_inactive_test': { - 'HOST': 'localhost', - 'PORT': 6379, - 'DB': 0, - 'ASYNC': False, - }, - 'django_tasks_scheduler_test2': { - 'HOST': 'localhost', - 'PORT': 6379, - 'DB': 0, - }, - 'test_scheduler': { - 'HOST': 'localhost', - 'PORT': 6379, - 'DB': 0, - 'DEFAULT_TIMEOUT': 400, + "default": {"HOST": "localhost", "PORT": 6379, "DB": 0, "DEFAULT_TIMEOUT": 500}, + "test": { + "HOST": "localhost", + "PORT": 1, + "DB": 1, + }, + "sentinel": { + "SENTINELS": [("localhost", 26736), ("localhost", 26737)], + "MASTER_NAME": "testmaster", + "DB": 1, + "USERNAME": "redis-user", + "PASSWORD": "secret", + "SOCKET_TIMEOUT": 10, + "SENTINEL_KWARGS": {}, + }, + "test1": { + "HOST": "localhost", + "PORT": 1, + "DB": 1, + "DEFAULT_TIMEOUT": 400, + }, + "test2": { + "HOST": "localhost", + "PORT": 1, + "DB": 1, + }, + "test3": { + "HOST": "localhost", + "PORT": 6379, + "DB": 1, + }, + "async": { + "HOST": "localhost", + "PORT": 6379, + "DB": 1, + "ASYNC": False, + }, + "url": { + "URL": "redis://username:password@host:1234/", + "DB": 4, + }, + "url_with_db": { + "URL": "redis://username:password@host:1234/5", + }, + "url_default_db": { + "URL": "redis://username:password@host:1234", + }, + "django_tasks_scheduler_test": { + "HOST": "localhost", + "PORT": 6379, + "DB": 0, + }, + "scheduler_scheduler_active_test": { + "HOST": "localhost", + "PORT": 6379, + "DB": 0, + "ASYNC": False, + }, + "scheduler_scheduler_inactive_test": { + "HOST": "localhost", + "PORT": 6379, + "DB": 0, + "ASYNC": False, + }, + "worker_scheduler_active_test": { + "HOST": "localhost", + "PORT": 6379, + "DB": 0, + "ASYNC": False, + }, + "worker_scheduler_inactive_test": { + "HOST": "localhost", + "PORT": 6379, + "DB": 0, + "ASYNC": False, + }, + "django_tasks_scheduler_test2": { + "HOST": "localhost", + "PORT": 6379, + "DB": 0, + }, + "test_scheduler": { + "HOST": "localhost", + "PORT": 6379, + "DB": 0, + "DEFAULT_TIMEOUT": 400, }, } settings.SCHEDULER_CONFIG = dict( - FAKEREDIS=(os.getenv('FAKEREDIS', 'False') == 'True'), + FAKEREDIS=(os.getenv("FAKEREDIS", "False") == "True"), ) conf_settings() diff --git a/scheduler/tests/test_views.py b/scheduler/tests/test_views.py index a9395bd..afae1d0 100644 --- a/scheduler/tests/test_views.py +++ b/scheduler/tests/test_views.py @@ -18,75 +18,75 @@ class BaseTestCase(TestCase): def setUp(self): - self.user = User.objects.create_superuser('user', password='pass') + self.user = User.objects.create_superuser("user", password="pass") self.client = Client() - self.client.login(username=self.user.username, password='pass') - get_queue('django_tasks_scheduler_test').connection.flushall() + self.client.login(username=self.user.username, password="pass") + get_queue("django_tasks_scheduler_test").connection.flushall() class SingleJobActionViewsTest(BaseTestCase): def test_single_job_action_unknown_job(self): - res = self.client.get(reverse('queue_job_action', args=['unknown', 'cancel']), follow=True) + res = self.client.get(reverse("queue_job_action", args=["unknown", "cancel"]), follow=True) self.assertEqual(400, res.status_code) def test_single_job_action_unknown_action(self): - queue = get_queue('default') + queue = get_queue("default") job = queue.enqueue(failing_job) - worker = create_worker('default') + worker = create_worker("default") worker.work(burst=True) job.refresh() self.assertTrue(job.is_failed) - res = self.client.get(reverse('queue_job_action', args=[job.id, 'unknown']), follow=True) + res = self.client.get(reverse("queue_job_action", args=[job.id, "unknown"]), follow=True) self.assertEqual(404, res.status_code) def test_single_job_action_requeue_job(self): - queue = get_queue('default') + queue = get_queue("default") job = queue.enqueue(failing_job) - worker = create_worker('default') + worker = create_worker("default") worker.work(burst=True) job.refresh() self.assertTrue(job.is_failed) - res = self.client.get(reverse('queue_job_action', args=[job.id, 'requeue']), follow=True) + res = self.client.get(reverse("queue_job_action", args=[job.id, "requeue"]), follow=True) self.assertEqual(200, res.status_code) - self.client.post(reverse('queue_job_action', args=[job.id, 'requeue']), {'requeue': 'Requeue'}, follow=True) + self.client.post(reverse("queue_job_action", args=[job.id, "requeue"]), {"requeue": "Requeue"}, follow=True) self.assertIn(job, queue.jobs) job.delete() def test_single_job_action_delete_job(self): - queue = get_queue('django_tasks_scheduler_test') + queue = get_queue("django_tasks_scheduler_test") job = queue.enqueue(test_job) - res = self.client.get(reverse('queue_job_action', args=[job.id, 'delete']), follow=True) + res = self.client.get(reverse("queue_job_action", args=[job.id, "delete"]), follow=True) self.assertEqual(200, res.status_code) - self.client.post(reverse('queue_job_action', args=[job.id, 'delete']), {'post': 'yes'}, follow=True) + self.client.post(reverse("queue_job_action", args=[job.id, "delete"]), {"post": "yes"}, follow=True) self.assertFalse(JobExecution.exists(job.id, connection=queue.connection)) self.assertNotIn(job.id, queue.get_job_ids()) def test_single_job_action_cancel_job(self): - queue = get_queue('django_tasks_scheduler_test') + queue = get_queue("django_tasks_scheduler_test") job = queue.enqueue(long_job) - res = self.client.get(reverse('queue_job_action', args=[job.id, 'cancel']), follow=True) + res = self.client.get(reverse("queue_job_action", args=[job.id, "cancel"]), follow=True) self.assertEqual(200, res.status_code) - res = self.client.post(reverse('queue_job_action', args=[job.id, 'cancel']), {'post': 'yes'}, follow=True) + res = self.client.post(reverse("queue_job_action", args=[job.id, "cancel"]), {"post": "yes"}, follow=True) self.assertEqual(200, res.status_code) tmp = JobExecution.fetch(job.id, connection=queue.connection) self.assertTrue(tmp.is_canceled) self.assertNotIn(job.id, queue.get_job_ids()) def test_single_job_action_cancel_job_that_is_already_cancelled(self): - queue = get_queue('django_tasks_scheduler_test') + queue = get_queue("django_tasks_scheduler_test") job = queue.enqueue(long_job) - res = self.client.post(reverse('queue_job_action', args=[job.id, 'cancel']), {'post': 'yes'}, follow=True) + res = self.client.post(reverse("queue_job_action", args=[job.id, "cancel"]), {"post": "yes"}, follow=True) self.assertEqual(200, res.status_code) tmp = JobExecution.fetch(job.id, connection=queue.connection) self.assertTrue(tmp.is_canceled) self.assertNotIn(job.id, queue.get_job_ids()) - res = self.client.post(reverse('queue_job_action', args=[job.id, 'cancel']), {'post': 'yes'}, follow=True) + res = self.client.post(reverse("queue_job_action", args=[job.id, "cancel"]), {"post": "yes"}, follow=True) self.assertEqual(200, res.status_code) - assert_message_in_response(res, f'Could not perform action: Cannot cancel already canceled job: {job.id}') + assert_message_in_response(res, f"Could not perform action: Cannot cancel already canceled job: {job.id}") def test_single_job_action_enqueue_job(self): - queue = get_queue('django_tasks_scheduler_test') + queue = get_queue("django_tasks_scheduler_test") job_list = [] # enqueue some jobs that depends on other previous_job = None @@ -101,9 +101,9 @@ def test_single_job_action_enqueue_job(self): self.assertIsNone(job_list[-1].enqueued_at) # Try to force enqueue last job should do nothing - res = self.client.get(reverse('queue_job_action', args=[job_list[-1].id, 'enqueue']), follow=True) + res = self.client.get(reverse("queue_job_action", args=[job_list[-1].id, "enqueue"]), follow=True) self.assertEqual(200, res.status_code) - res = self.client.post(reverse('queue_job_action', args=[job_list[-1].id, 'enqueue']), follow=True) + res = self.client.post(reverse("queue_job_action", args=[job_list[-1].id, "enqueue"]), follow=True) # Check that job is still deferred because it has dependencies (rq 1.14 change) self.assertEqual(200, res.status_code) @@ -114,7 +114,7 @@ def test_single_job_action_enqueue_job(self): class JobListActionViewsTest(BaseTestCase): def test_job_list_action_delete_jobs__with_bad_next_url(self): - queue = get_queue('django_tasks_scheduler_test') + queue = get_queue("django_tasks_scheduler_test") # enqueue some jobs job_ids = [] @@ -124,11 +124,20 @@ def test_job_list_action_delete_jobs__with_bad_next_url(self): # remove those jobs using view res = self.client.post( - reverse('queue_actions', args=[queue.name, ]), { - 'action': 'delete', 'job_ids': job_ids, - 'next_url': 'bad_url', - }, follow=True) - assert_message_in_response(res, 'Bad followup URL') + reverse( + "queue_actions", + args=[ + queue.name, + ], + ), + { + "action": "delete", + "job_ids": job_ids, + "next_url": "bad_url", + }, + follow=True, + ) + assert_message_in_response(res, "Bad followup URL") # check if jobs are removed self.assertEqual(200, res.status_code) for job_id in job_ids: @@ -136,7 +145,7 @@ def test_job_list_action_delete_jobs__with_bad_next_url(self): self.assertNotIn(job_id, queue.job_ids) def test_job_list_action_delete_jobs(self): - queue = get_queue('django_tasks_scheduler_test') + queue = get_queue("django_tasks_scheduler_test") # enqueue some jobs job_ids = [] @@ -146,7 +155,15 @@ def test_job_list_action_delete_jobs(self): # remove those jobs using view res = self.client.post( - reverse('queue_actions', args=[queue.name, ]), {'action': 'delete', 'job_ids': job_ids}, follow=True) + reverse( + "queue_actions", + args=[ + queue.name, + ], + ), + {"action": "delete", "job_ids": job_ids}, + follow=True, + ) # check if jobs are removed self.assertEqual(200, res.status_code) @@ -155,8 +172,8 @@ def test_job_list_action_delete_jobs(self): self.assertNotIn(job_id, queue.job_ids) def test_job_list_action_requeue_jobs(self): - queue = get_queue('django_tasks_scheduler_test') - queue_name = 'django_tasks_scheduler_test' + queue = get_queue("django_tasks_scheduler_test") + queue_name = "django_tasks_scheduler_test" # enqueue some jobs that will fail jobs = [] @@ -167,7 +184,7 @@ def test_job_list_action_requeue_jobs(self): job_ids.append(job.id) # do those jobs = fail them - worker = create_worker('django_tasks_scheduler_test') + worker = create_worker("django_tasks_scheduler_test") worker.work(burst=True) # check if all jobs are really failed @@ -175,19 +192,19 @@ def test_job_list_action_requeue_jobs(self): self.assertTrue(job.is_failed) # re-nqueue failed jobs from failed queue - self.client.post(reverse('queue_actions', args=[queue_name]), {'action': 'requeue', 'job_ids': job_ids}) + self.client.post(reverse("queue_actions", args=[queue_name]), {"action": "requeue", "job_ids": job_ids}) # check if we requeue all failed jobs for job in jobs: self.assertFalse(job.is_failed) def test_job_list_action_stop_jobs(self): - queue_name = 'django_tasks_scheduler_test' + queue_name = "django_tasks_scheduler_test" queue = get_queue(queue_name) # Enqueue some jobs job_ids = [] - worker = create_worker('django_tasks_scheduler_test') + worker = create_worker("django_tasks_scheduler_test") for _ in range(3): job = queue.enqueue(test_job) job_ids.append(job.id) @@ -201,7 +218,7 @@ def test_job_list_action_stop_jobs(self): # Stop those jobs using the view started_job_registry = queue.started_job_registry self.assertEqual(len(started_job_registry), len(job_ids)) - self.client.post(reverse('queue_actions', args=[queue_name]), {'action': 'stop', 'job_ids': job_ids}) + self.client.post(reverse("queue_actions", args=[queue_name]), {"action": "stop", "job_ids": job_ids}) self.assertEqual(len(started_job_registry), 0) canceled_job_registry = queue.canceled_job_registry @@ -213,119 +230,129 @@ def test_job_list_action_stop_jobs(self): class QueueRegistryJobsViewTest(BaseTestCase): def test_queue_jobs_unknown_registry(self): - queue_name = 'default' - res = self.client.get(reverse('queue_registry_jobs', args=[queue_name, 'unknown']), follow=True) + queue_name = "default" + res = self.client.get(reverse("queue_registry_jobs", args=[queue_name, "unknown"]), follow=True) self.assertEqual(404, res.status_code) def test_queue_jobs_unknown_queue(self): - res = self.client.get(reverse('queue_registry_jobs', args=['UNKNOWN', 'queued'])) + res = self.client.get(reverse("queue_registry_jobs", args=["UNKNOWN", "queued"])) self.assertEqual(404, res.status_code) def test_queued_jobs(self): """Jobs in queue are displayed properly""" - queue = get_queue('default') + queue = get_queue("default") job = queue.enqueue(test_job) - queue_name = 'default' - res = self.client.get(reverse('queue_registry_jobs', args=[queue_name, 'queued'])) - self.assertEqual(res.context['jobs'], [job]) + queue_name = "default" + res = self.client.get(reverse("queue_registry_jobs", args=[queue_name, "queued"])) + self.assertEqual(res.context["jobs"], [job]) def test_finished_jobs(self): """Ensure that finished jobs page works properly.""" - queue = get_queue('django_tasks_scheduler_test') - queue_name = 'django_tasks_scheduler_test' + queue = get_queue("django_tasks_scheduler_test") + queue_name = "django_tasks_scheduler_test" job = queue.enqueue(test_job) registry = queue.finished_job_registry registry.add(job, 2) - res = self.client.get(reverse('queue_registry_jobs', args=[queue_name, 'finished'])) - self.assertEqual(res.context['jobs'], [job]) + res = self.client.get(reverse("queue_registry_jobs", args=[queue_name, "finished"])) + self.assertEqual(res.context["jobs"], [job]) def test_failed_jobs(self): """Ensure that failed jobs page works properly.""" - queue = get_queue('django_tasks_scheduler_test') - queue_name = 'django_tasks_scheduler_test' + queue = get_queue("django_tasks_scheduler_test") + queue_name = "django_tasks_scheduler_test" # Test that page doesn't fail when FailedJobRegistry is empty - res = self.client.get(reverse('queue_registry_jobs', args=[queue_name, 'failed'])) + res = self.client.get(reverse("queue_registry_jobs", args=[queue_name, "failed"])) self.assertEqual(res.status_code, 200) job = queue.enqueue(test_job) registry = queue.failed_job_registry registry.add(job, 2) - res = self.client.get(reverse('queue_registry_jobs', args=[queue_name, 'failed'])) - self.assertEqual(res.context['jobs'], [job]) + res = self.client.get(reverse("queue_registry_jobs", args=[queue_name, "failed"])) + self.assertEqual(res.context["jobs"], [job]) def test_scheduled_jobs(self): """Ensure that scheduled jobs page works properly.""" - queue = get_queue('django_tasks_scheduler_test') - queue_name = 'django_tasks_scheduler_test' + queue = get_queue("django_tasks_scheduler_test") + queue_name = "django_tasks_scheduler_test" # Test that page doesn't fail when ScheduledJobRegistry is empty - res = self.client.get(reverse('queue_registry_jobs', args=[queue_name, 'scheduled'])) + res = self.client.get(reverse("queue_registry_jobs", args=[queue_name, "scheduled"])) self.assertEqual(res.status_code, 200) job = queue.enqueue_at(datetime.now(), test_job) - res = self.client.get(reverse('queue_registry_jobs', args=[queue_name, 'scheduled'])) - self.assertEqual(res.context['jobs'], [job]) + res = self.client.get(reverse("queue_registry_jobs", args=[queue_name, "scheduled"])) + self.assertEqual(res.context["jobs"], [job]) def test_scheduled_jobs_registry_removal(self): """Ensure that non-existing job is being deleted from registry by view""" - queue = get_queue('django_tasks_scheduler_test') - queue_name = 'django_tasks_scheduler_test' + queue = get_queue("django_tasks_scheduler_test") + queue_name = "django_tasks_scheduler_test" registry = queue.scheduled_job_registry job = queue.enqueue_at(datetime.now(), test_job) self.assertEqual(len(registry), 1) queue.connection.delete(job.key) - res = self.client.get(reverse('queue_registry_jobs', args=[queue_name, 'scheduled'])) - self.assertEqual(res.context['jobs'], []) + res = self.client.get(reverse("queue_registry_jobs", args=[queue_name, "scheduled"])) + self.assertEqual(res.context["jobs"], []) self.assertEqual(len(registry), 0) def test_started_jobs(self): """Ensure that active jobs page works properly.""" - queue = get_queue('django_tasks_scheduler_test') - queue_name = 'django_tasks_scheduler_test' + queue = get_queue("django_tasks_scheduler_test") + queue_name = "django_tasks_scheduler_test" job = queue.enqueue(test_job) registry = queue.started_job_registry registry.add(job, 2) - res = self.client.get(reverse('queue_registry_jobs', args=[queue_name, 'started'])) - self.assertEqual(res.context['jobs'], [job]) + res = self.client.get(reverse("queue_registry_jobs", args=[queue_name, "started"])) + self.assertEqual(res.context["jobs"], [job]) def test_deferred_jobs(self): """Ensure that active jobs page works properly.""" - queue = get_queue('django_tasks_scheduler_test') - queue_name = 'django_tasks_scheduler_test' + queue = get_queue("django_tasks_scheduler_test") + queue_name = "django_tasks_scheduler_test" job = queue.enqueue(test_job) registry = queue.deferred_job_registry registry.add(job, 2) - res = self.client.get(reverse('queue_registry_jobs', args=[queue_name, 'deferred'])) - self.assertEqual(res.context['jobs'], [job]) + res = self.client.get(reverse("queue_registry_jobs", args=[queue_name, "deferred"])) + self.assertEqual(res.context["jobs"], [job]) class ViewTest(BaseTestCase): def test_job_details(self): """Job data is displayed properly""" - queue = get_queue('default') + queue = get_queue("default") job = queue.enqueue(test_job) - url = reverse('job_details', args=[job.id, ]) + url = reverse( + "job_details", + args=[ + job.id, + ], + ) res = self.client.get(url) - self.assertIn('job', res.context) - self.assertEqual(res.context['job'], job) + self.assertIn("job", res.context) + self.assertEqual(res.context["job"], job) # This page shouldn't fail when job.data is corrupt - queue.connection.hset(job.key, 'data', 'non-pickleable data') + queue.connection.hset(job.key, "data", "non-pickleable data") res = self.client.get(url) self.assertEqual(res.status_code, 200) - self.assertIn('DeserializationError', res.content.decode()) + self.assertIn("DeserializationError", res.content.decode()) # Bad job-id should return 404 - url = reverse('job_details', args=['bad_job_id', ]) + url = reverse( + "job_details", + args=[ + "bad_job_id", + ], + ) res = self.client.get(url) self.assertEqual(400, res.status_code) @@ -334,19 +361,24 @@ def test_scheduled_job_details(self): scheduled_job = task_factory(ScheduledTask, enabled=True) job = _get_job_from_scheduled_registry(scheduled_job) - url = reverse('job_details', args=[job.id, ]) + url = reverse( + "job_details", + args=[ + job.id, + ], + ) res = self.client.get(url, follow=True) - self.assertIn('job', res.context) - self.assertEqual(res.context['job'], job) + self.assertIn("job", res.context) + self.assertEqual(res.context["job"], job) def test_job_details_on_deleted_dependency(self): """Page doesn't crash even if job.dependency has been deleted""" - queue = get_queue('default') + queue = get_queue("default") job = queue.enqueue(test_job) second_job = queue.enqueue(test_job, depends_on=job) job.delete() - url = reverse('job_details', args=[second_job.id]) + url = reverse("job_details", args=[second_job.id]) res = self.client.get(url) self.assertEqual(res.status_code, 200) self.assertIn(second_job._dependency_id, res.content.decode()) @@ -355,136 +387,146 @@ def test_requeue_all(self): """ Ensure that re-queuing all failed job work properly """ - queue = get_queue('default') - queue_name = 'default' + queue = get_queue("default") + queue_name = "default" queue.enqueue(failing_job) queue.enqueue(failing_job) - worker = create_worker('default') + worker = create_worker("default") worker.work(burst=True) - res = self.client.get(reverse('queue_requeue_all', args=[queue_name, 'failed'])) - self.assertEqual(res.context['total_jobs'], 2) + res = self.client.get(reverse("queue_requeue_all", args=[queue_name, "failed"])) + self.assertEqual(res.context["total_jobs"], 2) # After requeue_all is called, jobs are enqueued - res = self.client.post(reverse('queue_requeue_all', args=[queue_name, 'failed'])) + res = self.client.post(reverse("queue_requeue_all", args=[queue_name, "failed"])) self.assertEqual(len(queue), 2) def test_requeue_all_if_deleted_job(self): """ Ensure that re-queuing all failed job work properly """ - queue = get_queue('default') - queue_name = 'default' + queue = get_queue("default") + queue_name = "default" job = queue.enqueue(failing_job) queue.enqueue(failing_job) - worker = create_worker('default') + worker = create_worker("default") worker.work(burst=True) - res = self.client.get(reverse('queue_requeue_all', args=[queue_name, 'failed'])) - self.assertEqual(res.context['total_jobs'], 2) + res = self.client.get(reverse("queue_requeue_all", args=[queue_name, "failed"])) + self.assertEqual(res.context["total_jobs"], 2) job.delete() # After requeue_all is called, jobs are enqueued - res = self.client.post(reverse('queue_requeue_all', args=[queue_name, 'failed'])) + res = self.client.post(reverse("queue_requeue_all", args=[queue_name, "failed"])) self.assertEqual(len(queue), 1) def test_clear_queue_unknown_registry(self): - queue_name = 'django_tasks_scheduler_test' - res = self.client.post(reverse('queue_clear', args=[queue_name, 'unknown']), {'post': 'yes'}) + queue_name = "django_tasks_scheduler_test" + res = self.client.post(reverse("queue_clear", args=[queue_name, "unknown"]), {"post": "yes"}) self.assertEqual(404, res.status_code) def test_clear_queue_enqueued(self): - queue = get_queue('django_tasks_scheduler_test') + queue = get_queue("django_tasks_scheduler_test") job = queue.enqueue(test_job) - self.client.post(reverse('queue_clear', args=[queue.name, 'queued']), {'post': 'yes'}) + self.client.post(reverse("queue_clear", args=[queue.name, "queued"]), {"post": "yes"}) self.assertFalse(JobExecution.exists(job.id, connection=queue.connection)) self.assertNotIn(job.id, queue.job_ids) def test_clear_queue_scheduled(self): - queue = get_queue('django_tasks_scheduler_test') + queue = get_queue("django_tasks_scheduler_test") job = queue.enqueue_at(datetime.now(), test_job) - res = self.client.get(reverse('queue_clear', args=[queue.name, 'scheduled']), follow=True) + res = self.client.get(reverse("queue_clear", args=[queue.name, "scheduled"]), follow=True) self.assertEqual(200, res.status_code) - self.assertEqual(res.context['jobs'], [job, ]) - - res = self.client.post(reverse('queue_clear', args=[queue.name, 'scheduled']), {'post': 'yes'}, follow=True) - assert_message_in_response(res, f'You have successfully cleared the scheduled jobs in queue {queue.name}') + self.assertEqual( + res.context["jobs"], + [ + job, + ], + ) + + res = self.client.post(reverse("queue_clear", args=[queue.name, "scheduled"]), {"post": "yes"}, follow=True) + assert_message_in_response(res, f"You have successfully cleared the scheduled jobs in queue {queue.name}") self.assertEqual(200, res.status_code) self.assertFalse(JobExecution.exists(job.id, connection=queue.connection)) self.assertNotIn(job.id, queue.job_ids) def test_workers_home(self): - res = self.client.get(reverse('workers_home')) - prev_workers = res.context['workers'] - worker1 = create_worker('django_tasks_scheduler_test') + res = self.client.get(reverse("workers_home")) + prev_workers = res.context["workers"] + worker1 = create_worker("django_tasks_scheduler_test") worker1.register_birth() - worker2 = create_worker('test3') + worker2 = create_worker("test3") worker2.register_birth() - res = self.client.get(reverse('workers_home')) - self.assertEqual(res.context['workers'], prev_workers + [worker1, worker2]) + res = self.client.get(reverse("workers_home")) + self.assertEqual(res.context["workers"], prev_workers + [worker1, worker2]) def test_queue_workers(self): """Worker index page should show workers for a specific queue""" - queue_name = 'django_tasks_scheduler_test' + queue_name = "django_tasks_scheduler_test" - worker1 = create_worker('django_tasks_scheduler_test') + worker1 = create_worker("django_tasks_scheduler_test") worker1.register_birth() - worker2 = create_worker('test3') + worker2 = create_worker("test3") worker2.register_birth() - res = self.client.get(reverse('queue_workers', args=[queue_name])) - self.assertEqual(res.context['workers'], [worker1]) + res = self.client.get(reverse("queue_workers", args=[queue_name])) + self.assertEqual(res.context["workers"], [worker1]) def test_worker_details(self): """Worker index page should show workers for a specific queue""" - worker = create_worker('django_tasks_scheduler_test', name=uuid.uuid4().hex) + worker = create_worker("django_tasks_scheduler_test", name=uuid.uuid4().hex) worker.register_birth() - url = reverse('worker_details', args=[worker.name, ]) + url = reverse( + "worker_details", + args=[ + worker.name, + ], + ) res = self.client.get(url) - self.assertEqual(res.context['worker'], worker) + self.assertEqual(res.context["worker"], worker) def test_worker_details__non_existing_worker(self): """Worker index page should show workers for a specific queue""" - worker = create_worker('django_tasks_scheduler_test', name='WORKER') + worker = create_worker("django_tasks_scheduler_test", name="WORKER") worker.register_birth() - res = self.client.get(reverse('worker_details', args=['bad-worker-name'])) + res = self.client.get(reverse("worker_details", args=["bad-worker-name"])) self.assertEqual(404, res.status_code) def test_statistics_json_view(self): # Override testing SCHEDULER_QUEUES queues = { - 'default': { - 'DB': 0, - 'HOST': 'localhost', - 'PORT': 6379, + "default": { + "DB": 0, + "HOST": "localhost", + "PORT": 6379, } } - with patch('scheduler.settings.QUEUES', new_callable=PropertyMock(return_value=queues)): - res = self.client.get(reverse('queues_home')) + with patch("scheduler.settings.QUEUES", new_callable=PropertyMock(return_value=queues)): + res = self.client.get(reverse("queues_home")) self.assertEqual(res.status_code, 200) - res = self.client.get(reverse('queues_home_json')) + res = self.client.get(reverse("queues_home_json")) self.assertEqual(res.status_code, 200) # Not staff => return 404 self.user.is_staff = False self.user.save() - res = self.client.get(reverse('queues_home')) + res = self.client.get(reverse("queues_home")) self.assertEqual(res.status_code, 302) # 404 code for stats - res = self.client.get(reverse('queues_home_json')) + res = self.client.get(reverse("queues_home_json")) self.assertEqual(res.status_code, 404) @staticmethod def token_validation(token: str) -> bool: - return token == 'valid' + return token == "valid" # @patch('scheduler.views.SCHEDULER_CONFIG') # def test_statistics_json_view_token(self, configuration): diff --git a/scheduler/tests/test_worker.py b/scheduler/tests/test_worker.py index 9337929..4b40bfb 100644 --- a/scheduler/tests/test_worker.py +++ b/scheduler/tests/test_worker.py @@ -11,42 +11,42 @@ class TestWorker(SchedulerBaseCase): def test_create_worker__two_workers_same_queue(self): - worker1 = create_worker('default', 'django_tasks_scheduler_test') + worker1 = create_worker("default", "django_tasks_scheduler_test") worker1.register_birth() - worker2 = create_worker('default') + worker2 = create_worker("default") worker2.register_birth() hostname = os.uname()[1] - self.assertEqual(f'{hostname}-worker.1', worker1.name) - self.assertEqual(f'{hostname}-worker.2', worker2.name) + self.assertEqual(f"{hostname}-worker.1", worker1.name) + self.assertEqual(f"{hostname}-worker.2", worker2.name) def test_create_worker__worker_with_queues_different_connection(self): with self.assertRaises(ValueError): - create_worker('default', 'test1') + create_worker("default", "test1") def test_create_worker__with_name(self): name = uuid.uuid4().hex - worker1 = create_worker('default', name=name) + worker1 = create_worker("default", name=name) self.assertEqual(name, worker1.name) def test_create_worker__with_name_containing_slash(self): - name = uuid.uuid4().hex[-4:] + '/' + uuid.uuid4().hex[-4:] - worker1 = create_worker('default', name=name) - self.assertEqual(name.replace('/', '.'), worker1.name) + name = uuid.uuid4().hex[-4:] + "/" + uuid.uuid4().hex[-4:] + worker1 = create_worker("default", name=name) + self.assertEqual(name.replace("/", "."), worker1.name) def test_create_worker__scheduler_interval(self): - prev = settings.SCHEDULER_CONFIG['SCHEDULER_INTERVAL'] - settings.SCHEDULER_CONFIG['SCHEDULER_INTERVAL'] = 1 - worker = create_worker('default') + prev = settings.SCHEDULER_CONFIG.SCHEDULER_INTERVAL + settings.SCHEDULER_CONFIG.SCHEDULER_INTERVAL = 1 + worker = create_worker("default") worker.work(burst=True) self.assertEqual(worker.scheduler.interval, 1) - settings.SCHEDULER_CONFIG['SCHEDULER_INTERVAL'] = prev + settings.SCHEDULER_CONFIG.SCHEDULER_INTERVAL = prev def test_get_worker_with_custom_job_class(self): # Test with string representation of job_class - worker = create_worker('default', job_class='scheduler.rq_classes.JobExecution') + worker = create_worker("default", job_class="scheduler.rq_classes.JobExecution") self.assertTrue(issubclass(worker.job_class, Job)) self.assertTrue(issubclass(worker.job_class, JobExecution)) def test_get_worker_without_custom_job_class(self): - worker = create_worker('default') + worker = create_worker("default") self.assertTrue(issubclass(worker.job_class, JobExecution)) diff --git a/scheduler/tests/testtools.py b/scheduler/tests/testtools.py index 9a03c81..561327f 100644 --- a/scheduler/tests/testtools.py +++ b/scheduler/tests/testtools.py @@ -26,27 +26,38 @@ def sequence_gen(): seq = sequence_gen() -def task_factory(cls, callable_name: str = 'scheduler.tests.jobs.test_job', instance_only=False, **kwargs): +def task_factory(cls, callable_name: str = "scheduler.tests.jobs.test_job", instance_only=False, **kwargs): values = dict( - name='Scheduled Job %d' % next(seq), + name="Scheduled Job %d" % next(seq), job_id=None, queue=list(settings.QUEUES.keys())[0], callable=callable_name, enabled=True, - timeout=None) + timeout=None, + ) if cls == ScheduledTask: - values.update(dict( - result_ttl=None, - scheduled_time=timezone.now() + timedelta(days=1), )) + values.update( + dict( + result_ttl=None, + scheduled_time=timezone.now() + timedelta(days=1), + ) + ) elif cls == RepeatableTask: - values.update(dict( - result_ttl=None, - interval=1, - interval_unit='hours', - repeat=None, - scheduled_time=timezone.now() + timedelta(days=1), )) + values.update( + dict( + result_ttl=None, + interval=1, + interval_unit="hours", + repeat=None, + scheduled_time=timezone.now() + timedelta(days=1), + ) + ) elif cls == CronTask: - values.update(dict(cron_string="0 0 * * *", )) + values.update( + dict( + cron_string="0 0 * * *", + ) + ) values.update(kwargs) if instance_only: instance = cls(**values) @@ -56,18 +67,18 @@ def task_factory(cls, callable_name: str = 'scheduler.tests.jobs.test_job', inst def taskarg_factory(cls, **kwargs): - content_object = kwargs.pop('content_object', None) + content_object = kwargs.pop("content_object", None) if content_object is None: content_object = task_factory(ScheduledTask) values = dict( - arg_type='str', - val='', + arg_type="str", + val="", object_id=content_object.id, content_type=ContentType.objects.get_for_model(content_object), content_object=content_object, ) if cls == TaskKwarg: - values['key'] = 'key%d' % next(seq), + values["key"] = ("key%d" % next(seq),) values.update(kwargs) instance = cls.objects.create(**values) return instance @@ -81,9 +92,9 @@ def _get_job_from_scheduled_registry(django_task: BaseTask): def _get_executions(django_job: BaseTask): job_ids = django_job.rqueue.get_all_job_ids() - return list(filter( - lambda j: j.is_execution_of(django_job), - map(lambda jid: django_job.rqueue.fetch_job(jid), job_ids))) + return list( + filter(lambda j: j.is_execution_of(django_job), map(lambda jid: django_job.rqueue.fetch_job(jid), job_ids)) + ) class SchedulerBaseCase(TestCase): @@ -91,23 +102,23 @@ class SchedulerBaseCase(TestCase): def setUpTestData(cls) -> None: super().setUpTestData() try: - User.objects.create_superuser('admin', 'admin@a.com', 'admin') + User.objects.create_superuser("admin", "admin@a.com", "admin") except Exception: pass cls.client = Client() def setUp(self) -> None: super(SchedulerBaseCase, self).setUp() - queue = get_queue('default') + queue = get_queue("default") queue.empty() def tearDown(self) -> None: super(SchedulerBaseCase, self).setUp() - queue = get_queue('default') + queue = get_queue("default") queue.empty() @classmethod def setUpClass(cls): super(SchedulerBaseCase, cls).setUpClass() - queue = get_queue('default') + queue = get_queue("default") queue.connection.flushall() diff --git a/scheduler/tools.py b/scheduler/tools.py index f7ce1a8..476fff0 100644 --- a/scheduler/tools.py +++ b/scheduler/tools.py @@ -8,12 +8,12 @@ from scheduler.queues import get_queues, logger, get_queue from scheduler.rq_classes import DjangoWorker, MODEL_NAMES -from scheduler.settings import get_config +from scheduler.settings import SCHEDULER_CONFIG, Broker def callable_func(callable_str: str): - path = callable_str.split('.') - module = importlib.import_module('.'.join(path[:-1])) + path = callable_str.split(".") + module = importlib.import_module(".".join(path[:-1])) func = getattr(module, path[-1]) if callable(func) is False: raise TypeError("'{}' is not callable".format(callable_str)) @@ -30,19 +30,18 @@ def get_next_cron_time(cron_string) -> timezone.datetime: def get_scheduled_task(task_model: str, task_id: int): if task_model not in MODEL_NAMES: - raise ValueError(f'Job Model {task_model} does not exist, choices are {MODEL_NAMES}') - model = apps.get_model(app_label='scheduler', model_name=task_model) + raise ValueError(f"Job Model {task_model} does not exist, choices are {MODEL_NAMES}") + model = apps.get_model(app_label="scheduler", model_name=task_model) task = model.objects.filter(id=task_id).first() if task is None: - raise ValueError(f'Job {task_model}:{task_id} does not exit') + raise ValueError(f"Job {task_model}:{task_id} does not exit") return task def run_task(task_model: str, task_id: int): - """Run a scheduled job - """ + """Run a scheduled job""" scheduled_task = get_scheduled_task(task_model, task_id) - logger.debug(f'Running task {str(scheduled_task)}') + logger.debug(f"Running task {str(scheduled_task)}") args = scheduled_task.parse_args() kwargs = scheduled_task.parse_kwargs() res = scheduled_task.callable_func()(*args, **kwargs) @@ -52,10 +51,10 @@ def run_task(task_model: str, task_id: int): def _calc_worker_name(existing_worker_names): hostname = os.uname()[1] c = 1 - worker_name = f'{hostname}-worker.{c}' + worker_name = f"{hostname}-worker.{c}" while worker_name in existing_worker_names: c += 1 - worker_name = f'{hostname}-worker.{c}' + worker_name = f"{hostname}-worker.{c}" return worker_name @@ -67,17 +66,17 @@ def create_worker(*queue_names, **kwargs): queues = get_queues(*queue_names) existing_workers = DjangoWorker.all(connection=queues[0].connection) existing_worker_names = set(map(lambda w: w.name, existing_workers)) - kwargs['fork_job_execution'] = not get_config('FAKEREDIS') - if kwargs.get('name', None) is None: - kwargs['name'] = _calc_worker_name(existing_worker_names) + kwargs["fork_job_execution"] = SCHEDULER_CONFIG.BROKER != Broker.FAKEREDIS + if kwargs.get("name", None) is None: + kwargs["name"] = _calc_worker_name(existing_worker_names) - kwargs['name'] = kwargs['name'].replace('/', '.') + kwargs["name"] = kwargs["name"].replace("/", ".") # Handle job_class if provided - if 'job_class' not in kwargs or kwargs["job_class"] is None: - kwargs['job_class'] = 'scheduler.rq_classes.JobExecution' + if "job_class" not in kwargs or kwargs["job_class"] is None: + kwargs["job_class"] = "scheduler.rq_classes.JobExecution" try: - kwargs['job_class'] = import_string(kwargs['job_class']) + kwargs["job_class"] = import_string(kwargs["job_class"]) except ImportError: raise ImportError(f"Could not import job class {kwargs['job_class']}") diff --git a/scheduler/urls.py b/scheduler/urls.py index 6803a34..4dfa9b3 100644 --- a/scheduler/urls.py +++ b/scheduler/urls.py @@ -3,19 +3,19 @@ from . import views urlpatterns = [ - path('queues/', views.stats, name='queues_home'), - path('queues/stats.json', views.stats_json, name='queues_home_json'), - path('queues//workers/', views.queue_workers, name='queue_workers'), - path('queues///jobs', views.jobs_view, name='queue_registry_jobs'), - path('queues///empty/', views.clear_queue_registry, name='queue_clear'), - path('queues///requeue-all/', views.requeue_all, name='queue_requeue_all'), - path('queues//confirm-action/', views.confirm_action, name='queue_confirm_action'), - path('queues//actions/', views.actions, name='queue_actions'), + path("queues/", views.stats, name="queues_home"), + path("queues/stats.json", views.stats_json, name="queues_home_json"), + path("queues//workers/", views.queue_workers, name="queue_workers"), + path("queues///jobs", views.jobs_view, name="queue_registry_jobs"), + path("queues///empty/", views.clear_queue_registry, name="queue_clear"), + path("queues///requeue-all/", views.requeue_all, name="queue_requeue_all"), + path("queues//confirm-action/", views.confirm_action, name="queue_confirm_action"), + path("queues//actions/", views.actions, name="queue_actions"), ] urlpatterns += [ - path('workers/', views.workers, name='workers_home'), - path('workers//', views.worker_details, name='worker_details'), - path('jobs//', views.job_detail, name='job_details'), - path('jobs///', views.job_action, name='queue_job_action'), + path("workers/", views.workers, name="workers_home"), + path("workers//", views.worker_details, name="worker_details"), + path("jobs//", views.job_detail, name="job_details"), + path("jobs///", views.job_action, name="queue_job_action"), ] diff --git a/scheduler/views.py b/scheduler/views.py index 5b9e9a5..47e3dd3 100644 --- a/scheduler/views.py +++ b/scheduler/views.py @@ -42,14 +42,13 @@ def get_worker_executions(worker): @staff_member_required def stats(request): context_data = {**admin.site.each_context(request), **get_statistics(run_maintenance_tasks=True)} - return render(request, 'admin/scheduler/stats.html', context_data) + return render(request, "admin/scheduler/stats.html", context_data) def stats_json(request): - auth_token = request.headers.get('Authorization') - token_validation_func = SCHEDULER_CONFIG.get('TOKEN_VALIDATION_METHOD') - if (request.user.is_staff or - (token_validation_func and auth_token and token_validation_func(auth_token))): + auth_token = request.headers.get("Authorization") + token_validation_func = SCHEDULER_CONFIG.TOKEN_VALIDATION_METHOD + if request.user.is_staff or (token_validation_func and auth_token and token_validation_func(auth_token)): return JsonResponse(get_statistics()) return HttpResponseNotFound() @@ -57,6 +56,7 @@ def stats_json(request): def get_statistics(run_maintenance_tasks=False): from scheduler.settings import QUEUES + queues = [] if run_maintenance_tasks: workers = get_all_workers() @@ -77,15 +77,15 @@ def get_statistics(run_maintenance_tasks=False): # Ideally rq should supports Queue.oldest_job last_job_id = queue.last_job_id() - last_job = queue.fetch_job(last_job_id.decode('utf-8')) if last_job_id else None + last_job = queue.fetch_job(last_job_id.decode("utf-8")) if last_job_id else None if last_job and last_job.enqueued_at: - oldest_job_timestamp = last_job.enqueued_at.strftime('%Y-%m-%d, %H:%M:%S') + oldest_job_timestamp = last_job.enqueued_at.strftime("%Y-%m-%d, %H:%M:%S") else: oldest_job_timestamp = "-" # parse_class and connection_pool are not needed and not JSON serializable - connection_kwargs.pop('parser_class', None) - connection_kwargs.pop('connection_pool', None) + connection_kwargs.pop("parser_class", None) + connection_kwargs.pop("connection_pool", None) queue_data = dict( name=queue.name, @@ -103,14 +103,14 @@ def get_statistics(run_maintenance_tasks=False): ) queues.append(queue_data) except redis.ConnectionError as e: - logger.error(f'Could not connect for queue {queue_name}: {e}') + logger.error(f"Could not connect for queue {queue_name}: {e}") continue - return {'queues': queues} + return {"queues": queues} def _get_registry_job_list(queue, registry, page): - items_per_page = SCHEDULER_CONFIG['EXECUTIONS_IN_PAGE'] + items_per_page = SCHEDULER_CONFIG.EXECUTIONS_IN_PAGE num_jobs = len(registry) job_list = [] @@ -139,21 +139,21 @@ def jobs_view(request, queue_name: str, registry_name: str): if registry is None: return HttpResponseNotFound() title = registry_name.capitalize() - page = int(request.GET.get('page', 1)) + page = int(request.GET.get("page", 1)) job_list, num_jobs, page_range = _get_registry_job_list(queue, registry, page) context_data = { **admin.site.each_context(request), - 'queue': queue, - 'registry_name': registry_name, - 'registry': registry, - 'jobs': job_list, - 'num_jobs': num_jobs, - 'page': page, - 'page_range': page_range, - 'job_status': title, + "queue": queue, + "registry_name": registry_name, + "registry": registry, + "jobs": job_list, + "num_jobs": num_jobs, + "page": page, + "page_range": page_range, + "job_status": title, } - return render(request, 'admin/scheduler/jobs.html', context_data) + return render(request, "admin/scheduler/jobs.html", context_data) @never_cache @@ -167,10 +167,10 @@ def queue_workers(request, queue_name): context_data = { **admin.site.each_context(request), - 'queue': queue, - 'workers': worker_list, + "queue": queue, + "workers": worker_list, } - return render(request, 'admin/scheduler/queue_workers.html', context_data) + return render(request, "admin/scheduler/queue_workers.html", context_data) @never_cache @@ -181,9 +181,9 @@ def workers(request): context_data = { **admin.site.each_context(request), - 'workers': worker_list, + "workers": worker_list, } - return render(request, 'admin/scheduler/workers.html', context_data) + return render(request, "admin/scheduler/workers.html", context_data) @never_cache @@ -199,22 +199,22 @@ def worker_details(request, name): worker.total_working_time = worker.total_working_time / 1000 execution_list = get_worker_executions(worker) - paginator = Paginator(execution_list, SCHEDULER_CONFIG['EXECUTIONS_IN_PAGE']) - page_number = request.GET.get('p', 1) + paginator = Paginator(execution_list, SCHEDULER_CONFIG.EXECUTIONS_IN_PAGE) + page_number = request.GET.get("p", 1) page_obj = paginator.get_page(page_number) page_range = paginator.get_elided_page_range(page_obj.number) context_data = { **admin.site.each_context(request), - 'queue': queue, - 'worker': worker, - 'queue_names': ', '.join(worker.queue_names()), - 'job': worker.get_current_job(), - 'total_working_time': worker.total_working_time * 1000, - 'executions': page_obj, - 'page_range': page_range, - 'page_var': 'p', + "queue": queue, + "worker": worker, + "queue_names": ", ".join(worker.queue_names()), + "job": worker.get_current_job(), + "total_working_time": worker.total_working_time * 1000, + "executions": page_obj, + "page_range": page_range, + "page_var": "p", } - return render(request, 'admin/scheduler/worker_details.html', context_data) + return render(request, "admin/scheduler/worker_details.html", context_data) def _find_job(job_id: str) -> Tuple[Optional[DjangoQueue], Optional[JobExecution]]: @@ -236,7 +236,7 @@ def _find_job(job_id: str) -> Tuple[Optional[DjangoQueue], Optional[JobExecution def job_detail(request, job_id: str): queue, job = _find_job(job_id) if job is None: - return HttpResponseBadRequest(f'Job {escape(job_id)} does not exist, maybe its TTL has passed') + return HttpResponseBadRequest(f"Job {escape(job_id)} does not exist, maybe its TTL has passed") try: job.func_name data_is_valid = True @@ -250,13 +250,13 @@ def job_detail(request, job_id: str): context_data = { **admin.site.each_context(request), - 'job': job, - 'dependency_id': job._dependency_id, - 'queue': queue, - 'data_is_valid': data_is_valid, - 'exc_info': exc_info, + "job": job, + "dependency_id": job._dependency_id, + "queue": queue, + "data_is_valid": data_is_valid, + "exc_info": exc_info, } - return render(request, 'admin/scheduler/job_detail.html', context_data) + return render(request, "admin/scheduler/job_detail.html", context_data) @never_cache @@ -267,8 +267,8 @@ def clear_queue_registry(request, queue_name, registry_name): if registry is None: return HttpResponseNotFound() - next_url = request.META.get('HTTP_REFERER') or reverse('queue_registry_jobs', args=[queue_name, registry_name]) - if request.method == 'POST': + next_url = request.META.get("HTTP_REFERER") or reverse("queue_registry_jobs", args=[queue_name, registry_name]) + if request.method == "POST": try: if registry is queue: queue.empty() @@ -276,23 +276,32 @@ def clear_queue_registry(request, queue_name, registry_name): job_ids = registry.get_job_ids() for job_id in job_ids: registry.remove(job_id, delete_job=True) - messages.info(request, f'You have successfully cleared the {registry_name} jobs in queue {queue.name}') + messages.info(request, f"You have successfully cleared the {registry_name} jobs in queue {queue.name}") except ResponseError as e: - messages.error(request, f'error: {e}', ) + messages.error( + request, + f"error: {e}", + ) raise e - return redirect('queue_registry_jobs', queue_name, registry_name) + return redirect("queue_registry_jobs", queue_name, registry_name) job_ids = registry.get_job_ids() job_list = JobExecution.fetch_many(job_ids, connection=queue.connection) context_data = { **admin.site.each_context(request), - 'queue': queue, - 'total_jobs': len(registry), - 'action': 'empty', - 'jobs': job_list, - 'next_url': next_url, - 'action_url': reverse('queue_clear', args=[queue_name, registry_name, ]) + "queue": queue, + "total_jobs": len(registry), + "action": "empty", + "jobs": job_list, + "next_url": next_url, + "action_url": reverse( + "queue_clear", + args=[ + queue_name, + registry_name, + ], + ), } - return render(request, 'admin/scheduler/confirm_action.html', context_data) + return render(request, "admin/scheduler/confirm_action.html", context_data) @never_cache @@ -302,9 +311,9 @@ def requeue_all(request, queue_name, registry_name): registry = queue.get_registry(registry_name) if registry is None: return HttpResponseNotFound() - next_url = request.META.get('HTTP_REFERER') or reverse('queue_registry_jobs', args=[queue_name, registry_name]) + next_url = request.META.get("HTTP_REFERER") or reverse("queue_registry_jobs", args=[queue_name, registry_name]) job_ids = registry.get_job_ids() - if request.method == 'POST': + if request.method == "POST": count = 0 # Confirmation received jobs = JobExecution.fetch_many(job_ids, connection=queue.connection) @@ -317,47 +326,52 @@ def requeue_all(request, queue_name, registry_name): except Exception: pass - messages.info(request, f'You have successfully re-queued {count} jobs!') - return redirect('queue_registry_jobs', queue_name, registry_name) + messages.info(request, f"You have successfully re-queued {count} jobs!") + return redirect("queue_registry_jobs", queue_name, registry_name) context_data = { **admin.site.each_context(request), - 'queue': queue, - 'total_jobs': len(queue.failed_job_registry), - 'action': 'requeue', - 'jobs': [queue.fetch_job(job_id) for job_id in job_ids], - 'next_url': next_url, - 'action_url': reverse('queue_requeue_all', args=[queue_name, registry_name]) + "queue": queue, + "total_jobs": len(queue.failed_job_registry), + "action": "requeue", + "jobs": [queue.fetch_job(job_id) for job_id in job_ids], + "next_url": next_url, + "action_url": reverse("queue_requeue_all", args=[queue_name, registry_name]), } - return render(request, 'admin/scheduler/confirm_action.html', context_data) + return render(request, "admin/scheduler/confirm_action.html", context_data) @never_cache @staff_member_required def confirm_action(request, queue_name): queue = get_queue(queue_name) - next_url = request.META.get('HTTP_REFERER') or reverse('queue_registry_jobs', args=[queue_name, 'queued']) + next_url = request.META.get("HTTP_REFERER") or reverse("queue_registry_jobs", args=[queue_name, "queued"]) try: resolve(next_url) except Exception: - messages.warning(request, 'Bad followup URL') - next_url = reverse('queue_registry_jobs', args=[queue_name, 'queued']) + messages.warning(request, "Bad followup URL") + next_url = reverse("queue_registry_jobs", args=[queue_name, "queued"]) - if request.method == 'POST' and request.POST.get('action', False): + if request.method == "POST" and request.POST.get("action", False): # confirm action - if request.POST.get('_selected_action', False): - job_id_list = request.POST.getlist('_selected_action') + if request.POST.get("_selected_action", False): + job_id_list = request.POST.getlist("_selected_action") context_data = { **admin.site.each_context(request), - 'action': request.POST['action'], - 'jobs': [queue.fetch_job(job_id) for job_id in job_id_list], - 'total_jobs': len(job_id_list), - 'queue': queue, - 'next_url': next_url, - 'action_url': reverse('queue_actions', args=[queue_name, ]), + "action": request.POST["action"], + "jobs": [queue.fetch_job(job_id) for job_id in job_id_list], + "total_jobs": len(job_id_list), + "queue": queue, + "next_url": next_url, + "action_url": reverse( + "queue_actions", + args=[ + queue_name, + ], + ), } - return render(request, 'admin/scheduler/confirm_action.html', context_data) + return render(request, "admin/scheduler/confirm_action.html", context_data) return redirect(next_url) @@ -366,19 +380,19 @@ def confirm_action(request, queue_name): @staff_member_required def actions(request, queue_name): queue = get_queue(queue_name) - next_url = request.POST.get('next_url') or reverse('queue_registry_jobs', args=[queue_name, 'queued']) + next_url = request.POST.get("next_url") or reverse("queue_registry_jobs", args=[queue_name, "queued"]) try: resolve(next_url) except Exception: - messages.warning(request, 'Bad followup URL') - next_url = reverse('queue_registry_jobs', args=[queue_name, 'queued']) + messages.warning(request, "Bad followup URL") + next_url = reverse("queue_registry_jobs", args=[queue_name, "queued"]) - action = request.POST.get('action', False) - job_ids = request.POST.get('job_ids', False) - if request.method != 'POST' or not action or not job_ids: + action = request.POST.get("action", False) + job_ids = request.POST.get("job_ids", False) + if request.method != "POST" or not action or not job_ids: return redirect(next_url) - job_ids = request.POST.getlist('job_ids') - if action == 'delete': + job_ids = request.POST.getlist("job_ids") + if action == "delete": jobs = JobExecution.fetch_many(job_ids, connection=queue.connection) for job in jobs: if job is None: @@ -386,15 +400,15 @@ def actions(request, queue_name): # Remove job id from queue and delete the actual job queue.remove_job_id(job.id) job.delete() - messages.info(request, f'You have successfully deleted {len(job_ids)} jobs!') - elif action == 'requeue': + messages.info(request, f"You have successfully deleted {len(job_ids)} jobs!") + elif action == "requeue": jobs = JobExecution.fetch_many(job_ids, connection=queue.connection) for job in jobs: if job is None: continue job.requeue() - messages.info(request, f'You have successfully re-queued {len(job_ids)} jobs!') - elif action == 'stop': + messages.info(request, f"You have successfully re-queued {len(job_ids)} jobs!") + elif action == "stop": cancelled_jobs = 0 jobs = JobExecution.fetch_many(job_ids, connection=queue.connection) for job in jobs: @@ -405,13 +419,13 @@ def actions(request, queue_name): job.cancel() cancelled_jobs += 1 except Exception as e: - logger.warning(f'Could not stop job: {e}') + logger.warning(f"Could not stop job: {e}") pass - messages.info(request, f'You have successfully stopped {cancelled_jobs} jobs!') + messages.info(request, f"You have successfully stopped {cancelled_jobs} jobs!") return redirect(next_url) -SUPPORTED_JOB_ACTIONS = {'requeue', 'delete', 'enqueue', 'cancel'} +SUPPORTED_JOB_ACTIONS = {"requeue", "delete", "enqueue", "cancel"} @never_cache @@ -419,40 +433,40 @@ def actions(request, queue_name): def job_action(request, job_id: str, action: str): queue, job = _find_job(job_id) if job is None: - return HttpResponseBadRequest(f'Job {escape(job_id)} does not exist, maybe its TTL has passed') + return HttpResponseBadRequest(f"Job {escape(job_id)} does not exist, maybe its TTL has passed") if action not in SUPPORTED_JOB_ACTIONS: return HttpResponseNotFound() - if request.method != 'POST': + if request.method != "POST": context_data = { **admin.site.each_context(request), - 'job': job, - 'queue': queue, - 'action': action, + "job": job, + "queue": queue, + "action": action, } - return render(request, 'admin/scheduler/single_job_action.html', context_data) + return render(request, "admin/scheduler/single_job_action.html", context_data) try: - if action == 'requeue': + if action == "requeue": job.requeue() - messages.info(request, f'You have successfully re-queued {job.id}') - return redirect('job_details', job_id) - elif action == 'delete': + messages.info(request, f"You have successfully re-queued {job.id}") + return redirect("job_details", job_id) + elif action == "delete": # Remove job id from queue and delete the actual job queue.remove_job_id(job.id) job.delete() - messages.info(request, 'You have successfully deleted %s' % job.id) - return redirect('queue_registry_jobs', queue.name, 'queued') - elif action == 'enqueue': + messages.info(request, "You have successfully deleted %s" % job.id) + return redirect("queue_registry_jobs", queue.name, "queued") + elif action == "enqueue": job.delete(remove_from_queue=False) queue._enqueue_job(job) - messages.info(request, 'You have successfully enqueued %s' % job.id) - return redirect('job_details', job_id) - elif action == 'cancel': + messages.info(request, "You have successfully enqueued %s" % job.id) + return redirect("job_details", job_id) + elif action == "cancel": job.cancel() - messages.info(request, 'You have successfully enqueued %s' % job.id) - return redirect('job_details', job_id) + messages.info(request, "You have successfully enqueued %s" % job.id) + return redirect("job_details", job_id) except InvalidJobOperation as e: - logger.warning(f'Could not perform action: {e}') - messages.warning(request, f'Could not perform action: {e}') - return redirect('job_details', job_id) + logger.warning(f"Could not perform action: {e}") + messages.warning(request, f"Could not perform action: {e}") + return redirect("job_details", job_id) diff --git a/testproject/testproject/settings.py b/testproject/testproject/settings.py index 0216248..1ab283c 100644 --- a/testproject/testproject/settings.py +++ b/testproject/testproject/settings.py @@ -9,7 +9,7 @@ # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = 'h0_r$4#4@hgdxy_r0*n8+$(wf0&ie9&4-=(d394n!bo=9rt+85' +SECRET_KEY = "h0_r$4#4@hgdxy_r0*n8+$(wf0&ie9&4-=(d394n!bo=9rt+85" # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True @@ -19,63 +19,61 @@ # Application definition INSTALLED_APPS = [ - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'scheduler', + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + "scheduler", ] MIDDLEWARE = [ - 'django.middleware.security.SecurityMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", ] -ROOT_URLCONF = 'testproject.urls' -if django.VERSION > (4, 0): - CACHES = { - 'default': { - 'BACKEND': 'django.core.cache.backends.redis.RedisCache', - 'LOCATION': [ - 'redis://127.0.0.1:6379', # leader - ], - 'OPTIONS': { - 'connection_class': FakeConnection - } - } +ROOT_URLCONF = "testproject.urls" + +CACHES = { + "default": { + "BACKEND": "django.core.cache.backends.redis.RedisCache", + "LOCATION": [ + "redis://127.0.0.1:6379", # leader + ], + "OPTIONS": {"connection_class": FakeConnection}, } +} TEMPLATES = [ { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", ], }, }, ] -WSGI_APPLICATION = 'testproject.wsgi.application' +WSGI_APPLICATION = "testproject.wsgi.application" # Database # https://docs.djangoproject.com/en/1.9/ref/settings/#databases DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": os.path.join(BASE_DIR, "db.sqlite3"), } } @@ -84,25 +82,25 @@ AUTH_PASSWORD_VALIDATORS = [ { - 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", }, ] # Internationalization # https://docs.djangoproject.com/en/1.9/topics/i18n/ -LANGUAGE_CODE = 'en-us' +LANGUAGE_CODE = "en-us" -TIME_ZONE = 'UTC' +TIME_ZONE = "UTC" USE_I18N = True @@ -113,53 +111,50 @@ # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ -STATIC_URL = '/static/' +BROKER_PORT = os.getenv("BROKER_PORT", "6379") +STATIC_URL = "/static/" SCHEDULER_QUEUES = { - 'default': { - 'URL': 'redis://localhost:6379/0', + "default": { + "URL": f"redis://localhost:${BROKER_PORT}/0", }, - 'low': { - 'URL': 'redis://localhost:6379/0', + "low": { + "URL": f"redis://localhost:${BROKER_PORT}/0", }, - 'high': { - 'URL': 'redis://localhost:6379/1', + "high": { + "URL": f"redis://localhost:${BROKER_PORT}/1", }, - 'medium': { - 'URL': 'redis://localhost:6379/1', + "medium": { + "URL": f"redis://localhost:${BROKER_PORT}/1", }, - 'another': { - 'URL': 'redis://localhost:6379/1', + "another": { + "URL": f"redis://localhost:${BROKER_PORT}/1", }, } -DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' +DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" LOGGING = { - 'version': 1, - 'disable_existing_loggers': False, - 'formatters': { - 'verbose': { - 'format': '{levelname} {asctime} {module} {process:d} {thread:d} {message}', - 'style': '{', + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "verbose": { + "format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}", + "style": "{", }, - 'simple': { - 'format': '%(asctime)s %(levelname)s %(name)s.%(funcName)s:%(lineno)s- %(message)s', + "simple": { + "format": "%(asctime)s %(levelname)s %(name)s.%(funcName)s:%(lineno)s- %(message)s", }, }, - 'handlers': { - 'console': { - 'level': 'INFO', - 'class': 'logging.StreamHandler', - 'formatter': 'simple' - }, + "handlers": { + "console": {"level": "INFO", "class": "logging.StreamHandler", "formatter": "simple"}, }, - 'root': { - 'handlers': ['console'], - 'level': 'INFO', + "root": { + "handlers": ["console"], + "level": "INFO", }, - 'loggers': { - 'scheduler': { - 'handlers': ['console'], - 'level': 'INFO', + "loggers": { + "scheduler": { + "handlers": ["console"], + "level": "INFO", }, }, } diff --git a/testproject/testproject/urls.py b/testproject/testproject/urls.py index 301116d..42b0d4f 100644 --- a/testproject/testproject/urls.py +++ b/testproject/testproject/urls.py @@ -13,13 +13,17 @@ 1. Import the `include()` function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ + from django.contrib import admin from django.urls import path, include from . import views urlpatterns = [ - path('admin/', admin.site.urls), - path('scheduler/', include('scheduler.urls')), - path('test-view/', views.my_view, ), + path("admin/", admin.site.urls), + path("scheduler/", include("scheduler.urls")), + path( + "test-view/", + views.my_view, + ), ]