diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..5a026359 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,141 @@ +name: Tests + +on: + push: + branches: + - master + pull_request: + branches: + - master + +jobs: + # Unit tests - runs on every push/PR + # Excludes Google Cloud tests + unit-tests: + name: Unit Tests (Python ${{ matrix.python-version }}) + runs-on: ubuntu-latest + timeout-minutes: 30 + strategy: + fail-fast: false + matrix: + python-version: + - "3.12" + - "3.13" + - "3.14" + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + + - name: Set up uv + uses: astral-sh/setup-uv@v7 + with: + python-version: ${{ matrix.python-version }} + enable-cache: true + + - name: Install system dependencies + run: | + sudo apt-get update -y + sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ + tzdata default-jre + + - name: Install Singularity 4 + env: + SINGULARITY_VERSION: 4.3.7 + run: | + UBUNTU_CODENAME=$(lsb_release -cs) + wget -q "https://github.com/sylabs/singularity/releases/download/v${SINGULARITY_VERSION}/singularity-ce_${SINGULARITY_VERSION}-${UBUNTU_CODENAME}_amd64.deb" + sudo apt-get install -y "./singularity-ce_${SINGULARITY_VERSION}-${UBUNTU_CODENAME}_amd64.deb" + singularity --version + + - name: Install dependencies + run: uv sync --all-groups --all-extras + + - name: Run unit tests (excluding Google Cloud tests) + run: uv run pytest -m "not google_cloud and not slow" -vv -s --junit-xml=report.xml + + - name: Test Report + uses: dorny/test-reporter@v2 + if: success() || failure() # run this step even if previous step failed + with: + name: Google Cloud Integration Test Results + path: report.xml # Path to test results + reporter: java-junit # Format of test results + + # Google Cloud integration tests + # Requires maintainer approval via environment + gcp-integration-tests: + name: Google Cloud Integration Tests (Python 3.12) + runs-on: ubuntu-latest + timeout-minutes: 90 + needs: + - unit-tests + # This environment requires maintainer approval + # Configured at: Settings > Environments > google-cloud > Required reviewers + environment: Google Cloud + + # Required for Workload Identity Federation + permissions: + contents: read + id-token: write + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + + # See the action's README for more details on how/why Workload Identity Federation is configured and used + # The principalSet:// has direct access to the --gcs-root bucket + # https://docs.cloud.google.com/iam/docs/workload-identity-federation#access_management + # access to the service account is configured via the MoTrPAC/motrpac-iac repo + - name: Authenticate to Google Cloud + uses: google-github-actions/auth@v3 + with: + project_id: ${{ secrets.GCP_PROJECT_ID }} + service_account: ${{ secrets.GCP_ACTIONS_SERVICE_ACCOUNT }} + workload_identity_provider: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }} + + - name: Set up Cloud SDK + uses: google-github-actions/setup-gcloud@v3 + + - name: Set up uv + uses: astral-sh/setup-uv@v7 + with: + python-version: "3.12" + enable-cache: true + + - name: Install system dependencies + run: | + sudo apt-get update -y + sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ + tzdata default-jre + + - name: Install Singularity 4 + env: + SINGULARITY_VERSION: 4.3.7 + run: | + UBUNTU_CODENAME=$(lsb_release -cs) + wget -q "https://github.com/sylabs/singularity/releases/download/v${SINGULARITY_VERSION}/singularity-ce_${SINGULARITY_VERSION}-${UBUNTU_CODENAME}_amd64.deb" + sudo apt-get install -y "./singularity-ce_${SINGULARITY_VERSION}-${UBUNTU_CODENAME}_amd64.deb" + singularity --version + + - name: Install dependencies + run: uv sync --all-groups --all-extras + + - name: Run Google Cloud integration tests + timeout-minutes: 60 + env: + GOOGLE_CLOUD_PROJECT: ${{ secrets.GCP_PROJECT_ID }} + run: | + uv run pytest -m "google_cloud" \ + --ci-prefix ${{ github.run_id }} \ + --gcs-root gs://motrpac-test-caper \ + --debug-caper \ + -vv -s --junit-xml=report.xml + + - name: Test Report + uses: dorny/test-reporter@v2 + if: success() || failure() # run this step even if previous step failed + with: + name: Google Cloud Integration Test Results + path: report.xml # Path to test results + reporter: java-junit # Format of test results diff --git a/.gitignore b/.gitignore index 1efe2d6e..51ad5be8 100644 --- a/.gitignore +++ b/.gitignore @@ -112,3 +112,7 @@ src/test_caper_uri/ cromwell.out dev/ tests/hpc/ + +*.code-workspace +.vscode/ +.idea/ diff --git a/.isort.cfg b/.isort.cfg deleted file mode 100644 index c061ec84..00000000 --- a/.isort.cfg +++ /dev/null @@ -1,10 +0,0 @@ -[settings] -multi_line_output = 3 -include_trailing_comma = True -force_grid_wrap = 0 -use_parentheses = True -line_length = 88 -known_third_party = WDL,autouri,distutils,humanfriendly,matplotlib,numpy,pandas,pyhocon,pytest,requests,setuptools,sklearn - -[mypy-bin] -ignore_errors = True diff --git a/DETAILS.md b/DETAILS.md index af82d6b9..4f71d284 100644 --- a/DETAILS.md +++ b/DETAILS.md @@ -99,7 +99,7 @@ hpc abort | JOB_ID | Abort a Caper leader job. This will cascade kill all child > **IMPORTANT**: `--deepcopy` has been deprecated and it's activated by default. You can disable it with `--no-deepcopy`. -Deepcopy allows Caper to **RECURSIVELY** copy files defined in your input JSON into your target backend's temporary storage. For example, Cromwell cannot read directly from URLs in an [input JSON file](https://github.com/ENCODE-DCC/atac-seq-pipeline/blob/master/examples/caper/ENCSR356KRQ_subsampled.json), but Caper makes copies of these URLs on your backend's temporary directory (e.g. `--local-loc-dir` for `local`, `--gcp-loc-dir` for `gcp`) and pass them to Cromwell. +Deepcopy allows Caper to **RECURSIVELY** copy files defined in your input JSON into your target backend's temporary storage. For example, Cromwell cannot read directly from URLs in an input JSON file, but Caper makes copies of these URLs on your backend's temporary directory (e.g. `--local-loc-dir` for `local`, `--gcp-loc-dir` for `gcp`) and pass them to Cromwell. ## How to manage configuration file per project @@ -187,8 +187,6 @@ We highly recommend to use a default configuration file described in the section **Conf. file**|**Cmd. line**|**Description** :-----|:-----|:----- gcp-prj|--gcp-prj|Google Cloud project - use-google-cloud-life-sciences|--use-google-cloud-life-sciences|Use Google Cloud Life Sciences API instead of (deprecated) Genomics API - gcp-zones|--gcp-zones|Comma-delimited Google Cloud Platform zones to provision worker instances (e.g. us-central1-c,us-west1-b) gcp-out-dir, out-gcs-bucket|--gcp-out-dir, --out-gcs-bucket|Output `gs://` directory for GC backend gcp-loc-dir, tmp-gcs-bucket|--gcp-loc-dir, --tmp-gcs-bucket|Tmp. directory for localization on GC backend gcp-call-caching-dup-strat|--gcp-call-caching-dup-strat|Call-caching duplication strategy. Choose between `copy` and `reference`. `copy` will make a copy for a new workflow, `reference` will make refer to the call-cached output of a previous workflow in `metadata.json`. Defaults to `reference` @@ -466,12 +464,12 @@ If Caper's built-in backends don't work as expected on your clusters (e.g. due t Find this `backend.conf` first by dry-running `caper run [WDL] --dry-run ...`. For example of a `slurm` backend: ``` $ caper run main.wdl --dry-run -2020-07-07 11:18:13,196|caper.caper_runner|INFO| Adding encode-dcc-1016 to env var GOOGLE_CLOUD_PROJECT -2020-07-07 11:18:13,197|caper.caper_base|INFO| Creating a timestamped temporary directory. /mnt/data/scratch/leepc12/test_caper_tmp/main/20200707_111813_197082 -2020-07-07 11:18:13,197|caper.caper_runner|INFO| Localizing files on work_dir. /mnt/data/scratch/leepc12/test_caper_tmp/main/20200707_111813_197082 +2020-07-07 11:18:13,196|caper.caper_runner|INFO| Adding my-gcp-project to env var GOOGLE_CLOUD_PROJECT +2020-07-07 11:18:13,197|caper.caper_base|INFO| Creating a timestamped temporary directory. /scratch/user/caper_tmp/main/20200707_111813_197082 +2020-07-07 11:18:13,197|caper.caper_runner|INFO| Localizing files on work_dir. /scratch/user/caper_tmp/main/20200707_111813_197082 2020-07-07 11:18:13,829|caper.cromwell|INFO| Validating WDL/inputs/imports with Womtool... 2020-07-07 11:18:16,034|caper.cromwell|INFO| Womtool validation passed. -2020-07-07 11:18:16,035|caper.caper_runner|INFO| launching run: wdl=/mnt/data2/scratch/leepc12/test_wdl1_sub/main.wdl, inputs=None, backend_conf=/mnt/data/scratch/leepc12/test_caper_tmp/main/20200707_111813_197082/backend.conf +2020-07-07 11:18:16,035|caper.caper_runner|INFO| launching run: wdl=/scratch/user/workflows/main.wdl, inputs=None, backend_conf=/scratch/user/caper_tmp/main/20200707_111813_197082/backend.conf ``` Find `backend_conf`, make a copy of it and edit it. @@ -554,7 +552,7 @@ until [ $ITER -ge 3 ]; do sleep 30 done """ - root = "/mnt/data/scratch/leepc12/caper_out" + root = "/scratch/user/caper_out" exit-code-timeout-seconds = 360 check-alive = """for ITER in 1 2 3; do CHK_ALIVE=$(squeue --noheader -j ${job_id} --format=%i | grep ${job_id}) diff --git a/README.md b/README.md index 6bc017fe..e3417036 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,11 @@ -[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![CircleCI](https://circleci.com/gh/ENCODE-DCC/caper.svg?style=svg)](https://circleci.com/gh/ENCODE-DCC/caper) +[![Python Version from PEP 621 TOML](https://img.shields.io/python/required-version-toml?tomlFilePath=https%3A%2F%2Fraw.githubusercontent.com%2FMoTrPAC%2Fcaper%2Fmain%2Fpyproject.toml)](./pyproject.toml) ## Introduction -Caper (Cromwell Assisted Pipeline ExecutoR) is a wrapper Python package for [Cromwell](https://github.com/broadinstitute/cromwell/). Caper wraps Cromwell to run pipelines on multiple platforms like GCP (Google Cloud Platform), AWS (Amazon Web Service) and HPCs like SLURM, SGE, PBS/Torque and LSF. It provides easier way of running Cromwell server/run modes by automatically composing necessary input files for Cromwell. Caper can run each task on a specified environment (Docker, Singularity or Conda). Also, Caper automatically localizes all files (keeping their directory structure) defined in your input JSON and command line according to the specified backend. For example, if your chosen backend is GCP and files in your input JSON are on S3 buckets (or even URLs) then Caper automatically transfers `s3://` and `http(s)://` files to a specified `gs://` bucket directory. Supported URIs are `s3://`, `gs://`, `http(s)://` and local absolute paths. You can use such URIs either in CLI and input JSON. Private URIs are also accessible if you authenticate using cloud platform CLIs like `gcloud auth`, `aws configure` and using `~/.netrc` for URLs. +Caper (Cromwell Assisted Pipeline ExecutoR) is a wrapper Python package for [Cromwell](https://github.com/broadinstitute/cromwell/). This project is maintained by [MoTrPAC](https://motrpac.org/), forked from the original [ENCODE-DCC caper](https://github.com/ENCODE-DCC/caper) to add support for [Google Cloud Batch API](https://cloud.google.com/batch) and remove deprecated Google Cloud Life Sciences and Google Cloud Genomics APIs. + +Caper wraps Cromwell to run pipelines on multiple platforms like GCP (Google Cloud Platform), AWS (Amazon Web Service) and HPCs like SLURM, SGE, PBS/Torque and LSF. It provides an easier way of running Cromwell server/run modes by automatically composing necessary input files for Cromwell. Caper can run each task on a specified environment (Docker, Singularity or Conda). Also, Caper automatically localizes all files (keeping their directory structure) defined in your input JSON and command line according to the specified backend. For example, if your chosen backend is GCP and files in your input JSON are on S3 buckets (or even URLs) then Caper automatically transfers `s3://` and `http(s)://` files to a specified `gs://` bucket directory. Supported URIs are `s3://`, `gs://`, `http(s)://` and local absolute paths. You can use such URIs either in CLI and input JSON. Private URIs are also accessible if you authenticate using cloud platform CLIs like `gcloud auth`, `aws configure` and using `~/.netrc` for URLs. ## Installation for Google Cloud Platform and AWS @@ -18,19 +20,22 @@ See [this](scripts/aws_caper_server/README.md) for details. ## Installation for local computers and HPCs -1) Make sure that you have Java (>= 11) and Python>=3.6 installed on your system and `pip` to install Caper. +1) Make sure that you have Java (>= 17) and Python >= 3.12 installed on your system. + +2) Install Caper from the [MoTrPAC GitHub repository](https://github.com/MoTrPAC/caper) using [uv](https://docs.astral.sh/uv/) (recommended) or pip: ```bash - $ pip install caper - ``` + # Using uvx (recommended) - runs caper without permanent installation + $ uvx --from git+https://github.com/MoTrPAC/caper caper -2) If you see an error message like `caper: command not found` after installing then add the following line to the bottom of `~/.bashrc` and re-login. + # Or install with uv + $ uv pip install git+https://github.com/MoTrPAC/caper - ```bash - export PATH=$PATH:~/.local/bin + # Or install with pip + $ pip install git+https://github.com/MoTrPAC/caper ``` -3) Choose a backend from the following table and initialize Caper. This will create a default Caper configuration file `~/.caper/default.conf`, which have only required parameters for each backend. `caper init` will also install Cromwell/Womtool JARs on `~/.caper/`. Downloading those files can take up to 10 minutes. Once they are installed, Caper can completely work offline with local data files. +3) Choose a backend from the following table and initialize Caper. This will create a default Caper configuration file `~/.caper/default.conf`, which has only required parameters for each backend. `caper init` will also install Cromwell/Womtool JARs in `~/.caper/`. Downloading those files can take up to 10 minutes. Once they are installed, Caper can work completely offline with local data files. **Backend**|**Description** :--------|:----- @@ -51,7 +56,7 @@ See [this](scripts/aws_caper_server/README.md) for details. ## Docker, Singularity and Conda -For local backends (`local`, `slurm`, `sge`, `pbs` and `lsf`), you can use `--docker`, `--singularity` or `--conda` to run WDL tasks in a pipeline within one of these environment. For example, `caper run ... --singularity docker://ubuntu:latest` will run each task within a Singularity image built from a docker image `ubuntu:latest`. These parameters can also be used as flags. If used as a flag, Caper will try to find a default docker/singularity/conda in WDL. e.g. All ENCODE pipelines have default docker/singularity images defined within WDL's meta section (under key `caper_docker` or `default_docker`). +For local backends (`local`, `slurm`, `sge`, `pbs` and `lsf`), you can use `--docker`, `--singularity` or `--conda` to run WDL tasks in a pipeline within one of these environments. For example, `caper run ... --singularity docker://ubuntu:latest` will run each task within a Singularity image built from a docker image `ubuntu:latest`. These parameters can also be used as flags. If used as a flag, Caper will try to find a default docker/singularity/conda in WDL. Pipelines can define default docker/singularity images within WDL's meta section (under key `caper_docker` or `default_docker`). > **IMPORTANT**: Docker/singularity/conda defined in Caper's configuration file or in CLI (`--docker`, `--singularity` and `--conda`) will be overriden by those defined in WDL task's `runtime`. We provide these parameters to define default/base environment for a pipeline, not to override on WDL task's `runtime`. diff --git a/bin/caper b/bin/caper deleted file mode 100755 index 82335282..00000000 --- a/bin/caper +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env python3 -try: - from caper.cli import main -except ImportError: - import os - import sys - - script_path = os.path.dirname(os.path.realpath(__file__)) - sys.path.append(os.path.join(script_path, "../")) - from caper.cli import main - -if __name__ == "__main__": - main() diff --git a/caper/__init__.py b/caper/__init__.py index 433f69c9..d39e04f5 100644 --- a/caper/__init__.py +++ b/caper/__init__.py @@ -1,5 +1,7 @@ +"""Caper - Cromwell Assisted Pipeline ExecutoR.""" + from .caper_client import CaperClient, CaperClientSubmit from .caper_runner import CaperRunner __all__ = ['CaperClient', 'CaperClientSubmit', 'CaperRunner'] -__version__ = '2.3.2' +__version__ = '3.0.0' diff --git a/caper/__main__.py b/caper/__main__.py index a38f7ac1..782a39c7 100644 --- a/caper/__main__.py +++ b/caper/__main__.py @@ -1,3 +1,5 @@ +"""Entry point for running caper as a module.""" + from . import cli if __name__ == '__main__': diff --git a/caper/arg_tool.py b/caper/arg_tool.py index 8774f36b..9b7ba113 100644 --- a/caper/arg_tool.py +++ b/caper/arg_tool.py @@ -1,14 +1,23 @@ +"""Utility functions for combining configuration files with argparse arguments.""" + +from __future__ import annotations + import os from argparse import ArgumentParser from configparser import ConfigParser, MissingSectionHeaderError - -from distutils.util import strtobool +from typing import cast def read_from_conf( - conf_file, conf_section='defaults', conf_key_map=None, no_strip_quote=False -): - """Read key/value from conf_section of conf_file. + conf_file: str, + conf_section: str = 'defaults', + conf_key_map: dict[str, str] | None = None, + *, + no_strip_quote: bool = False, +) -> dict[str, str | None]: + """ + Read key/value from conf_section of conf_file. + Hyphens (-) in keys will be replace with underscores (_). All keys and values are considered as strings. @@ -34,43 +43,50 @@ def read_from_conf( """ conf_file = os.path.expanduser(conf_file) if not os.path.exists(conf_file): - raise FileNotFoundError('conf_file does not exist. f={f}'.format(f=conf_file)) + msg = f'conf_file does not exist. f={conf_file}' + raise FileNotFoundError(msg) config = ConfigParser() with open(conf_file) as fp: - s = fp.read() + config_content = fp.read() try: - config.read_string(s) + config.read_string(config_content) except MissingSectionHeaderError: - section = '[{sect}]\n'.format(sect=conf_section) - config.read_string(section + s) + # Add the section if it's missing + config.read_string(f'[{conf_section}]\n' + config_content) - d_ = dict(config.items(conf_section)) - result = {} - for k, v in d_.items(): - if not no_strip_quote: - v = v.strip('"\'') - if v: - k_ = k.replace('-', '_') - if conf_key_map and k_ in conf_key_map: - k_ = conf_key_map[k_] - result[k_] = v + section_items = dict(config.items(conf_section)) + processed: dict[str, str | None] = {} + for raw_key, raw_value in section_items.items(): + value = raw_value if no_strip_quote else raw_value.strip('"\'') - return result + if value: # Ignore empty values + key = raw_key.replace('-', '_') + if conf_key_map and key in conf_key_map: + key = conf_key_map[key] + processed[key] = value + + return processed + + +def strtobool(value: str) -> bool: + """Copy of (now deprecated) distutils.util.strtobool.""" + return value.strip().lower() in ('y', 'yes', 'on', '1', 'true', 't') def update_parsers_defaults_with_conf( - parsers, - conf_file, - conf_section='defaults', - conf_key_map=None, - no_strip_quote=False, - val_type=None, - val_default=None, -): - """Updates argparse.ArgumentParser's defaults with key/value pairs - defined in conf_file. Also, returns a dict of key/values defined in - conf_file with correct type for each value. + parsers: ArgumentParser | list[ArgumentParser], + conf_file: str, + conf_section: str = 'defaults', + conf_key_map: dict[str, str] | None = None, + no_strip_quote: bool = False, + val_type: dict[str, type] | None = None, + val_default: dict[str, str | bool | int | float | None] | None = None, +) -> dict[str, str | bool | int | float | None]: + """ + Updates argparse.ArgumentParser's defaults with key/value pairs defined in conf_file. + + Also, returns a dict of key/values defined in conf_file with correct type for each value. Type of each value in conf_file can be guessed from: - default value of ArgumentParser's argument. @@ -114,20 +130,24 @@ def update_parsers_defaults_with_conf( if isinstance(parsers, ArgumentParser): parsers = [parsers] - defaults = read_from_conf( - conf_file=conf_file, - conf_section=conf_section, - conf_key_map=conf_key_map, - no_strip_quote=no_strip_quote, + defaults = cast( + 'dict[str, str | bool | int | float | None]', + read_from_conf( + conf_file=conf_file, + conf_section=conf_section, + conf_key_map=conf_key_map, + no_strip_quote=no_strip_quote, + ), ) if val_default: - for k, v in val_default.items(): + for k in val_default: if k not in defaults: defaults[k] = None # used "is not None" for guessed_default to catch boolean false for k, v in defaults.items(): + guessed_default = None if val_default and k in val_default: guessed_default = val_default[k] else: @@ -143,10 +163,9 @@ def update_parsers_defaults_with_conf( guessed_type = None if v is None and guessed_default is not None: - v = guessed_default - defaults[k] = v + defaults[k] = guessed_default - if guessed_type: + if guessed_type and v is not None: if guessed_type is bool and isinstance(v, str): defaults[k] = bool(strtobool(v)) else: diff --git a/caper/backward_compatibility.py b/caper/backward_compatibility.py index 753c40fb..76e93faf 100644 --- a/caper/backward_compatibility.py +++ b/caper/backward_compatibility.py @@ -1,5 +1,4 @@ -"""Variables and functions for backward_compatibililty -""" +"""Variables and functions for backward_compatibililty.""" CAPER_1_0_0_PARAM_KEY_NAME_CHANGE = { 'out_dir': 'local_out_dir', diff --git a/caper/caper_args.py b/caper/caper_args.py index b394f511..015cd7b1 100644 --- a/caper/caper_args.py +++ b/caper/caper_args.py @@ -1,9 +1,16 @@ +"""Caper command line interface arguments.""" + +from __future__ import annotations + import argparse import os +from argparse import ArgumentParser from enum import Enum from autouri import URIBase +from caper.cromwell_backend import BackendProvider + from .arg_tool import update_parsers_defaults_with_conf from .backward_compatibility import PARAM_KEY_NAME_CHANGE from .caper_workflow_opts import CaperWorkflowOpts @@ -34,132 +41,84 @@ class ResourceAnalysisReductionMethod(Enum): - sum = sum - max = max - min = min - none = None - - -def get_defaults(conf_file=None): - """Wrapper for `get_parser_and_defaults()`. - Use this function to get default values updated with `conf_file`. - - Args: - conf_file: - `DEFAULT_CAPER_CONF` will be used if it is None. - - Returns updated defaults only. - """ - _, conf_dict = get_parser_and_defaults(conf_file=conf_file) - return conf_dict + """Enum defining the reduction method for resource analysis.""" + SUM = sum + MAX = max + MIN = min + NONE = None -def get_parser_and_defaults(conf_file=None): - """Creates a main parser and make a subparser for each subcommand. - There are many parent parsers defined here. - Each subparser will take a certain combination of these parent parsers - to share the same parameter arguments between subcommands. - e.g. subcommand run and server share the same --cromwell argument, which - is defined in a parent parser "parent_runner". - - Finally each subparser's default is updated with values defined in conf_file. - Args: - conf_file: - If defined, this will be used instead of partially parsing command line - arguments to find conf_file (-c). - `DEFAULT_CAPER_CONF` will be used if it is None. - Returns: - parser: - ArgumentParser object with all arguments defined for each sub- - command (subparser). - conf_dict: - Dict of key/value pairs parsed from conf_file. - Such value is converted into a correct type guessed from - defaults of arguments defined in ArgumentParser object. - """ - parser = argparse.ArgumentParser( - description='Caper (Cromwell-assisted Pipeline ExecutioneR)' - ) - parser.add_argument('-v', '--version', action='store_true', help='Show version') - - subparser = parser.add_subparsers(dest='action') - - parent_init = argparse.ArgumentParser(add_help=False) - parent_init.add_argument('platform', help='Platform to initialize Caper for.') - - # all - parent_all = argparse.ArgumentParser(add_help=False) - parent_all.add_argument( - '-c', '--conf', help='Specify config file', default=DEFAULT_CAPER_CONF - ) - parent_all.add_argument( - '-D', '--debug', action='store_true', help='Prints all logs >= DEBUG level' - ) - parent_all.add_argument( +def _add_common_args(parser: ArgumentParser) -> None: + parser.add_argument('-c', '--conf', help='Specify config file', default=DEFAULT_CAPER_CONF) + parser.add_argument('-D', '--debug', action='store_true', help='Prints all logs >= DEBUG level') + parser.add_argument( '--gcp-service-account-key-json', help='Secret key JSON file for Google Cloud Platform service account. ' - 'This service account should have enough permission to ' - 'Storage for client functions and ' - 'Storage/Compute Engine/Genomics API/Life Sciences API ' - 'for server/runner functions.', + 'This service account should have enough permission to Storage for client ' + 'functions and Storage/Compute Engine/Batch API for server/runner functions. ' + 'We recommend using application default credentials for authentication.', ) - group_loc = parent_all.add_argument_group( - title='cache directories for localization' - ) + +def _add_localization_args(parser: ArgumentParser) -> None: + group_loc = parser.add_argument_group(title='cache directories for localization') group_loc.add_argument( '--local-loc-dir', '--tmp-dir', - help='Temporary directory to store Cromwell\'s intermediate backend files. ' - 'These files include backend.conf, workflow_opts.json, imports.zip. and ' - 'localized input JSON files due to deepcopying (recursive localization). ' - 'Cromwell\'s MySQL/PostgreSQL DB password can be exposed on backend.conf ' - 'on this directory. Therefore, DO NOT USE /tmp HERE. This directory is ' - 'also used for storing cached files for local/slurm/sge/pbs/lsf backends.', + help=( + "Temporary directory to store Cromwell's intermediate backend files. " + 'These files include backend.conf, workflow_opts.json, imports.zip. and ' + 'localized input JSON files due to deepcopying (recursive localization). ' + "Cromwell's MySQL/PostgreSQL DB password can be exposed on backend.conf " + 'on this directory. Therefore, DO NOT USE /tmp HERE. This directory is ' + 'also used for storing cached files for local/slurm/sge/pbs/lsf backends.' + ), ) group_loc.add_argument( '--gcp-loc-dir', '--tmp-gcs-bucket', - help='Temporary directory to store cached files for gcp backend. ' - 'e.g. gs://my-bucket/caper-cache-dir. ', + help=( + 'Temporary directory to store cached files for gcp backend. ' + 'e.g. gs://my-bucket/caper-cache-dir.' + ), ) group_loc.add_argument( '--aws-loc-dir', '--tmp-s3-bucket', - help='Temporary directory to store cached files for aws backend. ' - 'e.g. s3://my-bucket/caper-cache-dir. ', + help=( + 'Temporary directory to store cached files for aws backend. ' + 'e.g. s3://my-bucket/caper-cache-dir.' + ), ) - # run, server, submit - parent_backend = argparse.ArgumentParser(add_help=False) - parent_backend.add_argument('-b', '--backend', help='Backend to run a workflow') - parent_backend.add_argument( + +def _add_backend_args(parser: ArgumentParser) -> None: + parser.add_argument('-b', '--backend', help='Backend to run a workflow') + parser.add_argument( '--dry-run', action='store_true', - help='Caper localizes remote files and validates WDL ' - 'but does not run/submit a pipeline.', + help=('Caper localizes remote files and validates WDL but does not run/submit a pipeline.'), ) - # run, server - parent_runner = argparse.ArgumentParser(add_help=False) - parent_runner = parent_runner.add_argument_group(title='Cromwell logging arguments') - parent_runner.add_argument( - '--cromwell-stdout', - default=DEFAULT_CROMWELL_STDOUT, - help='Local file to write STDOUT of Cromwell Java process to. ' - 'This is for Cromwell (not for Caper\'s logging system). ' - 'If this file already exists then Caper will make a new file suffixed with ' - 'incremented index. e.g. cromwell.out.1 ', +def _add_gcp_zones_args(parser: ArgumentParser) -> None: + group_gc_all = parser.add_argument_group(title='GCP backend arguments for server/runner/client') + group_gc_all.add_argument( + '--gcp-zones', + help=( + 'Comma-separated GCP zones used for Running jobs in Batch. ' + '(e.g. us-west1-b,us-central1-b).' + ), ) - group_db = parent_runner.add_argument_group( + + +def _add_db_args(parser: ArgumentParser) -> None: + group_db = parser.add_argument_group( title='General DB settings (for both file DB and MySQL DB)' ) group_db.add_argument( - '--db', - default=CromwellBackendDatabase.DB_FILE, - help='Cromwell metadata database type', + '--db', default=CromwellBackendDatabase.DB_FILE, help='Cromwell metadata database type' ) group_db.add_argument( '--db-timeout', @@ -168,16 +127,14 @@ def get_parser_and_defaults(conf_file=None): help='Milliseconds to wait for DB connection.', ) - group_file_db = parent_runner.add_argument_group( + group_file_db = parser.add_argument_group( title='HyperSQL file DB arguments (unstable, not recommended)' ) group_file_db.add_argument( - '--file-db', - '-d', - help='Default DB file for Cromwell\'s built-in HyperSQL database.', + '--file-db', '-d', help="Default DB file for Cromwell's built-in HyperSQL database." ) - group_mysql = parent_runner.add_argument_group(title='MySQL DB arguments') + group_mysql = parser.add_argument_group(title='MySQL DB arguments') group_mysql.add_argument( '--mysql-db-ip', default=CromwellBackendDatabase.DEFAULT_MYSQL_DB_IP, @@ -205,7 +162,7 @@ def get_parser_and_defaults(conf_file=None): help='MySQL DB name for Cromwell', ) - group_postgresql = parent_runner.add_argument_group(title='PostgreSQL DB arguments') + group_postgresql = parser.add_argument_group(title='PostgreSQL DB arguments') group_postgresql.add_argument( '--postgresql-db-ip', default=CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_IP, @@ -233,7 +190,9 @@ def get_parser_and_defaults(conf_file=None): help='PostgreSQL DB name for Cromwell', ) - group_cromwell = parent_runner.add_argument_group(title='Cromwell settings') + +def _add_cromwell_args(parser: ArgumentParser) -> None: + group_cromwell = parser.add_argument_group(title='Cromwell settings') group_cromwell.add_argument( '--cromwell', default=Cromwell.DEFAULT_CROMWELL, @@ -243,537 +202,776 @@ def get_parser_and_defaults(conf_file=None): '--max-concurrent-tasks', type=int, default=CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, - help='Number of concurrent tasks. ' - '"config.concurrent-job-limit" in Cromwell backend configuration ' - 'for each backend', + help=( + 'Number of concurrent tasks. "config.concurrent-job-limit" in Cromwell ' + 'backend configuration for each backend' + ), ) group_cromwell.add_argument( '--max-concurrent-workflows', type=int, default=CromwellBackendCommon.DEFAULT_MAX_CONCURRENT_WORKFLOWS, - help='Number of concurrent workflows. ' - '"system.max-concurrent-workflows" in backend configuration', + help=( + 'Number of concurrent workflows. "system.max-concurrent-workflows" in backend ' + 'configuration' + ), ) group_cromwell.add_argument( '--memory-retry-error-keys', default=','.join(CromwellBackendCommon.DEFAULT_MEMORY_RETRY_ERROR_KEYS), - help='(CURRENTLY NOT WORKING) ' - 'If an error caught by these comma-separated keys occurs, ' - 'then increase memory by --memory-retry-multiplier ' - 'for retrials controlled by --max-retries. ' - 'See https://cromwell.readthedocs.io/en/develop/cromwell_features/RetryWithMoreMemory/ ' - 'for details. ', + help=( + '(CURRENTLY NOT WORKING) If an error caught by these comma-separated keys ' + 'occurs, then increase memory by --memory-retry-multiplier for retrials ' + 'controlled by --max-retries. ' + 'See https://cromwell.readthedocs.io/en/develop/cromwell_features/RetryWithMoreMemory/ ' + 'for details.' + ), ) group_cromwell.add_argument( '--disable-call-caching', action='store_true', - help='Disable Cromwell\'s call caching, which re-uses outputs from ' - 'previous workflows', + help="Disable Cromwell's call caching, which re-uses outputs from previous workflows", ) group_cromwell.add_argument( - '--backend-file', - help='Custom Cromwell backend configuration file to override all', + '--backend-file', help='Custom Cromwell backend configuration file to override all' ) group_cromwell.add_argument( '--soft-glob-output', action='store_true', - help='Use soft-linking when globbing outputs for a filesystem that ' - 'does not allow hard-linking. e.g. beeGFS. ' - 'This flag does not work with backends based on a Docker container. ' - 'i.e. gcp and aws. Also, ' - 'it does not work with local backends (local/slurm/sge/pbs/lsf) ' - 'with --. However, it works fine with --singularity.', + help=( + 'Use soft-linking when globbing outputs for a filesystem that does not ' + 'allow hard-linking. e.g. beeGFS. This flag does not work with backends ' + 'based on a Docker container. i.e. gcp and aws. Also, it does not work ' + 'with local backends (local/slurm/sge/pbs/lsf) with --. However, it works ' + 'fine with --singularity.' + ), ) group_cromwell.add_argument( '--local-hash-strat', default=CromwellBackendLocal.DEFAULT_LOCAL_HASH_STRAT, - choices=[ - LOCAL_HASH_STRAT_FILE, - LOCAL_HASH_STRAT_PATH, - LOCAL_HASH_STRAT_PATH_MTIME, - ], - help='File hashing strategy for call caching. ' - 'For local backends (local/slurm/sge/pbs/lsf) only. ' - 'file: use md5sum hash (slow), path: use path only, ' - 'path+modtime (default): use path + mtime.', + choices=[LOCAL_HASH_STRAT_FILE, LOCAL_HASH_STRAT_PATH, LOCAL_HASH_STRAT_PATH_MTIME], + help=( + 'File hashing strategy for call caching. For local backends ' + '(local/slurm/sge/pbs/lsf) only. file: use md5sum hash (slow), ' + 'path: use path only, path+modtime (default): use path + mtime.' + ), + ) + group_cromwell.add_argument( + '--cromwell-stdout', + default=DEFAULT_CROMWELL_STDOUT, + help=( + 'Local file to write STDOUT of Cromwell Java process to. This is for ' + "Cromwell (not for Caper's logging system). If this file already exists " + 'then Caper will make a new file suffixed with incremented index. ' + 'e.g. cromwell.out.1' + ), ) - group_local = parent_runner.add_argument_group(title='local backend arguments') + +def _add_local_backend_args(parser: ArgumentParser) -> None: + group_local = parser.add_argument_group(title='local backend arguments') group_local.add_argument( '--local-out-dir', '--out-dir', default=DEFAULT_OUT_DIR, - help='Output directory path for local backend. ' - 'Cloud backends (gcp, aws) use different output directories. ' - 'For gcp, define --gcp-out-dir. ' - 'For aws, define --aws-out-dir.', + help=( + 'Output directory path for local backend. Cloud backends (gcp, aws) use ' + 'different output directories. For gcp, define --gcp-out-dir. For aws, ' + 'define --aws-out-dir.' + ), ) group_local.add_argument( '--slurm-resource-param', - help='SLURM resource parameters to be passed to sbatch. ' - 'You can customize this to fit your cluster\'s configuration. ' - 'You can use WDL syntax in ${} notation with Cromwell\'s built-in resource ' - 'variables. See documentation for details. ', + help=( + 'SLURM resource parameters to be passed to sbatch. You can customize this ' + "to fit your cluster's configuration. You can use WDL syntax in ${} " + "notation with Cromwell's built-in resource variables. See documentation " + 'for details.' + ), default=CromwellBackendSlurm.DEFAULT_SLURM_RESOURCE_PARAM, ) - group_gc_all = parent_backend.add_argument_group( - title='GCP backend arguments for server/runner/client' - ) - group_gc = parent_runner.add_argument_group( - title='GCP backend arguments for server/runner' - ) - group_gc.add_argument('--gcp-prj', help='GC project') - group_gc_all.add_argument( - '--use-google-cloud-life-sciences', - action='store_true', - help='Use Google Cloud Life Sciences API (v2beta) instead of ' - 'deprecated Genomics API (v2alpha1).' - 'Life Sciences API requires only one region specified with' - 'gcp-region. gcp-zones will be ignored since it is for Genomics API.' - 'See https://cloud.google.com/life-sciences/docs/concepts/locations ' - 'for supported regions.', - ) + +def _add_gcp_runner_args(parser: ArgumentParser) -> None: + group_gc = parser.add_argument_group(title='GCP backend arguments for server/runner') + group_gc.add_argument('--gcp-prj', help='Google Cloud project') group_gc.add_argument( '--gcp-region', default=CromwellBackendGcp.DEFAULT_REGION, - help='GCP region for Google Cloud Life Sciences API. ' - 'This is used only when --use-google-cloud-life-sciences is defined.', + help='GCP region for Google Cloud Batch API. ', ) - group_gc_all.add_argument( - '--gcp-zones', - help='Comma-separated GCP zones used for Genomics API. ' - '(e.g. us-west1-b,us-central1-b). ' - 'If you use --use-google-cloud-life-sciences then ' - 'define --gcp-region instead.', + group_gc.add_argument( + '--gcp-compute-service-account', + help=( + 'Service account email to use for Google Cloud Batch compute instances. ' + 'This is *not* the service account used to launch the job, but the ' + 'service account used to actually run the job on the Batch VM instances. ' + 'Ensure that this service account has the `roles/batch.agentReporter` ' + 'role, so that VM instances can report their status to Batch.' + ), ) group_gc.add_argument( '--gcp-call-caching-dup-strat', default=CromwellBackendGcp.DEFAULT_CALL_CACHING_DUP_STRAT, choices=[CALL_CACHING_DUP_STRAT_REFERENCE, CALL_CACHING_DUP_STRAT_COPY], - help='Duplication strategy for call-cached outputs for GCP backend: ' - 'copy: make a copy, reference: refer to old output in metadata.json.', + help=( + 'Duplication strategy for call-cached outputs for GCP backend: ' + 'copy: make a copy, ' + 'reference: refer to old output in metadata.json.' + ), ) group_gc.add_argument( '--gcp-out-dir', '--out-gcs-bucket', - help='Output directory path for GCP backend. ' 'e.g. gs://my-bucket/my-output.', + help='Output directory path for GCP backend. e.g. gs://my-bucket/my-output.', + ) + group_gc.add_argument( + '--gcp-network', + help=( + 'VPC network name for GCP Batch backend. Required for VPCs in custom subnet mode. ' + 'Can be short name (e.g. "my-vpc") or full path.' + ), + ) + group_gc.add_argument( + '--gcp-subnetwork', + help=( + 'VPC subnetwork name for GCP Batch backend. Required for VPCs in custom subnet mode. ' + 'Can be short name (e.g. "my-subnet") or regional path.' + ), + ) + group_gc.add_argument( + '--gcp-dockerhub-mirror', + action=argparse.BooleanOptionalAction, + default=True, + help=( + 'Enable Docker Hub mirroring through Google Artifact Registry (default: enabled). ' + 'Images will be pulled from mirror.gcr.io instead of Docker Hub directly. ' + 'Use --no-gcp-dockerhub-mirror to disable.' + ), + ) + group_gc.add_argument( + '--gcp-dockerhub-mirror-address', + default='mirror.gcr.io', + help='Address of the Docker Hub mirror (default: mirror.gcr.io).', ) - group_aws = parent_runner.add_argument_group(title='AWS backend arguments') + +def _add_aws_runner_args(parser: ArgumentParser) -> None: + group_aws = parser.add_argument_group(title='AWS backend arguments') group_aws.add_argument('--aws-batch-arn', help='ARN for AWS Batch') group_aws.add_argument('--aws-region', help='AWS region (e.g. us-west-1)') group_aws.add_argument( '--aws-out-dir', '--out-s3-bucket', - help='Output path on S3 bucket for AWS backend. ' - 'e.g. s3://my-bucket/my-output.', + help='Output path on S3 bucket for AWS backend. e.g. s3://my-bucket/my-output.', ) group_aws.add_argument( '--aws-call-caching-dup-strat', default=CromwellBackendAws.DEFAULT_CALL_CACHING_DUP_STRAT, choices=[CALL_CACHING_DUP_STRAT_REFERENCE, CALL_CACHING_DUP_STRAT_COPY], - help='Duplication strategy for call-cached outputs for AWS backend: ' - 'copy: make a copy, reference: refer to old output in metadata.json.', + help=( + 'Duplication strategy for call-cached outputs for AWS backend: ' + 'copy: make a copy, ' + 'reference: refer to old output in metadata.json.' + ), ) - # run, submit - parent_submit = argparse.ArgumentParser(add_help=False) - parent_submit.add_argument( +def _add_submit_io_args(parser: ArgumentParser) -> None: + parser.add_argument( 'wdl', - help='Path, URL or URI for WDL script ' - 'Example: /scratch/my.wdl, gs://some/where/our.wdl, ' - 'http://hello.com/world/your.wdl', - ) - parent_submit.add_argument('-i', '--inputs', help='Workflow inputs JSON file') - parent_submit.add_argument('-o', '--options', help='Workflow options JSON file') - parent_submit.add_argument('-l', '--labels', help='Workflow labels JSON file') - parent_submit.add_argument( - '-p', '--imports', help='Zip file of imported subworkflows' - ) - parent_submit.add_argument( + help=( + 'Path, URL or URI for WDL script Example: /scratch/my.wdl, ' + 'gs://some/where/our.wdl, http://hello.com/world/your.wdl' + ), + ) + parser.add_argument('-i', '--inputs', help='Workflow inputs JSON file') + parser.add_argument('-o', '--options', help='Workflow options JSON file') + parser.add_argument('-l', '--labels', help='Workflow labels JSON file') + parser.add_argument('-p', '--imports', help='Zip file of imported sub-workflows') + parser.add_argument( '-s', '--str-label', - help='Caper\'s special label for a workflow ' - 'This label will be added to a workflow labels JSON file ' - 'as a value for a key "caper-label". ' - 'DO NOT USE IRREGULAR CHARACTERS. USE LETTERS, NUMBERS, ' - 'DASHES AND UNDERSCORES ONLY (^[A-Za-z0-9\\-\\_]+$) ' - 'since this label is used to compose a path for ' - 'workflow\'s temporary/cache directory (.caper_tmp/label/timestamp/)', - ) - parent_submit.add_argument( + help=( + "Caper's special label for a workflow This label will be added to a workflow " + 'labels JSON file as a value for a key "caper-label". DO NOT USE IRREGULAR ' + 'CHARACTERS. USE LETTERS, NUMBERS, DASHES AND UNDERSCORES ONLY ' + "(^[A-Za-z0-9\\-\\_]+$) since this label is used to compose a path for workflow's" + 'temporary/cache directory (.caper_tmp/label/timestamp/)' + ), + ) + parser.add_argument( '--hold', action='store_true', help='Put a hold on a workflow when submitted to a Cromwell server.', ) - parent_submit.add_argument( + parser.add_argument( '--use-gsutil-for-s3', action='store_true', - help='Use gsutil CLI for direct trasnfer between S3 and GCS buckets. ' - 'Otherwise, such file transfer will stream through local machine. ' - 'Make sure that gsutil is installed on your system and it has access to ' - 'credentials for AWS (e.g. ~/.boto or ~/.aws/credentials).', - ) - parent_submit.add_argument( + help=( + 'Use gsutil CLI for direct transfer between S3 and GCS buckets. Otherwise, such ' + 'file transfer will stream through local machine. Make sure that gsutil is ' + 'installed on your system and it has access to credentials for AWS (e.g. ~/.boto ' + 'or ~/.aws/credentials).' + ), + ) + parser.add_argument( '--no-deepcopy', action='store_true', - help='(IMPORTANT) --deepcopy has been deprecated. ' - 'Deepcopying is now activated by default. ' - 'This flag disables deepcopy for ' - 'JSON (.json), TSV (.tsv) and CSV (.csv) ' - 'files specified in an input JSON file (--inputs). ' - 'Find all path/URI string values in an input JSON file ' - 'and make copies of files on a local/remote storage ' - 'for a target backend. Make sure that you have installed ' - 'gsutil for GCS and aws for S3.', - ) - parent_submit.add_argument( - '--ignore-womtool', - action='store_true', - help='Ignore warnings from womtool.jar.', - ) - parent_submit.add_argument( + help=( + '(IMPORTANT) --deepcopy has been deprecated. Deepcopying is now activated by ' + 'default. This flag disables deepcopy for JSON (.json), TSV (.tsv) and CSV (.csv) ' + 'files specified in an input JSON file (--inputs). Find all path/URI string values ' + 'in an input JSON file and make copies of files on a local/remote storage for a ' + 'target backend. Make sure that you have installed gsutil for GCS and aws for S3.' + ), + ) + parser.add_argument( + '--ignore-womtool', action='store_true', help='Ignore warnings from womtool.jar.' + ) + parser.add_argument( '--womtool', default=Cromwell.DEFAULT_WOMTOOL, - help='Path or URL for Cromwell\'s womtool JAR file', + help="Path or URL for Cromwell's womtool JAR file", ) - parent_submit.add_argument( + parser.add_argument( '--java-heap-womtool', default=Cromwell.DEFAULT_JAVA_HEAP_WOMTOOL, help='Java heap size for Womtool (java -Xmx)', ) - parent_submit.add_argument( + parser.add_argument( '--max-retries', type=int, default=CaperWorkflowOpts.DEFAULT_MAX_RETRIES, - help='Number of retries for failing tasks. ' - 'equivalent to "maxRetries" in workflow options JSON file.', + help=( + 'Number of retries for failing tasks. equivalent to "maxRetries" in workflow ' + 'options JSON file.' + ), ) - parent_submit.add_argument( + parser.add_argument( '--memory-retry-multiplier', default=CaperWorkflowOpts.DEFAULT_MEMORY_RETRY_MULTIPLIER, - help='(CURRENTLY NOT WORKING) ' - 'If an error caught by --memory-retry-error-keys occurs, ' - 'then increase memory by this ' - 'for retrials controlled by --max-retries. ' - 'See https://cromwell.readthedocs.io/en/develop/cromwell_features/RetryWithMoreMemory/ ' - 'for details.', - ) - parent_submit.add_argument( + help=( + '(CURRENTLY NOT WORKING) If an error caught by --memory-retry-error-keys occurs, ' + 'then increase memory by this for retrials controlled by --max-retries. ' + 'See https://cromwell.readthedocs.io/en/develop/cromwell_features/RetryWithMoreMemory/ ' + 'for details.' + ), + ) + parser.add_argument( '--gcp-monitoring-script', default=CaperWorkflowOpts.DEFAULT_GCP_MONITORING_SCRIPT, - help='Monitoring script for gcp backend only. ' - 'Caper defaults to use its own monitoring script which works fine ' - 'with subcommand gcp_profile. ' - 'To make your script work with gcp_profile, ' - 'make this script generate a TSV with a header in the first row. ' - 'The first column of such TSV will be ignored since it is usually timestamp. ' - 'Check monitoring_script in ' - 'https://cromwell.readthedocs.io/en/stable/wf_options/Google/' - '#google-pipelines-api-workflow-options ' - 'for details.', - ) - group_dep = parent_submit.add_argument_group( + help=( + 'Monitoring script for gcp backend only. Caper defaults to use its own monitoring ' + 'script which works fine with subcommand gcp_profile. To make your script work ' + 'with gcp_profile, make this script generate a TSV with a header in the first row. ' + 'The first column of such TSV will be ignored since it is usually timestamp. Check ' + 'monitoring_script in ' + 'https://cromwell.readthedocs.io/en/stable/wf_options/Google/ ' + 'for details.' + ), + ) + + +def _add_dependency_resolver_args(parser: ArgumentParser) -> None: + group_dep = parser.add_argument_group( title='dependency resolver for all backends', - description='Cloud-based backends (gc and aws) will only use Docker ' - 'so that "--docker URI_FOR_DOCKER_IMG" must be specified ' - 'in the command line argument or as a comment "#CAPER ' - 'docker URI_FOR_DOCKER_IMG" or value for "workflow.meta.default_docker"' - 'in a WDL file', + description=( + 'Cloud-based backends (gc and aws) will only use Docker so that ' + '"--docker URI_FOR_DOCKER_IMG" must be specified in the command line argument or ' + 'as a comment "#CAPER docker URI_FOR_DOCKER_IMG" or value for ' + '"workflow.meta.default_docker" in a WDL file' + ), ) group_dep.add_argument( '--docker', nargs='?', const='', default=None, - help='URI for Docker image (e.g. ubuntu:latest). ' - 'This can also be used as a flag to use Docker image URI ' - 'defined in your WDL file as a comment ("#CAPER docker") or ' - 'as "workflow.meta.default_docker" in WDL.', + help=( + 'URI for Docker image (e.g. ubuntu:latest). This can also be used as a flag to use ' + 'Docker image URI defined in your WDL file as a comment ("#CAPER docker") or as ' + '"workflow.meta.default_docker" in WDL.' + ), ) - group_dep_local = parent_submit.add_argument_group( + + group_dep_local = parser.add_argument_group( title='dependency resolver for local backend', - description='Singularity is for local backend only. Other backends ' - '(gcp and aws) will use Docker only. ' - 'Local backend defaults to not use any container-based methods. ' - 'Use "--singularity" or "--docker" to use containers', + description=( + 'Singularity is for local backend only. Other backends (gcp and aws) will use ' + 'Docker only. Local backend defaults to not use any container-based methods. ' + 'Use "--singularity" or "--docker" to use containers' + ), ) group_dep_local.add_argument( '--singularity', nargs='?', const='', default=None, - help='URI or path for Singularity image ' - '(e.g. ~/.singularity/ubuntu-latest.simg, ' - 'docker://ubuntu:latest, shub://vsoch/hello-world). ' - 'This can also be used as a flag to use Singularity image URI ' - 'defined in your WDL file as a comment ("#CAPER singularity") or ' - 'as "workflow.meta.default_singularity" in WDL.', + help=( + 'URI or path for Singularity image (e.g. ~/.singularity/ubuntu-latest.simg, ' + 'docker://ubuntu:latest, shub://vsoch/hello-world). This can also be used as a ' + 'flag to use Singularity image URI defined in your WDL file as a comment ' + '("#CAPER singularity") or as "workflow.meta.default_singularity" in WDL.' + ), ) group_dep_local.add_argument( '--conda', nargs='?', const='', default=None, - help='Default Conda environment\'s name. ' - 'If defined each task in WDL will be called with conda run -n ENV_NAME.' - 'This can also be used as a flag to use Conda environment ' - 'defined in your WDL file under "workflow.meta.default_conda".', + help=( + "Default Conda environment's name. If defined each task in WDL will be called with " + 'conda run -n ENV_NAME. This can also be used as a flag to use Conda environment ' + 'defined in your WDL file under "workflow.meta.default_conda".' + ), ) - group_hpc_submit = parent_submit.add_argument_group( - title='Parameters for "caper hpc submit" command only', + + +def _add_hpc_submit_args(parser: ArgumentParser) -> None: + group_hpc_submit = parser.add_argument_group( + title='Parameters for "caper hpc submit" command only' ) group_hpc_submit.add_argument( '--leader-job-name', - help='Leader job name for a submitted workflow.' - 'This name will be appended to the prefix "CAPER_LEADER_" and then ' - 'submitted to HPC. Such prefix is used to identify Caper leader jobs.', + help=( + 'Leader job name for a submitted workflow. This name will be appended to the ' + 'prefix "CAPER_LEADER_" and then submitted to HPC. Such prefix is used to identify ' + 'Caper leader jobs.' + ), ) group_hpc_submit.add_argument( '--slurm-leader-job-resource-param', - help='Resource parameters to submit a Caper leader job to SLURM. ' - 'Make sure to quote if you use it in the command line arguments.', + help=( + 'Resource parameters to submit a Caper leader job to SLURM. ' + 'Make sure to quote if you use it in the command line arguments.' + ), default=' '.join(SlurmWrapper.DEFAULT_LEADER_JOB_RESOURCE_PARAM), ) group_hpc_submit.add_argument( '--sge-leader-job-resource-param', - help='Resource parameters to submit a Caper leader job to SGE' - 'Make sure to quote if you use it in the command line arguments.', + help=( + 'Resource parameters to submit a Caper leader job to SGE. ' + 'Make sure to quote if you use it in the command line arguments.' + ), default=' '.join(SgeWrapper.DEFAULT_LEADER_JOB_RESOURCE_PARAM), ) group_hpc_submit.add_argument( '--pbs-leader-job-resource-param', - help='Resource parameters to submit a Caper leader job to PBS' - 'Make sure to quote if you use it in the command line arguments.', + help=( + 'Resource parameters to submit a Caper leader job to PBS. ' + 'Make sure to quote if you use it in the command line arguments.' + ), default=' '.join(PbsWrapper.DEFAULT_LEADER_JOB_RESOURCE_PARAM), ) group_hpc_submit.add_argument( '--lsf-leader-job-resource-param', - help='Resource parameters to submit a Caper leader job to LSF' - 'Make sure to quote if you use it in the command line arguments.', + help=( + 'Resource parameters to submit a Caper leader job to LSF. ' + 'Make sure to quote if you use it in the command line arguments.' + ), default=' '.join(LsfWrapper.DEFAULT_LEADER_JOB_RESOURCE_PARAM), ) - group_slurm = parent_submit.add_argument_group('SLURM arguments') + +def _add_scheduler_args(parser: ArgumentParser) -> None: + group_slurm = parser.add_argument_group('SLURM arguments') group_slurm.add_argument('--slurm-partition', help='SLURM partition') group_slurm.add_argument('--slurm-account', help='SLURM account') group_slurm.add_argument( '--slurm-extra-param', help='SLURM extra parameters to be passed to sbatch. ' ) - group_sge = parent_submit.add_argument_group('SGE arguments') - group_sge.add_argument( - '--sge-pe', help='SGE parallel environment. Check with "qconf -spl"' - ) + group_sge = parser.add_argument_group('SGE arguments') + group_sge.add_argument('--sge-pe', help='SGE parallel environment. Check with "qconf -spl"') group_sge.add_argument('--sge-queue', help='SGE queue. Check with "qconf -sql"') - group_sge.add_argument( - '--sge-extra-param', help='SGE extra parameters. Must be double-quoted' - ) + group_sge.add_argument('--sge-extra-param', help='SGE extra parameters. Must be double-quoted') - group_pbs = parent_submit.add_argument_group('PBS arguments') + group_pbs = parser.add_argument_group('PBS arguments') group_pbs.add_argument('--pbs-queue', help='PBS queue') - group_pbs.add_argument( - '--pbs-extra-param', help='PBS extra parameters. Must be double-quoted' - ) + group_pbs.add_argument('--pbs-extra-param', help='PBS extra parameters. Must be double-quoted') - group_lsf = parent_submit.add_argument_group('LSF arguments') + group_lsf = parser.add_argument_group('LSF arguments') group_lsf.add_argument('--lsf-queue', help='LSF queue') - group_lsf.add_argument( - '--lsf-extra-param', help='LSF extra parameters. Must be double-quoted' - ) + group_lsf.add_argument('--lsf-extra-param', help='LSF extra parameters. Must be double-quoted') + - # server - parent_server = argparse.ArgumentParser(add_help=False) - parent_server.add_argument( +def _add_server_args(parser: ArgumentParser) -> None: + parser.add_argument( '--java-heap-server', default=Cromwell.DEFAULT_JAVA_HEAP_CROMWELL_SERVER, help='Cromwell Java heap size for "server" mode (java -Xmx)', ) - parent_server.add_argument( + parser.add_argument( '--disable-auto-write-metadata', action='store_true', - help='Disable automatic retrieval/update/writing of metadata.json upon workflow/task status change.', + help=( + 'Disable automatic retrieval/update/writing of metadata.json upon workflow/task ' + 'status change.' + ), ) - # run - parent_run = argparse.ArgumentParser(add_help=False) - parent_run.add_argument( + +def _add_run_args(parser: ArgumentParser) -> None: + parser.add_argument( '-m', '--metadata-output', help='An optional directory path to output metadata JSON file', ) - parent_run.add_argument( + parser.add_argument( '--java-heap-run', default=Cromwell.DEFAULT_JAVA_HEAP_CROMWELL_RUN, help='Cromwell Java heap size for "run" mode (java -Xmx)', ) - # list, metadata, abort - parent_search_wf = argparse.ArgumentParser(add_help=False) - parent_search_wf.add_argument( + +def _add_search_args(parser: ArgumentParser) -> None: + parser.add_argument( 'wf_id_or_label', nargs='*', - help='List of workflow IDs to find matching workflows to ' - 'commit a specified action (list, metadata and abort). ' - 'Wildcards (* and ?) are allowed.', + help=( + 'List of workflow IDs to find matching workflows to commit a specified ' + 'action (list, metadata and abort). Wildcards (* and ?) are allowed.' + ), ) - # server, all client subcommands - parent_server_client = argparse.ArgumentParser(add_help=False) - parent_server_client.add_argument( - '--port', - type=int, - default=Cromwell.DEFAULT_SERVER_PORT, - help='Port for Caper server', + +def _add_server_client_args(parser: ArgumentParser) -> None: + parser.add_argument( + '--port', type=int, default=Cromwell.DEFAULT_SERVER_PORT, help='Port for Caper server' ) - parent_server_client.add_argument( - '--no-server-heartbeat', - action='store_true', - help='Disable server heartbeat file.', + parser.add_argument( + '--no-server-heartbeat', action='store_true', help='Disable server heartbeat file.' ) - parent_server_client.add_argument( + parser.add_argument( '--server-heartbeat-file', default=ServerHeartbeat.DEFAULT_SERVER_HEARTBEAT_FILE, help='Heartbeat file for Caper clients to get IP and port of a server', ) - parent_server_client.add_argument( + parser.add_argument( '--server-heartbeat-timeout', type=int, default=ServerHeartbeat.DEFAULT_HEARTBEAT_TIMEOUT_MS, - help='Timeout for a heartbeat file in Milliseconds. ' - 'A heartbeat file older than ' - 'this interval will be ignored.', + help=( + 'Timeout for a heartbeat file in Milliseconds. ' + 'A heartbeat file older than this interval will be ignored.' + ), ) - parent_client = argparse.ArgumentParser(add_help=False) - parent_client.add_argument( + +def _add_client_args(parser: ArgumentParser) -> None: + parser.add_argument( '--hostname', '--ip', default=CromwellRestAPI.DEFAULT_HOSTNAME, help='Hostname (or IP address) of Caper server.', ) - # list - parent_list = argparse.ArgumentParser(add_help=False) - parent_list.add_argument( + +def _add_list_args(parser: ArgumentParser) -> None: + parser.add_argument( '-f', '--format', default=DEFAULT_LIST_FORMAT, - help='Comma-separated list of items to be shown for "list" ' - 'subcommand. Any key name in workflow JSON from Cromwell ' - 'server\'s response is allowed. ' - 'Available keys are "id" (workflow ID), "status", "str_label", ' - '"name" (WDL/CWL name), "submission" (date/time), "start", ' - '"end" and "user". ' - '"str_label" is a special key for Caper. See help context ' - 'of "--str-label" for details', - ) - parent_list.add_argument( + help=( + 'Comma-separated list of items to be shown for "list" subcommand. Any key name in ' + "workflow JSON from Cromwell server's response is allowed. " + 'Available keys are "id" (workflow ID), "status", "str_label", "name" ' + '(WDL/CWL name), "submission" (date/time), "start", "end" and "user". ' + '"str_label" is a special key for Caper. See help context of "--str-label" ' + 'for details' + ), + ) + parser.add_argument( '--hide-result-before', - help='Hide workflows submitted before this date/time. ' - 'Use the same (or shorter) date/time format shown in ' - '"caper list". ' - 'e.g. 2019-06-13, 2019-06-13T10:07', + help=( + 'Hide workflows submitted before this date/time. Use the same (or shorter) ' + 'date/time format shown in "caper list". e.g. 2019-06-13, 2019-06-13T10:07' + ), ) - parent_list.add_argument( + parser.add_argument( '--show-subworkflow', action='store_true', - help='Show subworkflows in "caper list". ' - 'WARNING: If there are too many subworkflows, ' - 'this can result in crash of Caper/Cromwell server ', + help=( + 'Show sub-workflows in "caper list". WARNING: If there are too many ' + 'sub-workflows, this can result in crash of Caper/Cromwell server' + ), ) - # troubleshoot/debug - parent_troubleshoot = argparse.ArgumentParser(add_help=False) - parent_troubleshoot.add_argument( + +def _add_troubleshoot_args(parser: ArgumentParser) -> None: + parser.add_argument( '--show-completed-task', action='store_true', help='Show information about completed tasks.', ) - parent_troubleshoot.add_argument( - '--show-stdout', action='store_true', help='Show STDOUT for failed tasks.' - ) + parser.add_argument('--show-stdout', action='store_true', help='Show STDOUT for failed tasks.') - # gcp_monitor - parent_gcp_monitor = argparse.ArgumentParser(add_help=False) - parent_gcp_monitor.add_argument( - '--json-format', - action='store_true', - help='Prints out outputs in a JSON format.', + +def _add_gcp_monitor_args(parser: ArgumentParser) -> None: + parser.add_argument( + '--json-format', action='store_true', help='Prints out outputs in a JSON format.' ) - # gcp_res_analysis - parent_gcp_res_analysis = argparse.ArgumentParser(add_help=False) - parent_gcp_res_analysis.add_argument( + +def _add_gcp_res_analysis_args(parser: ArgumentParser) -> None: + parser.add_argument( '--in-file-vars-def-json', - help='JSON file to define task name and input file variabless ' - 'to be included in resource analysis. ' - 'Key: task name, wild-cards (*, ?) are allowed. ' - 'Value: list of input file var names. ' - 'e.g. "atac.align*": ["fastqs_R1", "fastqs_R2"]. ' - 'Once this file is defined, tasks not included in it will be ignored.', - ) - parent_gcp_res_analysis.add_argument( + help=( + 'JSON file to define task name and input file variables to be included in ' + 'resource analysis. Key: task name, wild-cards (*, ?) are allowed. ' + 'Value: list of input file var names. e.g. "atac.align*": ["fastqs_R1", ' + '"fastqs_R2"]. Once this file is defined, tasks not included in it will ' + 'be ignored.' + ), + ) + parser.add_argument( '--reduce-in-file-vars', choices=[method.name for method in list(ResourceAnalysisReductionMethod)], - default=ResourceAnalysisReductionMethod.sum.name, - help='Reduce X matrix (resource data) into a vector. ' - 'e.g. summing up all input file sizes. ' - 'Reducing X will convert a multiple linear regression into a single linear regression. ' - 'This is useful since single linear regression requires much less data ' - '(at least 2 for each task). ' - 'Choose NONE to keep all input file variables ' - 'without reduction in the analysis. ' - '2D Scatter plot (--plot-pdf) will not available for analysis without reduction. ' - 'If NONE then make sure that number of datasets (per task) ' - '> number of input file variables in a task.', - ) - parent_gcp_res_analysis.add_argument( + default=ResourceAnalysisReductionMethod.SUM.name, # type: ignore[missing-attribute] + help=( + 'Reduce X matrix (resource data) into a vector. e.g. summing up all input file ' + 'sizes. Reducing X will convert a multiple linear regression into a single linear ' + 'regression. This is useful since single linear regression requires much less data ' + '(at least 2 for each task). Choose NONE to keep all input file variables without ' + 'reduction in the analysis. 2D Scatter plot (--plot-pdf) will not available for ' + 'analysis without reduction. If NONE then make sure that number of datasets ' + '(per task) > number of input file variables in a task.' + ), + ) + parser.add_argument( '--target-resources', nargs='+', default=list(ResourceAnalysis.DEFAULT_TARGET_RESOURCES), - help='Keys for resources in a JSON gcp_monitor outputs, ' - 'which forms y vector for a linear problem. ' - 'Analysis will be done separately for each key (resource metric). ' - 'See help for gcp_monitor to find available resources. ' - 'e.g. stats.max.disk, stats.mean.cpu_pct.', - ) - parent_gcp_res_analysis.add_argument( + help=( + 'Keys for resources in a JSON gcp_monitor outputs, which forms y vector for a ' + 'linear problem. Analysis will be done separately for each key (resource metric). ' + 'See help for gcp_monitor to find available resources. ' + 'e.g. stats.max.disk, stats.mean.cpu_pct.' + ), + ) + parser.add_argument( '--plot-pdf', - help='Local path for a 2D scatter plot PDF file. ' - 'Scatter plot will not be available if --reduce-in-file-vars is none.', + help=( + 'Local path for a 2D scatter plot PDF file. Scatter plot will not be ' + 'available if --reduce-in-file-vars is none.' + ), ) - # cleanup - parent_cleanup = argparse.ArgumentParser(add_help=False) - parent_cleanup.add_argument( + +def _add_cleanup_args(parser: ArgumentParser) -> None: + parser.add_argument( '--delete', action='store_true', help='DELETE OUTPUTS. caper cleanup runs in a dry-run mode by default. ', ) - parent_cleanup.add_argument( + parser.add_argument( '--num-threads', default=URIBase.DEFAULT_NUM_THREADS, type=int, - help='Number of threads for cleaning up workflow\'s outputs. ' - 'This is used for cloud backends only.', + help=( + "Number of threads for cleaning up workflow's outputs. This is used for " + 'cloud backends only.' + ), ) - # hpc abort - parent_hpc_abort = argparse.ArgumentParser(add_help=False) - parent_hpc_abort.add_argument( + +def _add_hpc_abort_args(parser: ArgumentParser) -> None: + parser.add_argument( 'job_ids', nargs='+', help='Job ID or list of job IDs to abort matching Caper leader jobs.', ) + +# Parent parser factories + + +def _create_parent_init() -> ArgumentParser: + parser = argparse.ArgumentParser(add_help=False) + parser.add_argument( + 'platform', + help='Backend provider to initialize Caper for.', + choices=[*list(BackendProvider), None], + default=None, + ) + return parser + + +def _create_parent_all() -> ArgumentParser: + parser = argparse.ArgumentParser(add_help=False) + _add_common_args(parser) + _add_localization_args(parser) + return parser + + +def _create_parent_backend() -> ArgumentParser: + parser = argparse.ArgumentParser(add_help=False) + _add_backend_args(parser) + _add_gcp_zones_args(parser) + return parser + + +def _create_parent_runner() -> ArgumentParser: + parser = argparse.ArgumentParser(add_help=False) + _add_db_args(parser) + _add_cromwell_args(parser) + _add_local_backend_args(parser) + _add_gcp_runner_args(parser) + _add_aws_runner_args(parser) + return parser + + +def _create_parent_submit() -> ArgumentParser: + parser = argparse.ArgumentParser(add_help=False) + _add_submit_io_args(parser) + _add_dependency_resolver_args(parser) + _add_hpc_submit_args(parser) + _add_scheduler_args(parser) + return parser + + +def _create_parent_server() -> ArgumentParser: + parser = argparse.ArgumentParser(add_help=False) + _add_server_args(parser) + return parser + + +def _create_parent_run() -> ArgumentParser: + parser = argparse.ArgumentParser(add_help=False) + _add_run_args(parser) + return parser + + +def _create_parent_search_wf() -> ArgumentParser: + parser = argparse.ArgumentParser(add_help=False) + _add_search_args(parser) + return parser + + +def _create_parent_server_client() -> ArgumentParser: + parser = argparse.ArgumentParser(add_help=False) + _add_server_client_args(parser) + return parser + + +def _create_parent_client() -> ArgumentParser: + parser = argparse.ArgumentParser(add_help=False) + _add_client_args(parser) + return parser + + +def _create_parent_list() -> ArgumentParser: + parser = argparse.ArgumentParser(add_help=False) + _add_list_args(parser) + return parser + + +def _create_parent_troubleshoot() -> ArgumentParser: + parser = argparse.ArgumentParser(add_help=False) + _add_troubleshoot_args(parser) + return parser + + +def _create_parent_gcp_monitor() -> ArgumentParser: + parser = argparse.ArgumentParser(add_help=False) + _add_gcp_monitor_args(parser) + return parser + + +def _create_parent_gcp_res_analysis() -> ArgumentParser: + parser = argparse.ArgumentParser(add_help=False) + _add_gcp_res_analysis_args(parser) + return parser + + +def _create_parent_cleanup() -> ArgumentParser: + parser = argparse.ArgumentParser(add_help=False) + _add_cleanup_args(parser) + return parser + + +def _create_parent_hpc_abort() -> ArgumentParser: + parser = argparse.ArgumentParser(add_help=False) + _add_hpc_abort_args(parser) + return parser + + +def get_defaults( + conf_file: str | None = None, +) -> dict[str, str | bool | int | float | None] | None: + """ + Wrapper for `get_parser_and_defaults()`. + + Use this function to get default values updated with `conf_file`. + + Args: + conf_file: + `DEFAULT_CAPER_CONF` will be used if it is None. + + Returns updated defaults only. + """ + _, conf_dict = get_parser_and_defaults(conf_file=conf_file) + return conf_dict + + +def get_parser_and_defaults( + conf_file: str | None = None, +) -> tuple[ArgumentParser, dict[str, bool | float | int | str | None] | None]: + """ + Creates a main parser and make a subparser for each subcommand. + + There are many parent parsers defined here. + Each subparser will take a certain combination of these parent parsers + to share the same parameter arguments between subcommands. + e.g. subcommand run and server share the same --cromwell argument, which + is defined in a parent parser "parent_runner". + + Finally, each sub-parser's default is updated with values defined in conf_file. + + Args: + conf_file: + If defined, this will be used instead of partially parsing command line + arguments to find conf_file (-c). + `DEFAULT_CAPER_CONF` will be used if it is None. + + Returns: + parser: + ArgumentParser object with all arguments defined for each sub-command (subparser). + conf_dict: + Dict of key/value pairs parsed from conf_file. + Such value is converted into a correct type guessed from + defaults of arguments defined in ArgumentParser object. + """ + parser = argparse.ArgumentParser(description='Caper (Cromwell-assisted Pipeline ExecutioneR)') + parser.add_argument('-v', '--version', action='store_true', help='Show version') + + subparser = parser.add_subparsers(dest='action') + + parent_init = _create_parent_init() + parent_all = _create_parent_all() + parent_backend = _create_parent_backend() + parent_runner = _create_parent_runner() + parent_submit = _create_parent_submit() + parent_server = _create_parent_server() + parent_run = _create_parent_run() + parent_search_wf = _create_parent_search_wf() + parent_server_client = _create_parent_server_client() + parent_client = _create_parent_client() + parent_list = _create_parent_list() + parent_troubleshoot = _create_parent_troubleshoot() + parent_gcp_monitor = _create_parent_gcp_monitor() + parent_gcp_res_analysis = _create_parent_gcp_res_analysis() + parent_cleanup = _create_parent_cleanup() + parent_hpc_abort = _create_parent_hpc_abort() + # all subcommands p_init = subparser.add_parser( 'init', - help='Initialize Caper\'s configuration file. THIS WILL OVERWRITE ON ' - 'A SPECIFIED(-c)/DEFAULT CONF FILE. e.g. ~/.caper/default.conf.', + help=( + "Initialize Caper's configuration file. THIS WILL OVERWRITE ON A " + 'SPECIFIED(-c)/DEFAULT CONF FILE. e.g. ~/.caper/default.conf.' + ), parents=[parent_all, parent_init], ) p_run = subparser.add_parser( @@ -831,8 +1029,7 @@ def get_parser_and_defaults(conf_file=None): ) p_troubleshoot = subparser.add_parser( 'troubleshoot', - help='Troubleshoot workflow problems from metadata JSON file or ' - 'workflow IDs', + help='Troubleshoot workflow problems from metadata JSON file or workflow IDs', parents=[ parent_all, parent_server_client, @@ -853,18 +1050,13 @@ def get_parser_and_defaults(conf_file=None): ], ) - p_hpc = subparser.add_parser( - 'hpc', - help='Subcommand for HPCs', - parents=[parent_all], - ) + p_hpc = subparser.add_parser('hpc', help='Subcommand for HPCs', parents=[parent_all]) subparser_hpc = p_hpc.add_subparsers(dest='hpc_action') subparser_hpc.add_parser( 'submit', help='Submit a single workflow to HPC.', parents=[parent_all, parent_submit, parent_run, parent_runner, parent_backend], ) - subparser_hpc.add_parser( 'list', help='List all workflows submitted to HPC.', @@ -878,10 +1070,11 @@ def get_parser_and_defaults(conf_file=None): p_gcp_monitor = subparser.add_parser( 'gcp_monitor', - help='Tabulate task\'s resource data collected on ' - 'instances run on Google Cloud Compute. ' - 'Use this for any workflows run with Caper>=1.2.0 on gcp backend. ' - 'This is for gcp backend only.', + help=( + "Tabulate task's resource data collected on instances run on Google Cloud " + 'Compute. Use this for any workflows run with Caper>=1.2.0 on gcp backend. ' + 'This is for gcp backend only.' + ), parents=[ parent_all, parent_server_client, @@ -892,21 +1085,19 @@ def get_parser_and_defaults(conf_file=None): ) p_gcp_res_analysis = subparser.add_parser( 'gcp_res_analysis', - help='Linear resource analysis on monitoring data collected on ' - 'instances run on Google Cloud Compute. This is for gcp backend only. ' - 'Use this for any workflows run with Caper>=1.2.0 on gcp backend. ' - 'Calculates coefficients/intercept for task\'s required resources ' - 'based on input file size of a task. ' - 'For each task it solves a linear regression problem of y=Xc + i1 + e where ' - 'X is a matrix (m by n) of input file sizes and ' - 'c is a coefficient vector (n by 1) and ' - 'i is intercept and 1 is ones vector. ' - 'y is a vector (m by 1) of resource taken and ' - 'e is residual to be minimized. ' - 'm is number of dataset and n is number of input file variables. ' - 'Each resource metric will be solved separately. ' - 'Refer to --target-resources for details about available resource metrics. ' - 'Output will be a tuple of coefficient vector and intercept. ', + help=( + 'Linear resource analysis on monitoring data collected on instances run on Google ' + 'Cloud Compute. This is for gcp backend only. Use this for any workflows run with ' + "Caper>=1.2.0 on gcp backend. Calculates coefficients/intercept for task's " + 'required resources based on input file size of a task. For each task it solves a ' + 'linear regression problem ofy=Xc + i1 + e where X is a matrix (m by n) of input ' + 'file sizes and c is a coefficient vector (n by 1) and i is intercept and 1 is ' + 'ones vector. y is a vector (m by 1) of resource taken and e is residual to be ' + 'minimized. m is number of dataset and n is number of input file variables. Each ' + 'resource metric will be solved separately. Refer to --target-resources for ' + 'details about available resource metrics. Output will be a tuple of coefficient ' + 'vector and intercept.' + ), parents=[ parent_all, parent_server_client, diff --git a/caper/caper_backend_conf.py b/caper/caper_backend_conf.py index 5dc6bfd6..0254bc4c 100644 --- a/caper/caper_backend_conf.py +++ b/caper/caper_backend_conf.py @@ -1,13 +1,16 @@ +"""Module for creating the Cromwell backend configuration file.""" +from __future__ import annotations + import logging import os from copy import deepcopy +from typing import TYPE_CHECKING from autouri import AutoURI +from caper.cromwell_backend import BackendProvider, CachingDuplicationStrategyArgs + from .cromwell_backend import ( - BACKEND_AWS, - BACKEND_GCP, - BACKEND_SGE, CromwellBackendAws, CromwellBackendBase, CromwellBackendCommon, @@ -22,63 +25,73 @@ from .dict_tool import merge_dict from .hocon_string import HOCONString +if TYPE_CHECKING: + from collections.abc import Sequence + logger = logging.getLogger(__name__) class CaperBackendConf: + """Class for creating the Cromwell backend configuration file.""" + BACKEND_CONF_INCLUDE = 'include required(classpath("application"))' BASENAME_BACKEND_CONF = 'backend.conf' def __init__( self, - default_backend, - local_out_dir, - disable_call_caching=False, - max_concurrent_workflows=CromwellBackendCommon.DEFAULT_MAX_CONCURRENT_WORKFLOWS, - memory_retry_error_keys=CromwellBackendCommon.DEFAULT_MEMORY_RETRY_ERROR_KEYS, - max_concurrent_tasks=CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, - soft_glob_output=False, - local_hash_strat=CromwellBackendLocal.DEFAULT_LOCAL_HASH_STRAT, - db=CromwellBackendDatabase.DEFAULT_DB, - db_timeout=CromwellBackendDatabase.DEFAULT_DB_TIMEOUT_MS, - mysql_db_ip=CromwellBackendDatabase.DEFAULT_MYSQL_DB_IP, - mysql_db_port=CromwellBackendDatabase.DEFAULT_MYSQL_DB_PORT, - mysql_db_user=CromwellBackendDatabase.DEFAULT_MYSQL_DB_USER, - mysql_db_password=CromwellBackendDatabase.DEFAULT_MYSQL_DB_PASSWORD, - mysql_db_name=CromwellBackendDatabase.DEFAULT_MYSQL_DB_NAME, - postgresql_db_ip=CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_IP, - postgresql_db_port=CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_PORT, - postgresql_db_user=CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_USER, - postgresql_db_password=CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_PASSWORD, - postgresql_db_name=CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_NAME, - file_db=None, - gcp_prj=None, - gcp_out_dir=None, - gcp_call_caching_dup_strat=CromwellBackendGcp.DEFAULT_CALL_CACHING_DUP_STRAT, - gcp_service_account_key_json=None, - use_google_cloud_life_sciences=False, - gcp_region=CromwellBackendGcp.DEFAULT_REGION, - aws_batch_arn=None, - aws_region=None, - aws_out_dir=None, - aws_call_caching_dup_strat=CromwellBackendAws.DEFAULT_CALL_CACHING_DUP_STRAT, - gcp_zones=None, - slurm_partition=None, - slurm_account=None, - slurm_extra_param=None, - slurm_resource_param=CromwellBackendSlurm.DEFAULT_SLURM_RESOURCE_PARAM, - sge_pe=None, - sge_queue=None, - sge_extra_param=None, - sge_resource_param=CromwellBackendSge.DEFAULT_SGE_RESOURCE_PARAM, - pbs_queue=None, - pbs_extra_param=None, - pbs_resource_param=CromwellBackendPbs.DEFAULT_PBS_RESOURCE_PARAM, - lsf_queue=None, - lsf_extra_param=None, - lsf_resource_param=CromwellBackendLsf.DEFAULT_LSF_RESOURCE_PARAM, - ): - """Initializes the backend conf's stanzas. + default_backend: BackendProvider, + local_out_dir: str, + disable_call_caching: bool = False, + max_concurrent_workflows: int = CromwellBackendCommon.DEFAULT_MAX_CONCURRENT_WORKFLOWS, + memory_retry_error_keys: Sequence[str] + | None = CromwellBackendCommon.DEFAULT_MEMORY_RETRY_ERROR_KEYS, + max_concurrent_tasks: int = CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, + soft_glob_output: bool = False, + local_hash_strat: str = CromwellBackendLocal.DEFAULT_LOCAL_HASH_STRAT, + db: str = CromwellBackendDatabase.DEFAULT_DB, + db_timeout: int = CromwellBackendDatabase.DEFAULT_DB_TIMEOUT_MS, + mysql_db_ip: str = CromwellBackendDatabase.DEFAULT_MYSQL_DB_IP, + mysql_db_port: int = CromwellBackendDatabase.DEFAULT_MYSQL_DB_PORT, + mysql_db_user: str = CromwellBackendDatabase.DEFAULT_MYSQL_DB_USER, + mysql_db_password: str = CromwellBackendDatabase.DEFAULT_MYSQL_DB_PASSWORD, + mysql_db_name: str = CromwellBackendDatabase.DEFAULT_MYSQL_DB_NAME, + postgresql_db_ip: str = CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_IP, + postgresql_db_port: int = CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_PORT, + postgresql_db_user: str = CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_USER, + postgresql_db_password: str = CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_PASSWORD, + postgresql_db_name: str = CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_NAME, + file_db: str | None = None, + gcp_prj: str | None = None, + gcp_out_dir: str | None = None, + gcp_call_caching_dup_strat: CachingDuplicationStrategyArgs = CromwellBackendGcp.DEFAULT_CALL_CACHING_DUP_STRAT, + gcp_service_account_key_json: str | None = None, + gcp_compute_service_account: str | None = None, + gcp_network: str | None = None, + gcp_subnetwork: str | None = None, + gcp_dockerhub_mirror: bool = True, + gcp_dockerhub_mirror_address: str = 'mirror.gcr.io', + gcp_region: str = CromwellBackendGcp.DEFAULT_REGION, + aws_batch_arn: str | None = None, + aws_region: str | None = None, + aws_out_dir: str | None = None, + aws_call_caching_dup_strat: CachingDuplicationStrategyArgs = CromwellBackendAws.DEFAULT_CALL_CACHING_DUP_STRAT, + slurm_partition: str | None = None, + slurm_account: str | None = None, + slurm_extra_param: str | None = None, + slurm_resource_param: str = CromwellBackendSlurm.DEFAULT_SLURM_RESOURCE_PARAM, + sge_pe: str | None = None, + sge_queue: str | None = None, + sge_extra_param: str | None = None, + sge_resource_param: str = CromwellBackendSge.DEFAULT_SGE_RESOURCE_PARAM, + pbs_queue: str | None = None, + pbs_extra_param: str | None = None, + pbs_resource_param: str = CromwellBackendPbs.DEFAULT_PBS_RESOURCE_PARAM, + lsf_queue: str | None = None, + lsf_extra_param: str | None = None, + lsf_resource_param: str = CromwellBackendLsf.DEFAULT_LSF_RESOURCE_PARAM, + ) -> None: + """ + Initializes the backend conf's stanzas. Args: default_backend: @@ -100,7 +113,7 @@ def __init__( max_concurrent_tasks: Limit for concurrent number of tasks for each workflow. soft_glob_output: - Local backends only (Local, sge, pbs, slurm, lsf). + Local backends only (local, sge, pbs, slurm, lsf). Glob with ln -s instead of hard-linking (ln alone). Useful for file-system like beeGFS, which does not allow hard-linking. local_hash_strat: @@ -142,19 +155,22 @@ def __init__( Call-caching duplication strategy for GCP backend. gcp_service_account_key_json: GCP service account key JSON. - If defined, service_account scheme will be used instead of application_default - in Cromwell. - use_google_cloud_life_sciences: - Use Google Cloud Life Sciences API. - This requires only one zone specified in gcp_zones. - If not specified default zone will be used. - See Cromwell document: - https://cromwell.readthedocs.io/en/stable/backends/Google/. - Also check supported zones: - https://cloud.google.com/life-sciences/docs/concepts/locations + gcp_compute_service_account: + The email of the GCP service account email to use for the + Batch compute instances. If not provided, the default Compute + Engine service account will be used. Ensure that this service + account has the `roles/batch.agentReporter` role, so that + VM instances can report their status to Batch. + gcp_network: + VPC network name for GCP Batch backend. Required for VPCs in custom subnet mode. + gcp_subnetwork: + VPC subnetwork name for GCP Batch backend. Required for VPCs in custom subnet mode. + gcp_dockerhub_mirror: + Enable Docker Hub mirroring through Google Artifact Registry. + gcp_dockerhub_mirror_address: + Address of the Docker Hub mirror. gcp_region: - Region for Google Cloud Life Sciences API. - Ignored if not use_google_cloud_life_sciences. + Region for Google Cloud Batch API. aws_batch_arn: ARN for AWS Batch. aws_region: @@ -163,31 +179,37 @@ def __init__( Output bucket path for aws backend. Must start with s3://. aws_call_caching_dup_strat: Call-caching duplication strategy for AWS backend. - gcp_zones: - List of zones for Google Cloud Genomics API. - For this and all arguments below this, - see details in CaperWorkflowOpts.__init__. - These parameters can be defined either in a backend conf file or - in a workflow options JSON file. - One major difference is that the former will also be used as defaults. slurm_partition: + SLURM partition if required to sbatch jobs. slurm_account: + SLURM account if required to sbatch jobs. slurm_extra_param: + SLURM extra parameter to be appended to sbatch command line. slurm_resource_param: For slurm backend only. Resource parameters to be passed to sbatch. You can use WDL syntax and Cromwell's built-in variables in ${} notation. e.g. cpu, time, memory_mb sge_pe: + SGE parallel environment (required to run with multiple cpus). sge_queue: + SGE queue. sge_extra_param: + SGE extra parameter to be appended to qsub command line. sge_resource_param: + SGE resource parameters to be passed to qsub. pbs_queue: + PBS queue. pbs_extra_param: + PBS extra parameter to be appended to qsub command line. pbs_resource_param: + PBS resource parameters to be passed to qsub. lsf_queue: + LSF queue. lsf_extra_param: + LSF extra parameter to be appended to bsub command line. lsf_resource_param: + LSF resource parameters to be passed to bsub. """ self._template = {} @@ -288,15 +310,13 @@ def __init__( # cloud backends if gcp_prj and gcp_out_dir: if gcp_service_account_key_json: - gcp_service_account_key_json = os.path.expanduser( - gcp_service_account_key_json - ) + gcp_service_account_key_json = os.path.expanduser(gcp_service_account_key_json) if not os.path.exists(gcp_service_account_key_json): - raise FileNotFoundError( - 'gcp_service_account_key_json does not exist. f={f}'.format( - f=gcp_service_account_key_json - ) + msg = ( + f'gcp_service_account_key_json does not exist. ' + f'f={gcp_service_account_key_json}' ) + raise FileNotFoundError(msg) merge_dict( self._template, @@ -306,9 +326,12 @@ def __init__( gcp_out_dir=gcp_out_dir, call_caching_dup_strat=gcp_call_caching_dup_strat, gcp_service_account_key_json=gcp_service_account_key_json, - use_google_cloud_life_sciences=use_google_cloud_life_sciences, + gcp_compute_service_account=gcp_compute_service_account, + gcp_network=gcp_network, + gcp_subnetwork=gcp_subnetwork, + gcp_dockerhub_mirror=gcp_dockerhub_mirror, + gcp_dockerhub_mirror_address=gcp_dockerhub_mirror_address, gcp_region=gcp_region, - gcp_zones=gcp_zones, ), ) @@ -335,14 +358,17 @@ def __init__( def create_file( self, - directory, - backend=None, - custom_backend_conf=None, - basename=BASENAME_BACKEND_CONF, - ): - """Create a HOCON string and create a backend.conf file. + directory: str, + backend: BackendProvider | None = None, + custom_backend_conf: str | None = None, + basename: str = BASENAME_BACKEND_CONF, + ) -> str: + """ + Create a HOCON string and create a backend.conf file. Args: + directory: + Directory to create a backend.conf file. backend: Backend to run a workflow on. Default backend will be use if not defined. @@ -354,41 +380,31 @@ def create_file( """ template = deepcopy(self._template) - if backend == BACKEND_SGE: + if backend == BackendProvider.SGE: if self._sge_pe is None: - raise ValueError( - 'sge-pe (Sun GridEngine parallel environment) ' - 'is required for backend sge.' + msg = ( + 'sge-pe (Sun GridEngine parallel environment) is required for backend sge.' ) - elif backend == BACKEND_GCP: + raise ValueError(msg) + elif backend == BackendProvider.GCP: if self._gcp_prj is None: - raise ValueError( - 'gcp-prj (Google Cloud Platform project) ' - 'is required for backend gcp.' - ) + msg = 'gcp-prj (Google Cloud Platform project) is required for backend gcp.' + raise ValueError(msg) if self._gcp_out_dir is None: - raise ValueError( - 'gcp-out-dir (gs:// output bucket path) ' - 'is required for backend gcp.' - ) - elif backend == BACKEND_AWS: + msg = 'gcp-out-dir (gs:// output bucket path) is required for backend gcp.' + raise ValueError(msg) + elif backend == BackendProvider.AWS: if self._aws_batch_arn is None: - raise ValueError( - 'aws-batch-arn (ARN for AWS Batch) ' 'is required for backend aws.' - ) + msg = 'aws-batch-arn (ARN for AWS Batch) is required for backend aws.' + raise ValueError(msg) if self._aws_region is None: - raise ValueError( - 'aws-region (AWS region) ' 'is required for backend aws.' - ) + msg = 'aws-region (AWS region) is required for backend aws.' + raise ValueError(msg) if self._aws_out_dir is None: - raise ValueError( - 'aws-out-dir (s3:// output bucket path) ' - 'is required for backend aws.' - ) + msg = 'aws-out-dir (s3:// output bucket path) is required for backend aws.' + raise ValueError(msg) - hocon_s = HOCONString.from_dict( - template, include=CaperBackendConf.BACKEND_CONF_INCLUDE - ) + hocon_s = HOCONString.from_dict(template, include=CaperBackendConf.BACKEND_CONF_INCLUDE) if custom_backend_conf is not None: s = AutoURI(custom_backend_conf).read() diff --git a/caper/caper_base.py b/caper/caper_base.py index a163533c..a0a7d35d 100644 --- a/caper/caper_base.py +++ b/caper/caper_base.py @@ -1,30 +1,35 @@ +"""Base class for Caper with localization directory management.""" + import logging import os from datetime import datetime from autouri import GCSURI, S3URI, AbsPath, AutoURI -from .cromwell_backend import BACKEND_AWS, BACKEND_GCP +from .cromwell_backend import BackendProvider logger = logging.getLogger(__name__) class CaperBase: + """Base class managing work/cache/temp directories for localization.""" + ENV_GOOGLE_APPLICATION_CREDENTIALS = 'GOOGLE_APPLICATION_CREDENTIALS' DEFAULT_LOC_DIR_NAME = '.caper_tmp' def __init__( self, - local_loc_dir=None, - gcp_loc_dir=None, - aws_loc_dir=None, - gcp_service_account_key_json=None, - ): - """Manages work/cache/temp directories for localization on the following - storages: - - Local*: Local path -> local_loc_dir** + local_loc_dir: str | None = None, + gcp_loc_dir: str | None = None, + aws_loc_dir: str | None = None, + gcp_service_account_key_json: str | None = None, + ) -> None: + """Manage work/cache/temp directories for localization. + + Storages: + - local*: Local path -> local_loc_dir** - gcp: GCS bucket path -> gcp_loc_dir - - aws: S3 bucket path -> aws_loc_dir + - aws: S3 bucket path -> aws_loc_dir. * Note that it starts with capital L, which is a default backend of Cromwell's default configuration file (application.conf). @@ -52,19 +57,14 @@ def __init__( local_loc_dir = os.path.join(os.getcwd(), CaperBase.DEFAULT_LOC_DIR_NAME) if not AbsPath(local_loc_dir).is_valid: - raise ValueError( - 'local_loc_dir should be a valid local abspath. {f}'.format( - f=local_loc_dir - ) - ) + msg = f'local_loc_dir should be a valid local abspath. {local_loc_dir}' + raise ValueError(msg) if gcp_loc_dir and not GCSURI(gcp_loc_dir).is_valid: - raise ValueError( - 'gcp_loc_dir should be a valid GCS path. {f}'.format(f=gcp_loc_dir) - ) + msg = f'gcp_loc_dir should be a valid GCS path. {gcp_loc_dir}' + raise ValueError(msg) if aws_loc_dir and not S3URI(aws_loc_dir).is_valid: - raise ValueError( - 'aws_loc_dir should be a valid S3 path. {f}'.format(f=aws_loc_dir) - ) + msg = f'aws_loc_dir should be a valid S3 path. {aws_loc_dir}' + raise ValueError(msg) self._local_loc_dir = local_loc_dir self._gcp_loc_dir = gcp_loc_dir @@ -74,10 +74,11 @@ def __init__( def _set_env_gcp_app_credentials( self, - gcp_service_account_key_json=None, - env_name=ENV_GOOGLE_APPLICATION_CREDENTIALS, - ): - """Initalizes environment for authentication (VM instance/storage). + gcp_service_account_key_json: str | None = None, + env_name: str = ENV_GOOGLE_APPLICATION_CREDENTIALS, + ) -> None: + """ + Initalizes environment for authentication (VM instance/storage). Args: gcp_service_account_key_json: @@ -86,31 +87,36 @@ def _set_env_gcp_app_credentials( VM instance. Environment variable GOOGLE_APPLICATION_CREDENTIALS will be updated with this. + env_name: + Environment variable name to set for GCP application credentials. """ if gcp_service_account_key_json: - gcp_service_account_key_json = os.path.expanduser( - gcp_service_account_key_json - ) + gcp_service_account_key_json = os.path.expanduser(gcp_service_account_key_json) if env_name in os.environ: auth_file = os.environ[env_name] if not os.path.samefile(auth_file, gcp_service_account_key_json): logger.warning( - 'Env var {env} does not match with ' + 'Env var %s does not match with ' 'gcp_service_account_key_json. ' - 'Using application default credentials? '.format(env=env_name) + 'Using application default credentials? ', + env_name, ) logger.debug( - 'Adding GCP service account key JSON {key} to ' - 'env var {env}'.format(key=gcp_service_account_key_json, env=env_name) + 'Adding GCP service account key JSON %s to env var %s', + gcp_service_account_key_json, + env_name, ) os.environ[env_name] = gcp_service_account_key_json - def localize_on_backend(self, f, backend, recursive=False, make_md5_file=False): + def localize_on_backend( + self, f: str, backend: str, recursive: bool = False, make_md5_file: bool = False + ) -> str: """Localize a file according to the chosen backend. + Each backend has its corresponding storage. - gcp -> GCS bucket path (starting with gs://) - aws -> S3 bucket path (starting with s3://) - - All others (based on local backend) -> local storage + - All others (based on local backend) -> local storage. If contents of input JSON changes due to recursive localization (deepcopy) then a new temporary file suffixed with backend type will be written on loc_prefix. @@ -137,21 +143,20 @@ def localize_on_backend(self, f, backend, recursive=False, make_md5_file=False): Returns: localized URI. """ - if backend == BACKEND_GCP: + if backend == BackendProvider.GCP: loc_prefix = self._gcp_loc_dir - elif backend == BACKEND_AWS: + elif backend == BackendProvider.AWS: loc_prefix = self._aws_loc_dir else: loc_prefix = self._local_loc_dir - return AutoURI(f).localize_on( - loc_prefix, recursive=recursive, make_md5_file=make_md5_file - ) + return AutoURI(f).localize_on(loc_prefix, recursive=recursive, make_md5_file=make_md5_file) def localize_on_backend_if_modified( - self, f, backend, recursive=False, make_md5_file=False - ): - """Wrapper for localize_on_backend. + self, f: str, backend: str, recursive: bool = False, make_md5_file: bool = False + ) -> str: + """ + Wrapper for localize_on_backend. If localized file is not modified due to recursive localization, then it means that localization for such file was redundant. @@ -167,8 +172,9 @@ def localize_on_backend_if_modified( return f return f_loc - def create_timestamped_work_dir(self, prefix=''): - """Creates/returns a local temporary directory on self._local_work_dir. + def create_timestamped_work_dir(self, prefix: str = '') -> str: + """ + Creates/returns a local temporary directory on self._local_work_dir. Args: prefix: @@ -178,17 +184,14 @@ def create_timestamped_work_dir(self, prefix=''): timestamp = datetime.now().strftime('%Y%m%d_%H%M%S_%f') work_dir = os.path.join(self._local_loc_dir, prefix, timestamp) os.makedirs(work_dir, exist_ok=True) - logger.info( - 'Creating a timestamped temporary directory. {d}'.format(d=work_dir) - ) + logger.info('Creating a timestamped temporary directory. %s', work_dir) return work_dir - def get_loc_dir(self, backend): + def get_loc_dir(self, backend: str) -> str | None: """Get localization directory for a backend.""" - if backend == BACKEND_GCP: + if backend == BackendProvider.GCP: return self._gcp_loc_dir - elif backend == BACKEND_AWS: + if backend == BackendProvider.AWS: return self._aws_loc_dir - else: - return self._local_loc_dir + return self._local_loc_dir diff --git a/caper/caper_client.py b/caper/caper_client.py index eabc253c..9a7be9a0 100644 --- a/caper/caper_client.py +++ b/caper/caper_client.py @@ -1,4 +1,8 @@ +"""Client for interacting with Cromwell server.""" + import logging +from collections.abc import Sequence +from typing import Any from autouri import AutoURI @@ -8,24 +12,36 @@ from .caper_workflow_opts import CaperWorkflowOpts from .cromwell import Cromwell from .cromwell_rest_api import CromwellRestAPI, has_wildcard, is_valid_uuid +from .server_heartbeat import ServerHeartbeat logger = logging.getLogger(__name__) class CaperClient(CaperBase): + """Client for interacting with a Cromwell server.""" + def __init__( self, - local_loc_dir=None, - gcp_loc_dir=None, - aws_loc_dir=None, - gcp_service_account_key_json=None, - server_hostname=CromwellRestAPI.DEFAULT_HOSTNAME, - server_port=CromwellRestAPI.DEFAULT_PORT, - server_heartbeat=None, - ): - """Initializes for Caper's client functions. + local_loc_dir: str | None = None, + gcp_loc_dir: str | None = None, + aws_loc_dir: str | None = None, + gcp_service_account_key_json: str | None = None, + server_hostname: str = CromwellRestAPI.DEFAULT_HOSTNAME, + server_port: int = CromwellRestAPI.DEFAULT_PORT, + server_heartbeat: ServerHeartbeat | None = None, + ) -> None: + """ + Initializes for Caper's client functions. Args: + local_loc_dir: + Local cache directory for localization. + gcp_loc_dir: + GCP cache directory (gs://) for localization. + aws_loc_dir: + AWS cache directory (s3://) for localization. + gcp_service_account_key_json: + GCP service account key JSON file for authentication. server_hostname: Server hostname. Used only if heartbeat file is not available or timed out. @@ -49,15 +65,17 @@ def __init__( server_hostname, server_port = res if not server_hostname or not server_port: - raise ValueError( + msg = ( 'Server hostname/port must be defined ' 'if server heartbeat is not available or timed out.' ) + raise ValueError(msg) self._cromwell_rest_api = CromwellRestAPI(server_hostname, server_port) - def abort(self, wf_ids_or_labels): - """Abort running/pending workflows on a Cromwell server. + def abort(self, wf_ids_or_labels: Sequence[str]) -> Sequence[dict[str, Any]] | None: + """ + Abort running/pending workflows on a Cromwell server. Args: wf_ids_or_labels: @@ -67,11 +85,12 @@ def abort(self, wf_ids_or_labels): workflow_ids, labels = self._split_workflow_ids_and_labels(wf_ids_or_labels) r = self._cromwell_rest_api.abort(workflow_ids, labels) - logger.info('abort: {r}'.format(r=r)) + logger.info('abort: %s', r) return r - def unhold(self, wf_ids_or_labels): - """Release hold of workflows on a Cromwell server. + def unhold(self, wf_ids_or_labels: Sequence[str]) -> Sequence[dict[str, Any]] | None: + """ + Release hold of workflows on a Cromwell server. Args: wf_ids_or_labels: @@ -81,11 +100,14 @@ def unhold(self, wf_ids_or_labels): workflow_ids, labels = self._split_workflow_ids_and_labels(wf_ids_or_labels) r = self._cromwell_rest_api.release_hold(workflow_ids, labels) - logger.info('unhold: {r}'.format(r=r)) + logger.info('unhold: %s', r) return r - def list(self, wf_ids_or_labels=None, exclude_subworkflow=True): - """Retrieves list of running/pending workflows from a Cromwell server + def list( + self, wf_ids_or_labels: Sequence[str] | None = None, exclude_subworkflow: bool = True + ) -> list[dict[str, Any]] | None: + """ + Retrieves list of running/pending workflows from a Cromwell server. Args: wf_ids_or_labels: @@ -107,8 +129,11 @@ def list(self, wf_ids_or_labels=None, exclude_subworkflow=True): workflow_ids, labels, exclude_subworkflow=exclude_subworkflow ) - def metadata(self, wf_ids_or_labels, embed_subworkflow=False): - """Retrieves metadata for workflows from a Cromwell server. + def metadata( + self, wf_ids_or_labels: Sequence[str], embed_subworkflow: bool = False + ) -> Sequence[dict[str, Any]]: + """ + Retrieves metadata for workflows from a Cromwell server. Args: wf_ids_or_labels: @@ -118,6 +143,7 @@ def metadata(self, wf_ids_or_labels, embed_subworkflow=False): Recursively embed subworkflow's metadata JSON object in parent workflow's metadata JSON. This is to mimic behavior of Cromwell's run mode paramteter -m. + Returns: List of metadata JSONs of matched worflows. """ @@ -127,7 +153,9 @@ def metadata(self, wf_ids_or_labels, embed_subworkflow=False): workflow_ids, labels, embed_subworkflow=embed_subworkflow ) - def _split_workflow_ids_and_labels(self, workflow_ids_or_labels): + def _split_workflow_ids_and_labels( + self, workflow_ids_or_labels: Sequence[str] | None + ) -> tuple[Sequence[str], Sequence[tuple[str, str]]]: workflow_ids = [] labels = [] @@ -144,38 +172,53 @@ def _split_workflow_ids_and_labels(self, workflow_ids_or_labels): class CaperClientSubmit(CaperClient): + """Client for submitting workflows to a Cromwell server.""" + def __init__( self, - local_loc_dir=None, - gcp_loc_dir=None, - aws_loc_dir=None, - gcp_service_account_key_json=None, - server_hostname=CromwellRestAPI.DEFAULT_HOSTNAME, - server_port=CromwellRestAPI.DEFAULT_PORT, - server_heartbeat=None, - womtool=Cromwell.DEFAULT_WOMTOOL, - use_google_cloud_life_sciences=False, - gcp_zones=None, - slurm_partition=None, - slurm_account=None, - slurm_extra_param=None, - sge_pe=None, - sge_queue=None, - sge_extra_param=None, - pbs_queue=None, - pbs_extra_param=None, - lsf_queue=None, - lsf_extra_param=None, - ): - """Submit subcommand needs much more parameters than other client subcommands. + local_loc_dir: str | None = None, + gcp_loc_dir: str | None = None, + aws_loc_dir: str | None = None, + gcp_service_account_key_json: str | None = None, + gcp_compute_service_account: str | None = None, + server_hostname: str = CromwellRestAPI.DEFAULT_HOSTNAME, + server_port: int = CromwellRestAPI.DEFAULT_PORT, + server_heartbeat: ServerHeartbeat | None = None, + womtool: str = Cromwell.DEFAULT_WOMTOOL, + gcp_zones: list[str] | None = None, + slurm_partition: str | None = None, + slurm_account: str | None = None, + slurm_extra_param: str | None = None, + sge_pe: str | None = None, + sge_queue: str | None = None, + sge_extra_param: str | None = None, + pbs_queue: str | None = None, + pbs_extra_param: str | None = None, + lsf_queue: str | None = None, + lsf_extra_param: str | None = None, + ) -> None: + """ + Submit subcommand needs much more parameters than other client subcommands. Args: + local_loc_dir: + Local cache directory for localization. + gcp_loc_dir: + GCP cache directory (gs://) for localization. + aws_loc_dir: + AWS cache directory (s3://) for localization. + gcp_service_account_key_json: + GCP service account key JSON file for authentication. + gcp_compute_service_account: + Service account email to use for GCP compute instances. + server_hostname: + Server hostname. + server_port: + Server port. + server_heartbeat: + ServerHeartbeat object for reading hostname/port. womtool: Womtool JAR file. - use_google_cloud_life_sciences: - Use Google Cloud Life Sciences API. - gcp_zones will be ignored since it's already configured with in - server side backend.conf. gcp_zones: GCP zones. Used for gcp backend only. slurm_partition: @@ -212,8 +255,8 @@ def __init__( self._cromwell = Cromwell(womtool=womtool) self._caper_workflow_opts = CaperWorkflowOpts( - use_google_cloud_life_sciences=use_google_cloud_life_sciences, gcp_zones=gcp_zones, + gcp_compute_service_account=gcp_compute_service_account, slurm_partition=slurm_partition, slurm_account=slurm_account, slurm_extra_param=slurm_extra_param, @@ -230,28 +273,29 @@ def __init__( def submit( self, - wdl, - backend=None, - inputs=None, - options=None, - labels=None, - imports=None, - str_label=None, - user=None, - docker=None, - singularity=None, - conda=None, - max_retries=CaperWorkflowOpts.DEFAULT_MAX_RETRIES, - memory_retry_multiplier=CaperWorkflowOpts.DEFAULT_MEMORY_RETRY_MULTIPLIER, - gcp_monitoring_script=CaperWorkflowOpts.DEFAULT_GCP_MONITORING_SCRIPT, - ignore_womtool=False, - no_deepcopy=False, - hold=False, - java_heap_womtool=Cromwell.DEFAULT_JAVA_HEAP_WOMTOOL, - dry_run=False, - work_dir=None, - ): - """Submit a workflow to Cromwell server. + wdl: str, + backend: str | None = None, + inputs: str | None = None, + options: str | None = None, + labels: str | None = None, + imports: str | None = None, + str_label: str | None = None, + user: str | None = None, + docker: str | None = None, + singularity: str | None = None, + conda: str | None = None, + max_retries: int | None = CaperWorkflowOpts.DEFAULT_MAX_RETRIES, + memory_retry_multiplier: float | None = CaperWorkflowOpts.DEFAULT_MEMORY_RETRY_MULTIPLIER, + gcp_monitoring_script: str | None = CaperWorkflowOpts.DEFAULT_GCP_MONITORING_SCRIPT, + ignore_womtool: bool = False, + no_deepcopy: bool = False, + hold: bool = False, + java_heap_womtool: str = Cromwell.DEFAULT_JAVA_HEAP_WOMTOOL, + dry_run: bool = False, + work_dir: str | None = None, + ) -> dict[str, Any] | None: + """ + Submit a workflow to Cromwell server. Args: wdl: @@ -292,6 +336,8 @@ def submit( Multiplier for the memory retry feature. See https://cromwell.readthedocs.io/en/develop/cromwell_features/RetryWithMoreMemory/ for details. + gcp_monitoring_script: + GCP monitoring script for resource monitoring in workflow options. ignore_womtool: Disable Womtool validation for WDL/input JSON/imports. no_deepcopy: @@ -315,7 +361,8 @@ def submit( """ wdl_file = AutoURI(wdl) if not wdl_file.exists: - raise FileNotFoundError('WDL does not exists. {wdl}'.format(wdl=wdl)) + msg = f'WDL does not exists. {wdl}' + raise FileNotFoundError(msg) if str_label is None and inputs: str_label = AutoURI(inputs).basename_wo_ext @@ -333,9 +380,8 @@ def submit( # backend's localization directory. # check if such loc_dir is defined. if self.get_loc_dir(backend) is None: - raise ValueError( - 'loc_dir is not defined for your backend. {b}'.format(b=backend) - ) + msg = f'loc_dir is not defined for your backend. {backend}' + raise ValueError(msg) maybe_remote_file = self.localize_on_backend_if_modified( inputs, backend=backend, recursive=not no_deepcopy, make_md5_file=True @@ -371,10 +417,13 @@ def submit( imports = wdl_parser.create_imports_file(work_dir) logger.debug( - 'submit params: wdl={wdl}, imports={imp}, inputs={inp}, ' - 'options={opt}, labels={lbl}, hold={hold}'.format( - wdl=wdl, imp=imports, inp=inputs, opt=options, lbl=labels, hold=hold - ) + 'submit params: wdl=%s, imports=%s, inputs=%s, options=%s, labels=%s, hold=%s', + wdl, + imports, + inputs, + options, + labels, + hold, ) if not ignore_womtool: @@ -386,7 +435,7 @@ def submit( ) if dry_run: - return + return None r = self._cromwell_rest_api.submit( source=wdl, @@ -396,5 +445,5 @@ def submit( labels=labels, on_hold=hold, ) - logger.info('submit: {r}'.format(r=r)) + logger.info('submit: %s', r) return r diff --git a/caper/caper_init.py b/caper/caper_init.py index 89fc6fb8..f30d82cb 100644 --- a/caper/caper_init.py +++ b/caper/caper_init.py @@ -1,16 +1,9 @@ +"""Initialize Caper configuration files for different backends.""" + import os from .cromwell import Cromwell -from .cromwell_backend import ( - BACKEND_ALIAS_LOCAL, - BACKEND_AWS, - BACKEND_GCP, - BACKEND_LOCAL, - BACKEND_LSF, - BACKEND_PBS, - BACKEND_SGE, - BACKEND_SLURM, -) +from .cromwell_backend import BackendProvider CONF_CONTENTS_TMP_DIR = """ # Local directory for localized files and Cromwell's intermediate files. @@ -25,15 +18,15 @@ # It is not recommended to change it unless your cluster has custom resource settings. # See https://github.com/ENCODE-DCC/caper/blob/master/docs/resource_param.md for details.""" -CONF_CONTENTS_SLURM_PARAM = "" +CONF_CONTENTS_SLURM_PARAM = '' CONF_CONTENTS_SGE_PARAM = """ # Parallel environment of SGE: # Find one with `$ qconf -spl` or ask you admin to add one if not exists. sge-pe= """ -CONF_CONTENTS_PBS_PARAM = "" -CONF_CONTENTS_LSF_PARAM = "" +CONF_CONTENTS_PBS_PARAM = '' +CONF_CONTENTS_LSF_PARAM = '' DEFAULT_CONF_CONTENTS_LOCAL = ( """backend=local @@ -111,22 +104,12 @@ # copy (not recommended): make a copy for a new workflow. gcp-call-caching-dup-strat= -# Use Google Cloud Life Sciences API instead of Genomics API (deprecating). -# Make sure to enable Google Cloud Life Sciences API on your Google Cloud Console -use-google-cloud-life-sciences=true - -# gcp-region is required for Life Sciences API only. # Region is different from zone. Zone is more specific. # Do not define zone here. Check supported regions: -# https://cloud.google.com/life-sciences/docs/concepts/locations +# https://cloud.google.com/batch/docs/concepts/locations # e.g. us-central1 gcp-region= -# Comma-separated zones for Genomics API (deprecating). -# This is ignored if use-google-cloud-life-sciences. -# e.g. us-west1-a,us-west1-b,us-west1-c -gcp-zones= - # Number of retrials. This parameter also applies to non-OOM failures. max-retries=1 """ @@ -134,29 +117,31 @@ ) -def init_caper_conf(conf_file, backend): +def init_caper_conf(conf_file: str, backend: BackendProvider) -> None: """Initialize conf file for a given backend. + There are two special backend aliases for two Stanford clusters. These clusters are based on SLURM. Also, download/install Cromwell/Womtool JARs, whose default URL and install dir are defined in class Cromwell. """ - if backend in (BACKEND_LOCAL, BACKEND_ALIAS_LOCAL): + if backend == BackendProvider.LOCAL: contents = DEFAULT_CONF_CONTENTS_LOCAL - elif backend == BACKEND_SLURM: + elif backend == BackendProvider.SLURM: contents = DEFAULT_CONF_CONTENTS_SLURM - elif backend == BACKEND_SGE: + elif backend == BackendProvider.SGE: contents = DEFAULT_CONF_CONTENTS_SGE - elif backend == BACKEND_PBS: + elif backend == BackendProvider.PBS: contents = DEFAULT_CONF_CONTENTS_PBS - elif backend == BACKEND_LSF: + elif backend == BackendProvider.LSF: contents = DEFAULT_CONF_CONTENTS_LSF - elif backend in BACKEND_GCP: + elif backend == BackendProvider.GCP: contents = DEFAULT_CONF_CONTENTS_GCP - elif backend in BACKEND_AWS: + elif backend == BackendProvider.AWS: contents = DEFAULT_CONF_CONTENTS_AWS else: - raise ValueError('Unsupported backend {p}'.format(p=backend)) + msg = f'Unsupported backend {backend}' + raise ValueError(msg) conf_file = os.path.expanduser(conf_file) os.makedirs(os.path.dirname(conf_file), exist_ok=True) @@ -165,7 +150,5 @@ def init_caper_conf(conf_file, backend): fp.write(contents + '\n') cromwell = Cromwell() - fp.write( - '{key}={val}\n'.format(key='cromwell', val=cromwell.install_cromwell()) - ) + fp.write('{key}={val}\n'.format(key='cromwell', val=cromwell.install_cromwell())) fp.write('{key}={val}\n'.format(key='womtool', val=cromwell.install_womtool())) diff --git a/caper/caper_labels.py b/caper/caper_labels.py index 66f59709..3a6c2524 100644 --- a/caper/caper_labels.py +++ b/caper/caper_labels.py @@ -1,3 +1,5 @@ +"""Manage Cromwell workflow labels for tracking and querying.""" + import json import logging import os @@ -16,6 +18,8 @@ class CaperLabels: + """Manage Cromwell workflow labels for tracking and querying.""" + KEY_CAPER_STR_LABEL = 'caper-str-label' KEY_CAPER_USER = 'caper-user' KEY_CAPER_BACKEND = 'caper-backend' @@ -23,20 +27,23 @@ class CaperLabels: def create_file( self, - directory, - backend=None, - custom_labels=None, - str_label=None, - user=None, - basename=BASENAME_LABELS, - ): - """Create labels JSON file. + directory: str, + backend: str | None = None, + custom_labels: str | None = None, + str_label: str | None = None, + user: str | None = None, + basename: str = BASENAME_LABELS, + ) -> str: + """ + Create labels JSON file. Args: directory: Directory to create a labels JSON file. backend: - Backend + Backend. + user: + Username to add to labels. custom_labels: User's labels file to be merged. str_label: @@ -56,21 +63,16 @@ def create_file( template[CaperLabels.KEY_CAPER_BACKEND] = backend if str_label: - new_str_label = re.sub( - RE_ILLEGAL_STR_LABEL_CHRS, SUB_ILLEGAL_STR_LABEL_CHRS, str_label - ) + new_str_label = re.sub(RE_ILLEGAL_STR_LABEL_CHRS, SUB_ILLEGAL_STR_LABEL_CHRS, str_label) if str_label != new_str_label: logger.warning( - 'Found illegal characters in str_label matching with {regex}. ' - 'Replaced with {sub}'.format( - regex=RE_ILLEGAL_STR_LABEL_CHRS, sub=SUB_ILLEGAL_STR_LABEL_CHRS - ) + 'Found illegal characters in str_label matching with %s. Replaced with %s', + RE_ILLEGAL_STR_LABEL_CHRS, + SUB_ILLEGAL_STR_LABEL_CHRS, ) template[CaperLabels.KEY_CAPER_STR_LABEL] = new_str_label - template[CaperLabels.KEY_CAPER_USER] = ( - user if user else pwd.getpwuid(os.getuid())[0] - ) + template[CaperLabels.KEY_CAPER_USER] = user if user else pwd.getpwuid(os.getuid())[0] labels_file = os.path.join(directory, basename) AutoURI(labels_file).write(json.dumps(template, indent=4)) diff --git a/caper/caper_runner.py b/caper/caper_runner.py index 1deb684c..0c77b13c 100644 --- a/caper/caper_runner.py +++ b/caper/caper_runner.py @@ -1,5 +1,9 @@ +"""Runner for executing workflows with Cromwell.""" + import logging import os +from collections.abc import Callable +from typing import TextIO from autouri import AbsPath, AutoURI @@ -9,6 +13,7 @@ from .caper_workflow_opts import CaperWorkflowOpts from .cromwell import Cromwell from .cromwell_backend import ( + BackendProvider, CromwellBackendAws, CromwellBackendBase, CromwellBackendCommon, @@ -22,119 +27,173 @@ ) from .cromwell_metadata import CromwellMetadata from .cromwell_rest_api import CromwellRestAPI +from .nb_subproc_thread import NBSubprocThread +from .server_heartbeat import ServerHeartbeat from .wdl_parser import WDLParser logger = logging.getLogger(__name__) -class WomtoolValidationFailedException(Exception): - pass +class WomtoolValidationFailedException(Exception): # noqa: N818 + """Exception raised when WDL validation fails.""" class CaperRunner(CaperBase): + """Runner for executing WDL workflows with Cromwell.""" + ENV_GOOGLE_CLOUD_PROJECT = 'GOOGLE_CLOUD_PROJECT' DEFAULT_FILE_DB_PREFIX = 'default_caper_file_db' SERVER_TMP_DIR_PREFIX = '.caper_server' def __init__( self, - default_backend, - local_loc_dir=None, - local_out_dir=None, - gcp_loc_dir=None, - aws_loc_dir=None, - cromwell=Cromwell.DEFAULT_CROMWELL, - womtool=Cromwell.DEFAULT_WOMTOOL, - disable_call_caching=False, - max_concurrent_workflows=CromwellBackendCommon.DEFAULT_MAX_CONCURRENT_WORKFLOWS, - memory_retry_error_keys=CromwellBackendCommon.DEFAULT_MEMORY_RETRY_ERROR_KEYS, - max_concurrent_tasks=CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, - soft_glob_output=False, - local_hash_strat=CromwellBackendLocal.DEFAULT_LOCAL_HASH_STRAT, - db=CromwellBackendDatabase.DEFAULT_DB, - db_timeout=CromwellBackendDatabase.DEFAULT_DB_TIMEOUT_MS, - mysql_db_ip=CromwellBackendDatabase.DEFAULT_MYSQL_DB_IP, - mysql_db_port=CromwellBackendDatabase.DEFAULT_MYSQL_DB_PORT, - mysql_db_user=CromwellBackendDatabase.DEFAULT_MYSQL_DB_USER, - mysql_db_password=CromwellBackendDatabase.DEFAULT_MYSQL_DB_PASSWORD, - mysql_db_name=CromwellBackendDatabase.DEFAULT_MYSQL_DB_NAME, - postgresql_db_ip=CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_IP, - postgresql_db_port=CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_PORT, - postgresql_db_user=CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_USER, - postgresql_db_password=CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_PASSWORD, - postgresql_db_name=CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_NAME, - file_db=None, - gcp_prj=None, - gcp_out_dir=None, - gcp_call_caching_dup_strat=CromwellBackendGcp.DEFAULT_CALL_CACHING_DUP_STRAT, - gcp_service_account_key_json=None, - use_google_cloud_life_sciences=False, - gcp_region=CromwellBackendGcp.DEFAULT_REGION, - aws_batch_arn=None, - aws_region=None, - aws_out_dir=None, - aws_call_caching_dup_strat=CromwellBackendAws.DEFAULT_CALL_CACHING_DUP_STRAT, - gcp_zones=None, - slurm_partition=None, - slurm_account=None, - slurm_extra_param=None, - slurm_resource_param=CromwellBackendSlurm.DEFAULT_SLURM_RESOURCE_PARAM, - sge_pe=None, - sge_queue=None, - sge_extra_param=None, - sge_resource_param=CromwellBackendSge.DEFAULT_SGE_RESOURCE_PARAM, - pbs_queue=None, - pbs_extra_param=None, - pbs_resource_param=CromwellBackendPbs.DEFAULT_PBS_RESOURCE_PARAM, - lsf_queue=None, - lsf_extra_param=None, - lsf_resource_param=CromwellBackendLsf.DEFAULT_LSF_RESOURCE_PARAM, - ): - """See docstring of base class for other arguments. + default_backend: BackendProvider, + local_loc_dir: str | None = None, + local_out_dir: str | None = None, + gcp_loc_dir: str | None = None, + aws_loc_dir: str | None = None, + cromwell: str = Cromwell.DEFAULT_CROMWELL, + womtool: str = Cromwell.DEFAULT_WOMTOOL, + disable_call_caching: bool = False, + max_concurrent_workflows: int = CromwellBackendCommon.DEFAULT_MAX_CONCURRENT_WORKFLOWS, + memory_retry_error_keys: tuple[ + str, ... + ] = CromwellBackendCommon.DEFAULT_MEMORY_RETRY_ERROR_KEYS, + max_concurrent_tasks: int = CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, + soft_glob_output: bool = False, + local_hash_strat: str = CromwellBackendLocal.DEFAULT_LOCAL_HASH_STRAT, + db: str = CromwellBackendDatabase.DEFAULT_DB, + db_timeout: int = CromwellBackendDatabase.DEFAULT_DB_TIMEOUT_MS, + mysql_db_ip: str = CromwellBackendDatabase.DEFAULT_MYSQL_DB_IP, + mysql_db_port: int = CromwellBackendDatabase.DEFAULT_MYSQL_DB_PORT, + mysql_db_user: str = CromwellBackendDatabase.DEFAULT_MYSQL_DB_USER, + mysql_db_password: str = CromwellBackendDatabase.DEFAULT_MYSQL_DB_PASSWORD, + mysql_db_name: str = CromwellBackendDatabase.DEFAULT_MYSQL_DB_NAME, + postgresql_db_ip: str = CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_IP, + postgresql_db_port: int = CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_PORT, + postgresql_db_user: str = CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_USER, + postgresql_db_password: str = CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_PASSWORD, + postgresql_db_name: str = CromwellBackendDatabase.DEFAULT_POSTGRESQL_DB_NAME, + file_db: str | None = None, + gcp_prj: str | None = None, + gcp_out_dir: str | None = None, + gcp_call_caching_dup_strat: str = CromwellBackendGcp.DEFAULT_CALL_CACHING_DUP_STRAT, + gcp_service_account_key_json: str | None = None, + gcp_compute_service_account: str | None = None, + gcp_network: str | None = None, + gcp_subnetwork: str | None = None, + gcp_dockerhub_mirror: bool = True, + gcp_dockerhub_mirror_address: str = 'mirror.gcr.io', + gcp_region: str = CromwellBackendGcp.DEFAULT_REGION, + aws_batch_arn: str | None = None, + aws_region: str | None = None, + aws_out_dir: str | None = None, + aws_call_caching_dup_strat: str = CromwellBackendAws.DEFAULT_CALL_CACHING_DUP_STRAT, + gcp_zones: list[str] | None = None, + slurm_partition: str | None = None, + slurm_account: str | None = None, + slurm_extra_param: str | None = None, + slurm_resource_param: str = CromwellBackendSlurm.DEFAULT_SLURM_RESOURCE_PARAM, + sge_pe: str | None = None, + sge_queue: str | None = None, + sge_extra_param: str | None = None, + sge_resource_param: str = CromwellBackendSge.DEFAULT_SGE_RESOURCE_PARAM, + pbs_queue: str | None = None, + pbs_extra_param: str | None = None, + pbs_resource_param: str = CromwellBackendPbs.DEFAULT_PBS_RESOURCE_PARAM, + lsf_queue: str | None = None, + lsf_extra_param: str | None = None, + lsf_resource_param: str = CromwellBackendLsf.DEFAULT_LSF_RESOURCE_PARAM, + ) -> None: + """ + See docstring of base class for other arguments. Args: default_backend: Default backend. + local_loc_dir: + Local cache directory for localization. + local_out_dir: + Output directory for local backends. + gcp_loc_dir: + GCP cache directory (gs://) for localization. + aws_loc_dir: + AWS cache directory (s3://) for localization. cromwell: Cromwell JAR URI. womtool: Womtool JAR URI. disable_call_caching: + Disable call-caching (re-using outputs from previous workflows/tasks). max_concurrent_workflows: - memory_retry_error_keys + Limit for concurrent number of workflows. + memory_retry_error_keys: + List of error messages to catch failures due to OOM. max_concurrent_tasks: + Limit for concurrent number of tasks for each workflow. soft_glob_output: + Glob with ln -s instead of hard-linking. local_hash_strat: + Local file hashing strategy for call-caching. db: + Metadata DB type. db_timeout: + DB connection timeout in milliseconds. mysql_db_ip: + MySQL DB hostname. mysql_db_port: + MySQL DB port. mysql_db_user: + MySQL DB username. mysql_db_password: + MySQL DB password. mysql_db_name: + MySQL DB name. postgresql_db_ip: + PostgreSQL DB hostname. postgresql_db_port: + PostgreSQL DB port. postgresql_db_user: + PostgreSQL DB username. postgresql_db_password: + PostgreSQL DB password. postgresql_db_name: + PostgreSQL DB name. file_db: + File DB path prefix for db == "file". gcp_prj: + Google project name. + gcp_out_dir: + Output bucket path for GCP backend (gs://). gcp_call_caching_dup_strat: + Call-caching duplication strategy for GCP backend. gcp_service_account_key_json: This will be added to environment variable GOOGLE_APPLICATION_CREDENTIALS If not match with existing key then error out. - use_google_cloud_life_sciences: - Use Google Cloud Life Sciences API instead of Genomics API - which has beed deprecated. + gcp_compute_service_account: + Service account email to use for Google Cloud Batch compute instances. + If not provided, the default Compute Engine service account will be used. + Ensure that this service account has the `roles/batch.agentReporter` role, so that + VM instances can report their status to Batch. + gcp_network: + VPC network name for GCP Batch backend. Required for VPCs in custom subnet mode. + gcp_subnetwork: + VPC subnetwork name for GCP Batch backend. Required for VPCs in custom subnet mode. + gcp_dockerhub_mirror: + Enable Docker Hub mirroring through Google Artifact Registry. + gcp_dockerhub_mirror_address: + Address of the Docker Hub mirror (default: mirror.gcr.io). gcp_region: - Region for Google Cloud Life Sciences API. - Ignored if not use_google_cloud_life_sciences. - gcp_out_dir: + Region for Google Cloud Batch API. aws_batch_arn: + ARN for AWS Batch. aws_region: + AWS region. aws_out_dir: + Output bucket path for AWS backend (s3://). aws_call_caching_dup_strat: + Call-caching duplication strategy for AWS backend. gcp_zones: For this and all below arguments, see details in CaperWorkflowOpts.__init__. @@ -163,7 +222,7 @@ def __init__( lsf_queue: LSF queue. lsf_extra_param: - LSF extra parameter to be appended to qsub command line. + LSF extra parameter to be appended to bsub command line. lsf_resource_param: LSF resource parameters to be passed to bsub. """ @@ -206,13 +265,16 @@ def __init__( memory_retry_error_keys=memory_retry_error_keys, gcp_call_caching_dup_strat=gcp_call_caching_dup_strat, gcp_service_account_key_json=gcp_service_account_key_json, - use_google_cloud_life_sciences=use_google_cloud_life_sciences, + gcp_compute_service_account=gcp_compute_service_account, + gcp_network=gcp_network, + gcp_subnetwork=gcp_subnetwork, + gcp_dockerhub_mirror=gcp_dockerhub_mirror, + gcp_dockerhub_mirror_address=gcp_dockerhub_mirror_address, gcp_region=gcp_region, aws_batch_arn=aws_batch_arn, aws_region=aws_region, aws_out_dir=aws_out_dir, aws_call_caching_dup_strat=aws_call_caching_dup_strat, - gcp_zones=gcp_zones, slurm_partition=slurm_partition, slurm_account=slurm_account, slurm_extra_param=slurm_extra_param, @@ -230,7 +292,6 @@ def __init__( ) self._caper_workflow_opts = CaperWorkflowOpts( - use_google_cloud_life_sciences=use_google_cloud_life_sciences, gcp_zones=gcp_zones, slurm_partition=slurm_partition, slurm_account=slurm_account, @@ -246,54 +307,56 @@ def __init__( self._caper_labels = CaperLabels() - def _set_env_gcp_prj(self, gcp_prj=None, env_name=ENV_GOOGLE_CLOUD_PROJECT): - """Initalizes environment for authentication (storage). + def _set_env_gcp_prj( + self, gcp_prj: str | None = None, env_name: str = ENV_GOOGLE_CLOUD_PROJECT + ) -> None: + """ + Initalizes environment for authentication (storage). + Args: gcp_prj: Environment variable GOOGLE_CLOUD_PROJECT will be updated with this. + env_name: + Environment variable name to set for GCP project. """ if gcp_prj: if env_name in os.environ: prj = os.environ[env_name] if prj != gcp_prj: - logger.warning( - 'Env var {env} does not match with ' - 'gcp_prj {prj}.'.format(env=env_name, prj=gcp_prj) - ) - logger.debug( - 'Adding {prj} to env var {env}'.format(prj=gcp_prj, env=env_name) - ) + logger.warning('Env var %s does not match with gcp_prj %s.', env_name, gcp_prj) + logger.debug('Adding %s to env var %s', gcp_prj, env_name) os.environ[env_name] = gcp_prj def run( self, - backend, - wdl, - inputs=None, - options=None, - labels=None, - imports=None, - metadata_output=None, - str_label=None, - user=None, - docker=None, - singularity=None, - conda=None, - custom_backend_conf=None, - max_retries=CaperWorkflowOpts.DEFAULT_MAX_RETRIES, - memory_retry_multiplier=CaperWorkflowOpts.DEFAULT_MEMORY_RETRY_MULTIPLIER, - gcp_monitoring_script=CaperWorkflowOpts.DEFAULT_GCP_MONITORING_SCRIPT, - ignore_womtool=False, - no_deepcopy=False, - fileobj_stdout=None, - fileobj_troubleshoot=None, - work_dir=None, - java_heap_run=Cromwell.DEFAULT_JAVA_HEAP_CROMWELL_RUN, - java_heap_womtool=Cromwell.DEFAULT_JAVA_HEAP_WOMTOOL, - dry_run=False, - ): - """Run a workflow using Cromwell run mode. + backend: str, + wdl: str, + inputs: str | None = None, + options: str | None = None, + labels: str | None = None, + imports: str | None = None, + metadata_output: str | None = None, + str_label: str | None = None, + user: str | None = None, + docker: str | None = None, + singularity: str | None = None, + conda: str | None = None, + custom_backend_conf: str | None = None, + max_retries: int | None = CaperWorkflowOpts.DEFAULT_MAX_RETRIES, + memory_retry_multiplier: float | None = CaperWorkflowOpts.DEFAULT_MEMORY_RETRY_MULTIPLIER, + gcp_monitoring_script: str | None = CaperWorkflowOpts.DEFAULT_GCP_MONITORING_SCRIPT, + ignore_womtool: bool = False, + no_deepcopy: bool = False, + fileobj_stdout: TextIO | None = None, + fileobj_troubleshoot: TextIO | None = None, + work_dir: str | None = None, + java_heap_run: str = Cromwell.DEFAULT_JAVA_HEAP_CROMWELL_RUN, + java_heap_womtool: str = Cromwell.DEFAULT_JAVA_HEAP_WOMTOOL, + dry_run: bool = False, + ) -> NBSubprocThread | None: + """ + Run a workflow using Cromwell run mode. Args: backend: @@ -364,6 +427,8 @@ def run( Multiplier for the memory retry feature. See https://cromwell.readthedocs.io/en/develop/cromwell_features/RetryWithMoreMemory/ for details. + gcp_monitoring_script: + GCP monitoring script for resource monitoring in workflow options. ignore_womtool: Disable Womtool validation for WDL/input JSON/imports. no_deepcopy: @@ -388,22 +453,22 @@ def run( Java heap (java -Xmx) for Womtool. dry_run: Stop before running Java command line for Cromwell. + Returns: metadata_file: URI of metadata JSON file. """ if not AutoURI(wdl).exists: - raise FileNotFoundError('WDL does not exists. {wdl}'.format(wdl=wdl)) + msg = f'WDL does not exists. {wdl}' + raise FileNotFoundError(msg) if str_label is None and inputs: str_label = AutoURI(inputs).basename_wo_ext if work_dir is None: - work_dir = self.create_timestamped_work_dir( - prefix=AutoURI(wdl).basename_wo_ext - ) + work_dir = self.create_timestamped_work_dir(prefix=AutoURI(wdl).basename_wo_ext) - logger.info('Localizing files on work_dir. {d}'.format(d=work_dir)) + logger.info('Localizing files on work_dir. %s', work_dir) if inputs: maybe_remote_file = self.localize_on_backend_if_modified( @@ -422,15 +487,10 @@ def run( if metadata_output: if not AbsPath(metadata_output).is_valid: - raise ValueError( - 'metadata_output is not a valid local abspath. {m}'.format( - m=metadata_output - ) - ) + msg = f'metadata_output is not a valid local abspath. {metadata_output}' + raise ValueError(msg) else: - metadata_output = os.path.join( - work_dir, CromwellMetadata.DEFAULT_METADATA_BASENAME - ) + metadata_output = os.path.join(work_dir, CromwellMetadata.DEFAULT_METADATA_BASENAME) backend_conf = self._caper_backend_conf.create_file( directory=work_dir, backend=backend, custom_backend_conf=custom_backend_conf @@ -461,12 +521,8 @@ def run( if not ignore_womtool: self._cromwell.validate(wdl=wdl, inputs=inputs, imports=imports) - logger.info( - 'launching run: wdl={w}, inputs={i}, backend_conf={b}'.format( - w=wdl, i=inputs, b=backend_conf - ) - ) - th = self._cromwell.run( + logger.info('launching run: wdl=%s, inputs=%s, backend_conf=%s', wdl, inputs, backend_conf) + return self._cromwell.run( wdl=wdl, backend_conf=backend_conf, inputs=inputs, @@ -478,78 +534,79 @@ def run( fileobj_troubleshoot=fileobj_troubleshoot, dry_run=dry_run, ) - return th def server( self, - default_backend, - server_port=CromwellRestAPI.DEFAULT_PORT, - server_hostname=None, - server_heartbeat=None, - custom_backend_conf=None, - fileobj_stdout=None, - embed_subworkflow=False, - java_heap_server=Cromwell.DEFAULT_JAVA_HEAP_CROMWELL_SERVER, - auto_write_metadata=True, - work_dir=None, - dry_run=False, - ): + default_backend: BackendProvider, + server_port: int = CromwellRestAPI.DEFAULT_PORT, + server_hostname: str | None = None, + server_heartbeat: ServerHeartbeat | None = None, + custom_backend_conf: str | None = None, + fileobj_stdout: TextIO | None = None, + embed_subworkflow: bool = False, + java_heap_server: str = Cromwell.DEFAULT_JAVA_HEAP_CROMWELL_SERVER, + auto_write_metadata: bool = True, + on_server_start: Callable[[], None] | None = None, + on_status_change: Callable[..., None] | None = None, + work_dir: str | None = None, + dry_run: bool = False, + ) -> NBSubprocThread | None: """Run a Cromwell server. - default_backend: - Default backend. If backend is not specified for a submitted workflow - then default backend will be used. - Choose among Caper's built-in backends. - (aws, gcp, Local, slurm, sge, pbs, lsf). - Or use a backend defined in your custom backend config file - (above "backend_conf" file). - server_heartbeat: - Server heartbeat to write hostname/port of a server. - server_port: - Server port to run Cromwell server. - Make sure to use different port for multiple Cromwell servers on the same - machine. - server_hostname: - Server hostname. If not defined then socket.gethostname() will be used. - If server_heartbeat is given, then this hostname will be written to - the server heartbeat file defined in server_heartbeat. - custom_backend_conf: - Backend config file (HOCON) to override Caper's auto-generated backend config. - fileobj_stdout: - File-like object to write Cromwell's STDOUT. - embed_subworkflow: - Caper stores/updates metadata.JSON file on - each workflow's root directory whenever there is status change - of workflow (or its tasks). - This flag ensures that any subworkflow's metadata JSON will be - embedded in main (this) workflow's metadata JSON. - This is to mimic behavior of Cromwell run mode's -m parameter. - java_heap_server: - Java heap (java -Xmx) for Cromwell server mode. - auto_write_metadata: - Automatic retrieval/writing of metadata.json upon workflow/task's status change. - work_dir: - Local temporary directory to store all temporary files. - Temporary files mean intermediate files used for running Cromwell. - For example, auto-generated backend config file and workflow options file. - If this is not defined, then cache directory self._local_loc_dir with a timestamp - will be used. - However, Cromwell Java process itself will run on CWD instead of this directory. - dry_run: - Stop before running Java command line for Cromwell. + + Args: + default_backend: + Default backend. If backend is not specified for a submitted workflow + then default backend will be used. + Choose among Caper's built-in backends. + (aws, gcp, Local, slurm, sge, pbs, lsf). + Or use a backend defined in your custom backend config file + (above "backend_conf" file). + server_port: + Server port to run Cromwell server. + Make sure to use different port for multiple Cromwell servers on the same + machine. + server_hostname: + Server hostname. If not defined then socket.gethostname() will be used. + If server_heartbeat is given, then this hostname will be written to + the server heartbeat file defined in server_heartbeat. + server_heartbeat: + Server heartbeat to write hostname/port of a server. + custom_backend_conf: + Backend config file (HOCON) to override Caper's auto-generated backend config. + fileobj_stdout: + File-like object to write Cromwell's STDOUT. + embed_subworkflow: + Caper stores/updates metadata.JSON file on + each workflow's root directory whenever there is status change + of workflow (or its tasks). + This flag ensures that any subworkflow's metadata JSON will be + embedded in main (this) workflow's metadata JSON. + This is to mimic behavior of Cromwell run mode's -m parameter. + java_heap_server: + Java heap (java -Xmx) for Cromwell server mode. + auto_write_metadata: + Automatic retrieval/writing of metadata.json upon workflow/task's status change. + work_dir: + Local temporary directory to store all temporary files. + Temporary files mean intermediate files used for running Cromwell. + For example, auto-generated backend config file and workflow options file. + If this is not defined, then cache directory self._local_loc_dir with a timestamp + will be used. + However, Cromwell Java process itself will run on CWD instead of this directory. + dry_run: + Stop before running Java command line for Cromwell. """ if work_dir is None: - work_dir = self.create_timestamped_work_dir( - prefix=CaperRunner.SERVER_TMP_DIR_PREFIX - ) + work_dir = self.create_timestamped_work_dir(prefix=CaperRunner.SERVER_TMP_DIR_PREFIX) backend_conf = self._caper_backend_conf.create_file( directory=work_dir, backend=default_backend, custom_backend_conf=custom_backend_conf, ) - logger.info('launching server: backend_conf={b}'.format(b=backend_conf)) + logger.info('launching server: backend_conf=%s', backend_conf) - th = self._cromwell.server( + return self._cromwell.server( backend_conf=backend_conf, server_port=server_port, server_hostname=server_hostname, @@ -558,6 +615,7 @@ def server( embed_subworkflow=embed_subworkflow, java_heap_cromwell_server=java_heap_server, auto_write_metadata=auto_write_metadata, + on_server_start=on_server_start, + on_status_change=on_status_change, dry_run=dry_run, ) - return th diff --git a/caper/caper_wdl_parser.py b/caper/caper_wdl_parser.py index 2df298d5..7063f31c 100644 --- a/caper/caper_wdl_parser.py +++ b/caper/caper_wdl_parser.py @@ -1,6 +1,11 @@ +"""WDL parser with Caper-specific functionality.""" + +from __future__ import annotations + import logging +import re -from .wdl_parser import WDLParser +from caper.wdl_parser import WDLParser logger = logging.getLogger(__name__) @@ -8,8 +13,8 @@ class CaperWDLParser(WDLParser): """WDL parser for Caper.""" - RE_WDL_COMMENT_DOCKER = r'^\s*\#\s*CAPER\s+docker\s(.+)' - RE_WDL_COMMENT_SINGULARITY = r'^\s*\#\s*CAPER\s+singularity\s(.+)' + RE_WDL_COMMENT_DOCKER = re.compile(r'^\s*#\s*CAPER\s+docker\s(.+)') + RE_WDL_COMMENT_SINGULARITY = re.compile(r'^\s*#\s*CAPER\s+singularity\s(.+)') WDL_WORKFLOW_META_DOCKER_KEYS = ('default_docker', 'caper_docker') WDL_WORKFLOW_META_SINGULARITY_KEYS = ('default_singularity', 'caper_singularity') WDL_WORKFLOW_META_CONDA_KEYS = ( @@ -19,17 +24,13 @@ class CaperWDLParser(WDLParser): 'caper_conda_env', ) - def __init__(self, wdl): + def __init__(self, wdl: str) -> None: # noqa: D107 super().__init__(wdl) @property - def caper_docker(self): - """Backward compatibility for property name. See property default_docker.""" - return self.default_docker - - @property - def default_docker(self): - """Find a default Docker image in WDL for Caper. + def default_docker(self) -> str | None: + """ + Find a default Docker image in WDL for Caper. Backward compatibililty: Keep using old regex method @@ -40,18 +41,15 @@ def default_docker(self): if docker_key in self.workflow_meta: return self.workflow_meta[docker_key] - ret = self._find_val_of_matched_lines(CaperWDLParser.RE_WDL_COMMENT_DOCKER) + ret = self._find_val_of_matched_lines(self.RE_WDL_COMMENT_DOCKER) if ret: return ret[0].strip('"\'') + return None @property - def caper_singularity(self): - """Backward compatibility for property name. See property default_singularity.""" - return self.default_singularity - - @property - def default_singularity(self): - """Find a default Singularity image in WDL for Caper. + def default_singularity(self) -> str | None: + """ + Find a default Singularity image in WDL for Caper. Backward compatibililty: Keep using old regex method @@ -62,14 +60,16 @@ def default_singularity(self): if singularity_key in self.workflow_meta: return self.workflow_meta[singularity_key] - ret = self._find_val_of_matched_lines(CaperWDLParser.RE_WDL_COMMENT_SINGULARITY) + ret = self._find_val_of_matched_lines(self.RE_WDL_COMMENT_SINGULARITY) if ret: return ret[0].strip('"\'') + return None @property - def default_conda(self): + def default_conda(self) -> None: """Find a default Conda environment name in WDL for Caper.""" if self.workflow_meta: for conda_key in CaperWDLParser.WDL_WORKFLOW_META_CONDA_KEYS: if conda_key in self.workflow_meta: return self.workflow_meta[conda_key] + return None diff --git a/caper/caper_workflow_opts.py b/caper/caper_workflow_opts.py index 64cc36b3..faafc906 100644 --- a/caper/caper_workflow_opts.py +++ b/caper/caper_workflow_opts.py @@ -1,14 +1,18 @@ +"""Caper workflow options.""" + +from __future__ import annotations + import copy import json import logging import os +from typing import Any from autouri import GCSURI, AutoURI from .caper_wdl_parser import CaperWDLParser from .cromwell_backend import ( - BACKEND_AWS, - BACKEND_GCP, + BackendProvider, ENVIRONMENT_CONDA, ENVIRONMENT_DOCKER, ENVIRONMENT_SINGULARITY, @@ -20,40 +24,43 @@ class CaperWorkflowOpts: + """Manage Cromwell workflow options JSON file.""" + DEFAULT_RUNTIME_ATTRIBUTES = 'default_runtime_attributes' BASENAME_WORKFLOW_OPTS_JSON = 'workflow_opts.json' DEFAULT_MAX_RETRIES = 1 DEFAULT_MEMORY_RETRY_MULTIPLIER = 1.2 - DEFAULT_GCP_MONITORING_SCRIPT = ( - 'gs://caper-data/scripts/resource_monitor/resource_monitor.sh' - ) + DEFAULT_GCP_MONITORING_SCRIPT = 'gs://caper-data/scripts/resource_monitor/resource_monitor.sh' def __init__( self, - use_google_cloud_life_sciences=False, - gcp_zones=None, - slurm_partition=None, - slurm_account=None, - slurm_extra_param=None, - sge_pe=None, - sge_queue=None, - sge_extra_param=None, - pbs_queue=None, - pbs_extra_param=None, - lsf_queue=None, - lsf_extra_param=None, - ): - """Template for a workflows options JSON file. + gcp_zones: list[str] | None = None, + gcp_compute_service_account: str | None = None, + slurm_partition: str | None = None, + slurm_account: str | None = None, + slurm_extra_param: str | None = None, + sge_pe: str | None = None, + sge_queue: str | None = None, + sge_extra_param: str | None = None, + pbs_queue: str | None = None, + pbs_extra_param: str | None = None, + lsf_queue: str | None = None, + lsf_extra_param: str | None = None, + ) -> None: + """ + Template for a workflows options JSON file. + All parameters are optional. + If parameters have been set at the backend-level, these workflow-level options will + override them. + Args: - use_google_cloud_life_sciences: - Use Google Cloud Life Sciences API instead of Genomics API - which has beed deprecated. - If this flag is on gcp_zones is ignored. gcp_zones: For gcp backend only. List of GCP zones to run workflows on. + gcp_compute_service_account: + Overrides the service account for Batch VM instances. slurm_partition: For slurm backend only. SLURM partition to submit tasks to. @@ -92,13 +99,15 @@ def __init__( Extra parameters for LSF. This will be appended to "bsub" command line. """ - self._template = {CaperWorkflowOpts.DEFAULT_RUNTIME_ATTRIBUTES: dict()} - default_runtime_attributes = self._template[ - CaperWorkflowOpts.DEFAULT_RUNTIME_ATTRIBUTES - ] + self._template: dict[str, Any] = {CaperWorkflowOpts.DEFAULT_RUNTIME_ATTRIBUTES: {}} + default_runtime_attributes = self._template[CaperWorkflowOpts.DEFAULT_RUNTIME_ATTRIBUTES] - if gcp_zones and not use_google_cloud_life_sciences: + if gcp_zones: default_runtime_attributes['zones'] = ' '.join(gcp_zones) + if gcp_compute_service_account: + default_runtime_attributes['google_compute_service_account'] = ( + gcp_compute_service_account + ) if slurm_partition: default_runtime_attributes['slurm_partition'] = slurm_partition @@ -126,20 +135,22 @@ def __init__( def create_file( self, - directory, - wdl, - backend=None, - inputs=None, - custom_options=None, - docker=None, - singularity=None, - conda=None, - max_retries=DEFAULT_MAX_RETRIES, - memory_retry_multiplier=DEFAULT_MEMORY_RETRY_MULTIPLIER, - gcp_monitoring_script=DEFAULT_GCP_MONITORING_SCRIPT, - basename=BASENAME_WORKFLOW_OPTS_JSON, - ): - """Creates Cromwell's workflow options JSON file. + directory: str, + wdl: str, + backend: str | None = None, + inputs: str | None = None, + custom_options: str | None = None, + docker: str | None = None, + singularity: str | None = None, + conda: str | None = None, + max_retries: int | None = DEFAULT_MAX_RETRIES, + memory_retry_multiplier: float | None = DEFAULT_MEMORY_RETRY_MULTIPLIER, + gcp_monitoring_script: str | None = DEFAULT_GCP_MONITORING_SCRIPT, + basename: str = BASENAME_WORKFLOW_OPTS_JSON, + ) -> str: + """ + Creates Cromwell's workflow options JSON file. + Workflow options JSON file sets default values for attributes defined in runtime {} section of WDL's task. For example, docker attribute can be defined here instead of directory @@ -186,12 +197,11 @@ def create_file( Basename for a temporary workflow options JSON file. """ if singularity and docker: - raise ValueError('Cannot use both Singularity and Docker.') + msg = 'Cannot use both Singularity and Docker.' + raise ValueError(msg) template = copy.deepcopy(self._template) - default_runtime_attributes = template[ - CaperWorkflowOpts.DEFAULT_RUNTIME_ATTRIBUTES - ] + default_runtime_attributes = template[CaperWorkflowOpts.DEFAULT_RUNTIME_ATTRIBUTES] if backend: template['backend'] = backend @@ -201,10 +211,11 @@ def create_file( # sanity check for environment flags defined_env_flags = [env for env in (docker, singularity, conda) if env] if len(defined_env_flags) > 1: - raise ValueError( + msg = ( 'docker, singularity and conda are mutually exclusive. ' - 'Define nothing or only one environment.' + 'Define only one or none of these environments.' ) + raise ValueError(msg) if docker is not None: environment = ENVIRONMENT_DOCKER @@ -218,89 +229,65 @@ def create_file( if environment: default_runtime_attributes['environment'] = environment - if docker == '' or backend in (BACKEND_GCP, BACKEND_AWS) and not docker: + if docker == '' or (backend in (BackendProvider.GCP, BackendProvider.AWS) and not docker): # if used as a flag or cloud backend is chosen # try to find "default_docker" from WDL's workflow.meta or "#CAPER docker" from comments docker = wdl_parser.default_docker if docker: - logger.info( - 'Docker image found in WDL metadata. wdl={wdl}, d={d}'.format( - wdl=wdl, d=docker - ) - ) + logger.info('Docker image found in WDL metadata. wdl=%s, d=%s', wdl, docker) else: - logger.info( - "Docker image not found in WDL metadata. wdl={wdl}".format(wdl=wdl) - ) + logger.info('Docker image not found in WDL metadata. wdl=%s', wdl) if docker: default_runtime_attributes['docker'] = docker if singularity == '': # if used as a flag - if backend in (BACKEND_GCP, BACKEND_AWS): - raise ValueError( - 'Singularity cannot be used for cloud backend (e.g. aws, gcp).' - ) + if backend in (BackendProvider.GCP, BackendProvider.AWS): + msg = 'Singularity cannot be used for cloud backend (e.g. aws, gcp).' + raise ValueError(msg) singularity = wdl_parser.default_singularity if singularity: logger.info( - 'Singularity image found in WDL metadata. wdl={wdl}, s={s}'.format( - wdl=wdl, s=singularity - ) + 'Singularity image found in WDL metadata. wdl=%s, s=%s', wdl, singularity ) else: - logger.info( - 'Singularity image not found in WDL metadata. wdl={wdl}.'.format( - wdl=wdl - ) - ) + logger.info('Singularity image not found in WDL metadata. wdl=%s.', wdl) if singularity: default_runtime_attributes['singularity'] = singularity if inputs: - default_runtime_attributes['singularity_bindpath'] = find_bindpath( - inputs - ) + default_runtime_attributes['singularity_bindpath'] = find_bindpath(inputs) if conda == '': # if used as a flag - if backend in (BACKEND_GCP, BACKEND_AWS): - raise ValueError( - 'Conda cannot be used for cloud backend (e.g. aws, gcp).' - ) + if backend in (BackendProvider.GCP, BackendProvider.AWS): + msg = 'Conda cannot be used for cloud backend (e.g. aws, gcp).' + raise ValueError(msg) conda = wdl_parser.default_conda if conda: logger.info( - 'Conda environment name found in WDL metadata. wdl={wdl}, s={s}'.format( - wdl=wdl, s=conda - ) + 'Conda environment name found in WDL metadata. wdl=%s, s=%s', wdl, conda ) else: - logger.info( - 'Conda environment name not found in WDL metadata. wdl={wdl}'.format( - wdl=wdl - ) - ) + logger.info('Conda environment name not found in WDL metadata. wdl=%s', wdl) if conda: default_runtime_attributes['conda'] = conda if max_retries is not None: default_runtime_attributes['maxRetries'] = max_retries - # Cromwell's bug in memory-retry feature. + + # Cromwell has a bug in memory-retry feature. # Disabled until it's fixed on Cromwell's side. # if memory_retry_multiplier is not None: - # template['memory_retry_multiplier'] = memory_retry_multiplier + # template['memory_retry_multiplier'] = memory_retry_multiplier # noqa: ERA001 - if gcp_monitoring_script and backend == BACKEND_GCP: + if gcp_monitoring_script and backend == BackendProvider.GCP: if not GCSURI(gcp_monitoring_script).is_valid: - raise ValueError( - 'gcp_monitoring_script is not a valid URI. {uri}'.format( - uri=gcp_monitoring_script - ) - ) + msg = f'gcp_monitoring_script is not a valid URI. {gcp_monitoring_script}' + raise ValueError(msg) template['monitoring_script'] = gcp_monitoring_script if custom_options: diff --git a/caper/cli.py b/caper/cli.py index 7c076ecc..a638b231 100644 --- a/caper/cli.py +++ b/caper/cli.py @@ -1,4 +1,8 @@ #!/usr/bin/env python3 +"""Caper command line interface module.""" + +from __future__ import annotations + import copy import csv import json @@ -6,6 +10,7 @@ import os import re import sys +from typing import TYPE_CHECKING, Any, overload from autouri import GCSURI, AutoURI @@ -16,16 +21,18 @@ from .caper_labels import CaperLabels from .caper_runner import CaperRunner from .cli_hpc import subcmd_hpc -from .cromwell_backend import ( - BACKEND_ALIAS_LOCAL, - BACKEND_LOCAL, - CromwellBackendDatabase, -) +from .cromwell_backend import CromwellBackendDatabase from .cromwell_metadata import CromwellMetadata from .dict_tool import flatten_dict from .resource_analysis import LinearResourceAnalysis from .server_heartbeat import ServerHeartbeat +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace + + from .nb_subproc_thread import NBSubprocThread + + logger = logging.getLogger(__name__) @@ -41,8 +48,10 @@ PRINT_ROW_DELIMITER = '\t' -def get_abspath(path): - """Get abspath from a string. +def get_abspath(path: str) -> str: + """ + Get abspath from a string. + This function is mainly used to make a command line argument an abspath since AutoURI module only works with abspath and full URIs (e.g. /home/there, gs://here/there). @@ -52,13 +61,13 @@ def get_abspath(path): To do so, use this function for local file path strings only (e.g. toy.wdl). Do not use this function for other non-local-path strings (e.g. --docker). """ - if path: - if not AutoURI(path).is_valid: - return os.path.abspath(os.path.expanduser(path)) + if path and not AutoURI(path).is_valid: + return os.path.abspath(os.path.expanduser(path)) return path -def check_local_file_and_rename_if_exists(path, index=0): +def check_local_file_and_rename_if_exists(path: str, index: int = 0) -> str: + """Return a unique path by appending index if file exists.""" org_path = path if index: path = '.'.join([path, str(index)]) @@ -67,30 +76,29 @@ def check_local_file_and_rename_if_exists(path, index=0): return path -def print_version(parser, args): +def print_version(parser: ArgumentParser, args: Namespace) -> None: + """Print version and exit if --version flag is set.""" if args.version: - print(version) + print(version) # noqa: T201 parser.exit() -def init_logging(args): - if args.debug: - log_level = 'DEBUG' - else: - log_level = 'INFO' - logging.basicConfig( - level=log_level, format='%(asctime)s|%(name)s|%(levelname)s| %(message)s' - ) +def init_logging(args: Namespace) -> None: + """Initialize logging configuration.""" + log_level = 'DEBUG' if args.debug else 'INFO' + logging.basicConfig(level=log_level, format='%(asctime)s|%(name)s|%(levelname)s| %(message)s') # suppress filelock logging logging.getLogger('filelock').setLevel('CRITICAL') -def init_autouri(args): +def init_autouri(args: Namespace) -> None: + """Initialize AutoURI settings from args.""" if hasattr(args, 'use_gsutil_for_s3'): GCSURI.init_gcsuri(use_gsutil_for_s3=args.use_gsutil_for_s3) -def check_flags(args): +def check_flags(args: Namespace) -> None: + """Validate container and environment flags.""" singularity_flag = False docker_flag = False conda_flag = False @@ -98,74 +106,71 @@ def check_flags(args): if hasattr(args, 'singularity') and args.singularity is not None: singularity_flag = True if args.singularity.endswith(('.wdl', '.cwl')): - raise ValueError( + msg = ( '--singularity ate up positional arguments (e.g. WDL, CWL). ' 'Define --singularity at the end of command line arguments. ' - 'singularity={p}'.format(p=args.singularity) + f'singularity={args.singularity}' ) + raise ValueError(msg) if hasattr(args, 'docker') and args.docker is not None: docker_flag = True if args.docker.endswith(('.wdl', '.cwl')): - raise ValueError( + msg = ( '--docker ate up positional arguments (e.g. WDL, CWL). ' 'Define --docker at the end of command line arguments. ' - 'docker={p}'.format(p=args.docker) + f'docker={args.docker}' ) + raise ValueError(msg) if hasattr(args, 'soft_glob_output') and args.soft_glob_output: - raise ValueError( + msg = ( '--soft-glob-output and --docker are mutually exclusive. ' 'Delocalization from docker container will fail ' 'for soft-linked globbed outputs.' ) + raise ValueError(msg) if hasattr(args, 'conda') and args.conda is not None: conda_flag = True if args.conda.endswith(('.wdl', '.cwl')): - raise ValueError( + msg = ( '--conda ate up positional arguments (e.g. WDL, CWL). ' 'Define --conda at the end of command line arguments. ' - 'conda={p}'.format(p=args.conda) + f'conda={args.conda}' ) + raise ValueError(msg) all_flags = (docker_flag, singularity_flag, conda_flag) if len([flag for flag in all_flags if flag]) > 1: - raise ValueError('--docker, --singularity and --conda are mutually exclusive.') + msg = '--docker, --singularity and --conda are mutually exclusive.' + raise ValueError(msg) -def check_dirs(args): - """Convert local directories (local_out_dir, local_loc_dir) to absolute ones. +def check_dirs(args: Namespace) -> None: + """ + Convert local directories (local_out_dir, local_loc_dir) to absolute ones. + Also, if temporary/cache directory is not defined for each storage, then append ".caper_tmp" on output directory and use it. """ if hasattr(args, 'local_out_dir'): args.local_out_dir = get_abspath(args.local_out_dir) if not args.local_loc_dir: - args.local_loc_dir = os.path.join( - args.local_out_dir, CaperRunner.DEFAULT_LOC_DIR_NAME - ) - else: - if not args.local_loc_dir: - args.local_loc_dir = os.path.join( - os.getcwd(), CaperRunner.DEFAULT_LOC_DIR_NAME - ) + args.local_loc_dir = os.path.join(args.local_out_dir, CaperRunner.DEFAULT_LOC_DIR_NAME) + elif not args.local_loc_dir: + args.local_loc_dir = os.path.join(os.getcwd(), CaperRunner.DEFAULT_LOC_DIR_NAME) args.local_loc_dir = get_abspath(args.local_loc_dir) - if hasattr(args, 'gcp_out_dir'): - if args.gcp_out_dir and not args.gcp_loc_dir: - args.gcp_loc_dir = os.path.join( - args.gcp_out_dir, CaperRunner.DEFAULT_LOC_DIR_NAME - ) + if hasattr(args, 'gcp_out_dir') and args.gcp_out_dir and not args.gcp_loc_dir: + args.gcp_loc_dir = os.path.join(args.gcp_out_dir, CaperRunner.DEFAULT_LOC_DIR_NAME) - if hasattr(args, 'aws_out_dir'): - if args.aws_out_dir and not args.aws_loc_dir: - args.aws_loc_dir = os.path.join( - args.aws_out_dir, CaperRunner.DEFAULT_LOC_DIR_NAME - ) + if hasattr(args, 'aws_out_dir') and args.aws_out_dir and not args.aws_loc_dir: + args.aws_loc_dir = os.path.join(args.aws_out_dir, CaperRunner.DEFAULT_LOC_DIR_NAME) -def check_db_path(args): +def check_db_path(args: Namespace) -> None: + """Set up file database path from args.""" if hasattr(args, 'db') and args.db == CromwellBackendDatabase.DB_FILE: args.file_db = get_abspath(args.file_db) @@ -179,17 +184,8 @@ def check_db_path(args): args.file_db = os.path.join(args.local_out_dir, db_filename) -def check_backend(args): - """Check if local backend is in lower cases. - "Local" should be capitalized. i.e. local -> Local. - BACKEND_LOCAL is Local. - BACKEND_ALIAS_LOCAL is local. - """ - if hasattr(args, 'backend') and args.backend == BACKEND_ALIAS_LOCAL: - args.backend = BACKEND_LOCAL - - -def runner(args, nonblocking_server=False): +def runner(args: Namespace, nonblocking_server: bool = False) -> NBSubprocThread | None: + """Execute runner subcommand (run or server).""" if args.gcp_zones: args.gcp_zones = re.split(REGEX_DELIMITER_PARAMS, args.gcp_zones) if args.memory_retry_error_keys: @@ -204,6 +200,11 @@ def runner(args, nonblocking_server=False): gcp_loc_dir=args.gcp_loc_dir, aws_loc_dir=args.aws_loc_dir, gcp_service_account_key_json=get_abspath(args.gcp_service_account_key_json), + gcp_compute_service_account=args.gcp_compute_service_account, + gcp_network=args.gcp_network, + gcp_subnetwork=args.gcp_subnetwork, + gcp_dockerhub_mirror=args.gcp_dockerhub_mirror, + gcp_dockerhub_mirror_address=args.gcp_dockerhub_mirror_address, cromwell=get_abspath(args.cromwell), womtool=get_abspath(getattr(args, 'womtool', None)), disable_call_caching=args.disable_call_caching, @@ -226,7 +227,6 @@ def runner(args, nonblocking_server=False): postgresql_db_password=args.postgresql_db_password, postgresql_db_name=args.postgresql_db_name, gcp_prj=args.gcp_prj, - use_google_cloud_life_sciences=args.use_google_cloud_life_sciences, gcp_region=args.gcp_region, gcp_zones=args.gcp_zones, gcp_call_caching_dup_strat=args.gcp_call_caching_dup_strat, @@ -252,16 +252,15 @@ def runner(args, nonblocking_server=False): ) if args.action == 'run': - subcmd_run(c, args) - - elif args.action == 'server': + return subcmd_run(c, args) + if args.action == 'server': return subcmd_server(c, args, nonblocking=nonblocking_server) - - else: - raise ValueError('Unsupported runner action {act}'.format(act=args.action)) + msg = f'Unsupported runner action {args.action}' + raise ValueError(msg) -def client(args): +def client(args: Namespace) -> None: + """Execute client subcommand.""" sh = None if not args.no_server_heartbeat: sh = ServerHeartbeat( @@ -277,11 +276,11 @@ def client(args): gcp_loc_dir=args.gcp_loc_dir, aws_loc_dir=args.aws_loc_dir, gcp_service_account_key_json=get_abspath(args.gcp_service_account_key_json), + gcp_compute_service_account=args.gcp_compute_service_account, server_hostname=args.hostname, server_port=args.port, server_heartbeat=sh, womtool=get_abspath(args.womtool), - use_google_cloud_life_sciences=args.use_google_cloud_life_sciences, gcp_zones=args.gcp_zones, slurm_partition=args.slurm_partition, slurm_account=args.slurm_account, @@ -323,12 +322,29 @@ def client(args): elif args.action == 'cleanup': subcmd_cleanup(c, args) else: - raise ValueError('Unsupported client action {act}'.format(act=args.action)) - - -def subcmd_server(caper_runner, args, nonblocking=False): + msg = f'Unsupported client action {args.action}' + raise ValueError(msg) + + +@overload +def subcmd_server( + caper_runner: CaperRunner, args: Namespace, nonblocking: bool = True +) -> NBSubprocThread: ... +@overload +def subcmd_server( + caper_runner: CaperRunner, args: Namespace, nonblocking: bool = False +) -> None: ... +def subcmd_server( + caper_runner: CaperRunner, args: Namespace, nonblocking: bool = False +) -> NBSubprocThread | None: """ + Run a Cromwell server. + Args: + caper_runner: + CaperRunner instance for running workflows. + args: + Parsed command-line arguments. nonblocking: Make this function return a Thread object instead of blocking (Thread.join()). @@ -356,10 +372,8 @@ def subcmd_server(caper_runner, args, nonblocking=False): if nonblocking: return caper_runner.server(fileobj_stdout=sys.stdout, **args_from_cli) - cromwell_stdout = check_local_file_and_rename_if_exists( - get_abspath(args.cromwell_stdout) - ) - logger.info('Cromwell stdout: {stdout}'.format(stdout=cromwell_stdout)) + cromwell_stdout = check_local_file_and_rename_if_exists(get_abspath(args.cromwell_stdout)) + logger.info('Cromwell stdout: %s', cromwell_stdout) with open(cromwell_stdout, 'w') as f: try: @@ -368,17 +382,16 @@ def subcmd_server(caper_runner, args, nonblocking=False): thread.join() thread.stop(wait=True) if thread.returncode: - logger.error('Check stdout in {file}'.format(file=cromwell_stdout)) + logger.error('Check stdout in %s', cromwell_stdout) except KeyboardInterrupt: - logger.error(USER_INTERRUPT_WARNING, exc_info=True) + logger.exception(USER_INTERRUPT_WARNING) -def subcmd_run(caper_runner, args): - cromwell_stdout = check_local_file_and_rename_if_exists( - get_abspath(args.cromwell_stdout) - ) - logger.info('Cromwell stdout: {stdout}'.format(stdout=cromwell_stdout)) +def subcmd_run(caper_runner: CaperRunner, args: Namespace) -> None: + """Execute the run subcommand.""" + cromwell_stdout = check_local_file_and_rename_if_exists(get_abspath(args.cromwell_stdout)) + logger.info('Cromwell stdout: %s', cromwell_stdout) with open(cromwell_stdout, 'w') as f: try: @@ -410,13 +423,14 @@ def subcmd_run(caper_runner, args): thread.join() thread.stop(wait=True) if thread.returncode: - logger.error('Check stdout in {file}'.format(file=cromwell_stdout)) + logger.error('Check stdout in %s', cromwell_stdout) except KeyboardInterrupt: - logger.error(USER_INTERRUPT_WARNING, exc_info=True) + logger.exception(USER_INTERRUPT_WARNING) -def subcmd_submit(caper_client, args): +def subcmd_submit(caper_client: CaperClientSubmit, args: Namespace) -> None: + """Execute the submit subcommand.""" caper_client.submit( wdl=get_abspath(args.wdl), backend=args.backend, @@ -439,15 +453,18 @@ def subcmd_submit(caper_client, args): ) -def subcmd_abort(caper_client, args): +def subcmd_abort(caper_client: CaperClient, args: Namespace) -> None: + """Execute the abort subcommand.""" caper_client.abort(args.wf_id_or_label) -def subcmd_unhold(caper_client, args): +def subcmd_unhold(caper_client: CaperClient, args: Namespace) -> None: + """Execute the unhold subcommand.""" caper_client.unhold(args.wf_id_or_label) -def subcmd_list(caper_client, args): +def subcmd_list(caper_client: CaperClient, args: Namespace) -> None: + """Execute the list subcommand.""" workflows = caper_client.list( args.wf_id_or_label, exclude_subworkflow=not args.show_subworkflow ) @@ -466,10 +483,7 @@ def subcmd_list(caper_client, args): parent_workflow_id = w.get('parentWorkflowId') if args.hide_result_before is not None: - if ( - w.get('submission') - and w.get('submission') <= args.hide_result_before - ): + if w.get('submission') and w.get('submission') <= args.hide_result_before: continue for f in formats: if f == 'workflow_id': @@ -496,31 +510,35 @@ def subcmd_list(caper_client, args): logger.debug('Ignored BrokenPipeError.') -def subcmd_metadata(caper_client, args): - m = caper_client.metadata( - wf_ids_or_labels=args.wf_id_or_label, embed_subworkflow=True - ) +def subcmd_metadata(caper_client: CaperClient, args: Namespace) -> None: + """Execute the metadata subcommand.""" + m = caper_client.metadata(wf_ids_or_labels=args.wf_id_or_label, embed_subworkflow=True) if not m: - raise ValueError('Found no workflow matching with search query.') - elif len(m) > 1: - raise ValueError('Found multiple workflow matching with search query.') + msg = 'Found no workflow matching with search query.' + raise ValueError(msg) + if len(m) > 1: + msg = 'Found multiple workflow matching with search query.' + raise ValueError(msg) - print(json.dumps(m[0], indent=4)) - -def get_single_cromwell_metadata_obj(caper_client, args, subcmd): +def get_single_cromwell_metadata_obj( + caper_client: CaperClient, args: Namespace, subcmd: str +) -> CromwellMetadata: + """Get a single CromwellMetadata object from file or server query.""" if not args.wf_id_or_label: - raise ValueError( + msg = ( 'Define at least one metadata JSON file or ' 'a search query for workflow ID/string label ' 'if there is a running Caper server.' ) - elif len(args.wf_id_or_label) > 1: - raise ValueError( - 'Multiple files/queries are not allowed for {subcmd}. ' + raise ValueError(msg) + if len(args.wf_id_or_label) > 1: + msg = ( + f'Multiple files/queries are not allowed for {subcmd}. ' 'Define one metadata JSON file or a search query ' - 'for workflow ID/string label.'.format(subcmd=subcmd) + 'for workflow ID/string label.' ) + raise ValueError(msg) metadata_file = AutoURI(get_abspath(args.wf_id_or_label[0])) @@ -531,16 +549,18 @@ def get_single_cromwell_metadata_obj(caper_client, args, subcmd): wf_ids_or_labels=args.wf_id_or_label, embed_subworkflow=True ) if len(metadata_objs) > 1: - raise ValueError('Found multiple workflows matching with search query.') - elif len(metadata_objs) == 0: - raise ValueError('Found no workflow matching with search query.') + msg = 'Found multiple workflows matching with search query.' + raise ValueError(msg) + if len(metadata_objs) == 0: + msg = 'Found no workflow matching with search query.' + raise ValueError(msg) metadata = metadata_objs[0] return CromwellMetadata(metadata) -def split_list_into_file_and_non_file(lst): - """Returns tuple of (list of existing files, list of non-file strings)""" +def split_list_into_file_and_non_file(lst: list[str]) -> tuple[list[str], list[str]]: + """Returns tuple of (list of existing files, list of non-file strings).""" files = [] non_files = [] @@ -553,13 +573,17 @@ def split_list_into_file_and_non_file(lst): return files, non_files -def get_multi_cromwell_metadata_objs(caper_client, args): +def get_multi_cromwell_metadata_objs( + caper_client: CaperClient, args: Namespace +) -> list[CromwellMetadata]: + """Get multiple CromwellMetadata objects from files or server queries.""" if not args.wf_id_or_label: - raise ValueError( + msg = ( 'Define at least one metadata JSON file or ' 'a search query for workflow ID/string label ' 'if there is a running Caper server.' ) + raise ValueError(msg) files, non_files = split_list_into_file_and_non_file(args.wf_id_or_label) @@ -574,21 +598,22 @@ def get_multi_cromwell_metadata_objs(caper_client, args): ) if not all_metadata: - raise ValueError('Found no metadata/workflow matching with search query.') + msg = 'Found no metadata/workflow matching with search query.' + raise ValueError(msg) return [CromwellMetadata(m) for m in all_metadata] -def subcmd_troubleshoot(caper_client, args): +def subcmd_troubleshoot(caper_client: CaperClient, args: Namespace) -> None: + """Execute the troubleshoot subcommand.""" cm = get_single_cromwell_metadata_obj(caper_client, args, 'troubleshoot/debug') sys.stdout.write( - cm.troubleshoot( - show_completed_task=args.show_completed_task, show_stdout=args.show_stdout - ) + cm.troubleshoot(show_completed_task=args.show_completed_task, show_stdout=args.show_stdout) ) -def subcmd_gcp_monitor(caper_client, args): - """Prints out monitoring result either in a TSV format or in a JSON one. +def subcmd_gcp_monitor(caper_client: CaperClient, args: Namespace) -> None: + """ + Prints out monitoring result either in a TSV format or in a JSON one. TSV format: TSV will be a flattened JSON with dot notation. @@ -607,7 +632,7 @@ def subcmd_gcp_monitor(caper_client, args): result.extend(metadata.gcp_monitor()) if args.json_format: - print(json.dumps(result, indent=4)) + pass else: # input_file_sizes is dynamic in length so exclude and then put it back first_data = copy.deepcopy(result[0]) @@ -627,21 +652,23 @@ def subcmd_gcp_monitor(caper_client, args): if len(file_sizes) == 1: row.append(key) else: - row.append(key + '[{idx}]'.format(idx=i)) + row.append(key + f'[{i}]') row.append(str(file_size)) writer.writerow(row) -def read_json(json_file): +def read_json(json_file: str | None) -> dict[str, Any] | None: + """Read JSON file and return a dictionary.""" if json_file: json_contents = AutoURI(get_abspath(json_file)).read() return json.loads(json_contents) + return None -def subcmd_gcp_res_analysis(caper_client, args): - """Solves linear regression problem to find coeffs and intercept - to help optimizing resources for a task based on task's input file size. +def subcmd_gcp_res_analysis(caper_client: CaperClient, args: Namespace) -> None: + """ + Solves linear regression problem to optimize resources for a task based on input file sizes. Prints out found coeffs and intercept along with raw dataset (x, y). - x: input file sizes for a task @@ -660,10 +687,10 @@ def subcmd_gcp_res_analysis(caper_client, args): target_resources=args.target_resources, plot_pdf=get_abspath(args.plot_pdf), ) - print(json.dumps(result, indent=4)) + print(json.dumps(result, indent=4)) # noqa: T201 -def subcmd_cleanup(caper_client, args): +def subcmd_cleanup(caper_client: CaperClient, args: Namespace) -> None: """Cleanup outputs of a workflow.""" cm = get_single_cromwell_metadata_obj(caper_client, args, 'cleanup') cm.cleanup(dry_run=not args.delete, num_threads=args.num_threads, no_lock=True) @@ -674,8 +701,14 @@ def subcmd_cleanup(caper_client, args): ) -def main(args=None, nonblocking_server=False): +@overload +def main(args: list[str] | None = None, nonblocking_server: bool = False) -> None: ... +@overload +def main(args: list[str] | None = None, nonblocking_server: bool = True) -> NBSubprocThread: ... +def main(args: list[str] | None = None, nonblocking_server: bool = False) -> NBSubprocThread | None: """ + Main function for the Caper command line interface. + Args: args: List of command line arguments. @@ -700,16 +733,14 @@ def main(args=None, nonblocking_server=False): check_dirs(parsed_args) check_db_path(parsed_args) - check_backend(parsed_args) if parsed_args.action == 'init': - init_caper_conf(parsed_args.conf, parsed_args.platform) - elif parsed_args.action in ('hpc'): + return init_caper_conf(parsed_args.conf, parsed_args.platform) + if parsed_args.action == 'hpc': return subcmd_hpc(parsed_args) - elif parsed_args.action in ('run', 'server'): + if parsed_args.action in ('run', 'server'): return runner(parsed_args, nonblocking_server=nonblocking_server) - else: - client(parsed_args) + return client(parsed_args) if __name__ == '__main__': diff --git a/caper/cli_hpc.py b/caper/cli_hpc.py index b4bb4578..b34906ec 100644 --- a/caper/cli_hpc.py +++ b/caper/cli_hpc.py @@ -1,13 +1,24 @@ +"""CLI for HPC commands.""" + +from __future__ import annotations + import logging import sys +from typing import TYPE_CHECKING from .hpc import LsfWrapper, PbsWrapper, SgeWrapper, SlurmWrapper +if TYPE_CHECKING: + import argparse + logger = logging.getLogger(__name__) -def make_caper_run_command_for_hpc_submit(): - """Makes `caper run ...` command from `caper hpc submit` command by simply +def make_caper_run_command_for_hpc_submit() -> list[str]: + """ + Make `caper run ...` command from `caper hpc submit` command. + + Makes `caper run ...` command from `caper hpc submit` command by simply replacing `caper hpc submit` with `caper run`. This also escapes double quotes in caper run command. """ @@ -17,17 +28,17 @@ def make_caper_run_command_for_hpc_submit(): new_argv.pop(2) new_argv[1] = 'run' return new_argv - else: - raise ValueError('Wrong HPC command') + msg = 'Wrong HPC command' + raise ValueError(msg) -def subcmd_hpc(args): - if args.hpc_action == 'submit': +def subcmd_hpc(args: argparse.Namespace) -> None: + """Handle 'caper hpc' subcommand.""" + if args.hpc_action == 'submit': if args.leader_job_name is None: - raise ValueError( - 'Define --leader-job-name [LEADER_JOB_NAME] in the command line arguments.' - ) + msg = 'Define --leader-job-name [LEADER_JOB_NAME] in the command line arguments.' + raise ValueError(msg) caper_run_command = make_caper_run_command_for_hpc_submit() if args.backend == 'slurm': @@ -38,22 +49,23 @@ def subcmd_hpc(args): ).submit(args.leader_job_name, caper_run_command) elif args.backend == 'sge': - stdout = SgeWrapper( - args.sge_leader_job_resource_param.split(), args.sge_queue - ).submit(args.leader_job_name, caper_run_command) + stdout = SgeWrapper(args.sge_leader_job_resource_param.split(), args.sge_queue).submit( + args.leader_job_name, caper_run_command + ) elif args.backend == 'pbs': - stdout = PbsWrapper( - args.pbs_leader_job_resource_param.split(), args.pbs_queue - ).submit(args.leader_job_name, caper_run_command) + stdout = PbsWrapper(args.pbs_leader_job_resource_param.split(), args.pbs_queue).submit( + args.leader_job_name, caper_run_command + ) elif args.backend == 'lsf': - stdout = LsfWrapper( - args.lsf_leader_job_resource_param.split(), args.lsf_queue - ).submit(args.leader_job_name, caper_run_command) + stdout = LsfWrapper(args.lsf_leader_job_resource_param.split(), args.lsf_queue).submit( + args.leader_job_name, caper_run_command + ) else: - raise ValueError('Unsupported backend {b} for hpc'.format(b=args.backend)) + msg = f'Unsupported backend {args.backend} for hpc' + raise ValueError(msg) else: if args.backend == 'slurm': hpc_wrapper = SlurmWrapper() @@ -64,7 +76,8 @@ def subcmd_hpc(args): elif args.backend == 'lsf': hpc_wrapper = LsfWrapper() else: - raise ValueError('Unsupported backend {b} for hpc'.format(b=args.backend)) + msg = f'Unsupported backend {args.backend} for hpc' + raise ValueError(msg) if args.hpc_action == 'list': stdout = hpc_wrapper.list() @@ -73,6 +86,7 @@ def subcmd_hpc(args): stdout = hpc_wrapper.abort(args.job_ids) else: - raise ValueError('Unsupported hpc action {act}'.format(act=args.hpc_action)) + msg = f'Unsupported hpc action {args.hpc_action}' + raise ValueError(msg) - print(stdout) + print(stdout) # noqa: T201 diff --git a/caper/cromwell.py b/caper/cromwell.py index 02a04480..5520a665 100644 --- a/caper/cromwell.py +++ b/caper/cromwell.py @@ -1,34 +1,42 @@ +"""Cromwell server and JAR management.""" + import json import logging import os import shutil import socket import tempfile +from collections.abc import Callable +from pathlib import Path +from typing import TextIO from autouri import AbsPath, AutoURI -from .cromwell_metadata import CromwellMetadata -from .cromwell_workflow_monitor import CromwellWorkflowMonitor -from .nb_subproc_thread import NBSubprocThread, is_fileobj_open +from caper.cromwell_metadata import CromwellMetadata +from caper.cromwell_workflow_monitor import CromwellWorkflowMonitor +from caper.nb_subproc_thread import NBSubprocThread, is_fileobj_open +from caper.server_heartbeat import ServerHeartbeat logger = logging.getLogger(__name__) class PortAlreadyInUseError(Exception): - pass + """Exception raised when a port is already in use.""" + +class WomtoolValidationFailed(Exception): # noqa: N818 + """Exception raised when Womtool validation fails.""" -class WomtoolValidationFailed(Exception): - pass +def install_file(f: AutoURI | str, install_dir: str, label: str) -> AutoURI: + """ + Install f locally on install_dir. -def install_file(f, install_dir, label): - """Install f locally on install_dir. If f is already local then skip it. """ if AbsPath(f).is_valid: return AbsPath(f).uri - logger.info('Installing {label}... {f}'.format(label=label, f=f)) + logger.info('Installing %s... %s', label, f) path = os.path.join(os.path.expanduser(install_dir), AutoURI(f).basename) return AutoURI(f).cp(path) @@ -36,9 +44,14 @@ def install_file(f, install_dir, label): class Cromwell: """Wraps Cromwell/Womtool.""" - DEFAULT_CROMWELL = 'https://github.com/broadinstitute/cromwell/releases/download/82/cromwell-82.jar' + DEFAULT_CROMWELL_VERSION = '91' + DEFAULT_CROMWELL = ( + 'https://github.com/broadinstitute/cromwell/releases/download/' + f'{DEFAULT_CROMWELL_VERSION}/cromwell-{DEFAULT_CROMWELL_VERSION}.jar' + ) DEFAULT_WOMTOOL = ( - 'https://github.com/broadinstitute/cromwell/releases/download/82/womtool-82.jar' + 'https://github.com/broadinstitute/cromwell/releases/download/' + f'{DEFAULT_CROMWELL_VERSION}/womtool-{DEFAULT_CROMWELL_VERSION}.jar' ) DEFAULT_CROMWELL_INSTALL_DIR = '~/.caper/cromwell_jar' DEFAULT_WOMTOOL_INSTALL_DIR = '~/.caper/womtool_jar' @@ -51,12 +64,14 @@ class Cromwell: def __init__( self, - cromwell=DEFAULT_CROMWELL, - womtool=DEFAULT_WOMTOOL, - cromwell_install_dir=DEFAULT_CROMWELL_INSTALL_DIR, - womtool_install_dir=DEFAULT_WOMTOOL_INSTALL_DIR, - ): + cromwell: AutoURI | str = DEFAULT_CROMWELL, + womtool: AutoURI | str = DEFAULT_WOMTOOL, + cromwell_install_dir: str = DEFAULT_CROMWELL_INSTALL_DIR, + womtool_install_dir: str = DEFAULT_WOMTOOL_INSTALL_DIR, + ) -> None: """ + Initializes the Cromwell class, which handles the main Cromwell/Womtool operations. + Args: cromwell: Cromwell JAR path/URI/URL. @@ -71,28 +86,25 @@ def __init__( self._womtool = womtool if not AbsPath(cromwell_install_dir).is_valid: - raise ValueError( - 'crommwell_install_dir is not a valid absolute ' - 'path. {path}'.format(path=cromwell_install_dir) - ) + msg = f'crommwell_install_dir is not a valid absolute path. {cromwell_install_dir}' + raise ValueError(msg) self._cromwell_install_dir = cromwell_install_dir if not AbsPath(womtool_install_dir).is_valid: - raise ValueError( - 'womtool_install_dir is not a valid absolute ' - 'path. {path}'.format(path=womtool_install_dir) - ) + msg = f'womtool_install_dir is not a valid absolute path. {womtool_install_dir}' + raise ValueError(msg) self._womtool_install_dir = womtool_install_dir def validate( self, - wdl, - inputs=None, - imports=None, - cwd=None, - java_heap_womtool=DEFAULT_JAVA_HEAP_WOMTOOL, - ): - """Validate WDL/inputs/imports using Womtool. + wdl: AutoURI | str, + inputs: AutoURI | str | None = None, + imports: Path | str | None = None, + cwd: str | None = None, + java_heap_womtool: str = DEFAULT_JAVA_HEAP_WOMTOOL, + ) -> None: + """ + Validate WDL/inputs/imports using Womtool. Returns: valid: @@ -102,23 +114,17 @@ def validate( wdl_file = AutoURI(wdl) if not wdl_file.exists: - raise FileNotFoundError( - 'WDL file does not exist. wdl={wdl}'.format(wdl=wdl) - ) - if inputs: - if not AutoURI(inputs).exists: - raise FileNotFoundError( - 'Inputs JSON defined but does not exist. i={i}'.format(i=inputs) - ) + msg = f'WDL file does not exist. wdl={wdl}' + raise FileNotFoundError(msg) + if inputs and not AutoURI(inputs).exists: + msg = f'Inputs JSON defined but does not exist. i={inputs}' + raise FileNotFoundError(msg) with tempfile.TemporaryDirectory() as tmp_d: if imports: if not AutoURI(imports).exists: - raise FileNotFoundError( - 'Imports file defined but does not exist. i={i}'.format( - i=imports - ) - ) + msg = f'Imports file defined but does not exist. i={imports}' + raise FileNotFoundError(msg) wdl_ = os.path.join(tmp_d, wdl_file.basename) wdl_file.cp(wdl_) shutil.unpack_archive(imports, tmp_d) @@ -127,7 +133,7 @@ def validate( cmd = [ 'java', - '-Xmx{heap}'.format(heap=java_heap_womtool), + f'-Xmx{java_heap_womtool}', '-jar', '-DLOG_LEVEL={lvl}'.format(lvl='INFO'), self._womtool, @@ -141,49 +147,48 @@ def validate( stderr = '' - def on_stderr(s): + def on_stderr(s: str) -> None: nonlocal stderr stderr += s - th = NBSubprocThread(cmd, cwd=tmp_d, on_stderr=on_stderr, quiet=False) + th = NBSubprocThread([str(s) for s in cmd], cwd=tmp_d, on_stderr=on_stderr, quiet=False) th.start() th.join() if th.returncode: if th.returncode == 127: - raise FileNotFoundError( + msg = ( 'Java executable not found on your system? ' 'Please install Java and try again.' ) - else: - raise WomtoolValidationFailed( - 'RC={rc}\nSTDERR={stderr}'.format( - rc=th.returncode, stderr=stderr - ) - ) + raise FileNotFoundError(msg) + msg = f'RC={th.returncode}\nSTDERR={stderr}' + raise WomtoolValidationFailed(msg) logger.info('Passed Womtool validation.') def run( self, - wdl, - inputs=None, - options=None, - imports=None, - labels=None, - metadata=None, - backend_conf=None, - backend=None, - fileobj_stdout=None, - fileobj_troubleshoot=None, - java_heap_cromwell_run=DEFAULT_JAVA_HEAP_CROMWELL_RUN, - java_heap_womtool=DEFAULT_JAVA_HEAP_WOMTOOL, - work_dir=None, - cwd=None, - on_status_change=None, - dry_run=False, - ): - """Run Cromwell run mode (java -jar cromwell.jar run). + wdl: str, + inputs: str | None = None, + options: str | None = None, + imports: str | None = None, + labels: str | None = None, + metadata: str | None = None, + backend_conf: str | None = None, + backend: str | None = None, + fileobj_stdout: TextIO | None = None, + fileobj_troubleshoot: TextIO | None = None, + java_heap_cromwell_run: str = DEFAULT_JAVA_HEAP_CROMWELL_RUN, + java_heap_womtool: str = DEFAULT_JAVA_HEAP_WOMTOOL, + work_dir: str | None = None, + cwd: str | None = None, + on_status_change: Callable[..., None] | None = None, + dry_run: bool = False, + ) -> NBSubprocThread | None: + """ + Run Cromwell run mode (java -jar cromwell.jar run). + This is a non-blocking function which returns a NBSubprocThread object. So this function itself doesn't return anything. However, its NBSubprocThread object has a return value which is validated @@ -192,6 +197,8 @@ def run( You can simply get it by thread.returnvalue after thread is done. Args: + wdl: + WDL file to run. inputs: input JSON file (-i). options: @@ -205,6 +212,10 @@ def run( backend_conf: backend.conf file (-Dconfig.file=). Default backend defined in this file will be used. + backend: + Backend to run a workflow on. + java_heap_womtool: + Java heap (java -Xmx) for Womtool. If no default backend is defined then "Local" (Cromwell's default) backend will be used. fileobj_stdout: @@ -238,10 +249,12 @@ def run( metadata (dict) of a workflow. dry_run: Dry run. + Returns: th: Thread for Cromwell's run mode. None if dry_run. - Notes: + + Notes: Thread's return value (th.returnvalue) is Cromwell's output metadata dict. It is None if Cromwell subprocess itself didn't run, @@ -255,7 +268,7 @@ def run( # LOG_LEVEL must be >=INFO to catch workflow ID from STDOUT cmd = [ 'java', - '-Xmx{}'.format(java_heap_cromwell_run), + f'-Xmx{java_heap_cromwell_run}', '-XX:ParallelGCThreads=1', '-jar', '-DLOG_LEVEL={lvl}'.format(lvl='INFO'), @@ -263,7 +276,7 @@ def run( ] if backend_conf: - cmd += ['-Dconfig.file={}'.format(backend_conf)] + cmd += [f'-Dconfig.file={backend_conf}'] cmd += [self._cromwell, 'run', wdl] if inputs: cmd += ['-i', inputs] @@ -274,18 +287,16 @@ def run( if imports: cmd += ['-p', imports] if metadata is None: - metadata = os.path.join( - work_dir, CromwellMetadata.DEFAULT_METADATA_BASENAME - ) + metadata = os.path.join(work_dir, CromwellMetadata.DEFAULT_METADATA_BASENAME) cmd += ['-m', metadata] - logger.debug('cmd: {cmd}'.format(cmd=' '.join(cmd))) + logger.debug('cmd: %s', ' '.join(str(s) for s in cmd)) if dry_run: - return + return None wm = CromwellWorkflowMonitor(on_status_change=on_status_change, is_server=False) - def on_stdout(stdout): + def on_stdout(stdout: str) -> None: nonlocal wm nonlocal fileobj_stdout @@ -294,7 +305,7 @@ def on_stdout(stdout): fileobj_stdout.flush() wm.update(stdout) - def on_finish(): + def on_finish() -> dict | None: nonlocal metadata nonlocal fileobj_troubleshoot @@ -312,6 +323,7 @@ def on_finish(): # to make it a return value of the thread after it is done (joined) return metadata_dict + return None th = NBSubprocThread( cmd, @@ -326,20 +338,22 @@ def on_finish(): def server( self, - server_port=DEFAULT_SERVER_PORT, - server_hostname=None, - server_heartbeat=None, - backend_conf=None, - fileobj_stdout=None, - embed_subworkflow=False, - java_heap_cromwell_server=DEFAULT_JAVA_HEAP_CROMWELL_SERVER, - auto_write_metadata=True, - on_server_start=None, - on_status_change=None, - cwd=None, - dry_run=False, - ): - """Run Cromwell server mode (java -jar cromwell.jar server). + server_port: int = DEFAULT_SERVER_PORT, + server_hostname: str | None = None, + server_heartbeat: ServerHeartbeat | None = None, + backend_conf: str | None = None, + fileobj_stdout: TextIO | None = None, + embed_subworkflow: bool = False, + java_heap_cromwell_server: str = DEFAULT_JAVA_HEAP_CROMWELL_SERVER, + auto_write_metadata: bool = True, + on_server_start: Callable[[], None] | None = None, + on_status_change: Callable[..., None] | None = None, + cwd: str | None = None, + dry_run: bool = False, + ) -> NBSubprocThread | None: + """ + Run Cromwell server mode (java -jar cromwell.jar server). + This is a non-blocking function that returns a Thread object of Cromwell server. Howerver, this Thread object has a property status that indicates whether the server is started and ready to take submissions. @@ -394,6 +408,7 @@ def server( This will be finally passed to subprocess.Popen(cwd=). dry_run: Dry run. + Returns: th: Thread for Cromwell's server mode. @@ -405,32 +420,28 @@ def server( sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) result = sock.connect_ex((Cromwell.LOCALHOST, server_port)) if not result: - raise PortAlreadyInUseError( - 'Server port {p} is already taken. ' - 'Try with a different port'.format(p=server_port) - ) + msg = f'Server port {server_port} is already taken. Try with a different port' + raise PortAlreadyInUseError(msg) # LOG_LEVEL must be >=INFO to catch workflow ID from STDOUT cmd = [ 'java', - '-Xmx{}'.format(java_heap_cromwell_server), + f'-Xmx{java_heap_cromwell_server}', '-XX:ParallelGCThreads=1', '-jar', - '-DLOG_LEVEL={lvl}'.format(lvl='INFO'), + '-DLOG_LEVEL=INFO', '-DLOG_MODE=standard', - '-Dwebservice.port={port}'.format(port=server_port), + f'-Dwebservice.port={server_port}', ] if backend_conf: - cmd += ['-Dconfig.file={}'.format(backend_conf)] - logger.debug( - 'backend_conf contents:\n{s}'.format(s=AutoURI(backend_conf).read()) - ) + cmd += [f'-Dconfig.file={backend_conf}'] + logger.debug('backend_conf contents:\n%s', AutoURI(backend_conf).read()) cmd += [self._cromwell, 'server'] - logger.debug('cmd: {cmd}'.format(cmd=' '.join(cmd))) + logger.debug('cmd: %s', ' '.join(cmd)) if dry_run: - return + return None wm = CromwellWorkflowMonitor( server_port=server_port, @@ -441,8 +452,10 @@ def server( on_status_change=on_status_change, ) - def on_stdout(stdout): - """Returns 'server_started' when server is ready to take submissions. + def on_stdout(stdout: str) -> str | None: + """ + Returns 'server_started' when server is ready to take submissions. + Return value of this callback function is to update .status of an NBSubprocThread object. """ @@ -459,8 +472,9 @@ def on_stdout(stdout): if server_heartbeat and not server_heartbeat.is_alive(): server_heartbeat.start(port=server_port, hostname=server_hostname) return 'server_started' + return None - def on_finish(): + def on_finish() -> None: nonlocal server_heartbeat if server_heartbeat: @@ -477,14 +491,10 @@ def on_finish(): return th - def install_cromwell(self): - self._cromwell = install_file( - self._cromwell, self._cromwell_install_dir, 'Cromwell JAR' - ) + def install_cromwell(self) -> str: + self._cromwell = install_file(self._cromwell, self._cromwell_install_dir, 'Cromwell JAR') return self._cromwell - def install_womtool(self): - self._womtool = install_file( - self._womtool, self._womtool_install_dir, 'Womtool JAR' - ) + def install_womtool(self) -> str: + self._womtool = install_file(self._womtool, self._womtool_install_dir, 'Womtool JAR') return self._womtool diff --git a/caper/cromwell_backend.py b/caper/cromwell_backend.py index 1ddd0f6e..9424d8f2 100644 --- a/caper/cromwell_backend.py +++ b/caper/cromwell_backend.py @@ -1,23 +1,36 @@ -import json +"""Cromwell backend configuration.""" + +from __future__ import annotations + import logging from collections import UserDict from copy import deepcopy +from enum import StrEnum from textwrap import dedent +from typing import TYPE_CHECKING, Any, ClassVar, Literal from .dict_tool import merge_dict +if TYPE_CHECKING: + from collections.abc import Sequence + logger = logging.getLogger(__name__) -BACKEND_GCP = 'gcp' -BACKEND_AWS = 'aws' -BACKEND_LOCAL = 'Local' -BACKEND_ALIAS_LOCAL = 'local' -BACKEND_SLURM = 'slurm' -BACKEND_SGE = 'sge' -BACKEND_PBS = 'pbs' -BACKEND_LSF = 'lsf' -DEFAULT_BACKEND = BACKEND_LOCAL +class BackendProvider(StrEnum): + """Backends supported by Cromwell/Caper.""" + + GCP = 'gcp' + AWS = 'aws' + LOCAL = 'local' + SLURM = 'slurm' + SGE = 'sge' + PBS = 'pbs' + LSF = 'lsf' + + +DEFAULT_BACKEND = BackendProvider.LOCAL + ENVIRONMENT_DOCKER = 'docker' ENVIRONMENT_SINGULARITY = 'singularity' @@ -36,6 +49,11 @@ CALL_CACHING_DUP_STRAT_REFERENCE = 'reference' CALL_CACHING_DUP_STRAT_SOFTLINK = 'soft-link' +CachingDuplicationStrategies = Literal['soft-link', 'hard-link', 'copy', 'reference'] +CachingDuplicationStrategyArgs = ( + CachingDuplicationStrategies | tuple[CachingDuplicationStrategies, ...] +) + LOCAL_HASH_STRAT_FILE = 'file' LOCAL_HASH_STRAT_PATH = 'path' LOCAL_HASH_STRAT_PATH_MTIME = 'path+modtime' @@ -43,20 +61,23 @@ SOFT_GLOB_OUTPUT_CMD = 'ln -sL GLOB_PATTERN GLOB_DIRECTORY 2> /dev/null' -def get_s3_bucket_name(s3_uri): +def get_s3_bucket_name(s3_uri: str) -> str: + """Get the bucket name from an S3 URI.""" return s3_uri.replace('s3://', '', 1).split('/')[0] +type CromwellBackendConfigTemplate = dict[str, Any] + + class CromwellBackendCommon(UserDict): """Basic stanzas for Cromwell backend conf.""" - TEMPLATE = { + TEMPLATE: ClassVar[CromwellBackendConfigTemplate] = { 'backend': {}, 'webservice': {}, 'services': { 'LoadController': { - 'class': 'cromwell.services.loadcontroller.impl' - '.LoadControllerServiceActor', + 'class': 'cromwell.services.loadcontroller.impl.LoadControllerServiceActor', 'config': { # added due to issues on stanford sherlock/scg 'control-frequency': '21474834 seconds' @@ -77,13 +98,21 @@ class CromwellBackendCommon(UserDict): def __init__( self, - default_backend, - disable_call_caching=False, - max_concurrent_workflows=DEFAULT_MAX_CONCURRENT_WORKFLOWS, - memory_retry_error_keys=DEFAULT_MEMORY_RETRY_ERROR_KEYS, - ): + default_backend: BackendProvider | str | None, + disable_call_caching: bool = False, + max_concurrent_workflows: int = DEFAULT_MAX_CONCURRENT_WORKFLOWS, + memory_retry_error_keys: Sequence[str] | None = DEFAULT_MEMORY_RETRY_ERROR_KEYS, + ) -> None: """ + Initializes the common options of the Cromwell backend configuration file. + Args: + default_backend: + Default backend provider. + disable_call_caching: + Disable call-caching (re-using outputs from previous workflows/tasks). + max_concurrent_workflows: + Limit for concurrent number of workflows. memory_retry_error_keys: List of error strings to catch out-of-memory error See https://cromwell.readthedocs.io/en/develop/cromwell_features/RetryWithMoreMemory @@ -98,20 +127,31 @@ def __init__( self['system']['max-concurrent-workflows'] = max_concurrent_workflows # Cromwell's bug in memory-retry feature. # Disabled until it's fixed on Cromwell's side. - # if memory_retry_error_keys: - # if isinstance(memory_retry_error_keys, tuple): - # memory_retry_error_keys = list(memory_retry_error_keys) - # self['system']['memory-retry-error-keys'] = memory_retry_error_keys + if memory_retry_error_keys: + logger.warning( + 'memory_retry_error_keys is not implemented due to upstream Cromwell bugs. ' + 'This argument will be ignored.' + ) + # if isinstance(memory_retry_error_keys, tuple): + # memory_retry_error_keys = list(memory_retry_error_keys) + # self['system']['memory-retry-error-keys'] = memory_retry_error_keys class CromwellBackendServer(UserDict): """Stanzas for Cromwell server.""" - TEMPLATE = {'webservice': {}} + TEMPLATE: ClassVar[CromwellBackendConfigTemplate] = {'webservice': {}} DEFAULT_SERVER_PORT = 8000 - def __init__(self, server_port=DEFAULT_SERVER_PORT): + def __init__(self, server_port: int = DEFAULT_SERVER_PORT) -> None: + """ + Initialize the Cromwell backend server. + + Args: + server_port: + The port for the Cromwell server. + """ super().__init__(deepcopy(CromwellBackendServer.TEMPLATE)) self['webservice']['port'] = server_port @@ -120,7 +160,9 @@ def __init__(self, server_port=DEFAULT_SERVER_PORT): class CromwellBackendDatabase(UserDict): """Common stanzas for Cromwell's metadata database.""" - TEMPLATE = {'database': {'db': {'connectionTimeout': 5000, 'numThreads': 1}}} + TEMPLATE: ClassVar[CromwellBackendConfigTemplate] = { + 'database': {'db': {'connectionTimeout': 5000, 'numThreads': 1}} + } DB_IN_MEMORY = 'in-memory' DB_FILE = 'file' @@ -152,30 +194,30 @@ class CromwellBackendDatabase(UserDict): DEFAULT_MYSQL_DB_IP = 'localhost' DEFAULT_MYSQL_DB_PORT = 3306 DEFAULT_MYSQL_DB_USER = 'cromwell' - DEFAULT_MYSQL_DB_PASSWORD = 'cromwell' + DEFAULT_MYSQL_DB_PASSWORD = 'cromwell' # noqa: S105 DEFAULT_MYSQL_DB_NAME = 'cromwell' DEFAULT_POSTGRESQL_DB_IP = 'localhost' DEFAULT_POSTGRESQL_DB_PORT = 5432 DEFAULT_POSTGRESQL_DB_USER = 'cromwell' - DEFAULT_POSTGRESQL_DB_PASSWORD = 'cromwell' + DEFAULT_POSTGRESQL_DB_PASSWORD = 'cromwell' # noqa: S105 DEFAULT_POSTGRESQL_DB_NAME = 'cromwell' def __init__( self, - db=DEFAULT_DB, - db_timeout=DEFAULT_DB_TIMEOUT_MS, - mysql_db_ip=DEFAULT_MYSQL_DB_IP, - mysql_db_port=DEFAULT_MYSQL_DB_PORT, - mysql_db_user=DEFAULT_MYSQL_DB_USER, - mysql_db_password=DEFAULT_MYSQL_DB_PASSWORD, - mysql_db_name=DEFAULT_MYSQL_DB_NAME, - postgresql_db_ip=DEFAULT_POSTGRESQL_DB_IP, - postgresql_db_port=DEFAULT_POSTGRESQL_DB_PORT, - postgresql_db_user=DEFAULT_POSTGRESQL_DB_USER, - postgresql_db_password=DEFAULT_POSTGRESQL_DB_PASSWORD, - postgresql_db_name=DEFAULT_POSTGRESQL_DB_NAME, - file_db=None, - ): + db: str = DEFAULT_DB, + db_timeout: int = DEFAULT_DB_TIMEOUT_MS, + mysql_db_ip: str = DEFAULT_MYSQL_DB_IP, + mysql_db_port: int = DEFAULT_MYSQL_DB_PORT, + mysql_db_user: str = DEFAULT_MYSQL_DB_USER, + mysql_db_password: str = DEFAULT_MYSQL_DB_PASSWORD, + mysql_db_name: str = DEFAULT_MYSQL_DB_NAME, + postgresql_db_ip: str = DEFAULT_POSTGRESQL_DB_IP, + postgresql_db_port: int = DEFAULT_POSTGRESQL_DB_PORT, + postgresql_db_user: str = DEFAULT_POSTGRESQL_DB_USER, + postgresql_db_password: str = DEFAULT_POSTGRESQL_DB_PASSWORD, + postgresql_db_name: str = DEFAULT_POSTGRESQL_DB_NAME, + file_db: str | None = None, + ) -> None: super().__init__(deepcopy(CromwellBackendDatabase.TEMPLATE)) database = self['database'] @@ -183,9 +225,9 @@ def __init__( db_obj['connectionTimeout'] = db_timeout - if db == CromwellBackendDatabase.DB_FILE: - if not file_db: - raise ValueError('file_db must be defined for db {db}'.format(db=db)) + if db == CromwellBackendDatabase.DB_FILE and file_db is None: + msg = f'file_db must be defined for db {db}' + raise ValueError(msg) if db == CromwellBackendDatabase.DB_IN_MEMORY: pass @@ -213,15 +255,18 @@ def __init__( db_obj['password'] = postgresql_db_password else: - raise ValueError('Unsupported DB type {db}'.format(db=db)) + msg = f'Unsupported DB type {db}' + raise ValueError(msg) class CromwellBackendBase(UserDict): - """Base skeleton backend for all backends""" + """Base skeleton backend for all backends.""" - TEMPLATE = {'backend': {'providers': {}}} - TEMPLATE_BACKEND = {'config': {'default-runtime-attributes': {}, 'filesystems': {}}} - DEFAULT_CALL_CACHING_DUP_STRAT = ( + TEMPLATE: ClassVar[CromwellBackendConfigTemplate] = {'backend': {'providers': {}}} + TEMPLATE_BACKEND: ClassVar[CromwellBackendConfigTemplate] = { + 'config': {'default-runtime-attributes': {}, 'filesystems': {}} + } + DEFAULT_CALL_CACHING_DUP_STRAT: ClassVar[CachingDuplicationStrategyArgs] = ( CALL_CACHING_DUP_STRAT_SOFTLINK, CALL_CACHING_DUP_STRAT_HARDLINK, CALL_CACHING_DUP_STRAT_COPY, @@ -230,12 +275,15 @@ class CromwellBackendBase(UserDict): def __init__( self, - backend_name, - max_concurrent_tasks=DEFAULT_CONCURRENT_JOB_LIMIT, - filesystem_name=None, - call_caching_dup_strat=DEFAULT_CALL_CACHING_DUP_STRAT, - ): + backend_name: str, + max_concurrent_tasks: int = DEFAULT_CONCURRENT_JOB_LIMIT, + filesystem_name: str | None = None, + call_caching_dup_strat: CachingDuplicationStrategies + | tuple[CachingDuplicationStrategies, ...] = DEFAULT_CALL_CACHING_DUP_STRAT, + ) -> None: """ + Initialize the Cromwell backend base. + Args: backend_name: Backend's name. @@ -250,7 +298,8 @@ def __init__( super().__init__(deepcopy(CromwellBackendBase.TEMPLATE)) if backend_name is None: - raise ValueError('backend_name must be provided.') + msg = 'backend_name must be provided.' + raise ValueError(msg) self._backend_name = backend_name self.backend = CromwellBackendBase.TEMPLATE_BACKEND @@ -259,99 +308,121 @@ def __init__( config['concurrent-job-limit'] = max_concurrent_tasks if filesystem_name: - if isinstance(call_caching_dup_strat, tuple): - call_caching_dup_strat = list(call_caching_dup_strat) - config['filesystems'][filesystem_name] = { - 'caching': {'duplication-strategy': call_caching_dup_strat} + 'caching': { + 'duplication-strategy': list(call_caching_dup_strat) + if isinstance(call_caching_dup_strat, tuple) + else call_caching_dup_strat + } } @property - def backend(self): + def backend(self) -> CromwellBackendConfigTemplate: return self['backend']['providers'][self._backend_name] @backend.setter - def backend(self, backend): + def backend(self, backend: CromwellBackendConfigTemplate) -> None: self['backend']['providers'][self._backend_name] = deepcopy(backend) - def merge_backend(self, backend): + def merge_backend(self, backend: CromwellBackendConfigTemplate) -> None: merge_dict(self.backend, backend) @property - def backend_config(self): + def backend_config(self) -> CromwellBackendConfigTemplate: return self.backend['config'] @property - def default_runtime_attributes(self): + def default_runtime_attributes(self) -> dict[str, Any]: """Backend's default runtime attributes in self.backend_config.""" return self.backend_config['default-runtime-attributes'] class CromwellBackendGcp(CromwellBackendBase): - TEMPLATE = { + """Google Cloud Platform backend configuration for Cromwell.""" + + TEMPLATE: ClassVar[CromwellBackendConfigTemplate] = { 'google': {'application-name': 'cromwell'}, 'engine': {'filesystems': {FILESYSTEM_GCS: {'auth': 'default'}}}, } - TEMPLATE_BACKEND = { + TEMPLATE_BACKEND: ClassVar[CromwellBackendConfigTemplate] = { 'config': { 'default-runtime-attributes': {}, - 'genomics-api-queries-per-100-seconds': 1000, 'maximum-polling-interval': 600, 'localization-attempts': 3, - 'genomics': { - 'restrict-metadata-access': False, - 'compute-service-account': 'default', - }, + 'batch': {}, } } - ACTOR_FACTORY_V2ALPHA = ( - 'cromwell.backend.google.pipelines.v2alpha1.PipelinesApiLifecycleActorFactory' - ) - ACTOR_FACTORY_V2BETA = ( - 'cromwell.backend.google.pipelines.v2beta.PipelinesApiLifecycleActorFactory' - ) - GENOMICS_ENDPOINT_V2ALPHA = 'https://genomics.googleapis.com/' - GENOMICS_ENDPOINT_V2BETA = 'https://lifesciences.googleapis.com/' + + ACTOR_FACTORY_BATCH = 'cromwell.backend.google.batch.GcpBatchBackendLifecycleActorFactory' DEFAULT_REGION = 'us-central1' - DEFAULT_CALL_CACHING_DUP_STRAT = CALL_CACHING_DUP_STRAT_REFERENCE + DEFAULT_CALL_CACHING_DUP_STRAT: ClassVar[CachingDuplicationStrategyArgs] = ( + CALL_CACHING_DUP_STRAT_REFERENCE, + ) + + LOGGING_POLICY_GOOGLE_CLOUD_STORAGE = 'PATH' + LOGGING_POLICY_GOOGLE_CLOUD_LOGGING = 'LOGGING' + + DEFAULT_DOCKERHUB_MIRROR_ADDRESS = 'mirror.gcr.io' def __init__( self, - gcp_prj, - gcp_out_dir, - gcp_service_account_key_json=None, - use_google_cloud_life_sciences=False, - gcp_region=DEFAULT_REGION, - gcp_zones=None, - max_concurrent_tasks=CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, - call_caching_dup_strat=DEFAULT_CALL_CACHING_DUP_STRAT, - ): - """ + gcp_prj: str, + gcp_out_dir: str, + gcp_service_account_key_json: str | None = None, + gcp_compute_service_account: str | None = None, + gcp_network: str | None = None, + gcp_subnetwork: str | None = None, + gcp_dockerhub_mirror: bool = True, + gcp_dockerhub_mirror_address: str = DEFAULT_DOCKERHUB_MIRROR_ADDRESS, + gcp_region: str = DEFAULT_REGION, + gcp_logging_policy: str = LOGGING_POLICY_GOOGLE_CLOUD_STORAGE, + max_concurrent_tasks: int = CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, + call_caching_dup_strat: CachingDuplicationStrategyArgs = DEFAULT_CALL_CACHING_DUP_STRAT, + ) -> None: + """Initialize GCP backend configuration. + Args: + gcp_prj: + Google project name. + gcp_out_dir: + Output bucket path for GCP backend (gs://). gcp_service_account_key_json: Use this key JSON file to use service_account scheme instead of application_default. - use_google_cloud_life_sciences: - Use Google Cloud Life Sciences API (v2beta) instead of - deprecated Genomics API (v2alpha1). + gcp_compute_service_account: + Set this to override the Batch compute service account. Otherwise, + defaults to the project's compute engine service account. Ensure that + this service account has the `roles/batch.agentReporter` role, so that + VM instances can report their status to Batch. + gcp_network: + VPC network name for GCP Batch backend. Required for VPCs in custom subnet mode. + gcp_subnetwork: + VPC subnetwork name for GCP Batch backend. Required for VPCs in custom subnet mode. + gcp_dockerhub_mirror: + Enable Docker Hub mirroring through Google Artifact Registry. + gcp_dockerhub_mirror_address: + Address of the Docker Hub mirror (default: mirror.gcr.io). gcp_region: - Region for Google Cloud Life Sciences API. - gcp_zones: - List of zones for Genomics API. - Ignored if use_google_cloud_life_sciences. + Region for Google Cloud Batch API. + gcp_logging_policy: + Logging policy for GCP backend (PATH or LOGGING). + max_concurrent_tasks: + Limit for concurrent number of tasks. + call_caching_dup_strat: + Call-caching duplication strategy for GCP backend. """ - if call_caching_dup_strat not in ( - CALL_CACHING_DUP_STRAT_REFERENCE, - CALL_CACHING_DUP_STRAT_COPY, - ): - raise ValueError( - 'Wrong call_caching_dup_strat for GCP: {v}'.format( - v=call_caching_dup_strat - ) - ) + valid_gcp_strategies = (CALL_CACHING_DUP_STRAT_REFERENCE, CALL_CACHING_DUP_STRAT_COPY) + if isinstance(call_caching_dup_strat, tuple): + invalid = [s for s in call_caching_dup_strat if s not in valid_gcp_strategies] + if invalid: + msg = f'Wrong call_caching_dup_strat for GCP: {call_caching_dup_strat}' + raise ValueError(msg) + elif call_caching_dup_strat not in valid_gcp_strategies: + msg = f'Wrong call_caching_dup_strat for GCP: {call_caching_dup_strat}' + raise ValueError(msg) super().__init__( - backend_name=BACKEND_GCP, + backend_name=BackendProvider.GCP, max_concurrent_tasks=max_concurrent_tasks, filesystem_name=FILESYSTEM_GCS, call_caching_dup_strat=call_caching_dup_strat, @@ -360,63 +431,86 @@ def __init__( self.merge_backend(CromwellBackendGcp.TEMPLATE_BACKEND) config = self.backend_config - genomics = config['genomics'] + batch = config['batch'] filesystems = config['filesystems'] + # Canonical Cromwell config structure for GCP Batch places Batch-specific knobs + # under `backend.providers.GCPBATCH.config.batch`. + batch['location'] = gcp_region + batch['logs-policy'] = gcp_logging_policy + + # Fix duplication-strategy: GCS expects a single string, not a list + # The parent class converts tuples to lists, but GCS needs a string + if isinstance(call_caching_dup_strat, tuple) and len(call_caching_dup_strat) == 1: + filesystems[FILESYSTEM_GCS]['caching']['duplication-strategy'] = call_caching_dup_strat[ + 0 + ] + if gcp_service_account_key_json: - genomics['auth'] = 'service-account' - filesystems[FILESYSTEM_GCS]['auth'] = 'service-account' + auth_name = 'service-account' + config['auth'] = auth_name + batch['auth'] = auth_name + filesystems[FILESYSTEM_GCS]['auth'] = auth_name self['google']['auths'] = [ { - 'name': 'service-account', + 'name': auth_name, 'scheme': 'service_account', 'json-file': gcp_service_account_key_json, } ] - # parse service account key JSON to get client_email. - with open(gcp_service_account_key_json) as fp: - key_json = json.loads(fp.read()) - genomics['compute-service-account'] = key_json['client_email'] - self['engine']['filesystems'][FILESYSTEM_GCS]['auth'] = 'service-account' + self['engine']['filesystems'][FILESYSTEM_GCS]['auth'] = auth_name else: - genomics['auth'] = 'application-default' - filesystems[FILESYSTEM_GCS]['auth'] = 'application-default' - self['google']['auths'] = [ - {'name': 'application-default', 'scheme': 'application_default'} - ] - self['engine']['filesystems'][FILESYSTEM_GCS][ - 'auth' - ] = 'application-default' - - if use_google_cloud_life_sciences: - self.backend['actor-factory'] = CromwellBackendGcp.ACTOR_FACTORY_V2BETA - genomics['endpoint-url'] = CromwellBackendGcp.GENOMICS_ENDPOINT_V2BETA - genomics['location'] = gcp_region - else: - self.backend['actor-factory'] = CromwellBackendGcp.ACTOR_FACTORY_V2ALPHA - genomics['endpoint-url'] = CromwellBackendGcp.GENOMICS_ENDPOINT_V2ALPHA - if gcp_zones: - self.default_runtime_attributes['zones'] = ' '.join(gcp_zones) + auth_name = 'application-default' + config['auth'] = auth_name + batch['auth'] = auth_name + filesystems[FILESYSTEM_GCS]['auth'] = auth_name + self['google']['auths'] = [{'name': auth_name, 'scheme': 'application_default'}] + self['engine']['filesystems'][FILESYSTEM_GCS]['auth'] = auth_name + + # If service account email is provided, use it for compute-service-account + if gcp_compute_service_account: + batch['compute-service-account'] = gcp_compute_service_account + + # Virtual Private Cloud configuration for custom subnet mode VPCs + if gcp_network or gcp_subnetwork: + vpc_config: dict[str, str] = {} + if gcp_network: + vpc_config['network-name'] = gcp_network + if gcp_subnetwork: + vpc_config['subnetwork-name'] = gcp_subnetwork + config['virtual-private-cloud'] = vpc_config + + # Docker Hub mirroring configuration + if gcp_dockerhub_mirror: + config['docker-mirror'] = { + 'dockerhub': { + 'enabled': True, + 'address': gcp_dockerhub_mirror_address, + } + } + + self.backend['actor-factory'] = CromwellBackendGcp.ACTOR_FACTORY_BATCH config['project'] = gcp_prj self['engine']['filesystems'][FILESYSTEM_GCS]['project'] = gcp_prj if not gcp_out_dir.startswith('gs://'): - raise ValueError( - 'Wrong GCS bucket URI for gcp_out_dir: {v}'.format(v=gcp_out_dir) - ) + msg = f'Wrong GCS bucket URI for gcp_out_dir: {gcp_out_dir}' + raise ValueError(msg) config['root'] = gcp_out_dir class CromwellBackendAws(CromwellBackendBase): - TEMPLATE = { + """AWS Batch backend configuration for Cromwell.""" + + TEMPLATE: ClassVar = { 'aws': { 'application-name': 'cromwell', 'auths': [{'name': 'default', 'scheme': 'default'}], }, 'engine': {'filesystems': {FILESYSTEM_S3: {'auth': 'default'}}}, } - TEMPLATE_BACKEND = { + TEMPLATE_BACKEND: ClassVar = { 'actor-factory': 'cromwell.backend.impl.aws.AwsBatchBackendLifecycleActorFactory', 'config': { 'default-runtime-attributes': {}, @@ -426,25 +520,24 @@ class CromwellBackendAws(CromwellBackendBase): 'filesystems': {FILESYSTEM_S3: {'auth': 'default'}}, }, } - DEFAULT_CALL_CACHING_DUP_STRAT = CALL_CACHING_DUP_STRAT_REFERENCE + DEFAULT_CALL_CACHING_DUP_STRAT: ClassVar[CachingDuplicationStrategyArgs] = ( + CALL_CACHING_DUP_STRAT_REFERENCE + ) def __init__( self, - aws_batch_arn, - aws_region, - aws_out_dir, - max_concurrent_tasks=CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, - call_caching_dup_strat=DEFAULT_CALL_CACHING_DUP_STRAT, - ): + aws_batch_arn: str, + aws_region: str, + aws_out_dir: str, + max_concurrent_tasks: int = CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, + call_caching_dup_strat: CachingDuplicationStrategyArgs = DEFAULT_CALL_CACHING_DUP_STRAT, + ) -> None: if call_caching_dup_strat not in ( CALL_CACHING_DUP_STRAT_REFERENCE, CALL_CACHING_DUP_STRAT_COPY, ): - raise ValueError( - 'Wrong call_caching_dup_strat for S3: {v}'.format( - v=call_caching_dup_strat - ) - ) + msg = f'Wrong call_caching_dup_strat for S3: {call_caching_dup_strat}' + raise ValueError(msg) if call_caching_dup_strat == CALL_CACHING_DUP_STRAT_REFERENCE: logger.warning( 'Warning for aws backend: "reference" mode for call_caching_dup_strat currently ' @@ -454,13 +547,13 @@ def __init__( '"reference" mode on gcp backend works fine. ' 'See the following link for details. ' 'https://github.com/broadinstitute/cromwell/issues/6327. ' - 'It is recommend to clean up previous workflow\'s outputs manually ' + "It is recommend to clean up previous workflow's outputs manually " 'with "caper cleanup WORKFLOW_ID_OR_METADATA_JSON_FILE" or ' 'with AWS CLI. e.g. ' '"aws s3 rm --recursive s3://some-bucket/a/b/c/WORKFLOW_ID". ' ) super().__init__( - backend_name=BACKEND_AWS, + backend_name=BackendProvider.AWS, max_concurrent_tasks=max_concurrent_tasks, filesystem_name=FILESYSTEM_S3, call_caching_dup_strat=call_caching_dup_strat, @@ -468,23 +561,20 @@ def __init__( merge_dict(self.data, CromwellBackendAws.TEMPLATE) self.merge_backend(CromwellBackendAws.TEMPLATE_BACKEND) - aws = self[BACKEND_AWS] + aws = self[BackendProvider.AWS] aws['region'] = aws_region config = self.backend_config if not aws_out_dir.startswith('s3://'): - raise ValueError( - 'Wrong S3 bucket URI for aws_out_dir: {v}'.format(v=aws_out_dir) - ) + msg = f'Wrong S3 bucket URI for aws_out_dir: {aws_out_dir}' + raise ValueError(msg) config['root'] = aws_out_dir - self.default_runtime_attributes['scriptBucketName'] = get_s3_bucket_name( - aws_out_dir - ) + self.default_runtime_attributes['scriptBucketName'] = get_s3_bucket_name(aws_out_dir) self.default_runtime_attributes['queueArn'] = aws_batch_arn class CromwellBackendLocal(CromwellBackendBase): - """Class constants: + """Class constants for Cromwell backend local. SUBMIT_DOCKER: Cromwell falls back to 'submit_docker' instead of 'submit' if WDL task has @@ -505,7 +595,8 @@ class CromwellBackendLocal(CromwellBackendBase): Possible issue: - 'sed' is used here with a delimiter as hash mark (#) so hash marks in output path can result in error. - - Files globbed by WDL functions other than write_*() will still have paths inside a container. + - Files globbed by WDL functions other than write_*() will still have + paths inside a container. """ RUNTIME_ATTRIBUTES = dedent( @@ -615,9 +706,10 @@ class CromwellBackendLocal(CromwellBackendBase): fi """ ) - TEMPLATE_BACKEND = { + TEMPLATE_BACKEND: ClassVar[dict[str, Any]] = { 'actor-factory': 'cromwell.backend.impl.sfs.config.ConfigBackendLifecycleActorFactory', 'config': { + 'run-in-background': True, 'script-epilogue': 'sleep 5', 'filesystems': { FILESYSTEM_LOCAL: { @@ -639,13 +731,14 @@ class CromwellBackendLocal(CromwellBackendBase): def __init__( self, - local_out_dir, - backend_name=BACKEND_LOCAL, - soft_glob_output=False, - local_hash_strat=DEFAULT_LOCAL_HASH_STRAT, - max_concurrent_tasks=CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, - ): - """Base class for local backends. + local_out_dir: str, + backend_name: str = BackendProvider.LOCAL, + soft_glob_output: bool = False, + local_hash_strat: str = DEFAULT_LOCAL_HASH_STRAT, + max_concurrent_tasks: int = CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, + ) -> None: + """ + Base class for local backends. Used flock to synchronize local Singularity image building. Image building will occur in the first task and other parallel tasks will wait. @@ -669,20 +762,22 @@ def __init__( LOCAL_HASH_STRAT_PATH_MTIME, LOCAL_HASH_START_XXH64, ): - raise ValueError( - 'Wrong local_hash_strat: {strat}'.format(strat=local_hash_strat) - ) + msg = f'Wrong local_hash_strat: {local_hash_strat}' + raise ValueError(msg) caching['hashing-strategy'] = local_hash_strat if soft_glob_output: config['glob-link-command'] = SOFT_GLOB_OUTPUT_CMD if local_out_dir is None: - raise ValueError('local_out_dir must be provided.') + msg = 'local_out_dir must be provided.' + raise ValueError(msg) config['root'] = local_out_dir class CromwellBackendHpc(CromwellBackendLocal): + """Base HPC backend configuration for Cromwell.""" + HPC_RUNTIME_ATTRIBUTES = dedent( """ Int cpu = 1 @@ -693,21 +788,32 @@ class CromwellBackendHpc(CromwellBackendLocal): def __init__( self, - local_out_dir, - backend_name=None, - max_concurrent_tasks=CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, - soft_glob_output=False, - local_hash_strat=CromwellBackendLocal.DEFAULT_LOCAL_HASH_STRAT, - check_alive=None, - kill=None, - job_id_regex=None, - submit=None, - runtime_attributes=None, - ): - """Base class for HPCs. + local_out_dir: str, + backend_name: str, + max_concurrent_tasks: int = CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, + soft_glob_output: bool = False, + local_hash_strat: str = CromwellBackendLocal.DEFAULT_LOCAL_HASH_STRAT, + check_alive: str | None = None, + kill: str | None = None, + job_id_regex: str | None = None, + submit: str | None = None, + runtime_attributes: str | None = None, + ) -> None: + r"""Base class for HPCs. + No docker support. docker attribute in WDL task's runtime will be just ignored. Args: + local_out_dir: + Output directory for local backends. + backend_name: + Backend name identifier. + max_concurrent_tasks: + Limit for concurrent number of tasks. + soft_glob_output: + Glob with ln -s instead of hard-linking. + local_hash_strat: + Local file hashing strategy for call-caching. check_alive: Shell command lines to check if a job exists. WDL syntax allowed in ${} notation. @@ -742,13 +848,17 @@ def __init__( config = self.backend_config if not check_alive: - raise ValueError('check_alive not defined!') + msg = 'check_alive not defined!' + raise ValueError(msg) if not kill: - raise ValueError('kill not defined!') + msg = 'kill not defined!' + raise ValueError(msg) if not job_id_regex: - raise ValueError('job_id_regex not defined!') + msg = 'job_id_regex not defined!' + raise ValueError(msg) if not submit: - raise ValueError('submit not defined!') + msg = 'submit not defined!' + raise ValueError(msg) config['check-alive'] = check_alive config['kill'] = kill @@ -767,6 +877,8 @@ def __init__( class CromwellBackendSlurm(CromwellBackendHpc): + """SLURM backend configuration for Cromwell.""" + SLURM_RUNTIME_ATTRIBUTES = dedent( """ String? slurm_partition @@ -827,16 +939,18 @@ class CromwellBackendSlurm(CromwellBackendHpc): def __init__( self, - local_out_dir, - max_concurrent_tasks=CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, - soft_glob_output=False, - local_hash_strat=CromwellBackendLocal.DEFAULT_LOCAL_HASH_STRAT, - slurm_partition=None, - slurm_account=None, - slurm_extra_param=None, - slurm_resource_param=DEFAULT_SLURM_RESOURCE_PARAM, - ): - """SLURM backend. + local_out_dir: str, + max_concurrent_tasks: int = CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, + soft_glob_output: bool = False, + local_hash_strat: str = CromwellBackendLocal.DEFAULT_LOCAL_HASH_STRAT, + slurm_partition: str | None = None, + slurm_account: str | None = None, + slurm_extra_param: str | None = None, + slurm_resource_param: str = DEFAULT_SLURM_RESOURCE_PARAM, + ) -> None: + """ + SLURM backend. + Try sbatching up to 3 times every 30 second to prevent Cromwell from halting the whole pipeline immediately after the first failure. @@ -849,6 +963,20 @@ def __init__( So 'squeue --noheader -j JOB_ID' is used here and it checks if output is empty Args: + local_out_dir: + Output directory for local backends. + max_concurrent_tasks: + Limit for concurrent number of tasks. + soft_glob_output: + Glob with ln -s instead of hard-linking. + local_hash_strat: + Local file hashing strategy for call-caching. + slurm_partition: + SLURM partition if required to sbatch jobs. + slurm_account: + SLURM account if required to sbatch jobs. + slurm_extra_param: + SLURM extra parameter to be appended to sbatch command line. slurm_resource_param: String of a set of resource parameters for the job submission engine. WDL syntax allowed in ${} notation. @@ -862,7 +990,7 @@ def __init__( super().__init__( local_out_dir=local_out_dir, - backend_name=BACKEND_SLURM, + backend_name=BackendProvider.SLURM, max_concurrent_tasks=max_concurrent_tasks, soft_glob_output=soft_glob_output, local_hash_strat=local_hash_strat, @@ -882,6 +1010,8 @@ def __init__( class CromwellBackendSge(CromwellBackendHpc): + """SGE backend configuration for Cromwell.""" + SGE_RUNTIME_ATTRIBUTES = dedent( """ String? sge_pe @@ -924,18 +1054,33 @@ class CromwellBackendSge(CromwellBackendHpc): def __init__( self, - local_out_dir, - max_concurrent_tasks=CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, - soft_glob_output=False, - local_hash_strat=CromwellBackendLocal.DEFAULT_LOCAL_HASH_STRAT, - sge_pe=None, - sge_queue=None, - sge_extra_param=None, - sge_resource_param=DEFAULT_SGE_RESOURCE_PARAM, - ): - """SGE backend. Try qsubbing up to 3 times every 30 second. + local_out_dir: str, + max_concurrent_tasks: int = CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, + soft_glob_output: bool = False, + local_hash_strat: str = CromwellBackendLocal.DEFAULT_LOCAL_HASH_STRAT, + sge_pe: str | None = None, + sge_queue: str | None = None, + sge_extra_param: str | None = None, + sge_resource_param: str = DEFAULT_SGE_RESOURCE_PARAM, + ) -> None: + """ + SGE backend. Try qsubbing up to 3 times every 30 second. Args: + local_out_dir: + Output directory for local backends. + max_concurrent_tasks: + Limit for concurrent number of tasks. + soft_glob_output: + Glob with ln -s instead of hard-linking. + local_hash_strat: + Local file hashing strategy for call-caching. + sge_pe: + SGE parallel environment (required to run with multiple cpus). + sge_queue: + SGE queue. + sge_extra_param: + SGE extra parameter to be appended to qsub command line. sge_resource_param: String of a set of resource parameters for the job submission engine. WDL syntax allowed in ${} notation. @@ -948,7 +1093,7 @@ def __init__( super().__init__( local_out_dir=local_out_dir, - backend_name=BACKEND_SGE, + backend_name=BackendProvider.SGE, max_concurrent_tasks=max_concurrent_tasks, soft_glob_output=soft_glob_output, local_hash_strat=local_hash_strat, @@ -968,6 +1113,8 @@ def __init__( class CromwellBackendPbs(CromwellBackendHpc): + """PBS backend configuration for Cromwell.""" + PBS_RUNTIME_ATTRIBUTES = dedent( """ String? pbs_queue @@ -1005,17 +1152,30 @@ class CromwellBackendPbs(CromwellBackendHpc): def __init__( self, - local_out_dir, - max_concurrent_tasks=CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, - soft_glob_output=False, - local_hash_strat=CromwellBackendLocal.DEFAULT_LOCAL_HASH_STRAT, - pbs_queue=None, - pbs_extra_param=None, - pbs_resource_param=DEFAULT_PBS_RESOURCE_PARAM, - ): - """PBS backend. Try qsubbing up to 3 times every 30 second. + local_out_dir: str, + max_concurrent_tasks: int = CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, + soft_glob_output: bool = False, + local_hash_strat: str = CromwellBackendLocal.DEFAULT_LOCAL_HASH_STRAT, + pbs_queue: str | None = None, + pbs_extra_param: str | None = None, + pbs_resource_param: str = DEFAULT_PBS_RESOURCE_PARAM, + ) -> None: + """ + PBS backend. Try qsubbing up to 3 times every 30 second. Args: + local_out_dir: + Output directory for local backends. + max_concurrent_tasks: + Limit for concurrent number of tasks. + soft_glob_output: + Glob with ln -s instead of hard-linking. + local_hash_strat: + Local file hashing strategy for call-caching. + pbs_queue: + PBS queue. + pbs_extra_param: + PBS extra parameter to be appended to qsub command line. pbs_resource_param: String of a set of resource parameters for the job submission engine. WDL syntax allowed in ${} notation. @@ -1028,7 +1188,7 @@ def __init__( super().__init__( local_out_dir=local_out_dir, - backend_name=BACKEND_PBS, + backend_name=BackendProvider.PBS, max_concurrent_tasks=max_concurrent_tasks, soft_glob_output=soft_glob_output, local_hash_strat=local_hash_strat, @@ -1046,6 +1206,8 @@ def __init__( class CromwellBackendLsf(CromwellBackendHpc): + """LSF backend configuration for Cromwell.""" + LSF_RUNTIME_ATTRIBUTES = dedent( """ String? lsf_queue @@ -1084,22 +1246,35 @@ class CromwellBackendLsf(CromwellBackendHpc): def __init__( self, - local_out_dir, - max_concurrent_tasks=CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, - soft_glob_output=False, - local_hash_strat=CromwellBackendLocal.DEFAULT_LOCAL_HASH_STRAT, - lsf_queue=None, - lsf_extra_param=None, - lsf_resource_param=DEFAULT_LSF_RESOURCE_PARAM, - ): - """LSF backend. Try bsubbing up to 3 times every 30 second. + local_out_dir: str, + max_concurrent_tasks: int = CromwellBackendBase.DEFAULT_CONCURRENT_JOB_LIMIT, + soft_glob_output: bool = False, + local_hash_strat: str = CromwellBackendLocal.DEFAULT_LOCAL_HASH_STRAT, + lsf_queue: str | None = None, + lsf_extra_param: str | None = None, + lsf_resource_param: str = DEFAULT_LSF_RESOURCE_PARAM, + ) -> None: + """ + LSF backend. Try bsubbing up to 3 times every 30 second. Args: + local_out_dir: + Output directory for local backends. + max_concurrent_tasks: + Limit for concurrent number of tasks. + soft_glob_output: + Glob with ln -s instead of hard-linking. + local_hash_strat: + Local file hashing strategy for call-caching. + lsf_queue: + LSF queue. + lsf_extra_param: + LSF extra parameter to be appended to bsub command line. lsf_resource_param: String of a set of resource parameters for the job submission engine. WDL syntax allowed in ${} notation. This will be appended to the job sumbission command line. - e.g. qsub ... THIS_RESOURCE_PARAM + e.g. bsub ... THIS_RESOURCE_PARAM """ submit = CromwellBackendLsf.TEMPLATE_LSF_SUBMIT.format( submit=CromwellBackendLocal.SUBMIT, lsf_resource_param=lsf_resource_param @@ -1107,7 +1282,7 @@ def __init__( super().__init__( local_out_dir=local_out_dir, - backend_name=BACKEND_LSF, + backend_name=BackendProvider.LSF, max_concurrent_tasks=max_concurrent_tasks, soft_glob_output=soft_glob_output, local_hash_strat=local_hash_strat, diff --git a/caper/cromwell_metadata.py b/caper/cromwell_metadata.py index fd41fc59..a1b91a4e 100644 --- a/caper/cromwell_metadata.py +++ b/caper/cromwell_metadata.py @@ -1,130 +1,173 @@ +"""Helpers for reading and analyzing Cromwell metadata.""" + import io import json import logging import os import re from collections import defaultdict +from collections.abc import Callable, Iterable, Iterator +from dataclasses import dataclass +from typing import Any, Self, TypeVar, cast import humanfriendly import numpy as np import pandas as pd from autouri import GCSURI, AbsPath, AutoURI, URIBase +from pandas.errors import EmptyDataError from .dict_tool import recurse_dict_value logger = logging.getLogger(__name__) -def get_workflow_root_from_call(call): +def get_workflow_root_from_call(call: dict) -> str | None: + """Returns workflow's root directory from a call.""" call_root = call.get('callRoot') if call_root: return '/'.join(call_root.split('/')[:-1]) + return None -def get_workflow_id_from_workflow_root(workflow_root): +def get_workflow_id_from_workflow_root(workflow_root: str | None) -> str | None: + """Returns workflow's ID from a workflow's root directory.""" if workflow_root: return workflow_root.split('/')[-1] + return None -def parse_cromwell_disks(s): +def parse_cromwell_disks(s: str | None) -> int | None: """Parses Cromwell's disks in runtime attribute.""" if s: - m = re.findall(r'(\d+)', s) - if m: - return int(m[0]) * 1024 * 1024 * 1024 + matches = re.findall(r'(\d+)', s) + if matches: + return int(matches[0]) * 1024 * 1024 * 1024 + return None -def parse_cromwell_memory(s): +def parse_cromwell_memory(s: str | None) -> int | None: """Parses Cromwell's memory runtime attribute.""" if s: return humanfriendly.parse_size(s) + return None + +T = TypeVar('T') -def convert_type_np_to_py(o): + +def convert_type_np_to_py(o: np.generic) -> T: """Convert numpy type to Python type.""" if isinstance(o, np.generic): return o.item() raise TypeError +@dataclass(frozen=True, slots=True) +class _CallContext: + """Internal context for a single Cromwell call (task).""" + + call_name: str + call: dict[str, Any] + parent_call_names: tuple[str, ...] = () + + class CromwellMetadata: + """Metadata helper for Cromwell workflows.""" + DEFAULT_METADATA_BASENAME = 'metadata.json' DEFAULT_GCP_MONITOR_STAT_METHODS = ('mean', 'std', 'max', 'min', 'last') - def __init__(self, metadata): - """Parses metadata JSON (dict) object or file.""" + def __init__(self, metadata: dict | Self | str) -> None: + """Load metadata from a dict, another instance, or a JSON file path/URI.""" if isinstance(metadata, dict): self._metadata = metadata elif isinstance(metadata, CromwellMetadata): - self._metadata = metadata._metadata + self._metadata = metadata._metadata # noqa: SLF001 else: - s = AutoURI(metadata).read() + s = cast('str', AutoURI(metadata).read()) self._metadata = json.loads(s) @property - def data(self): + def data(self) -> dict: + """Raw metadata dictionary.""" return self._metadata @property - def metadata(self): + def metadata(self) -> dict: + """Alias for raw metadata dictionary.""" return self._metadata @property - def workflow_id(self): + def workflow_id(self) -> str | None: + """Workflow ID string or None if absent.""" return self._metadata.get('id') @property - def workflow_status(self): + def workflow_status(self) -> str | None: + """Workflow status (e.g., Succeeded, Failed) or None if absent.""" return self._metadata.get('status') @property - def workflow_root(self): + def workflow_root(self) -> str | None: + """Best-effort workflow root directory (explicit or inferred).""" if 'workflowRoot' in self._metadata: return self._metadata['workflowRoot'] - else: - workflow_roots = [ - get_workflow_root_from_call(call) for _, call, _ in self.recursed_calls - ] - common_root = os.path.commonprefix(workflow_roots) - if common_root: - guessed_workflow_id = get_workflow_id_from_workflow_root(common_root) - if guessed_workflow_id == self.workflow_id: - return common_root - logger.error( - 'workflowRoot not found in metadata JSON. ' - 'Tried to guess from callRoot of each call but failed.' - ) + workflow_roots = [get_workflow_root_from_call(call) for _, call, _ in self.recursed_calls] + common_root = os.path.commonprefix([r for r in workflow_roots if r]) + if common_root: + guessed_workflow_id = get_workflow_id_from_workflow_root(common_root) + if guessed_workflow_id == self.workflow_id: + return common_root + logger.error( + 'workflowRoot not found in metadata JSON. ' + 'Tried to guess from callRoot of each call but failed.' + ) + return None @property - def failures(self): + def failures(self) -> Any: + """Failures object from metadata, if present.""" return self._metadata.get('failures') @property - def calls(self): + def calls(self) -> Any: + """Calls object from metadata (tasks and subworkflows).""" return self._metadata.get('calls') + def iter_call_contexts( + self, + *, + parent_call_names: tuple[str, ...] = (), + ) -> Iterator[_CallContext]: + """Yield leaf task calls, descending into subWorkflowMetadata.""" + calls = self.calls or {} + for call_name, call_list in calls.items(): + for call in call_list: + sub = call.get('subWorkflowMetadata') + if sub is not None: + yield from CromwellMetadata(sub).iter_call_contexts( + parent_call_names=(*parent_call_names, call_name) + ) + else: + yield _CallContext( + call_name=call_name, + call=call, + parent_call_names=parent_call_names, + ) + @property - def recursed_calls(self): - """Returns a generator for tuples. - - Tuple: - call_name: - Call's name. i.e. key in the original metadata JSON's `calls` dict. - call: - Call object. i.e. value in the original metadata JSON's `calls` dict. - parent_call_names: - Tuple of Parent call's names. - """ - return self.recurse_calls( - lambda call_name, call, parent_call_names: ( - call_name, - call, - parent_call_names, - ) - ) + def recursed_calls(self) -> Iterator[tuple[str, dict[str, Any], tuple[str, ...]]]: + """Generator of (call_name, call, parent_call_names) for all leaf tasks.""" + for ctx in self.iter_call_contexts(): + yield (ctx.call_name, ctx.call, ctx.parent_call_names) - def recurse_calls(self, fn_call, parent_call_names=tuple()): - """Recurse on tasks in metadata. + def recurse_calls( + self, + fn_call: Callable[[str, dict[str, Any], tuple[str, ...]], T], + parent_call_names: tuple[str, ...] = (), + ) -> Iterator[T]: + """ + Recurse on tasks in metadata. Args: fn_call: @@ -137,24 +180,19 @@ def recurse_calls(self, fn_call, parent_call_names=tuple()): parent_call_names: Tuple of Parent call's names. e.g. (..., great grand parent, grand parent, parent, ...) + parent_call_names: + Tuple of Parent call's names. + e.g. (..., great grand parent, grand parent, parent, ...) + Returns: Generator object for all calls. """ - if not self.calls: - return - - for call_name, call_list in self.calls.items(): - for call in call_list: - if 'subWorkflowMetadata' in call: - subworkflow = call['subWorkflowMetadata'] - subworkflow_metadata = CromwellMetadata(subworkflow) - yield from subworkflow_metadata.recurse_calls( - fn_call, parent_call_names=parent_call_names + (call_name,) - ) - else: - yield fn_call(call_name, call, parent_call_names) + yield from ( + fn_call(ctx.call_name, ctx.call, ctx.parent_call_names) + for ctx in self.iter_call_contexts(parent_call_names=parent_call_names) + ) - def write_on_workflow_root(self, basename=DEFAULT_METADATA_BASENAME): + def write_on_workflow_root(self, basename: str = DEFAULT_METADATA_BASENAME) -> str | None: """Update metadata JSON file on metadata's output root directory.""" root = self.workflow_root @@ -162,115 +200,235 @@ def write_on_workflow_root(self, basename=DEFAULT_METADATA_BASENAME): metadata_file = os.path.join(root, basename) AutoURI(metadata_file).write(json.dumps(self._metadata, indent=4) + '\n') - logger.info('Wrote metadata file. {f}'.format(f=metadata_file)) + logger.info('Wrote metadata file. %s', metadata_file) return metadata_file - - def troubleshoot(self, show_completed_task=False, show_stdout=False): - """Troubleshoots a workflow. - Also, finds failure reasons and prints out STDERR and STDOUT. + return None + + @staticmethod + def _get_running_window(call: dict[str, Any]) -> tuple[str | None, str | None]: + """Extract start/end times from Running execution event.""" + for event in call.get('executionEvents', ()): + desc = event.get('description', '') + if desc.startswith('Running'): + return event.get('startTime'), event.get('endTime') + return None, None + + def _format_troubleshoot_call( + self, + call_name: str, + call: dict[str, Any], + parent_call_names: tuple[str, ...], + *, + show_completed_task: bool, + show_stdout: bool, + ) -> str: + """Format a single call's troubleshooting info.""" + status = call.get('executionStatus') + if not (show_completed_task or status not in ('Done', 'Succeeded')): + return '' + + shard_index = call.get('shardIndex') + rc = call.get('returnCode') + job_id = call.get('jobId') + stdout = call.get('stdout') + stderr = call.get('stderr') + stderr_background = f'{stderr}.background' if stderr else None + run_start, run_end = self._get_running_window(call) + + parts: list[str] = [ + f'\n==== NAME={call_name}, STATUS={status}, PARENT={",".join(parent_call_names)}\n', + f'SHARD_IDX={shard_index}, RC={rc}, JOB_ID={job_id}\n', + f'START={run_start}, END={run_end}\n', + f'STDOUT={stdout}\nSTDERR={stderr}\n', + ] + + if stderr: + stderr_uri = AutoURI(stderr) + if stderr_uri.exists: + parts.append(f'STDERR_CONTENTS=\n{stderr_uri.read()}\n') + + if show_stdout and stdout: + stdout_uri = AutoURI(stdout) + if stdout_uri.exists: + parts.append(f'STDOUT_CONTENTS=\n{stdout_uri.read()}\n') + + if stderr_background: + bg_uri = AutoURI(stderr_background) + if bg_uri.exists: + parts.append(f'STDERR_BACKGROUND_CONTENTS=\n{bg_uri.read()}\n') + + return ''.join(parts) + + def troubleshoot(self, *, show_completed_task: bool = False, show_stdout: bool = False) -> str: + """ + Troubleshoot a workflow by finding failed calls and printing out STDERR and STDOUT. Args: show_completed_task: Show STDERR/STDOUT of completed tasks. show_stdout: Show failed task's STDOUT along with STDERR. + Return: result: Troubleshooting report as a plain string. """ - result = ( - '* Started troubleshooting workflow: id={id}, status={status}\n'.format( - id=self.workflow_id, status=self.workflow_status - ) + header = ( + f'* Started troubleshooting workflow: id={self.workflow_id}, ' + f'status={self.workflow_status}\n' ) + lines: list[str] = [header] if self.workflow_status == 'Succeeded': - result += '* Workflow ran Successfully.\n' + lines.append('* Workflow ran Successfully.\n') + return ''.join(lines) + + if self.failures: + lines.append(f'* Found failures JSON object.\n{json.dumps(self.failures, indent=4)}\n') + + lines.append('* Recursively finding failures in calls (tasks)...\n') + for ctx in self.iter_call_contexts(): + msg = self._format_troubleshoot_call( + ctx.call_name, + ctx.call, + ctx.parent_call_names, + show_completed_task=show_completed_task, + show_stdout=show_stdout, + ) + if msg: + lines.append(msg) + + return ''.join(lines) + + @staticmethod + def _py_scalar(val: Any) -> Any: + """Convert numpy scalar to Python native type.""" + return val.item() if isinstance(val, np.generic) else val + + @staticmethod + def _read_tsv_dataframe(text: str) -> pd.DataFrame | None: + """Parse TSV text into DataFrame; return None if empty or unparseable.""" + try: + return pd.read_csv(io.StringIO(text), delimiter='\t') + except EmptyDataError: + return None + + def _gcs_file_size_cached(self, uri_str: str, cache: dict[str, int]) -> int: + """Get GCS file size with memoization.""" + if uri_str not in cache: + cache[uri_str] = GCSURI(uri_str).size + return cache[uri_str] + + def _collect_input_file_sizes( + self, + inputs: dict[str, Any], + *, + cache: dict[str, int], + ) -> dict[str, list[int]]: + """Collect file sizes for all GCS input files.""" + out: dict[str, list[int]] = defaultdict(list) - else: - if self.failures: - result += '* Found failures JSON object.\n{s}\n'.format( - s=json.dumps(self.failures, indent=4) - ) - - def troubleshoot_call(call_name, call, parent_call_names): - """Returns troubleshooting help message.""" - nonlocal show_completed_task - nonlocal show_stdout - status = call.get('executionStatus') - shard_index = call.get('shardIndex') - rc = call.get('returnCode') - job_id = call.get('jobId') - stdout = call.get('stdout') - stderr = call.get('stderr') - strerr_background = (stderr + '.background') if stderr else None - run_start = None - run_end = None - for event in call.get('executionEvents', []): - if event['description'].startswith('Running'): - run_start = event['startTime'] - run_end = event['endTime'] - break - - help_msg = '' - if show_completed_task or status not in ('Done', 'Succeeded'): - help_msg += ( - '\n==== NAME={name}, STATUS={status}, PARENT={p}\n' - 'SHARD_IDX={shard_idx}, RC={rc}, JOB_ID={job_id}\n' - 'START={start}, END={end}\n' - 'STDOUT={stdout}\nSTDERR={stderr}\n'.format( - name=call_name, - status=status, - p=','.join(parent_call_names), - start=run_start, - end=run_end, - shard_idx=shard_index, - rc=rc, - job_id=job_id, - stdout=stdout, - stderr=stderr, - ) - ) - if stderr: - if AutoURI(stderr).exists: - help_msg += 'STDERR_CONTENTS=\n{s}\n'.format( - s=AutoURI(stderr).read() - ) - if show_stdout and stdout: - if AutoURI(stdout).exists: - help_msg += 'STDOUT_CONTENTS=\n{s}\n'.format( - s=AutoURI(stdout).read() - ) - if strerr_background: - if AutoURI(strerr_background).exists: - help_msg += 'STDERR_BACKGROUND_CONTENTS=\n{s}\n'.format( - s=AutoURI(strerr_background).read() - ) - - return help_msg - - result += '* Recursively finding failures in calls (tasks)...\n' - for help_msg in self.recurse_calls(troubleshoot_call): - result += help_msg - - return result + for input_name, input_value in sorted(inputs.items()): + + def visit(v: Any, *, _name: str = input_name) -> None: + if isinstance(v, str) and GCSURI(v).is_valid: + out[_name].append(self._gcs_file_size_cached(v, cache)) + + recurse_dict_value(input_value, visit) + + return dict(out) + + def _compute_stats( + self, + df: pd.DataFrame, + *, + excluded_cols: set[int], + stat_methods: tuple[str, ...], + ) -> dict[str, dict[str, Any]]: + """Compute statistics for each column in the monitoring DataFrame.""" + stats: dict[str, dict[str, Any]] = {m: {} for m in stat_methods} + + for i, col_name in enumerate(df.columns): + if i in excluded_cols: + continue + + series = df[col_name] + for method in stat_methods: + if df.empty: + val = None + elif method == 'last': + val = series.iloc[-1] + else: + val = getattr(series, method)() + stats[method][col_name] = self._py_scalar(val) if val is not None else None + + return stats + + def _gcp_monitor_one_call( + self, + call_name: str, + call: dict[str, Any], + *, + workflow_id: str | None, + excluded_cols: set[int], + stat_methods: tuple[str, ...], + file_size_cache: dict[str, int], + ) -> dict[str, Any] | None: + """Process a single call's monitoring log. Returns None if not applicable.""" + monitoring_log = call.get('monitoringLog') + if not monitoring_log: + return None + + log_uri = GCSURI(monitoring_log) + if not log_uri.is_valid or not log_uri.exists: + return None + + df = self._read_tsv_dataframe(log_uri.read()) + if df is None: + return None + + rt_attrs = call.get('runtimeAttributes') or {} + + return { + 'workflow_id': workflow_id, + 'task_name': call_name, + 'shard_idx': call.get('shardIndex'), + 'status': call.get('executionStatus'), + 'attempt': call.get('attempt'), + 'instance': { + 'cpu': int(rt_attrs.get('cpu')) if rt_attrs.get('cpu') is not None else None, + 'disk': parse_cromwell_disks(rt_attrs.get('disks')), + 'mem': parse_cromwell_memory(rt_attrs.get('memory')), + }, + 'stats': self._compute_stats( + df, + excluded_cols=excluded_cols, + stat_methods=stat_methods, + ), + 'input_file_sizes': self._collect_input_file_sizes( + call.get('inputs', {}), + cache=file_size_cache, + ), + } def gcp_monitor( self, - task_name=None, - excluded_cols=(0,), - stat_methods=DEFAULT_GCP_MONITOR_STAT_METHODS, - ): - """Recursively parse task(call)'s `monitoringLog` + task_name: str | None = None, + excluded_cols: Iterable[int] = (0,), + stat_methods: Iterable[str] = DEFAULT_GCP_MONITOR_STAT_METHODS, + ) -> list[dict[str, Any]]: + """ + Recursively parse task(call)'s `monitoringLog`. + (`monitoring.log` in task's execution directory) generated by `monitoring_script` defined in workflow options. This feature is gcp backend only. Check the following for details. - https://cromwell.readthedocs.io/en/stable/wf_options/Google/#google-pipelines-api-workflow-options + https://cromwell.readthedocs.io/en/stable/wf_options/Google/ - This functions calculates mean/max/min/last of each column in `monitoring.log` and return - them with task's input file sizes. + This functions calculates mean/max/min/last of each column in `monitoring.log` and + return them with task's input file sizes. Args: task_name: @@ -287,6 +445,7 @@ def gcp_monitor( `last` is to get the last element in data, which usually means the latest data. Some methods in pandas.DataFrame will return `nan` if the number of data row is too small (e.g. `std` requires more than one data row). + Returns: List of mean/std/max/min/last of columns along with size of input files. Note that @@ -351,91 +510,37 @@ def gcp_monitor( ... ] """ - file_size_cache = {} + file_size_cache: dict[str, int] = {} + excluded_set = set(excluded_cols) + stat_tuple = tuple(stat_methods) workflow_id = self.workflow_id - def gcp_monitor_call(call_name, call, parent_call_names): - nonlocal excluded_cols - nonlocal stat_methods - nonlocal file_size_cache - nonlocal workflow_id - nonlocal task_name - - if task_name and task_name != call_name: - return - - monitoring_log = call.get('monitoringLog') - if monitoring_log is None: - return - if not GCSURI(monitoring_log).is_valid: - # This feature is for GCSURI only. - return - if not GCSURI(monitoring_log).exists: - # Workaround for Cromwell-52's bug. - # Call-cached task has `monitoringLog`, but it does not exist. - return - - dataframe = pd.read_csv( - io.StringIO(GCSURI(monitoring_log).read()), delimiter='\t' + results: list[dict[str, Any]] = [] + for ctx in self.iter_call_contexts(): + if task_name and task_name != ctx.call_name: + continue + data = self._gcp_monitor_one_call( + ctx.call_name, + ctx.call, + workflow_id=workflow_id, + excluded_cols=excluded_set, + stat_methods=stat_tuple, + file_size_cache=file_size_cache, ) - rt_attrs = call.get('runtimeAttributes') - - data = { - 'workflow_id': workflow_id, - 'task_name': call_name, - 'shard_idx': call.get('shardIndex'), - 'status': call.get('executionStatus'), - 'attempt': call.get('attempt'), - 'instance': { - 'cpu': int(rt_attrs.get('cpu')), - 'disk': parse_cromwell_disks(rt_attrs.get('disks')), - 'mem': parse_cromwell_memory(rt_attrs.get('memory')), - }, - 'stats': {s: {} for s in stat_methods}, - 'input_file_sizes': defaultdict(list), - } - for i, col_name in enumerate(dataframe.columns): - if i in excluded_cols: - continue - for stat_method in stat_methods: - if dataframe.empty: - val = None - elif stat_method == 'last': - last_idx = dataframe.tail(1).index.item() - val = dataframe[col_name][last_idx] - else: - val = getattr(dataframe[col_name], stat_method)() - data['stats'][stat_method][col_name] = val - - for input_name, input_value in sorted(call['inputs'].items()): - file_sizes_dict = data['input_file_sizes'] - - def add_to_input_files_if_valid(file): - nonlocal file_size_cache - nonlocal file_sizes_dict - nonlocal input_name - - if GCSURI(file).is_valid: - file_size = file_size_cache.get(file) - if file_size is None: - file_size = GCSURI(file).size - file_size_cache[file] = file_size - file_sizes_dict[input_name].append(file_size) - - recurse_dict_value(input_value, add_to_input_files_if_valid) - - return data - - result = list(self.recurse_calls(gcp_monitor_call)) - - # a bit hacky way to recursively convert numpy type into python type - json_str = json.dumps(result, default=convert_type_np_to_py) - return json.loads(json_str) + if data is not None: + results.append(data) + + return results def cleanup( - self, dry_run=False, num_threads=URIBase.DEFAULT_NUM_THREADS, no_lock=False - ): - """Cleans up workflow's root output directory. + self, + *, + dry_run: bool = False, + num_threads: int = URIBase.DEFAULT_NUM_THREADS, + no_lock: bool = False, + ) -> None: + """ + Cleans up workflow's root output directory. Args: dry_run: @@ -450,7 +555,7 @@ def cleanup( root = self.workflow_root if not root: logger.error( - 'workflow\'s root directory cannot be found in metadata JSON. ' + "workflow's root directory cannot be found in metadata JSON. " 'Cannot proceed to cleanup outputs.' ) return @@ -459,6 +564,4 @@ def cleanup( # num_threads is not available for AbsPath().rmdir() AbsPath(root).rmdir(dry_run=dry_run, no_lock=no_lock) else: - AutoURI(root).rmdir( - dry_run=dry_run, no_lock=no_lock, num_threads=num_threads - ) + AutoURI(root).rmdir(dry_run=dry_run, no_lock=no_lock, num_threads=num_threads) diff --git a/caper/cromwell_rest_api.py b/caper/cromwell_rest_api.py index deb3ba24..9ec8ebd4 100644 --- a/caper/cromwell_rest_api.py +++ b/caper/cromwell_rest_api.py @@ -1,57 +1,64 @@ +"""Cromwell REST API client.""" + +from __future__ import annotations + import fnmatch import io import logging +from collections.abc import Callable, Iterable +from http import HTTPStatus +from typing import Any, ParamSpec, TypedDict, TypeVar, cast from uuid import UUID import requests -from requests.exceptions import ConnectionError, HTTPError from .cromwell_metadata import CromwellMetadata logger = logging.getLogger(__name__) +T = TypeVar('T') +P = ParamSpec('P') + +# Sometimes, Cromwell has taken up to 300 seconds to deliver requests +DEFAULT_REQUEST_TIMEOUT = 300 + + +def requests_error_handler(func: Callable[P, T]) -> Callable[P, T | None]: + """ + Re-raise ConnectionError with help message. -def requests_error_handler(func): - """Re-raise ConnectionError with help message. Continue on HTTP 404 error (server is on but workflow doesn't exist). Otherwise, re-raise from None to hide nested tracebacks. """ - def wrapper(*args, **kwargs): + def wrapper(*args: P.args, **kwargs: P.kwargs) -> T | None: try: return func(*args, **kwargs) - except HTTPError as err: - if err.response.status_code == 404: - logger.error("Workflow doesn't seem to exist.") - return + except requests.exceptions.HTTPError as err: + if err.response.status_code == HTTPStatus.NOT_FOUND: + logger.exception("Method/workflow/endpoint doesn't exist.") + return None - message = ( - '{err}\n\n' - 'Cromwell server is on but got an HTTP error other than 404. ' - ).format(err=err) - raise HTTPError(message) from None + message = f'{err}\n\nCromwell server is on but got an HTTP error other than 404. ' + raise requests.exceptions.HTTPError(message) from None - except ConnectionError as err: + except requests.exceptions.ConnectionError as err: message = ( - '{err}\n\n' - 'Failed to connect to Cromwell server. ' - 'Check if Caper server is running. ' + 'Failed to connect to Cromwell server. Check if Caper server is running. ' 'Also check if hostname and port are correct. ' - 'method={method}, ' - 'url={url}'.format( - err=err, method=err.request.method, url=err.request.url - ) ) - raise ConnectionError(message) from None + if err.request: + message += f'method={err.request.method}, url={err.request.url}' + else: + message += 'No request information available.' + raise requests.exceptions.ConnectionError(message, request=err.request) from err return wrapper -def is_valid_uuid(workflow_id, version=4): - """To validate Cromwell's UUID (lowercase only). - This does not allow uppercase UUIDs. - """ +def is_valid_uuid(workflow_id: str, version: int = 4) -> bool: + """Validate Cromwell's UUID (lowercase only).""" if not isinstance(workflow_id, str): return False if not workflow_id.islower(): @@ -64,27 +71,26 @@ def is_valid_uuid(workflow_id, version=4): return True -def has_wildcard(workflow_id_or_label): - """Check if string or any element in list/tuple has - a wildcard (? or *). +def has_wildcard(workflow_id_or_label: str | Iterable[str] | None) -> bool: + """ + Check if string or any element in list/tuple has a wildcard (? or *). Args: workflow_id_or_label: Workflow ID (str) or label (str). Or array (list, tuple) of them. """ - if workflow_id_or_label is None: - return False - if isinstance(workflow_id_or_label, (list, tuple)): - for val in workflow_id_or_label: - if has_wildcard(val): - return True + if not workflow_id_or_label: return False - else: - return '?' in workflow_id_or_label or '*' in workflow_id_or_label + if isinstance(workflow_id_or_label, Iterable) and not isinstance(workflow_id_or_label, str): + return any(has_wildcard(val) for val in workflow_id_or_label) + + return '?' in workflow_id_or_label or '*' in workflow_id_or_label class CromwellRestAPI: + """Cromwell REST API client.""" + QUERY_URL = 'http://{hostname}:{port}' ENDPOINT_BACKEND = '/api/workflows/v1/backends' ENDPOINT_WORKFLOWS = '/api/workflows/v1/query' @@ -97,30 +103,47 @@ class CromwellRestAPI: DEFAULT_PORT = 8000 def __init__( - self, hostname=DEFAULT_HOSTNAME, port=DEFAULT_PORT, user=None, password=None - ): + self, + hostname: str = DEFAULT_HOSTNAME, + port: int = DEFAULT_PORT, + user: str | None = None, + password: str | None = None, + ) -> None: + """ + Initialize the Cromwell REST API client. + + Args: + hostname: + The hostname of the Cromwell server. + port: + The port of the Cromwell server. + user: + The user to authenticate with the Cromwell server. + password: + The password to authenticate with the Cromwell server. + """ self._hostname = hostname self._port = port - self._user = user self._password = password - self.__init_auth() + self._auth = (user, password) if user and password else None def submit( self, - source, - dependencies=None, - inputs=None, - options=None, - labels=None, - on_hold=False, - ): - """Submit a workflow. + source: str, + dependencies: str | None = None, + inputs: str | None = None, + options: str | None = None, + labels: str | None = None, + on_hold: bool = False, + ) -> dict | None: + """ + Submit a workflow. Returns: JSON Response from POST request submit a workflow """ - manifest = {} + manifest: dict = {} with open(source) as fp: manifest['workflowSource'] = io.StringIO(fp.read()) if dependencies: @@ -141,91 +164,113 @@ def submit( manifest['workflowOnHold'] = True r = self.__request_post(CromwellRestAPI.ENDPOINT_SUBMIT, manifest) - logger.debug('submit: {r}'.format(r=r)) + logger.debug('submit: %s', r) return r - def abort(self, workflow_ids=None, labels=None): - """Abort workflows matching workflow IDs or labels + def abort( + self, workflow_ids: list[str] | None = None, labels: dict[str, str] | None = None + ) -> list[dict] | None: + """ + Abort workflows matching workflow IDs or labels. Returns: List of JSON responses from POST request for aborting workflows """ - valid_workflow_ids = self.find_valid_workflow_ids( - workflow_ids=workflow_ids, labels=labels - ) + valid_workflow_ids = self.find_valid_workflow_ids(workflow_ids=workflow_ids, labels=labels) if valid_workflow_ids is None: - return + return None result = [] for workflow_id in valid_workflow_ids: - r = self.__request_post( - CromwellRestAPI.ENDPOINT_ABORT.format(wf_id=workflow_id) - ) + r = self.__request_post(CromwellRestAPI.ENDPOINT_ABORT.format(wf_id=workflow_id)) result.append(r) - logger.debug('abort: {r}'.format(r=result)) + logger.debug('abort: %s', result) return result - def release_hold(self, workflow_ids=None, labels=None): - """Release hold of workflows matching workflow IDs or labels + def release_hold( + self, workflow_ids: list[str] | None = None, labels: dict[str, str] | None = None + ) -> list[dict] | None: + """ + Release hold of workflows matching workflow IDs or labels. Returns: List of JSON responses from POST request for releasing hold of workflows """ - valid_workflow_ids = self.find_valid_workflow_ids( - workflow_ids=workflow_ids, labels=labels - ) + valid_workflow_ids = self.find_valid_workflow_ids(workflow_ids=workflow_ids, labels=labels) if valid_workflow_ids is None: - return + return None result = [] for workflow_id in valid_workflow_ids: - r = self.__request_post( - CromwellRestAPI.ENDPOINT_RELEASE_HOLD.format(wf_id=workflow_id) - ) + r = self.__request_post(CromwellRestAPI.ENDPOINT_RELEASE_HOLD.format(wf_id=workflow_id)) result.append(r) - logger.debug('release_hold: {r}'.format(r=result)) + logger.debug('release hold: %s', result) return result - def get_default_backend(self): - """Retrieve default backend name + def get_default_backend(self) -> str: + """ + Retrieve default backend name. Returns: Default backend name """ - return self.get_backends()['defaultBackend'] + if not (backends := self.get_backends()): + msg = "No backends found, unhandled failure since this shouldn't happen." + raise RuntimeError(msg) + return backends['defaultBackend'] - def get_backends(self): - """Retrieve available backend names and default backend name + class _BackendResponse(TypedDict): + """Represents JSON response from Cromwell REST API for backend response.""" + + defaultBackend: str + supportedBackends: list[str] + + def get_backends(self) -> _BackendResponse | None: + """ + Retrieve available backend names and default backend name. Returns: JSON response with keys "defaultBackend" and "supportedBackends" Example: {"defaultBackend":"Local","supportedBackends": ["Local","aws","gcp","pbs","sge","slurm"]} """ - return self.__request_get(CromwellRestAPI.ENDPOINT_BACKEND) + return cast( + 'CromwellRestAPI._BackendResponse | None', + self.__request_get(CromwellRestAPI.ENDPOINT_BACKEND), + ) def find_valid_workflow_ids( - self, workflow_ids=None, labels=None, exclude_subworkflow=True - ): - """Checks if workflow ID in `workflow_ids` are already valid UUIDs (without wildcards). + self, + workflow_ids: list[str] | None = None, + labels: dict[str, str] | None = None, + exclude_subworkflow: bool = True, + ) -> list[str] | None: + """ + Checks if workflow ID in `workflow_ids` are already valid UUIDs (without wildcards). + If so then we don't have to send the server a query to get matching workflow IDs. """ if not labels and workflow_ids and all(is_valid_uuid(i) for i in workflow_ids): return workflow_ids - else: - workflows = self.find( - workflow_ids=workflow_ids, - labels=labels, - exclude_subworkflow=exclude_subworkflow, - ) - if not workflows: - return - return [w['id'] for w in workflows] + workflows = self.find( + workflow_ids=workflow_ids, + labels=labels, + exclude_subworkflow=exclude_subworkflow, + ) + if not workflows: + return None + return [w['id'] for w in workflows] - def get_metadata(self, workflow_ids=None, labels=None, embed_subworkflow=False): - """Retrieve metadata for workflows matching workflow IDs or labels + def get_metadata( + self, + workflow_ids: list[str] | None = None, + labels: dict[str, str] | None = None, + embed_subworkflow: bool = False, + ) -> list[dict] | None: + """ + Retrieve metadata for workflows matching workflow IDs or labels. Args: workflow_ids: @@ -239,11 +284,9 @@ def get_metadata(self, workflow_ids=None, labels=None, embed_subworkflow=False): Metadata JSON generated with Cromwell run mode includes all subworkflows embedded in main workflow's JSON file. """ - valid_workflow_ids = self.find_valid_workflow_ids( - workflow_ids=workflow_ids, labels=labels - ) + valid_workflow_ids = self.find_valid_workflow_ids(workflow_ids=workflow_ids, labels=labels) if valid_workflow_ids is None: - return + return None result = [] for workflow_id in valid_workflow_ids: @@ -260,51 +303,52 @@ def get_metadata(self, workflow_ids=None, labels=None, embed_subworkflow=False): result.append(cm.metadata) return result - def get_labels(self, workflow_id): - """Get labels JSON for a specified workflow + def get_labels(self, workflow_id: str) -> dict | None: + """ + Get labels JSON for a specified workflow. Returns: Labels JSON for a workflow """ if workflow_id is None or not is_valid_uuid(workflow_id): - return + return None - r = self.__request_get( - CromwellRestAPI.ENDPOINT_LABELS.format(wf_id=workflow_id) - ) + r = self.__request_get(CromwellRestAPI.ENDPOINT_LABELS.format(wf_id=workflow_id)) if r is None: - return + return None return r['labels'] - def get_label(self, workflow_id, key): - """Get a label for a key in a specified workflow + def get_label(self, workflow_id: str, key: str) -> str | None: + """ + Get a label for a key in a specified workflow. Returns: Value for a specified key in labels JSON for a workflow """ labels = self.get_labels(workflow_id) if labels is None: - return + return None if key in labels: return labels[key] + return None - def update_labels(self, workflow_id, labels): - """Update labels for a specified workflow with - a list of (key, val) tuples - """ + def update_labels(self, workflow_id: str, labels: dict[str, str]) -> dict | None: + """Update labels for a specified workflow with a list of (key, val) tuples.""" if workflow_id is None or labels is None: - return - r = self.__request_patch( - CromwellRestAPI.ENDPOINT_LABELS.format(wf_id=workflow_id), labels - ) - logger.debug('update_labels: {r}'.format(r=r)) + return None + r = self.__request_patch(CromwellRestAPI.ENDPOINT_LABELS.format(wf_id=workflow_id), labels) + logger.debug('update labels: %s', r) return r def find_with_wildcard( - self, workflow_ids=None, labels=None, exclude_subworkflow=True - ): - """Retrieves all workflows from Cromwell server. - And then find matching workflows by ID or labels. + self, + workflow_ids: list[str] | None = None, + labels: list[tuple[str, str]] | None = None, + exclude_subworkflow: bool = True, + ) -> list[dict]: + """ + Retrieves all workflows from Cromwell server, then finds workflows by ID or labels. + Wildcards (? and *) are allowed for both parameters. """ result = [] @@ -347,16 +391,20 @@ def find_with_wildcard( result.append(workflow) break logger.debug( - 'find_with_wildcard: workflow_ids={workflow_ids}, ' - 'labels={labels}, result={result}'.format( - workflow_ids=workflow_ids, labels=labels, result=result - ) + 'find with wildcard: workflow_ids=%s, labels=%s, result=%s', + workflow_ids, + labels, + result, ) return result - def find_by_workflow_ids(self, workflow_ids=None, exclude_subworkflow=True): - """Finds workflows by exactly matching workflow IDs (UUIDs). + def find_by_workflow_ids( + self, workflow_ids: list[str] | None = None, exclude_subworkflow: bool = True + ) -> list[dict]: + """ + Finds workflows by exactly matching workflow IDs (UUIDs). + Does OR search for a list of workflow IDs. Invalid UUID in `workflows_ids` will be ignored without warning. Wildcards (? and *) are not allowed. @@ -365,14 +413,15 @@ def find_by_workflow_ids(self, workflow_ids=None, exclude_subworkflow=True): workflow_ids: List of workflow ID (UUID) strings. Lower-case only (Cromwell uses lower-case UUIDs). + exclude_subworkflow: + Whether to exclude subworkflows. + Returns: List of matched workflow JSONs. """ if has_wildcard(workflow_ids): - raise ValueError( - 'Wildcards are not allowed in workflow_ids. ' - 'ids={ids}'.format(ids=workflow_ids) - ) + msg = f'Wildcards are not allowed in workflow_ids. ids={workflow_ids}' + raise ValueError(msg) result = [] if workflow_ids: @@ -389,57 +438,58 @@ def find_by_workflow_ids(self, workflow_ids=None, exclude_subworkflow=True): if resp and resp['results']: result.extend(resp['results']) - logger.debug( - 'find_by_workflow_ids: workflow_ids={workflow_ids}, ' - 'result={result}'.format(workflow_ids=workflow_ids, result=result) - ) + logger.debug('find by workflow ids: workflow_ids=%s, result=%s', workflow_ids, result) return result - def find_by_labels(self, labels=None, exclude_subworkflow=True): - """Finds workflows by exactly matching labels (key, value) tuples. - Does OR search for a list of label key/value pairs. - Wildcards (? and *) are not allowed. + def find_by_labels( + self, labels: list[tuple[str, str]] | None = None, exclude_subworkflow: bool = True + ) -> list[dict]: + """ + Finds workflows by exactly matching labels (key, value) tuples. + + Does OR search for a list of label key/value pairs. Wildcards (? and *) are not allowed. Args: labels: List of labels (key/value pairs). + exclude_subworkflow: + Whether to exclude subworkflows. + Returns: List of matched workflow JSONs. """ if has_wildcard(labels): - raise ValueError( - 'Wildcards are not allowed in labels. ' - 'labels={labels}'.format(labels=labels) - ) + msg = f'Wildcards are not allowed in labels. labels={labels}' + raise ValueError(msg) result = [] if labels: - # reformat labels with `:` notation. exclude pairs with empty value. - labels = [ - '{key}:{val}'.format(key=key, val=val) for key, val in labels if val - ] resp = self.__request_get( CromwellRestAPI.ENDPOINT_WORKFLOWS, params={ 'additionalQueryResultFields': 'labels', 'includeSubworkflows': not exclude_subworkflow, - 'labelor': labels, + # reformat labels with `:` notation. exclude pairs with empty value. + 'labelor': [f'{key}:{val}' for key, val in labels if val], }, ) if resp and resp['results']: result.extend(resp['results']) - logger.debug( - 'find_by_labels: labels={labels}, result={result}'.format( - labels=labels, result=result - ) - ) + logger.debug('find by labels: labels=%s, result=%s', labels, result) return result - def find(self, workflow_ids=None, labels=None, exclude_subworkflow=True): - """Wrapper for the following three find functions. + def find( + self, + workflow_ids: list[str] | None = None, + labels: list[tuple[str, str]] | None = None, + exclude_subworkflow: bool = True, + ) -> list[dict]: + """ + Wrapper for the following three find functions. + - find_with_wildcard - find_by_workflow_ids - find_by_labels @@ -458,13 +508,12 @@ def find(self, workflow_ids=None, labels=None, exclude_subworkflow=True): List of labels (key/value pairs). exclude_subworkflow: Exclude subworkflows. + Returns: List of matched workflow JSONs. """ wildcard_found_in_workflow_ids = has_wildcard(workflow_ids) - wildcard_found_in_labels = has_wildcard( - [val for key, val in labels] if labels else None - ) + wildcard_found_in_labels = has_wildcard([val for key, val in labels] if labels else None) if wildcard_found_in_workflow_ids or wildcard_found_in_labels: return self.find_with_wildcard( workflow_ids=workflow_ids, @@ -492,62 +541,65 @@ def find(self, workflow_ids=None, labels=None, exclude_subworkflow=True): return result - def __init_auth(self): - """Init auth object""" + def __init_auth(self) -> None: + """Init auth object.""" if self._user is not None and self._password is not None: self._auth = (self._user, self._password) else: self._auth = None @requests_error_handler - def __request_get(self, endpoint, params=None): - """GET request + def __request_get(self, endpoint: str, params: dict[str, Any] | None = None) -> dict: + """ + Send a GET request to the Cromwell REST API. Returns: JSON response """ - url = ( - CromwellRestAPI.QUERY_URL.format(hostname=self._hostname, port=self._port) - + endpoint - ) + url = CromwellRestAPI.QUERY_URL.format(hostname=self._hostname, port=self._port) + endpoint resp = requests.get( - url, auth=self._auth, params=params, headers={'accept': 'application/json'} + url, + auth=self._auth, + params=params, + timeout=DEFAULT_REQUEST_TIMEOUT, + headers={'accept': 'application/json'}, ) resp.raise_for_status() return resp.json() @requests_error_handler - def __request_post(self, endpoint, manifest=None): - """POST request + def __request_post(self, endpoint: str, manifest: dict[str, Any] | None = None) -> dict: + """ + Send a POST request to the Cromwell REST API. Returns: JSON response """ - url = ( - CromwellRestAPI.QUERY_URL.format(hostname=self._hostname, port=self._port) - + endpoint - ) + url = CromwellRestAPI.QUERY_URL.format(hostname=self._hostname, port=self._port) + endpoint resp = requests.post( - url, files=manifest, auth=self._auth, headers={'accept': 'application/json'} + url, + files=manifest, + auth=self._auth, + timeout=DEFAULT_REQUEST_TIMEOUT, + headers={'accept': 'application/json'}, ) resp.raise_for_status() return resp.json() @requests_error_handler - def __request_patch(self, endpoint, data): - """POST request + def __request_patch(self, endpoint: str, data: dict[str, Any] | None = None) -> dict | None: + """ + Send a PATCH request to the Cromwell REST API. Returns: JSON response """ - url = ( - CromwellRestAPI.QUERY_URL.format(hostname=self._hostname, port=self._port) - + endpoint - ) + url = CromwellRestAPI.QUERY_URL.format(hostname=self._hostname, port=self._port) + endpoint resp = requests.patch( url, data=data, auth=self._auth, + timeout=DEFAULT_REQUEST_TIMEOUT, headers={'accept': 'application/json', 'content-type': 'application/json'}, ) resp.raise_for_status() diff --git a/caper/cromwell_workflow_monitor.py b/caper/cromwell_workflow_monitor.py index a4031595..aa8de1e5 100644 --- a/caper/cromwell_workflow_monitor.py +++ b/caper/cromwell_workflow_monitor.py @@ -1,16 +1,36 @@ +"""Module contains classes to monitor Cromwell's workflow status.""" + import logging import re import time +from collections.abc import Callable, Iterable +from typing import Any -from .cromwell_metadata import CromwellMetadata -from .cromwell_rest_api import CromwellRestAPI +from caper.cromwell_metadata import CromwellMetadata +from caper.cromwell_rest_api import CromwellRestAPI logger = logging.getLogger(__name__) class WorkflowStatusTransition: - def __init__(self, regex, status_transitions, auto_write_metadata=False): + """ + Wrapper for a particular status transition that can happen in the workflow graph. + + Status transitions are parsed from Cromwell's stderr, and are represented by a tuple of + previous and next statuses. This class's parse() method uses the regular expression the + class was initialized with to match its transition from the stderr output. + """ + + def __init__( + self, + regex: re.Pattern, + status_transitions: Iterable[tuple[str | None, str]], + *, + auto_write_metadata: bool = False, + ) -> None: """ + Track a workflow's various status transitions. + Args: regex: Regular expression to catch workflow's status transition from @@ -24,13 +44,20 @@ def __init__(self, regex, status_transitions, auto_write_metadata=False): e.g. [('Submitted', 'Running'),] Iterating over this list, only the first valid transition, where a previous status is matched, found will be used. + auto_write_metadata: + Whether to write metadata on workflow's root directory when a status transition + is detected. """ self._regex = regex self._status_transitions = status_transitions self._auto_write_metadata = auto_write_metadata - def parse(self, line, workflow_status_map): + def parse( + self, line: str, workflow_status_map: dict[str, str] + ) -> tuple[str | None, str | None, bool]: """ + Parse a line to catch a workflow status transition. + Args: line: Line to be parsed to catch status transition. @@ -38,11 +65,12 @@ def parse(self, line, workflow_status_map): Dict of workflow_id (key) and previus_status (value) pairs. This is used to get previous status of a workflow. If None then previous status will be ignored. + Returns: workflow_id: Workflow's string ID. status: - New status after transition. + New status after transition. None if no transition is detected. auto_write_metadata: For this status transition metadataJSON file should be written on workflow's root output directory. @@ -50,47 +78,46 @@ def parse(self, line, workflow_status_map): r = re.findall(self._regex, line) if r: wf_id = r[0].strip() - if wf_id in workflow_status_map: - prev_status = workflow_status_map[wf_id] - else: - prev_status = None + prev_status = workflow_status_map.get(wf_id) for st1, st2 in self._status_transitions: if st1 is None or st1 == prev_status: if st1 != st2: - logger.info( - 'Workflow: id={id}, status={status}'.format( - id=wf_id, status=st2 - ) - ) + logger.info('Workflow: id=%s, status=%s', wf_id, st2) return wf_id, st2, self._auto_write_metadata break return None, None, False +WORKFLOW_UUID_REGEX = r'\b[0-9a-f]{8}\b-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-\b[0-9a-f]{12}\b' + + class CromwellWorkflowMonitor: - """Class constants include several regular expressions to catch + """ + Monitor Cromwell's STDERR to track workflow/task status changes. + + Class constants include several regular expressions to catch status changes of workflow/task by Cromwell's STDERR (logging level>=INFO). """ ALL_STATUS_TRANSITIONS = ( WorkflowStatusTransition( - regex=r'workflow (\b[0-9a-f]{8}\b-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-\b[0-9a-f]{12}\b) submitted', + regex=re.compile(rf'workflow ({WORKFLOW_UUID_REGEX}) submitted'), status_transitions=((None, 'Submitted'),), ), WorkflowStatusTransition( - regex=r'started WorkflowActor-(\b[0-9a-f]{8}\b-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-\b[0-9a-f]{12}\b)', + regex=re.compile(rf'started WorkflowActor-({WORKFLOW_UUID_REGEX})'), status_transitions=((None, 'Running'),), ), WorkflowStatusTransition( - regex=r'Workflow (\b[0-9a-f]{8}\b-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-\b[0-9a-f]{12}\b) failed', + regex=re.compile(rf'Workflow ({WORKFLOW_UUID_REGEX}) failed'), status_transitions=((None, 'Failed'),), ), WorkflowStatusTransition( - regex=r'Abort requested for workflow (\b[0-9a-f]{8}\b-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-\b[0-9a-f]{12}\b)\.', + regex=re.compile(rf'Abort requested for workflow ({WORKFLOW_UUID_REGEX})\.'), status_transitions=((None, 'Aborting'),), ), WorkflowStatusTransition( - regex=r'WorkflowActor-(\b[0-9a-f]{8}\b-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-\b[0-9a-f]{12}\b) is in a terminal state', + regex=re.compile(rf'WorkflowActor-({WORKFLOW_UUID_REGEX}) is in a terminal state'), status_transitions=( ('Failed', 'Failed'), ('Aborting', 'Aborted'), @@ -99,7 +126,7 @@ class CromwellWorkflowMonitor: auto_write_metadata=True, ), WorkflowStatusTransition( - regex=r'Workflow actor for (\b[0-9a-f]{8}\b-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-\b[0-9a-f]{12}\b) completed with status', + regex=re.compile(rf'Workflow actor for ({WORKFLOW_UUID_REGEX}) completed with status'), status_transitions=( ('Failed', 'Failed'), ('Aborting', 'Aborted'), @@ -109,13 +136,16 @@ class CromwellWorkflowMonitor: ), ) - RE_CROMWELL_SERVER_START = r'Cromwell \d+ service started on' - RE_TASK_START = r'\[UUID\((\b[0-9a-f]{8})\)(.+):(.+):(\d+)\]: job id: (.+)' - RE_TASK_STATUS_CHANGE = ( - r'\[UUID\((\b[0-9a-f]{8})\)(.+):(.+):(\d+)\]: Status change from (.+) to (.+)' + RE_CROMWELL_SERVER_START = re.compile(r'Cromwell \d+ service started on') + RE_TASK_START = re.compile(r'\[UUID\((\b[0-9a-f]{8})\)(.+):(.+):(\d+)]: job id: (.+)') + RE_TASK_STATUS_CHANGE = re.compile( + r'\[UUID\((\b[0-9a-f]{8})\)(.+):(.+):(\d+)]: Status change from (.+) to (.+)' + ) + RE_TASK_CALL_CACHED = re.compile( + r'\[UUID\((\b[0-9a-f]{8})\)]: ' + r'Job results retrieved \(CallCached\): \'(.+)\' \(scatter index: (.+), attempt (\d+)\)' ) - RE_TASK_CALL_CACHED = r'\[UUID\((\b[0-9a-f]{8})\)\]: Job results retrieved \(CallCached\): \'(.+)\' \(scatter index: (.+), attempt (\d+)\)' - RE_SUBWORKFLOW_FOUND = r'(\b[0-9a-f]{8}\b-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-\b[0-9a-f]{12}\b)-SubWorkflowActor-SubWorkflow' + RE_SUBWORKFLOW_FOUND = re.compile(rf'({WORKFLOW_UUID_REGEX})-SubWorkflowActor-SubWorkflow') MAX_RETRY_WRITE_METADATA = 3 INTERVAL_RETRY_WRITE_METADATA = 10.0 @@ -124,15 +154,18 @@ class CromwellWorkflowMonitor: def __init__( self, - is_server=False, - server_hostname=DEFAULT_SERVER_HOSTNAME, - server_port=DEFAULT_SERVER_PORT, - embed_subworkflow=False, - auto_write_metadata=False, - on_status_change=None, - on_server_start=None, - ): - """Parses STDERR from Cromwell to updates workflow/task information. + *, + is_server: bool = False, + server_hostname: str = DEFAULT_SERVER_HOSTNAME, + server_port: int = DEFAULT_SERVER_PORT, + embed_subworkflow: bool = False, + auto_write_metadata: bool = False, + on_status_change: Callable[[dict[str, Any]], None] | None = None, + on_server_start: Callable[[], None] | None = None, + ) -> None: + """ + Parses STDERR from Cromwell to updates workflow/task information. + Also, write/update metadata.json on each workflow's root directory. Args: @@ -141,13 +174,10 @@ def __init__( for server mode only. It tries to write/update metadata JSON file on workflow's root directory when there is any status change of it. - metadata_basename: - Basename for metadata JSON file to be written on each workflow's - root directory. server_hostname: Cromwell server hostname for Cromwell REST API. This is used to get metadata JSON of a workflow. - server_hostname: + server_port: Cromwell server port for Cromwell REST API. This is used to get metadata JSON of a workflow. embed_subworkflow: @@ -174,28 +204,25 @@ def __init__( This function should not take parameter. """ self._is_server = is_server - - if self._is_server: - self._cromwell_rest_api = CromwellRestAPI( - hostname=server_hostname, port=server_port - ) - else: - self._cromwell_rest_api = None - + self._cromwell_rest_api = ( + CromwellRestAPI(hostname=server_hostname, port=server_port) if is_server else None + ) self._embed_subworkflow = embed_subworkflow self._auto_write_metadata = auto_write_metadata self._on_status_change = on_status_change self._on_server_start = on_server_start - self._workflow_status_map = dict() + self._workflow_status_map = {} self._subworkflows = set() self._is_server_started = False - def is_server_started(self): + def is_server_started(self) -> bool: + """Check if the Cromwell server has started.""" return self._is_server_started - def update(self, stderr): - """Update workflows by parsing Cromwell's stderr. + def update(self, stderr: str) -> None: + """ + Update workflows by parsing Cromwell's stderr. Args: stderr: @@ -205,17 +232,17 @@ def update(self, stderr): if self._is_server: self._update_server_start(stderr) - updated_workflows, workflows_to_write_metadata = self._update_workflows(stderr) + _updated_workflows, workflows_to_write_metadata = self._update_workflows(stderr) self._update_subworkflows(stderr) self._update_tasks(stderr) for w in workflows_to_write_metadata: self._write_metadata(w) - def _update_server_start(self, stderr): + def _update_server_start(self, stderr: str) -> None: if not self._is_server_started: for line in stderr.split('\n'): - r1 = re.findall(CromwellWorkflowMonitor.RE_CROMWELL_SERVER_START, line) + r1 = re.findall(self.RE_CROMWELL_SERVER_START, line) if r1: self._is_server_started = True if self._on_server_start: @@ -223,12 +250,12 @@ def _update_server_start(self, stderr): logger.info('Cromwell server started. Ready to take submissions.') break - def _update_workflows(self, stderr): + def _update_workflows(self, stderr: str) -> tuple[set[str], set[str]]: """Updates workflow status by parsing Cromwell's stderr lines.""" updated_workflows = set() workflows_to_write_metadata = set() for line in stderr.split('\n'): - for st_transitions in CromwellWorkflowMonitor.ALL_STATUS_TRANSITIONS: + for st_transitions in self.ALL_STATUS_TRANSITIONS: workflow_id, status, auto_write_metadata = st_transitions.parse( line, self._workflow_status_map ) @@ -240,34 +267,34 @@ def _update_workflows(self, stderr): return updated_workflows, workflows_to_write_metadata - def _update_subworkflows(self, stderr): + def _update_subworkflows(self, stderr: str) -> None: for line in stderr.split('\n'): - r_sub = re.findall(CromwellWorkflowMonitor.RE_SUBWORKFLOW_FOUND, line) + r_sub = re.findall(self.RE_SUBWORKFLOW_FOUND, line) if r_sub: subworkflow_id = r_sub[0] if subworkflow_id not in self._subworkflows: - logger.info('Subworkflow found: {id}'.format(id=subworkflow_id)) + logger.info('Subworkflow found: %s', subworkflow_id) self._subworkflows.add(subworkflow_id) - def _update_tasks(self, stderr): + def _update_tasks(self, stderr: str) -> None: """Check if workflow's task status changed by parsing Cromwell's stderr lines.""" for line in stderr.split('\n'): r_common = None - r_start = re.findall(CromwellWorkflowMonitor.RE_TASK_START, line) + job_id = None + status = None + r_start = re.findall(self.RE_TASK_START, line) if r_start: r_common = r_start[0] status = 'Started' job_id = r_common[4] - r_callcached = re.findall(CromwellWorkflowMonitor.RE_TASK_CALL_CACHED, line) + r_callcached = re.findall(self.RE_TASK_CALL_CACHED, line) if r_callcached: r_common = r_callcached[0] status = 'CallCached' job_id = None - r_status_change = re.findall( - CromwellWorkflowMonitor.RE_TASK_STATUS_CHANGE, line - ) + r_status_change = re.findall(self.RE_TASK_STATUS_CHANGE, line) if r_status_change: r_common = r_status_change[0] status = r_common[5] @@ -284,48 +311,51 @@ def _update_tasks(self, stderr): shard_idx = -1 retry = int(r_common[3]) - msg = 'Task: id={id}, task={name}:{shard_idx}, retry={retry}, status={status}'.format( - id=workflow_id, - name=task_name, - shard_idx=shard_idx, - retry=retry - 1, - status=status, + logger.info( + 'Task: id=%s, task=%s:%s, retry=%s, status=%s job_id=%s', + workflow_id, + task_name, + shard_idx, + retry - 1, + status, + job_id, ) - if job_id: - msg += ', job_id={job_id}'.format(job_id=job_id) - logger.info(msg) - def _find_workflow_id_from_short_id(self, short_id): + def _find_workflow_id_from_short_id(self, short_id: str) -> str | None: for w in self._subworkflows.union(set(self._workflow_status_map.keys())): if w.startswith(short_id): return w + return None - def _write_metadata(self, workflow_id): + def _write_metadata(self, workflow_id: str) -> None: """Update metadata on Cromwell'e exec root.""" - if not self._is_server or not self._auto_write_metadata: + if not self._is_server or not self._auto_write_metadata or not self._cromwell_rest_api: return if workflow_id in self._subworkflows and self._embed_subworkflow: - logger.debug( - 'Skipped writing metadata JSON file of subworkflow {wf_id}'.format( - wf_id=workflow_id - ) - ) + logger.debug('Skipped writing metadata JSON file of subworkflow %s', workflow_id) return - for trial in range(CromwellWorkflowMonitor.MAX_RETRY_WRITE_METADATA + 1): + for trial in range(self.MAX_RETRY_WRITE_METADATA + 1): try: - time.sleep(CromwellWorkflowMonitor.INTERVAL_RETRY_WRITE_METADATA) + time.sleep(self.INTERVAL_RETRY_WRITE_METADATA) metadata = self._cromwell_rest_api.get_metadata( workflow_ids=[workflow_id], embed_subworkflow=self._embed_subworkflow, - )[0] + ) + if metadata is None: + logger.error( + 'Failed to retrieve metadata from Cromwell server. id=%s', workflow_id + ) + continue + metadata = metadata[0] if self._on_status_change: self._on_status_change(metadata) cm = CromwellMetadata(metadata) cm.write_on_workflow_root() except Exception: - logger.error( - 'Failed to retrieve metadata from Cromwell server. ' - 'trial={t}, id={wf_id}'.format(t=trial, wf_id=workflow_id) + logger.exception( + 'Failed to retrieve metadata from Cromwell server. trial=%s, id=%s', + trial, + workflow_id, ) continue break diff --git a/caper/dict_tool.py b/caper/dict_tool.py index d43c309a..c37c95f2 100644 --- a/caper/dict_tool.py +++ b/caper/dict_tool.py @@ -1,4 +1,5 @@ -"""dictTool: merge/split/flatten/unflatten dict +""" +dictTool: merge/split/flatten/unflatten dict. Author: Jin Lee (leepc12@gmail.com) at ENCODE-DCC @@ -6,23 +7,20 @@ import re from collections import defaultdict - -try: - from collections.abc import MutableMapping -except AttributeError: - from collections import MutableMapping +from collections.abc import Callable, Mapping, MutableMapping +from typing import Any, overload -def merge_dict(a, b): - """Merges b into a recursively. This mutates a and overwrites - items in b on a for conflicts. +def merge_dict[U: MutableMapping[str, Any], W: Mapping[str, Any]](a: U, b: W) -> U: + """ + Merges b into a recursively. - Ref: https://stackoverflow.com/questions/7204805/dictionaries - -of-dictionaries-merge/7205107#7205107 + This mutates a and overwrites items in b on a for conflicts. + Ref: https://stackoverflow.com/questions/7204805/dictionaries-of-dictionaries-merge/7205107#7205107 """ for key in b: if key in a: - if isinstance(a[key], dict) and isinstance(b[key], dict): + if isinstance(a[key], MutableMapping) and isinstance(b[key], MutableMapping): merge_dict(a[key], b[key]) elif a[key] == b[key]: pass @@ -33,14 +31,35 @@ def merge_dict(a, b): return a -def flatten_dict(d, reducer=None, parent_key=()): - """Flattens dict into single-level-tuple-keyed dict with - {(tuple of keys of parents and self): value} +@overload +def flatten_dict[W: Mapping[str, Any]]( + d: W, + reducer: str, + parent_key: tuple[str, ...] = (), +) -> dict[str, Any]: ... +@overload +def flatten_dict[W: Mapping[str, Any]]( + d: W, + reducer: None = None, + parent_key: tuple[str, ...] = (), +) -> dict[tuple[str, ...], Any] | W: ... +def flatten_dict[W: Mapping[str, Any]]( + d: W, reducer: str | None = None, parent_key: tuple[str, ...] = () +) -> dict[str, Any] | dict[tuple[str, ...], Any] | W: + """ + Flattens dict into single-level-tuple-keyed dict. + + Result looks like: {(tuple of keys of parents and self): value} Args: + d: + Dictionary/Mapping to flatten. reducer: Character to join keys in a tuple. If None, returns with key as a tuple. + parent_key: + Optional tuple of keys of parents to start from. + Returns: dict of { (key_lvl1, key_lvl2, key_lvl3, ...): value @@ -49,19 +68,28 @@ def flatten_dict(d, reducer=None, parent_key=()): items = [] for k, v in d.items(): new_key = parent_key + (k if isinstance(k, tuple) else (k,)) - if isinstance(v, MutableMapping): + if isinstance(v, Mapping): items.extend(flatten_dict(v, parent_key=new_key).items()) else: items.append((new_key, v)) if reducer: - return {reducer.join(k): v for k, v in type(d)(items).items()} - else: - return type(d)(items) + return {reducer.join(k): v for k, v in items} + return type(d)(items) -def recurse_dict_value(d, fnc): - if isinstance(d, dict): - for k, v in d.items(): + +def recurse_dict_value(d: Mapping[str, Any], fnc: Callable[[Any], None]) -> None: + """ + Recursively apply a function to the values of a dictionary. + + Args: + d: + Dictionary to recurse on. + fnc: + Function to apply to the values of the dictionary. + """ + if isinstance(d, Mapping): + for v in d.values(): recurse_dict_value(v, fnc) elif isinstance(d, (list, tuple)): @@ -71,8 +99,17 @@ def recurse_dict_value(d, fnc): fnc(d) -def unflatten_dict(d_flat): - """Unflattens single-level-tuple-keyed dict into dict""" +def unflatten_dict[U: MutableMapping[str, Any]](d_flat: U) -> U: + """ + Unflattens single-level-tuple-keyed dict into dict. + + Args: + d_flat: + Dictionary/Mapping to unflatten. + + Returns: + Dictionary/Mapping of the same concrete type as d_flat. + """ result = type(d_flat)() for k_tuple, v in d_flat.items(): d_curr = result @@ -85,14 +122,17 @@ def unflatten_dict(d_flat): return result -def split_dict(d, rules=None): - """Splits dict according to "rule" +def split_dict[W: Mapping[str, Any]](d: W, rules: list[tuple[str, str]] | None = None) -> list[W]: + r""" + Splits dict according to "rule". Returns: List of split dict Args: - rule: + d: + Dictionary/Mapping to split. + rules: A list of tuple (RULE_NAME: REGEX) If a key name in an JSON object matches with this REGEX @@ -202,12 +242,17 @@ def split_dict(d, rules=None): d_others[k_tuple] = v if d_others: d_ = unflatten_dict(d_others) - result = [d_] + result + result = [d_, *result] return result -def dict_to_dot_str(d, parent_key='digraph D', indent='', base_indent=''): - """Dict will be converted into DOT like the followings: +def dict_to_dot_str[W: Mapping[str, Any]]( + d: W, parent_key: str = 'digraph D', indent: str = '', base_indent: str = '' +) -> str: + r""" + Converts dict into DOT string. + + Dict will be converted into DOT like the followings: 1) Value string will not be double-quotted in DOT. - make sure to escape double-quotes in a string with special characters (e.g. whitespace, # and ;) @@ -272,10 +317,10 @@ def dict_to_dot_str(d, parent_key='digraph D', indent='', base_indent=''): """ result = '' if d is None: - return '{}{};\n'.format(base_indent, parent_key) - elif isinstance(d, str): - return '{}{} = {};\n'.format(base_indent, parent_key, d) - elif isinstance(d, dict): + return f'{base_indent}{parent_key};\n' + if isinstance(d, str): + return f'{base_indent}{parent_key} = {d};\n' + if isinstance(d, Mapping): result += base_indent + parent_key + ' {\n' for k, v in d.items(): result += dict_to_dot_str( @@ -283,8 +328,6 @@ def dict_to_dot_str(d, parent_key='digraph D', indent='', base_indent=''): ) result += base_indent + '}\n' else: - raise ValueError( - 'Unsupported data type: {} ' - '(only str and dict/JSON are allowed).'.format(type(d)) - ) + msg = f'Unsupported data type: {type(d)} (only str and dict/JSON are allowed).' + raise TypeError(msg) return result diff --git a/caper/hocon_string.py b/caper/hocon_string.py index bcac2218..042169db 100644 --- a/caper/hocon_string.py +++ b/caper/hocon_string.py @@ -1,3 +1,5 @@ +"""HOCON string parsing and manipulation with include statement handling.""" + import hashlib import json import logging @@ -20,15 +22,18 @@ HOCONSTRING_INCLUDE_KEY = 'HOCONSTRING_INCLUDE_{id}' -def escape_double_quotes(double_quotes): +def escape_double_quotes(double_quotes: str) -> str: + """Escape double quotes with backslash.""" return double_quotes.replace('"', '\\"') -def unescape_double_quotes(escaped_double_quotes): +def unescape_double_quotes(escaped_double_quotes: str) -> str: + """Unescape backslash-escaped double quotes.""" return escaped_double_quotes.replace('\\"', '"') -def is_valid_include(include): +def is_valid_include(include: str) -> bool: + """Check if include statement matches valid HOCON include format.""" is_valid_format = False for regex in RE_HOCON_INCLUDE: if re.findall(regex, include): @@ -38,13 +43,14 @@ def is_valid_include(include): return is_valid_format -def get_include_key(include_str): +def get_include_key(include_str: str) -> str: """Use md5sum hash of the whole include statement string for a key.""" - return hashlib.md5(include_str.encode()).hexdigest() + return hashlib.md5(include_str.encode()).hexdigest() # noqa: S324 -def wrap_includes(hocon_str): +def wrap_includes(hocon_str: str) -> str: """Convert `include` statement string into key = val format. + Returns '{key} = "{double_quote_escaped_val}"'. """ for regex in RE_HOCON_INCLUDE: @@ -52,42 +58,39 @@ def wrap_includes(hocon_str): if '\\"' in include: continue - logger.debug('Found include in HOCON: {include}'.format(include=include)) + logger.debug('Found include in HOCON: %s', include) hocon_str = hocon_str.replace( include, - '{key} = "{val}"'.format( - key=HOCONSTRING_INCLUDE_KEY.format(id=get_include_key(include)), - val=escape_double_quotes(include), - ), + f'{HOCONSTRING_INCLUDE_KEY.format(id=get_include_key(include))} = "{escape_double_quotes(include)}"', ) return hocon_str -def unwrap_includes(key_val_str): - """Convert '{key} = "{val}"" formatted string to the original `include` statement string. +def unwrap_includes(key_val_str: str) -> str | None: + """ + Convert '{key} = "{val}"" formatted string to the original `include` statement string. + Args: - key: - HOCONSTRING_INCLUDE_KEY with `id` as md5sum hash of the original - `include` statement string. - val: - Double-quote-escaped `include` statement string. + key_val_str: + String in '{key} = "{val}"' format where key is HOCONSTRING_INCLUDE_KEY + with `id` as md5sum hash of the original `include` statement string, + and val is the double-quote-escaped `include` statement string. """ val = re.findall(RE_HOCONSTRING_INCLUDE_VALUE, key_val_str) if val: if len(val) > 1: - raise ValueError( - 'Found multiple matches. Wrong include key=val format? {val}'.format( - val=val - ) - ) + msg = f'Found multiple matches. Wrong include key=val format? {val}' + raise ValueError(msg) return unescape_double_quotes(val[0]) + return None class HOCONString: - def __init__(self, hocon_str): - """Find an `include` statement (VALUE) in HOCON string and then convert it - into a HOCONSTRING_INCLUDE_KEY="VALUE" pair in HOCON. + """HOCON string with include statement handling.""" + + def __init__(self, hocon_str: str) -> None: + """Find an `include` statement and convert it into a key="VALUE" pair. Double-quotes will be escaped with double slashes. Then the VALUE is kept as it is as a value and can be recovered later when @@ -101,18 +104,22 @@ def __init__(self, hocon_str): passed to Cromwell. """ if not isinstance(hocon_str, str): - raise ValueError('HOCONString() takes str type only.') + msg = 'HOCONString() takes str type only.' + raise TypeError(msg) self._hocon_str = wrap_includes(hocon_str) - def __str__(self): + def __str__(self) -> str: # noqa: D105 return self.get_contents() @classmethod - def from_dict(cls, d, include=''): - """Create HOCONString from dict. + def from_dict(cls, d: dict, include: str = '') -> 'HOCONString': + """ + Create HOCONString from dict. Args: + d: + Dictionary to convert to HOCONString. include: `include` statement to be added to the top of the HOCONString. """ @@ -121,15 +128,15 @@ def from_dict(cls, d, include=''): if include: if not is_valid_include(include): - raise ValueError( - 'Wrong HOCON include format. {include}'.format(include=include) - ) + msg = f'Wrong HOCON include format. {include}' + raise ValueError(msg) hocon_str = NEW_LINE.join([include, hocon_str]) return cls(hocon_str=hocon_str) - def to_dict(self, with_include=True): - """Convert HOCON string into dict. + def to_dict(self, with_include: bool = True) -> dict: + """ + Convert HOCON string into dict. Args: with_include: @@ -137,24 +144,24 @@ def to_dict(self, with_include=True): under key HOCONSTRING_INCLUDE_KEY. Otherwise, `include` statements will be excluded. """ - if with_include: - hocon_str = self._hocon_str - else: - hocon_str = self.get_contents(with_include=False) + hocon_str = self._hocon_str if with_include else self.get_contents(with_include=False) c = ConfigFactory.parse_string(hocon_str) j = HOCONConverter.to_json(c) return json.loads(j) - def merge(self, b, update=False): - """Merge self with b and then returns a plain string of merged. + def merge(self, b: 'HOCONString | dict | str', update: bool = False) -> str: + """ + Merge self with b and then returns a plain string of merged. + Args: b: HOCONString, dict, str to be merged. b's `include` statement will always be ignored. update: If True then replace self with a merged one. + Returns: String of merged HOCONs. """ @@ -165,7 +172,8 @@ def merge(self, b, update=False): elif isinstance(b, dict): d = b else: - raise TypeError('Unsupported type {t}'.format(t=type(b))) + msg = f'Unsupported type {type(b)}' + raise TypeError(msg) self_d = self.to_dict() merge_dict(self_d, d) @@ -178,9 +186,8 @@ def merge(self, b, update=False): return HOCONString(hocon_str).get_contents() - def get_contents(self, with_include=True): - """Check if `include` statement is stored as a plain string. - If exists, converts it back to HOCON `include` statement. + def get_contents(self, with_include: bool = True) -> str: + """Check if `include` statement is stored and convert back if needed. Args: with_include: (renamed/changed from without_include) @@ -191,11 +198,7 @@ def get_contents(self, with_include=True): hocon_str = self._hocon_str for include_key_val in re.findall(RE_HOCONSTRING_INCLUDE, self._hocon_str): - logger.debug( - 'Found include key in HOCONString: {include_key_val}'.format( - include_key_val=include_key_val - ) - ) + logger.debug('Found include key in HOCONString: %s', include_key_val) if with_include: original_include_str = unwrap_includes(include_key_val) if original_include_str: diff --git a/caper/hpc.py b/caper/hpc.py index 81c6dfba..79969ab0 100644 --- a/caper/hpc.py +++ b/caper/hpc.py @@ -1,12 +1,22 @@ -"""Caper's HPC Wrapper based on job engine's CLI (shell command). -e.g. sbatch, squeue, qsub, qstat """ +Caper's HPC Wrapper based on job engine's CLI (shell command). + +Supports/wraps sbatch, squeue, qsub, qstat +""" + +from __future__ import annotations + import logging import os import subprocess from abc import ABC, abstractmethod from pathlib import Path from tempfile import NamedTemporaryFile +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + import builtins + from collections.abc import Iterable logger = logging.getLogger(__name__) @@ -14,43 +24,45 @@ ILLEGAL_CHARS_IN_JOB_NAME = [',', ' ', '\t'] -def get_user_from_os_environ(): +def get_user_from_os_environ() -> str: + """Returns username from OS environment.""" return os.environ['USER'] -def make_bash_script_contents(contents): +def make_bash_script_contents(contents: str) -> str: + """Wraps a shell command in a bash shebang.""" return f'#!/bin/bash\n{contents}\n' -def make_caper_leader_job_name(job_name): - """Check if job name contains Comma, TAB or whitespace. - They are not allowed since they can be used as separators. +def make_caper_leader_job_name(job_name: str) -> str: + """ + Check if job name contains Comma, TAB or whitespace. + + These characters are not allowed since they can be used as separators. """ for illegal_char in ILLEGAL_CHARS_IN_JOB_NAME: if illegal_char in job_name: - raise ValueError( - 'Illegal character {chr} in job name {job}'.format( - chr=illegal_char, job=job_name - ) - ) + msg = f'Illegal character {illegal_char} in job name {job_name}' + raise ValueError(msg) return CAPER_LEADER_JOB_NAME_PREFIX + job_name class HpcWrapper(ABC): - def __init__( - self, - leader_job_resource_param=[], - ): + """Base class for HPC job engine wrapper.""" + + def __init__(self, leader_job_resource_param: Iterable[str] = ()) -> None: """Base class for HPC job engine wrapper.""" - self._leader_job_resource_param = leader_job_resource_param + self._leader_job_resource_param = list(leader_job_resource_param) + + def submit(self, job_name: str, caper_run_command: Iterable[str]) -> str: + """ + Submits a caper leader job to HPC (e.g. sbatch, qsub). - def submit(self, job_name, caper_run_command): - """Submits a caper leader job to HPC (e.g. sbatch, qsub). Such leader job will be prefixed with CAPER_LEADER_JOB_NAME_PREFIX. Returns output STDOUT from submission command. """ - home_dir = f'{str(Path.home())}{os.sep}' + home_dir = f'{Path.home()!s}{os.sep}' with NamedTemporaryFile(prefix=home_dir, suffix='.sh') as shell_script: contents = make_bash_script_contents(' '.join(caper_run_command)) shell_script.write(contents.encode()) @@ -58,48 +70,54 @@ def submit(self, job_name, caper_run_command): return self._submit(job_name, shell_script.name) - def list(self): - """Filters out non-caper jobs from the job list keeping the first line (header). + def list(self) -> str: + """ + Print out non-caper jobs from the job list keeping the first line (header). + And then returns output STDOUT. """ - result = [] lines = self._list().split('\n') # keep header - result.append(lines[0]) + result = [lines[0]] # filter out non-caper lines logger.info('Filtering out non-caper leader jobs...') - for line in lines[1:]: - if CAPER_LEADER_JOB_NAME_PREFIX in line: - result.append(line) + result.extend([line for line in lines[1:] if CAPER_LEADER_JOB_NAME_PREFIX in line]) return '\n'.join(result) - def abort(self, job_ids): + def abort(self, job_ids: Iterable[str]) -> str: """Returns output STDOUT from job engine's abort command (e.g. scancel, qdel).""" return self._abort(job_ids) @abstractmethod - def _submit(self, job_name, shell_script): - pass + def _submit(self, job_name: str, shell_script: str) -> str: + """Submits a caper leader job to HPC (e.g. sbatch, qsub).""" + msg = f'{self.__class__.__name__} does not support submit command.' + raise NotImplementedError(msg) - def _list(self): - pass + @abstractmethod + def _list(self) -> str: + """Returns output STDOUT from job engine's list command.""" + msg = f'{self.__class__.__name__} does not support list command.' + raise NotImplementedError(msg) @abstractmethod - def _abort(self, job_ids): + def _abort(self, job_ids: Iterable[str]) -> str: """Sends SIGINT (or SIGTERM) to Caper for a graceful shutdown.""" - pass + msg = f'{self.__class__.__name__} does not support abort command.' + raise NotImplementedError(msg) - def _run_command(self, command): + def _run_command(self, command: builtins.list[str]) -> str: """Runs a shell command line and returns STDOUT.""" - logger.info(f'Running shell command: {" ".join(command)}') + logger.info('Running shell command: %s', ' '.join(command)) return ( - subprocess.run( + subprocess.run( # noqa: S603 command, stdout=subprocess.PIPE, env=os.environ, + check=True, ) .stdout.decode() .strip() @@ -107,36 +125,36 @@ def _run_command(self, command): class SlurmWrapper(HpcWrapper): - DEFAULT_LEADER_JOB_RESOURCE_PARAM = ['-t', '48:00:00', '--mem', '4G'] + """Wrapper for SLURM job engine.""" + + DEFAULT_LEADER_JOB_RESOURCE_PARAM = ('-t', '48:00:00', '--mem', '4G') def __init__( self, - leader_job_resource_param=DEFAULT_LEADER_JOB_RESOURCE_PARAM, - slurm_partition=None, - slurm_account=None, - ): - super().__init__( - leader_job_resource_param=leader_job_resource_param, - ) + leader_job_resource_param: Iterable[str] = DEFAULT_LEADER_JOB_RESOURCE_PARAM, + slurm_partition: str | None = None, + slurm_account: str | None = None, + ) -> None: + """Initialize the shared parameters for SLURM job engine.""" + super().__init__(leader_job_resource_param=leader_job_resource_param) slurm_partition_param = ['-p', slurm_partition] if slurm_partition else [] slurm_account_param = ['-A', slurm_account] if slurm_account else [] self._slurm_extra_param = slurm_partition_param + slurm_account_param - def _submit(self, job_name, shell_script): - command = ( - ['sbatch'] - + self._leader_job_resource_param - + self._slurm_extra_param - + [ - '--export=ALL', - '-J', - make_caper_leader_job_name(job_name), - shell_script, - ] - ) + def _submit(self, job_name: str, shell_script: str) -> str: + command = [ + 'sbatch', + *self._leader_job_resource_param, + *self._slurm_extra_param, + '--export=ALL', + '-J', + make_caper_leader_job_name(job_name), + shell_script, + ] return self._run_command(command) - def _list(self): + def _list(self) -> str: + """List SLURM jobs.""" return self._run_command( [ 'squeue', @@ -146,96 +164,122 @@ def _list(self): ] ) - def _abort(self, job_ids): - """Notes: --full is necessary to correctly send SIGINT to the leader job (Cromwell process). - Sending SIGTERM may result in an immediate shutdown of the leaderjob on some clusters. - SIGINT is much better to trigger a graceful shutdown. + def _abort(self, job_ids: Iterable[str]) -> str: """ - return self._run_command(['scancel', '--full', '--signal=SIGINT'] + job_ids) + Abort a SLURM job. + + Notes: --full is necessary to correctly send SIGINT to the leader job (Cromwell + process). Sending SIGTERM may result in an immediate shutdown of the leaderjob on some + clusters. SIGINT is much better to trigger a graceful shutdown. + """ + return self._run_command(['scancel', '--full', '--signal=SIGINT', *job_ids]) class SgeWrapper(HpcWrapper): - DEFAULT_LEADER_JOB_RESOURCE_PARAM = ['-l', 'h_rt=48:00:00,h_vmem=4G'] + """Wrapper for SGE job engine.""" + + DEFAULT_LEADER_JOB_RESOURCE_PARAM = ('-l', 'h_rt=48:00:00,h_vmem=4G') def __init__( self, - leader_job_resource_param=DEFAULT_LEADER_JOB_RESOURCE_PARAM, - sge_queue=None, - ): - super().__init__( - leader_job_resource_param=leader_job_resource_param, - ) + leader_job_resource_param: Iterable[str] = DEFAULT_LEADER_JOB_RESOURCE_PARAM, + sge_queue: str | None = None, + ) -> None: + """Initialize the shared parameters for SGE job engine.""" + super().__init__(leader_job_resource_param=leader_job_resource_param) self._sge_queue_param = ['-q', sge_queue] if sge_queue else [] - def _submit(self, job_name, shell_script): - command = ( - ['qsub'] - + self._leader_job_resource_param - + self._sge_queue_param - + ['-V', '-terse', '-N', make_caper_leader_job_name(job_name), shell_script] - ) + def _submit(self, job_name: str, shell_script: str) -> str: + """Submit a SGE job.""" + command = [ + 'qsub', + *self._leader_job_resource_param, + *self._sge_queue_param, + '-V', + '-terse', + '-N', + make_caper_leader_job_name(job_name), + shell_script, + ] return self._run_command(command) - def _list(self): + def _list(self) -> str: + """List SGE jobs.""" return self._run_command(['qstat', '-u', get_user_from_os_environ()]) - def _abort(self, job_ids): - return self._run_command(['qdel'] + job_ids) + def _abort(self, job_ids: Iterable[str]) -> str: + """Abort a SGE job.""" + return self._run_command(['qdel', *job_ids]) class PbsWrapper(HpcWrapper): - DEFAULT_LEADER_JOB_RESOURCE_PARAM = ['-l', 'walltime=48:00:00,mem=4gb'] + """Wrapper for PBS job engine.""" + + DEFAULT_LEADER_JOB_RESOURCE_PARAM = ('-l', 'walltime=48:00:00,mem=4gb') def __init__( self, - leader_job_resource_param=DEFAULT_LEADER_JOB_RESOURCE_PARAM, - pbs_queue=None, - ): - super().__init__( - leader_job_resource_param=leader_job_resource_param, - ) + leader_job_resource_param: Iterable[str] = DEFAULT_LEADER_JOB_RESOURCE_PARAM, + pbs_queue: str | None = None, + ) -> None: + """Initialize the shared parameters for PBS job engine.""" + super().__init__(leader_job_resource_param=leader_job_resource_param) self._pbs_queue_param = ['-q', pbs_queue] if pbs_queue else [] - def _submit(self, job_name, shell_script): - command = ( - ['qsub'] - + self._leader_job_resource_param - + self._pbs_queue_param - + ['-V', '-N', make_caper_leader_job_name(job_name), shell_script] - ) + def _submit(self, job_name: str, shell_script: str) -> str: + """Submit a PBS job.""" + command = [ + 'qsub', + *self._leader_job_resource_param, + *self._pbs_queue_param, + '-V', + '-N', + make_caper_leader_job_name(job_name), + shell_script, + ] return self._run_command(command) - def _list(self): + def _list(self) -> str: + """List PBS jobs.""" return self._run_command(['qstat', '-u', get_user_from_os_environ()]) - def _abort(self, job_ids): - return self._run_command(['qdel', '-W', '30'] + job_ids) + def _abort(self, job_ids: Iterable[str]) -> str: + """Abort a PBS job.""" + return self._run_command(['qdel', '-W', '30', *job_ids]) class LsfWrapper(HpcWrapper): - DEFAULT_LEADER_JOB_RESOURCE_PARAM = ['-W', '2880', '-M', '4g'] + """Wrapper for LSF job engine.""" + + DEFAULT_LEADER_JOB_RESOURCE_PARAM = ('-W', '2880', '-M', '4g') def __init__( self, - leader_job_resource_param=DEFAULT_LEADER_JOB_RESOURCE_PARAM, - lsf_queue=None, - ): - super().__init__( - leader_job_resource_param=leader_job_resource_param, - ) + leader_job_resource_param: Iterable[str] = DEFAULT_LEADER_JOB_RESOURCE_PARAM, + lsf_queue: str | None = None, + ) -> None: + """Initialize the shared parameters for LSF job engine.""" + super().__init__(leader_job_resource_param=leader_job_resource_param) self._lsf_queue_param = ['-q', lsf_queue] if lsf_queue else [] - def _submit(self, job_name, shell_script): - command = ( - ['bsub'] - + self._leader_job_resource_param - + self._lsf_queue_param - + ['-env', 'all', '-J', make_caper_leader_job_name(job_name), shell_script] - ) + def _submit(self, job_name: str, shell_script: str) -> str: + """Submit a LSF job.""" + command = [ + 'bsub', + *self._leader_job_resource_param, + *self._lsf_queue_param, + '-env', + 'all', + '-J', + make_caper_leader_job_name(job_name), + shell_script, + ] return self._run_command(command) - def _list(self): + def _list(self) -> str: + """List LSF jobs.""" return self._run_command(['bjobs', '-u', get_user_from_os_environ()]) - def _abort(self, job_ids): - return self._run_command(['bkill'] + job_ids) + def _abort(self, job_ids: Iterable[str]) -> str: + """Abort a LSF job.""" + return self._run_command(['bkill', *job_ids]) diff --git a/caper/nb_subproc_thread.py b/caper/nb_subproc_thread.py index bd73ab7d..ec8a792a 100644 --- a/caper/nb_subproc_thread.py +++ b/caper/nb_subproc_thread.py @@ -1,22 +1,28 @@ +"""Non-blocking subprocess thread with streaming stdout/stderr.""" + import logging import signal import time +from collections.abc import Callable from subprocess import PIPE, Popen from threading import Thread +from typing import IO, TypeGuard logger = logging.getLogger(__name__) interrupted = False terminated = False -def sigterm_handler(signo, frame): - global terminated +def sigterm_handler(signo: int, frame: object) -> None: # noqa: ARG001 + """Handle SIGTERM signal.""" + global terminated # noqa: PLW0603 logger.info('Received SIGTERM.') terminated = True -def sigint_handler(signo, frame): - global interrupted +def sigint_handler(signo: int, frame: object) -> None: # noqa: ARG001 + """Handle SIGINT signal.""" + global interrupted # noqa: PLW0603 logger.info('Received SIGINT.') interrupted = True @@ -25,29 +31,33 @@ def sigint_handler(signo, frame): signal.signal(signal.SIGINT, sigint_handler) -def is_fileobj_open(fileobj): - return fileobj and not getattr(fileobj, 'closed', False) +def is_fileobj_open(fileobj: IO[bytes] | None) -> TypeGuard[IO[bytes]]: + """Check if file object is not None and not closed.""" + return fileobj is not None and not getattr(fileobj, 'closed', False) class NBSubprocThread(Thread): + """Non-blocking subprocess thread with streaming stdout/stderr.""" + DEFAULT_POLL_INTERVAL_SEC = 0.01 DEFAULT_SUBPROCESS_NAME = 'Subprocess' DEFAULT_STOP_SIGNAL = signal.SIGTERM def __init__( self, - args, - cwd=None, - stdin=None, - on_poll=None, - on_stdout=None, - on_stderr=None, - on_finish=None, - poll_interval=DEFAULT_POLL_INTERVAL_SEC, - quiet=False, - subprocess_name=DEFAULT_SUBPROCESS_NAME, - ): - """Non-blocking STDOUT/STDERR streaming for subprocess.Popen(). + args: list[str], + cwd: str | None = None, + stdin: IO[bytes] | None = None, + on_poll: Callable[[], object] | None = None, + on_stdout: Callable[[str], object] | None = None, + on_stderr: Callable[[str], object] | None = None, + on_finish: Callable[[], object] | None = None, + poll_interval: float = DEFAULT_POLL_INTERVAL_SEC, + quiet: bool = False, + subprocess_name: str = DEFAULT_SUBPROCESS_NAME, + ) -> None: + """ + Non-blocking STDOUT/STDERR streaming for subprocess.Popen(). This class makes two daemonized threads for nonblocking streaming of STDOUT/STDERR. @@ -118,41 +128,49 @@ def __init__( self._returnvalue = None @property - def stdout(self): + def stdout(self) -> str: return ''.join(self._stdout_list) @property - def stderr(self): + def stderr(self) -> str: return ''.join(self._stderr_list) @property - def returncode(self): - """Returns subprocess.Popen.returncode. + def returncode(self) -> int | None: + """Return subprocess.Popen.returncode. + None if not completed or any general Exception occurs. """ return self._returncode @property - def status(self): - """Updated with return value of on_poll() for every polling. + def status(self) -> object: + """Return the current status from callback return values. + + Updated with return value of on_poll() for every polling. Also updated with return value of on_stdout() or on_stderr() if their return values are not None. """ return self._status @property - def returnvalue(self): - """Updated with return value of on_finish() + def returnvalue(self) -> object: + """Return the value from on_finish() callback. + + Updated with return value of on_finish() which is called when a thread is terminated. None if thread is still running so that on_finish() has not been called yet. This works like an actual return value of the function ran inside a thread. """ return self._returnvalue - def stop(self, stop_signal=DEFAULT_STOP_SIGNAL, wait=False): - """Subprocess will be teminated after next polling. + def stop(self, stop_signal: signal.Signals = DEFAULT_STOP_SIGNAL, wait: bool = False) -> None: + """ + Subprocess will be teminated after next polling. Args: + stop_signal: + Signal to send to the subprocess for termination. wait: Wait for a valid returncode (which is not None). """ @@ -160,11 +178,7 @@ def stop(self, stop_signal=DEFAULT_STOP_SIGNAL, wait=False): self._stop_signal = stop_signal if wait: if self._returncode is None: - logger.info( - '{name}: waiting for a graceful shutdown...'.format( - name=self._subprocess_name - ) - ) + logger.info('%s: waiting for a graceful shutdown...', self._subprocess_name) while True: if self._returncode is not None: return @@ -172,19 +186,16 @@ def stop(self, stop_signal=DEFAULT_STOP_SIGNAL, wait=False): def _popen( self, - args, - cwd=None, - stdin=None, - on_poll=None, - on_stdout=None, - on_stderr=None, - on_finish=None, - ): + args: list[str], + cwd: str | None = None, + stdin: IO[bytes] | None = None, + on_poll: Callable[[], object] | None = None, + on_stdout: Callable[[str], object] | None = None, + on_stderr: Callable[[str], object] | None = None, + on_finish: Callable[[], object] | None = None, + ) -> None: """Wrapper for subprocess.Popen().""" - global terminated - global interrupted - - def read_stdout(stdout_bytes): + def read_stdout(stdout_bytes: bytes) -> None: text = stdout_bytes.decode() if text: self._stdout_list.append(text) @@ -193,7 +204,7 @@ def read_stdout(stdout_bytes): if ret_on_stdout is not None: self._status = ret_on_stdout - def read_stderr(stderr_bytes): + def read_stderr(stderr_bytes: bytes) -> None: text = stderr_bytes.decode() if text: self._stderr_list.append(text) @@ -202,12 +213,12 @@ def read_stderr(stderr_bytes): if ret_on_stderr is not None: self._status = ret_on_stderr - def read_from_stdout_obj(stdout): + def read_from_stdout_obj(stdout: IO[bytes] | None) -> None: if is_fileobj_open(stdout): for line in iter(stdout.readline, b''): read_stdout(line) - def read_from_stderr_obj(stderr): + def read_from_stderr_obj(stderr: IO[bytes] | None) -> None: if is_fileobj_open(stderr): for line in iter(stderr.readline, b''): read_stderr(line) @@ -215,13 +226,9 @@ def read_from_stderr_obj(stderr): self._stop_it = False try: - p = Popen(args, stdout=PIPE, stderr=PIPE, cwd=cwd, stdin=stdin) - thread_stdout = Thread( - target=read_from_stdout_obj, args=(p.stdout,), daemon=True - ) - thread_stderr = Thread( - target=read_from_stderr_obj, args=(p.stderr,), daemon=True - ) + p = Popen(args, stdout=PIPE, stderr=PIPE, cwd=cwd, stdin=stdin) # noqa: S603 + thread_stdout = Thread(target=read_from_stdout_obj, args=(p.stdout,), daemon=True) + thread_stderr = Thread(target=read_from_stderr_obj, args=(p.stderr,), daemon=True) thread_stdout.start() thread_stderr.start() @@ -234,7 +241,7 @@ def read_from_stderr_obj(stderr): self._returncode = p.poll() break - if terminated or interrupted or self._stop_it and self._stop_signal: + if terminated or interrupted or (self._stop_it and self._stop_signal): if terminated: stop_signal = signal.SIGTERM elif interrupted: @@ -243,8 +250,10 @@ def read_from_stderr_obj(stderr): stop_signal = self._stop_signal logger.info( - f'Sending signal {stop_signal} to subprocess. ' - f'name: {self._subprocess_name}, pid: {p.pid}' + 'Sending signal %s to subprocess. name: %s, pid: %s', + stop_signal, + self._subprocess_name, + p.pid, ) p.send_signal(stop_signal) @@ -253,9 +262,9 @@ def read_from_stderr_obj(stderr): time.sleep(self._poll_interval) - except Exception as e: + except Exception: if not self._quiet: - logger.error(e, exc_info=True) + logger.exception('Thread failed') self._returncode = 127 else: @@ -271,12 +280,6 @@ def read_from_stderr_obj(stderr): if not self._quiet: if self._returncode: - logger.error( - '{name} failed. returncode={rc}'.format( - name=self._subprocess_name, rc=self._returncode - ) - ) + logger.error('%s failed. returncode=%s', self._subprocess_name, self._returncode) else: - logger.info( - '{name} finished successfully.'.format(name=self._subprocess_name) - ) + logger.info('%s finished successfully.', self._subprocess_name) diff --git a/caper/resource_analysis.py b/caper/resource_analysis.py index 82eb6e1d..8c5ba009 100644 --- a/caper/resource_analysis.py +++ b/caper/resource_analysis.py @@ -1,11 +1,15 @@ +"""Resource analysis for Cromwell workflow execution.""" + import fnmatch import json import logging from abc import ABC, abstractmethod from collections import defaultdict +from collections.abc import Callable +from typing import Any import numpy as np -from matplotlib import pyplot +from matplotlib import pyplot as plt from matplotlib.backends.backend_pdf import PdfPages from sklearn import linear_model @@ -17,6 +21,8 @@ class ResourceAnalysis(ABC): """ + Resource analysis base class. + Class constants: DEFAULT_REDUCE_IN_FILE_VARS: Function to be used for reducing x vector. @@ -28,19 +34,23 @@ class ResourceAnalysis(ABC): DEFAULT_REDUCE_IN_FILE_VARS = sum DEFAULT_TARGET_RESOURCES = ('stats.max.mem', 'stats.max.disk') - def __init__(self): - """Solves y = f(X) in a statistical way where + def __init__(self) -> None: + """ + Solves y = f(X) in a statistical way. + + Where: X is a matrix vector of input file sizes (e.g. size_each([bam, bowtie2_index_tar, ...])) y is a vector of resources (e.g. [max_mem, max_disk, ...]) """ self._task_resources = [] @property - def task_resources(self): + def task_resources(self) -> list[dict[str, Any]]: return self._task_resources - def collect_resource_data(self, metadata_jsons): - """Collect resource data from parsing metadata JSON files. + def collect_resource_data(self, metadata_jsons: list[str | dict | CromwellMetadata]) -> None: + """ + Collect resource data from parsing metadata JSON files. self._task_resources is an extended (across all workflows) list of resource monitoring result from CromwellMetadata.gcp_monitor(): @@ -68,12 +78,14 @@ def collect_resource_data(self, metadata_jsons): def analyze( self, - in_file_vars=None, - reduce_in_file_vars=DEFAULT_REDUCE_IN_FILE_VARS, - target_resources=DEFAULT_TARGET_RESOURCES, - plot_pdf=None, - ): - """Find and analyze all tasks. + in_file_vars: dict[str, list[str]] | None = None, + reduce_in_file_vars: Callable[[list[int]], int] = DEFAULT_REDUCE_IN_FILE_VARS, + target_resources: tuple[str, ...] = DEFAULT_TARGET_RESOURCES, + plot_pdf: str | None = None, + ) -> dict[str, dict[str, dict[str, list[float] | dict[str, list[float]]]]]: + """ + Find and analyze all tasks. + Run `self.collect_resource_data()` first to collect resource data before analysis. Args: @@ -88,6 +100,7 @@ def analyze( Keys (in dot notation) to make vector y. plot_pdf: Local file name for a PDF plot. + Returns: Results in a dict form: { TASK_NAME: { @@ -106,7 +119,7 @@ def analyze( if in_file_vars: all_tasks = in_file_vars.keys() else: - all_tasks = list(set([task['task_name'] for task in self.task_resources])) + all_tasks = list({task['task_name'] for task in self.task_resources}) for task_name in all_tasks: result[task_name] = self.analyze_task( @@ -117,20 +130,26 @@ def analyze( plot_pp=plot_pp, ) - if plot_pdf: + if plot_pdf and plot_pp: plot_pp.close() return result def analyze_task( self, - task_name, - in_file_vars=None, - reduce_in_file_vars=DEFAULT_REDUCE_IN_FILE_VARS, - target_resources=DEFAULT_TARGET_RESOURCES, - plot_pp=None, - ): - """Does resource analysis on a task. + task_name: str, + in_file_vars: list[str] | None = None, + reduce_in_file_vars: Callable[[list[int]], int] = DEFAULT_REDUCE_IN_FILE_VARS, + target_resources: tuple[str, ...] = DEFAULT_TARGET_RESOURCES, + plot_pp: PdfPages | None = None, + ) -> dict[str, dict[str, list[float] | dict[str, list[float]]]]: + """ + Does resource analysis on a task. + + Args: + task_name: + Name of the task to analyze. + Run `self.collect_resource_data()` first to collect resource data before analysis. Then you can such collected data for each task. @@ -200,7 +219,7 @@ def analyze_task( x_data = defaultdict(list) y_data = defaultdict(list) - logger.info('Analyzing task={task}'.format(task=task_name)) + logger.info('Analyzing task=%s', task_name) # first look at task's optional/empty input file vars across all workflows # e.g. SE (single-ended) pipeline runs does not have fastqs_R2 # but we want to mix both SE/PE (paired-ended) data. @@ -249,12 +268,12 @@ def analyze_task( ) # transpose to reduce(sum by default) file sizes # over all in_file_vars - tranposed = np.transpose([vec for vec in x_data.values()]) + tranposed = np.transpose(list(x_data.values())) reduced = [reduce_in_file_vars(vec) for vec in tranposed] x_data = {key: reduced} # tranpose it to make x matrix - x_matrix = np.transpose([vec for vec in x_data.values()]) + x_matrix = np.transpose(list(x_data.values())) result = {'x': x_data, 'y': y_data, 'coeffs': {}} for res_metric, y_vec in y_data.items(): @@ -271,13 +290,31 @@ def analyze_task( return json.loads(json_str) @abstractmethod - def _solve(self, x_matrix, y_vec, plot_y_label=None, plot_title=None, plot_pp=None): + def _solve( + self, + x_matrix: np.ndarray, + y_vec: list[float], + plot_y_label: str | None = None, + plot_title: str | None = None, + plot_pp: PdfPages | None = None, + ) -> tuple[list[float], float] | None: raise NotImplementedError class LinearResourceAnalysis(ResourceAnalysis): - def _solve(self, x_matrix, y_vec, plot_y_label=None, plot_title=None, plot_pp=None): - """Solve y = A(X) with linear regression. + """Linear regression-based resource analysis.""" + + def _solve( + self, + x_matrix: np.ndarray, + y_vec: list[float], + plot_y_label: str | None = None, + plot_title: str | None = None, + plot_pp: PdfPages | None = None, + ) -> tuple[list[float], float] | None: + """ + Solve y = A(X) with linear regression. + Also make a scatter plot (for one-dimensional x_matrix only). Use `reduce_in_file_vars` in ResourceAnalysis.analyze() to reduce a matrix into a vector. @@ -293,6 +330,7 @@ def _solve(self, x_matrix, y_vec, plot_y_label=None, plot_title=None, plot_pp=No Plot title. plot_pp: Matplotlib's PDF backend PdfPages object. + Returns: Tuple of (coeffs, intercept). """ @@ -302,21 +340,21 @@ def _solve(self, x_matrix, y_vec, plot_y_label=None, plot_title=None, plot_pp=No model = linear_model.LinearRegression().fit(x_matrix, y_vec) except ValueError: - logger.error( - 'Failed to solve due to type/dim mismatch? ' - 'Too few data or invalid resource monitoring script? ' - 'title: {title}, y_label: {y_label}, ' - 'y_vec={y_vec}, x_matrix: {x_matrix}'.format( - title=plot_title, - y_label=plot_y_label, - y_vec=y_vec, - x_matrix=x_matrix, - ), - exc_info=True, + logger.exception( + 'Failed to solve due to type/dim mismatch? Too few data or invalid resource ' + 'monitoring script? title: %s, y_label: %s, y_vec=%s, x_matrix: %s', + plot_title, + plot_y_label, + y_vec, + x_matrix, ) - return + return None if plot_pp: + if not plot_title: + plot_title = 'Linear Regression' + if not plot_y_label: + plot_y_label = 'Resource' if x_matrix.shape[1] > 1: logger.warning( 'Cannot make a 2D scatter plot. dim(x_matrix) > 1. ' @@ -325,12 +363,12 @@ def _solve(self, x_matrix, y_vec, plot_y_label=None, plot_title=None, plot_pp=No else: x_vec = x_matrix[:, 0] # scatter plot with a fitting line - pyplot.scatter(x_vec, y_vec, s=np.pi * 3, color=(0, 0, 0), alpha=0.5) - pyplot.plot(x_vec, model.coef_ * x_vec + model.intercept_) - pyplot.title(plot_title) - pyplot.xlabel('input_file_size') - pyplot.ylabel(plot_y_label) - pyplot.savefig(plot_pp, format='pdf') - pyplot.clf() + plt.scatter(x_vec, y_vec, s=np.pi * 3, color=(0, 0, 0), alpha=0.5) + plt.plot(x_vec, model.coef_ * x_vec + model.intercept_) + plt.title(plot_title) + plt.xlabel('input_file_size') + plt.ylabel(plot_y_label) + plt.savefig(plot_pp, format='pdf') # type: ignore[bad-argument-type] PdfPages is actually accepted by savefig + plt.clf() return list(model.coef_), model.intercept_ diff --git a/caper/server_heartbeat.py b/caper/server_heartbeat.py index 3c123d3c..6757978a 100644 --- a/caper/server_heartbeat.py +++ b/caper/server_heartbeat.py @@ -1,29 +1,40 @@ +"""Server heartbeat module for sharing server hostname/port with clients.""" + +from __future__ import annotations + import logging import socket import time from threading import Thread +from typing import TYPE_CHECKING from autouri import AutoURI +if TYPE_CHECKING: + import os + logger = logging.getLogger(__name__) class ServerHeartbeatTimeoutError(Exception): - pass + """Exception raised when a heartbeat file is expired.""" class ServerHeartbeat: + """Server heartbeat to share server's hostname/port with clients.""" + DEFAULT_SERVER_HEARTBEAT_FILE = '~/.caper/default_server_heartbeat' DEFAULT_HEARTBEAT_TIMEOUT_MS = 120000 DEFAULT_INTERVAL_UPDATE_HEARTBEAT_SEC = 60.0 def __init__( self, - heartbeat_file=DEFAULT_SERVER_HEARTBEAT_FILE, - heartbeat_timeout=DEFAULT_HEARTBEAT_TIMEOUT_MS, - interval_update_heartbeat=DEFAULT_INTERVAL_UPDATE_HEARTBEAT_SEC, - ): - """Server heartbeat to share store server's hostname/port with clients. + heartbeat_file: str | os.PathLike[str] = DEFAULT_SERVER_HEARTBEAT_FILE, + heartbeat_timeout: int = DEFAULT_HEARTBEAT_TIMEOUT_MS, + interval_update_heartbeat: float = DEFAULT_INTERVAL_UPDATE_HEARTBEAT_SEC, + ) -> None: + """ + Server heartbeat to share store server's hostname/port with clients. Args: heartbeat_file: @@ -35,16 +46,16 @@ def __init__( interval_update_heartbeat: Period for updtaing a heartbeat file (in seconds). """ - self._heartbeat_file = heartbeat_file + self._heartbeat_file = str(heartbeat_file) self._heartbeat_timeout = heartbeat_timeout self._interval_update_heartbeat = interval_update_heartbeat self._stop_it = False self._thread = None - def start(self, port, hostname=None): - """Starts a thread that writes hostname/port of a server - on a heartbeat file. + def start(self, port: int, hostname: str | None = None) -> Thread: + """ + Starts a thread that writes hostname/port of a server on a heartbeat file. Args: port: @@ -57,52 +68,59 @@ def start(self, port, hostname=None): self._thread.start() return self._thread - def is_alive(self): + def is_alive(self) -> bool: + """Check if the heartbeat thread is alive.""" return self._thread.is_alive() if self._thread else False - def stop(self): + def stop(self) -> None: + """Stop the heartbeat thread.""" self._stop_it = True if self._thread: self._thread.join() - def read(self, raise_timeout=False): - """Read from heartbeat file. - If a heartbeat file is not fresh (mtime difference < timeout) - then None is returned. + def read(self, *, raise_timeout: bool = False) -> tuple[str, int] | None: + """ + Read from heartbeat file. + + If a heartbeat file is not fresh (mtime difference < timeout) then None is returned. Returns: Tuple of (hostname, port) + None if a heartbeat file is not fresh (mtime difference < timeout) """ try: u = AutoURI(self._heartbeat_file) - if (time.time() - u.mtime) * 1000.0 > self._heartbeat_timeout: - raise ServerHeartbeatTimeoutError - else: - hostname, port = u.read().strip('\n').split(':') - logger.info( - 'Reading hostname/port from a heartbeat file. {h}:{p}'.format( - h=hostname, p=port - ) - ) - return hostname, int(port) - - except ServerHeartbeatTimeoutError: + if not u.exists: + return None + time_diff_ms = (time.time() - u.mtime) * 1000.0 + content = u.read().strip('\n') + hostname, port = content.split(':') + + except (OSError, ValueError, AttributeError): + logger.exception('Failed to read from a heartbeat file. %s', self._heartbeat_file) + return None + + # Check if heartbeat file has expired (after successful read) + is_expired = time_diff_ms > self._heartbeat_timeout + if is_expired: logger.error( - 'Found a heartbeat file but it has been expired (> timeout)' - '. {f}'.format(f=self._heartbeat_file) + 'Found a heartbeat file but it has been expired (> timeout). %s', + self._heartbeat_file, ) if raise_timeout: - raise - - except Exception: - logger.error( - 'Failed to read from a heartbeat file. {f}'.format( - f=self._heartbeat_file + msg = ( + f'Found a heartbeat file but it has expired timeout ' + f'{self._heartbeat_file} ms > {self._heartbeat_timeout} ms' ) - ) + raise ServerHeartbeatTimeoutError(msg) + return None - def _write_to_file(self, port, hostname=None): + logger.info('Reading hostname/port from a heartbeat file. %s:%s', hostname, port) + return hostname, int(port) + + def _write_to_file(self, port: int, hostname: str | None = None) -> None: + """Write hostname/port to a heartbeat file.""" if not hostname: hostname = socket.gethostname() @@ -110,20 +128,10 @@ def _write_to_file(self, port, hostname=None): while True: try: - logger.debug( - 'Writing heartbeat: {hostname}, {port}'.format( - hostname=hostname, port=port - ) - ) - AutoURI(self._heartbeat_file).write( - '{hostname}:{port}'.format(hostname=hostname, port=port) - ) - except Exception: - logger.error( - 'Failed to write to a heartbeat_file. {f}'.format( - f=self._heartbeat_file - ) - ) + logger.debug('Writing heartbeat: %s, %s', hostname, port) + AutoURI(self._heartbeat_file).write(f'{hostname}:{port}') + except OSError: + logger.exception('Failed to write to a heartbeat_file. %s', self._heartbeat_file) cnt = 0 while cnt < self._interval_update_heartbeat: cnt += 1 diff --git a/caper/singularity.py b/caper/singularity.py index 23662cc9..64e67437 100644 --- a/caper/singularity.py +++ b/caper/singularity.py @@ -1,5 +1,8 @@ +"""Singularity container image management and caching.""" + import logging import os +from typing import Literal from autouri import AbsPath, AutoURI, URIBase from autouri.loc_aux import recurse_json @@ -10,8 +13,12 @@ DEFAULT_COMMON_ROOT_SEARCH_LEVEL = 5 -def find_bindpath(json_file, common_root_search_level=DEFAULT_COMMON_ROOT_SEARCH_LEVEL): - """Recursively find paths to be bound for singularity. +def find_bindpath( + json_file: str, common_root_search_level: int = DEFAULT_COMMON_ROOT_SEARCH_LEVEL +) -> str: + """ + Recursively find paths to be bound for singularity. + Find common roots for all files in an input JSON file. This function will recursively visit all values in input JSON and also JSON, TSV, CSV files in the input JSON itself. @@ -40,7 +47,7 @@ def find_bindpath(json_file, common_root_search_level=DEFAULT_COMMON_ROOT_SEARCH json_contents = AutoURI(json_file).read() all_dirnames = [] - def find_dirname(s): + def find_dirname(s: str) -> tuple[None, Literal[False]]: u = AbsPath(s) if u.is_valid: for ext, recurse_fnc_for_ext in URIBase.LOC_RECURSE_EXT_AND_FNC.items(): diff --git a/caper/wdl_parser.py b/caper/wdl_parser.py index 04bae0d9..6cc639ee 100644 --- a/caper/wdl_parser.py +++ b/caper/wdl_parser.py @@ -1,67 +1,88 @@ +"""WDL parsing module.""" + +from __future__ import annotations + import logging import os import re import shutil from tempfile import TemporaryDirectory +from typing import TYPE_CHECKING, Any from autouri import HTTPURL, AbsPath, AutoURI from WDL import parse_document +if TYPE_CHECKING: + from WDL.Tree import Document + logger = logging.getLogger(__name__) class WDLParser: - RE_WDL_IMPORT = r'^\s*import\s+[\"\'](.+)[\"\']\s*' + """Wrapper for miniwdl's WDL parser.""" + + RE_WDL_IMPORT = re.compile(r'^\s*import\s+[\"\'](.+)[\"\']\s*') RECURSION_DEPTH_LIMIT = 20 BASENAME_IMPORTS = 'imports.zip' + _wdl_doc: Document | None - def __init__(self, wdl): + def __init__(self, wdl: str) -> None: """Wraps miniwdl's parse_document().""" u = AutoURI(wdl) if not u.exists: - raise FileNotFoundError('WDL does not exist: wdl={wdl}'.format(wdl=wdl)) + msg = f'WDL does not exist: wdl={wdl}' + raise FileNotFoundError(msg) self._wdl = wdl self._wdl_contents = AutoURI(wdl).read() try: self._wdl_doc = parse_document(self._wdl_contents) except Exception: - logger.error('Failed to parse WDL with miniwdl.') + logger.exception('Failed to parse WDL with miniwdl.') self._wdl_doc = None @property - def contents(self): + def contents(self) -> str: + """String contents of the WDL file.""" return self._wdl_contents @property - def workflow_meta(self): - if self._wdl_doc: + def workflow_meta(self) -> dict[str, Any] | None: + """Retrieve the optional `meta` section from the workflow declaration.""" + if self._wdl_doc and self._wdl_doc.workflow: return self._wdl_doc.workflow.meta + return None @property - def workflow_parameter_meta(self): - if self._wdl_doc: + def workflow_parameter_meta(self) -> dict[str, Any | None] | None: + """Retrieve the optional `parameter_meta` section from the workflow declaration.""" + if self._wdl_doc and self._wdl_doc.workflow: return self._wdl_doc.workflow.parameter_meta + return None @property - def imports(self): - """Miniwdl (0.3.7) has a bug for URL imports. + def imports(self) -> list[str]: + """ + Miniwdl (0.3.7) has a bug for URL imports. + Keep using reg-ex to find imports until it's fixed. + Returns: List of URIs of imported subworkflows. """ try: - return [i.uri for i in self._wdl_doc.imports] + return [i.uri for i in self._wdl_doc.imports] if self._wdl_doc else [] except Exception: - pass - return self._find_val_of_matched_lines(WDLParser.RE_WDL_IMPORT) - - def zip_subworkflows(self, zip_file): - """Recursively find/zip imported subworkflow WDLs - This will zip sub-WDLs with relative paths only. - i.e. URIs are ignored. - For this (main) workflow, any URI is allowed. - However, only subworkflows with relative path will be zipped - since there is no way to make directory structure to zip them. + logger.exception('Failed to get imports from WDL with miniwdl.') + return self._find_val_of_matched_lines(self.RE_WDL_IMPORT) + + def zip_subworkflows(self, zip_file: str) -> str | None: + """ + Recursively find/zip imported subworkflow WDLs. + + This will zip sub-WDLs with relative paths only, i.e. URIs are ignored. For this (main) + workflow, any URI is allowed. However, only subworkflows with relative paths will be + zipped since there is no way to make directory structure to zip them. + Returns: Zipped imports file. None if no subworkflows recursively found in WDL. @@ -71,42 +92,57 @@ def zip_subworkflows(self, zip_file): # then will use its original path without loc. wdl = AutoURI(self._wdl).localize_on(tmp_d) # keep directory structure as they imported - num_sub_wf_packed = self.__recurse_zip_subworkflows( + num_sub_wf_packed = self.recurse_zip_subworkflows( root_zip_dir=tmp_d, root_wdl_dir=AutoURI(wdl).dirname ) if num_sub_wf_packed: shutil.make_archive(AutoURI(zip_file).uri_wo_ext, 'zip', tmp_d) return zip_file + return None + + def create_imports_file(self, directory: str, basename: str = BASENAME_IMPORTS) -> str | None: + """ + Wrapper for zip_subworkflows. - def create_imports_file(self, directory, basename=BASENAME_IMPORTS): - """Wrapper for zip_subworkflows. This creates an imports zip file with basename on directory. """ zip_file = os.path.join(directory, basename) if self.zip_subworkflows(zip_file): return zip_file + return None + + def _find_val_of_matched_lines( + self, regex: re.Pattern[str], *, no_strip: bool = False + ) -> list[str]: + """ + Find value of the first line matching regex. - def _find_val_of_matched_lines(self, regex, no_strip=False): - """Find value of the first line matching regex. Args: regex: Regular expression. This should have only one (). no_strip: Do not strip result strings. + Returns: Value of the first line matching regex. """ res = [] for line in self.contents.split('\n'): - r = re.findall(regex, line) + r = regex.findall(line) if len(r) > 0: res.append(r[0] if no_strip else r[0].strip()) return res - def __recurse_zip_subworkflows( - self, root_zip_dir, root_wdl_dir, imported_as_url=False, depth=0 - ): - """Recurse imported sub-WDLs in main-WDL. + def recurse_zip_subworkflows( + self, + root_zip_dir: str, + root_wdl_dir: str, + imported_as_url: bool = False, + depth: int = 0, + ) -> int: + """ + Recurse imported sub-WDLs in main-WDL. + Unlike Cromwell, Womtool does not take imports.zip while validating WDLs. All sub-WDLs should be in a correct directory structure relative to the root WDL. @@ -118,24 +154,30 @@ def __recurse_zip_subworkflows( WDL's directory. Sub-WDLs imported as absolute path are not allowed. This can work with "caper run" but not with "caper submit" (or Cromwell submit). + Args: - depth: Recursion depth + root_zip_dir: + Root directory for the imports zip file. + root_wdl_dir: + Root directory containing the main WDL file. + imported_as_url: + Whether the current WDL was imported as a URL. + depth: + Recursion depth. + Returns: Total number of subworkflows: Sub WDL files "recursively" localized on "root_zip_dir". + """ if depth > WDLParser.RECURSION_DEPTH_LIMIT: - raise ValueError( + msg = ( 'Reached recursion depth limit while zipping subworkflows recursively. ' - 'Possible cyclic import or self-refencing in WDLs? wdl={wdl}'.format( - wdl=self._wdl - ) + f'Possible cyclic import or self-refencing in WDLs? wdl={self._wdl}' ) + raise ValueError(msg) - if imported_as_url: - main_wdl_dir = root_wdl_dir - else: - main_wdl_dir = AbsPath(self._wdl).dirname + main_wdl_dir = root_wdl_dir if imported_as_url else AbsPath(self._wdl).dirname num_sub_wf_packed = 0 for sub_rel_to_parent in self.imports: @@ -145,34 +187,26 @@ def __recurse_zip_subworkflows( sub_abs = sub_wdl_file.uri imported_as_url_sub = True elif isinstance(sub_wdl_file, AbsPath): - raise ValueError( + msg = ( 'For sub WDL zipping, absolute path is not allowed for sub WDL. ' - 'main={main}, sub={sub}'.format( - main=self._wdl, sub=sub_rel_to_parent - ) + f'main={self._wdl}, sub={sub_rel_to_parent}' ) + raise TypeError(msg) else: - sub_abs = os.path.realpath( - os.path.join(main_wdl_dir, sub_rel_to_parent) - ) + sub_abs = os.path.realpath(os.path.join(main_wdl_dir, sub_rel_to_parent)) if not AbsPath(sub_abs).exists: - raise FileNotFoundError( - 'Sub WDL does not exist. Did you import main WDL ' - 'as a URL but sub WDL references a local file? ' - 'main={main}, sub={sub}, imported_as_url={i}'.format( - main=self._wdl, sub=sub_rel_to_parent, i=imported_as_url - ) + msg = ( + 'Sub WDL does not exist. Did you import main WDL as a URL but sub WDL references a local file? ' + f'main={self._wdl}, sub={sub_rel_to_parent}, imported_as_url={imported_as_url}' ) + raise FileNotFoundError(msg) if not sub_abs.startswith(root_wdl_dir): - raise ValueError( - 'Sub WDL exists but it is out of root WDL directory. ' - 'Too many "../" in your sub WDL? ' - 'Or main WDL is imported as an URL but sub WDL ' - 'has "../"? ' - 'main={main}, sub={sub}, imported_as_url={i}'.format( - main=self._wdl, sub=sub_rel_to_parent, i=imported_as_url - ) + msg = ( + 'Sub WDL exists but it is out of root WDL directory. Too many "../" in your sub WDL? ' + 'Or main WDL is imported as an URL but sub WDL has "../"? ' + f'main={self._wdl}, sub={sub_rel_to_parent}, imported_as_url={imported_as_url}' ) + raise ValueError(msg) # make a copy on zip_dir rel_path = os.path.relpath(sub_abs, root_wdl_dir) @@ -182,7 +216,7 @@ def __recurse_zip_subworkflows( num_sub_wf_packed += 1 imported_as_url_sub = False - num_sub_wf_packed += WDLParser(sub_abs).__recurse_zip_subworkflows( + num_sub_wf_packed += WDLParser(sub_abs).recurse_zip_subworkflows( root_zip_dir=root_zip_dir, root_wdl_dir=root_wdl_dir, imported_as_url=imported_as_url_sub, diff --git a/docs/conf_encode_workshop_2019.md b/docs/conf_encode_workshop_2019.md index 054eb2c9..c3241ccc 100644 --- a/docs/conf_encode_workshop_2019.md +++ b/docs/conf_encode_workshop_2019.md @@ -1,3 +1,9 @@ +# ARCHIVED - 2019 ENCODE Users' Meeting Pipeline Workshop + +> **NOTE**: This document is archived and preserved for historical reference only. It was created for a specific workshop in 2019 and the instructions may no longer be accurate or relevant. For current setup instructions, please see the main [README](../README.md) and [DETAILS](../DETAILS.md). + +--- + # Welcome to the 2019 ENCODE Users' Meeting Pipeline Workshop ## Do this before the workshop diff --git a/docs/conf_gcp.md b/docs/conf_gcp.md index 6d9c867c..2c5ccc3f 100644 --- a/docs/conf_gcp.md +++ b/docs/conf_gcp.md @@ -1,65 +1,78 @@ -Deprecated. Please see [this](../scripts/gcp_caper_server/README.md) instead. +# Configuration for Google Cloud Platform backend (`gcp`) -# DEPRECATED +> **NOTE**: For complete GCP server setup instructions, see [scripts/gcp_caper_server/README.md](../scripts/gcp_caper_server/README.md). -# Configuration for Google Cloud Platform backend (`gcp`) +> **IMPORTANT**: Google Cloud Genomics API and Cloud Life Sciences API have been deprecated and removed. Caper now uses [Google Cloud Batch API](https://cloud.google.com/batch) exclusively. + +## Prerequisites -1. Sign up for a Google account. -2. Go to [Google Project](https://console.developers.google.com/project) page and click "SIGN UP FOR FREE TRIAL" on the top left and agree to terms. -3. Set up a payment method and click "START MY FREE TRIAL". -4. Create a [Google Project](https://console.developers.google.com/project) `[YOUR_PROJECT_NAME]` and choose it on the top of the page. -5. Create a [Google Cloud Storage bucket](https://console.cloud.google.com/storage/browser) `gs://[YOUR_BUCKET_NAME]` by clicking on a button "CREATE BUCKET" and create it to store pipeline outputs. -6. Find and enable following APIs in your [API Manager](https://console.developers.google.com/apis/library). Click a back button on your web brower after enabling each. +1. Sign up for a Google account and set up billing in the [Google Cloud Console](https://console.cloud.google.com/). +2. Create a [Google Project](https://console.developers.google.com/project). +3. Create a [Google Cloud Storage bucket](https://console.cloud.google.com/storage/browser) to store pipeline outputs. +4. Enable the following APIs in your [API Manager](https://console.developers.google.com/apis/library): * Compute Engine API - * Google Cloud Storage (DO NOT click on "Create credentials") + * Google Cloud Storage * Google Cloud Storage JSON API - * Genomics API - * **Google Cloud Life Sciences API** (for Cromwell's new API, i.e. `--use-google-cloud-life-sciences`) + * Cloud Batch API -7. Install [Google Cloud Platform SDK](https://cloud.google.com/sdk/downloads) and authenticate through it. You will be asked to enter verification keys. Get keys from the URLs they provide. +5. Set your default Google Cloud Project: ```bash - $ gcloud auth login --no-launch-browser - $ gcloud auth application-default login --no-launch-browser + $ gcloud config set project [YOUR_PROJECT_NAME] ``` -8. If you see permission errors at runtime, then unset environment variable `GOOGLE_APPLICATION_CREDENTIALS` or add it to your BASH startup scripts (`$HOME/.bashrc` or `$HOME/.bash_profile`). - ```bash - unset GOOGLE_APPLICATION_CREDENTIALS - ``` +## Authentication -7. Set your default Google Cloud Project. Pipeline will provision instances on this project. - ```bash - $ gcloud config set project [YOUR_PROJECT_NAME] - ``` +### Recommended: Application Default Credentials (ADC) -# Setting up a Caper server instance +Caper uses [Application Default Credentials](https://cloud.google.com/docs/authentication/application-default-credentials) to authenticate with Google Cloud services. The recommended authentication method depends on your environment: -You will find [this](./conf_encode_workshop_2019.md) useful to set up your own Caper server on Google Cloud Platform. +**On a Compute Engine VM:** -# How to run Caper with a service account +Attach a service account to the VM instance. Applications automatically use the VM's credentials via the metadata server—no additional configuration needed. -On your Google Cloud Console, create a service account (`IAM & Admin` -> `Service Accounts`) with the following roles. You can add roles later in `IAM & Admin` -> `IAM`. - * Service Account User - * Compute Admin - * Genomics Admin - * **Cloud Life Sciences Admin** (for Cromwell's new API, i.e. `--use-google-cloud-life-sciences`) - * Storage Admin (or set it up for an individual bucket) +```bash +# When creating the VM +gcloud compute instances create [INSTANCE_NAME] \ + --service-account=[SERVICE_ACCOUNT_EMAIL] \ + --scopes=cloud-platform +``` -Create a secret key JSON file for your service account. Make sure that your service account has enough permission for provionsing VM instances and write permission on output/work Google Cloud Storage buckets (`--gcp-out-dir` and `--gcp-work-dir`). +**For local development:** -> **IMPORTANT**: Click on the created service account and make sure that `Enable G Suite Domain-wide Delegation` is checked to prevent the following permission error. +Use your Google account credentials: +```bash +$ gcloud auth login --no-launch-browser +$ gcloud auth application-default login --no-launch-browser ``` -400 Bad Request -POST https://lifesciences.googleapis.com/v2beta/projects/99884963860/locations/us-central1/operations/XXXXXXXXXXXXXXXXXXXX:cancel -{ - "code" : 400, - "errors" : [ { - "domain" : "global", - "message" : "Precondition check failed.", - "reason" : "failedPrecondition" - } ], - "message" : "Precondition check failed.", - "status" : "FAILED_PRECONDITION" -} -``` + +### Legacy: Service Account JSON Keys (Not Recommended) + +> **WARNING**: JSON key files pose security risks—they can be leaked, are difficult to rotate, and provide long-lived credentials. Prefer VM-attached service accounts or user credentials instead. + +If you must use JSON keys: +- Store securely with restricted permissions (`chmod 600`) +- Never commit to version control +- Rotate regularly +- Set `GOOGLE_APPLICATION_CREDENTIALS` environment variable + +Consider [Workload Identity Federation](https://cloud.google.com/iam/docs/workload-identity-federation) as a more secure alternative to JSON keys. + +## Service Account Permissions + +Create a service account with the following roles: +* Service Account User +* Compute Admin +* Batch Admin +* Storage Admin (or configure per-bucket permissions) + +> **NOTE**: The service account used to launch Batch jobs is different from the Compute Service Account used by Batch VMs to run tasks. You can specify a different Compute Service Account using `--gcp-compute-service-account`. The Compute Service Account needs `roles/batch.agentReporter` to report status back to Batch. + +## Troubleshooting + +If you see permission errors at runtime: + +1. Verify your VM has an attached service account with the correct roles +2. Ensure `GOOGLE_APPLICATION_CREDENTIALS` is unset if using default credentials +3. Check that the service account has access to required GCS buckets +4. Run `gcloud auth application-default print-access-token` to verify credentials are working diff --git a/docs/resource_param.md b/docs/resource_param.md index 7cd08df7..6e0cdf03 100644 --- a/docs/resource_param.md +++ b/docs/resource_param.md @@ -24,7 +24,7 @@ slurm-leader-job-resource-param=-t 48:00:00 --mem 4G # This parameter defines resource parameters for submitting WDL task to job engine. # It is for HPC backends only (slurm, sge, pbs and lsf). # It is not recommended to change it unless your cluster has custom resource settings. -# See https://github.com/ENCODE-DCC/caper/blob/master/docs/resource_param.md for details. +# See the resource_param.md documentation for details. slurm-resource-param=-n 1 --ntasks-per-node=1 --cpus-per-task=${cpu} ${if defined(memory_mb) then "--mem=" else ""}${memory_mb}${if defined(memory_mb) then "M" else ""} ${if defined(time) then "--time=" else ""}${time*60} ${if defined(gpu) then "--gres=gpu:" else ""}${gpu} ``` @@ -43,7 +43,7 @@ sge-pe= # This parameter defines resource parameters for submitting WDL task to job engine. # It is for HPC backends only (slurm, sge, pbs and lsf). # It is not recommended to change it unless your cluster has custom resource settings. -# See https://github.com/ENCODE-DCC/caper/blob/master/docs/resource_param.md for details. +# See the resource_param.md documentation for details. sge-resource-param=${if cpu > 1 then "-pe " + sge_pe + " " else ""} ${if cpu > 1 then cpu else ""} ${true="-l h_vmem=$(expr " false="" defined(memory_mb)}${memory_mb}${true=" / " false="" defined(memory_mb)}${if defined(memory_mb) then cpu else ""}${true=")m" false="" defined(memory_mb)} ${true="-l s_vmem=$(expr " false="" defined(memory_mb)}${memory_mb}${true=" / " false="" defined(memory_mb)}${if defined(memory_mb) then cpu else ""}${true=")m" false="" defined(memory_mb)} ${"-l h_rt=" + time + ":00:00"} ${"-l s_rt=" + time + ":00:00"} ${"-l gpu=" + gpu} ``` @@ -56,7 +56,7 @@ pbs-leader-job-resource-param=-l walltime=48:00:00,mem=4gb # This parameter defines resource parameters for submitting WDL task to job engine. # It is for HPC backends only (slurm, sge, pbs and lsf). # It is not recommended to change it unless your cluster has custom resource settings. -# See https://github.com/ENCODE-DCC/caper/blob/master/docs/resource_param.md for details. +# See the resource_param.md documentation for details. pbs-resource-param=${"-lnodes=1:ppn=" + cpu}${if defined(gpu) then ":gpus=" + gpu else ""} ${if defined(memory_mb) then "-l mem=" else ""}${memory_mb}${if defined(memory_mb) then "mb" else ""} ${"-lwalltime=" + time + ":0:0"} ``` @@ -69,6 +69,6 @@ lsf-leader-job-resource-param=-W 2880 -M 4g # This parameter defines resource parameters for submitting WDL task to job engine. # It is for HPC backends only (slurm, sge, pbs and lsf). # It is not recommended to change it unless your cluster has custom resource settings. -# See https://github.com/ENCODE-DCC/caper/blob/master/docs/resource_param.md for details. +# See the resource_param.md documentation for details. lsf-resource-param=${"-n " + cpu} ${if defined(gpu) then "-gpu " + gpu else ""} ${if defined(memory_mb) then "-M " else ""}${memory_mb}${if defined(memory_mb) then "m" else ""} ${"-W " + 60*time} ``` diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..b1909813 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,101 @@ +[project] +name = "caper" +dynamic = ["version"] +description = "Cromwell Assisted Pipeline ExecutoR" +readme = "README.md" +requires-python = ">=3.12" +license = "MIT" +authors = [ + { name = "Jin Lee", email = "leepc12@gmail.com" }, + { name = "Mihir Samdarshi", email = "msamdars@stanford.edu"} +] +maintainers = [ + { name = "Mihir Samdarshi", email = "msamdars@stanford.edu"} +] +keywords = ["cromwell", "pipeline", "workflow", "bioinformatics"] +classifiers = [ + "Programming Language :: Python :: 3", + "Operating System :: POSIX :: Linux", +] +dependencies = [ + "pyhocon>=0.3.53", + "requests>=2.20", + "pyopenssl", + "autouri>=0.4.4", + "miniwdl>=0.7.0", + "humanfriendly", + "numpy>=1.8.2", + "pandas>=1.0", + "scikit-learn>=0.19.2", + "matplotlib>=1.5", + "six>=1.13.0", +] + +[project.urls] +Homepage = "https://github.com/MoTrPAC/caper" +Repository = "https://github.com/MoTrPAC/caper" + +[project.scripts] +caper = "caper.cli:main" + +[build-system] +requires = ["setuptools>=45", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.setuptools] +packages = ["caper"] + +[tool.setuptools.package-data] +caper = ["bin/*", "scripts/*"] + +[tool.setuptools.dynamic] +version = { attr = "caper.__version__" } + +[dependency-groups] +dev = [ + "pytest>=8.4.1", + "ruff", + "pyrefly", +] + +[tool.pytest.ini_options] +minversion = "6.0" +testpaths = ["tests"] +markers = [ + "integration: Integration tests", + "google_cloud: Tests using Google Cloud services (require auth)", + "slow: mark test as slow" +] + +[tool.ruff] +line-length = 100 +show-fixes = true +target-version = "py312" + +[tool.ruff.format] +quote-style = "single" + +[tool.ruff.lint] +select = ["ALL"] +ignore = [ + "COM812", + "ANN401", + "D203", + "D212", + "D401", + "PTH", +] +# Specify per-file ignores for Ruff +[tool.ruff.lint.per-file-ignores] +"tests/*.py" = ["D100", "D102", "D103", "S101", "I001", "SLF001", "ANN"] +# CLI module uses arg() pattern designed for dataclass defaults +"caper/cli/args/*.py" = ["RUF009"] + +[tool.ruff.lint.pydocstyle] +convention = "google" + +[tool.ruff.lint.flake8-quotes] +inline-quotes = "single" + +[tool.pyright] +pythonVersion = "3.12" diff --git a/scripts/aws_caper_server/README.md b/scripts/aws_caper_server/README.md index 93de20e9..2099b122 100644 --- a/scripts/aws_caper_server/README.md +++ b/scripts/aws_caper_server/README.md @@ -169,4 +169,4 @@ https://docs.opendata.aws/genomics-workflows/orchestration/cromwell/cromwell-ove ## Troubleshooting -See [this] for troubleshooting. +See the main [DETAILS.md](../../DETAILS.md) documentation for troubleshooting information. diff --git a/scripts/gcp_caper_server/README.md b/scripts/gcp_caper_server/README.md index 14fb2995..f2040c7c 100644 --- a/scripts/gcp_caper_server/README.md +++ b/scripts/gcp_caper_server/README.md @@ -1,56 +1,124 @@ ## Introduction -`create_instance.sh` will create an instance on Google Cloud Compute Engine in Google your project and configure the instance for Caper with PostgreSQL database and Google Cloud Life Sciences API (`v2beta`). +`create_instance.sh` will create an instance on Google Cloud Compute Engine in your project and configure the instance for Caper with PostgreSQL database and Google Cloud Batch API. -> **NOTE**: Google Cloud Life Sciences API is a new API replacing the old deprecating Genomics API (`v2alpha1`). It requires `--gcp-region` to be defined correctly. Check [supported regions](https://cloud.google.com/life-sciences/docs/concepts/locations) for the new API. +> **NOTE**: Google Cloud Genomics API and Cloud Life Sciences API have been deprecated and removed. Caper now uses Google Cloud Batch API exclusively. -## Install Google Cloud SDK SLI +## Prerequisites -Make sure that `gcloud` (Google Cloud SDK CLI) is installed on your system. +### Install Google Cloud CLI -Go to [APIs & Services](https://console.cloud.google.com/apis/dashboard) on your project and enable the following APIs on your Google Cloud console. +Make sure that `gcloud` (Google Cloud CLI) is installed on your local system. See [Install the gcloud CLI](https://cloud.google.com/sdk/docs/install) for instructions. + +### Enable Required APIs + +Go to [APIs & Services](https://console.cloud.google.com/apis/dashboard) on your project and enable the following APIs: * Compute Engine API -* Cloud Storage: DO NOT click on `Create credentials`. +* Cloud Storage * Cloud Storage JSON API -* Google Cloud Life Sciences API +* Cloud Batch API + +### Create a Service Account Go to [Service accounts](https://console.cloud.google.com/iam-admin/serviceaccounts) on your project and create a new service account with the following roles: * Compute Admin -* Storage Admin: You can skip this and individually configure permission on each bucket on the project. -* Cloud Life Sciences Admin (Cromwell's PAPI v2beta) -* **Service Account User** (VERY IMPORTANT). +* Storage Admin (or configure permissions on individual buckets) +* Batch Admin +* **Service Account User** (required for impersonation) + +> **IMPORTANT**: The service account specified above is used to launch Batch jobs. This is different from the Compute Service Account used by the Google Cloud Batch VMs to run the actual tasks. You can specify a different Compute Service Account using the `--gcp-compute-service-account` parameter. The Compute Service Account needs the `roles/batch.agentReporter` role to report status back to Batch. + +## Authentication Methods + +### Recommended: VM-Attached Service Account (Default Credentials) + +The most secure approach is to attach the service account directly to the Compute Engine VM instance. This uses Google's metadata server for authentication and eliminates the need for JSON key files. + +1. When creating the VM instance, attach the service account: + ```bash + gcloud compute instances create [INSTANCE_NAME] \ + --service-account=[SERVICE_ACCOUNT_EMAIL] \ + --scopes=cloud-platform \ + --zone=[ZONE] + ``` -Generate a secret key JSON from the service account and keep it locally on your computer. +2. On the VM, applications automatically authenticate using the attached service account via [Application Default Credentials (ADC)](https://cloud.google.com/docs/authentication/application-default-credentials). + +3. No additional authentication setup is needed. Caper will automatically use the VM's credentials. + +**Benefits:** +- No JSON key files to manage or secure +- Credentials are automatically rotated by Google +- No risk of key file exposure +- Works automatically with all Google Cloud client libraries + +### Alternative: User Credentials (for local development) + +For local development or testing, you can use your own Google account: + +```bash +# Authenticate with your Google account +$ gcloud auth login --no-launch-browser + +# Set up Application Default Credentials +$ gcloud auth application-default login --no-launch-browser +``` -> **WARNING**: Such secret JSON file is a master key for important resources on your project. Keep it secure at your own risk. This file will be used for Caper so that it will be trasnferred to the created instance at `/opt/caper/service_account_key.json` visible to all users on the instance. +### Legacy: Service Account JSON Key (Not Recommended) + +> **WARNING**: Using JSON key files is discouraged for production environments. Keys can be leaked, are difficult to rotate, and provide long-lived credentials. + +If you must use a JSON key file: + +1. Generate a key from the service account in the Google Cloud Console +2. Store the key securely with restricted file permissions (`chmod 600`) +3. Pass it to `create_instance.sh` with `--service-account-key-json` + +**Security considerations for JSON keys:** +- Never commit key files to version control +- Rotate keys regularly +- Use short-lived keys when possible +- Consider using [Workload Identity Federation](https://cloud.google.com/iam/docs/workload-identity-federation) as an alternative ## How to create an instance -Run without arguments to see detailed help. Some optional arguments are very important depending on your region/zone. e.g. `--gcp-region` (for provisioning worker instances of Life Sciences API) and `--zone` (for server instance creation only). These regional parameters default to US central region/zones. +Run without arguments to see detailed help: ```bash $ bash create_instance.sh ``` -However, this script is designed to work well with default arguments. Try with positional arguments only first and see if it works. +**Recommended:** Create an instance with a VM-attached service account: ```bash -$ bash create_instance.sh [INSTANCE_NAME] [PROJECT_ID] [GCP_SERVICE_ACCOUNT_KEY_JSON_FILE] [GCP_OUT_DIR] +$ bash create_instance.sh [INSTANCE_NAME] [PROJECT_ID] [GCP_OUT_DIR] \ + --service-account [SERVICE_ACCOUNT_EMAIL] ``` -This script will run Caper server by user `root` in a `screen` named `caper_server` at the end the installation. +Example: +```bash +$ bash create_instance.sh my-caper-server my-gcp-project gs://my-bucket/caper-out \ + --service-account caper-sa@my-gcp-project.iam.gserviceaccount.com +``` +**Legacy:** Create an instance with a JSON key file (not recommended): +```bash +$ bash create_instance.sh [INSTANCE_NAME] [PROJECT_ID] [GCP_OUT_DIR] \ + --service-account-key-json [PATH_TO_KEY_FILE] +``` + +> **NOTE**: Some optional arguments are important depending on your region/zone, e.g., `--gcp-region` (for provisioning worker instances of Batch API) and `--zone` (for server instance creation). These default to US central region/zones. ## How to stop Caper server -On the instance, attach to the existing screen `caper_server`, stop it with Ctrl + C. +On the instance, attach to the existing screen `caper_server` and stop it with Ctrl + C: ```bash -$ sudo su # log-in as root -$ screen -r caper_server # attach to the screen -# in the screen, press Ctrl + C to send SIGINT to Caper +$ sudo su +$ screen -r caper_server +# Press Ctrl + C to send SIGINT to Caper ``` ## How to start Caper server -On the instance, make a new screen `caper_server`. +On the instance, create a new screen `caper_server`: ```bash $ cd /opt/caper $ screen -dmS caper_server bash -c "caper server > caper_server.log 2>&1" @@ -58,37 +126,34 @@ $ screen -dmS caper_server bash -c "caper server > caper_server.log 2>&1" ## How to submit workflow -Check if `caper list` works without any network errors. +Check if `caper list` works without any network errors: ```bash $ caper list ``` -Submit a workflow. +Submit a workflow: ```bash $ caper submit [WDL] -i input.json ... ``` -Caper will localize big data files on a GCS bucket directory `--gcp-loc-dir`, which defaults to `[GCP_OUT_DIR]/.caper_tmp/` if not defined. e.g. your FASTQs and reference genome data defined in an input JSON. - +Caper will localize big data files on a GCS bucket directory `--gcp-loc-dir`, which defaults to `[GCP_OUT_DIR]/.caper_tmp/` if not defined. ## How to configure Caper -**This section is for advanced users only**. Caper tries to find a default configuration file at `~/.caper/default.conf` which is symlinked from `/opt/caper/default.conf`. `/opt/caper/default.conf` is a globally shared configuration file. Edit this file for both server/client. +Caper looks for a default configuration file at `~/.caper/default.conf`. For shared server setups, this can be symlinked to a global configuration at `/opt/caper/default.conf`. -Everytime a user logs in, symlinking is reset. It is controlled by `/etc/profile.d/gcp-auth.sh`. +To use your own configuration: ```bash -gcloud auth activate-service-account --key-file=/opt/caper/service_account_key.json -mkdir -p ~/.caper -ln -s /opt/caper/default.conf ~/.caper/ 2> /dev/null | true -``` - -If users want to have their own configuration at `~/.caper/default.conf`, simply delete this symlink and make a copy of globally shared one. -```bash -$ rm ~/.caper/default.conf +$ mkdir -p ~/.caper $ cp /opt/caper/default.conf ~/.caper/default.conf +# Edit ~/.caper/default.conf as needed ``` - ## Troubleshooting -See [this] for troubleshooting. +See the main [DETAILS.md](../../DETAILS.md) documentation for troubleshooting information. + +If you see permission errors at runtime, ensure: +1. The VM has an attached service account with the correct roles +2. Or `GOOGLE_APPLICATION_CREDENTIALS` is not set (to use default credentials) +3. The service account has access to the required GCS buckets diff --git a/scripts/gcp_caper_server/TROUBLESHOOTING.md b/scripts/gcp_caper_server/TROUBLESHOOTING.md deleted file mode 100644 index 67606e68..00000000 --- a/scripts/gcp_caper_server/TROUBLESHOOTING.md +++ /dev/null @@ -1,21 +0,0 @@ -## Troubleshooting errors - -If you see permission errors check if the above roles are correctly configured for your service account. - -If you see PAPI errors and Google's HTTP endpoint deprecation warning. Remove Life Sciences API role from your service account and add it back. - -If you see the following error then click on your service account on `Service Account` in `IAM` of your Google project and make sure that `Enable G Suite Domain-wide Delegation` is checked. -``` -400 Bad Request -POST https://lifesciences.googleapis.com/v2beta/projects/99884963860/locations/us-central1/operations/XXXXXXXXXXXXXXXXXXXX:cancel -{ - "code" : 400, - "errors" : [ { - "domain" : "global", - "message" : "Precondition check failed.", - "reason" : "failedPrecondition" - } ], - "message" : "Precondition check failed.", - "status" : "FAILED_PRECONDITION" -} -``` diff --git a/scripts/gcp_caper_server/create_instance.sh b/scripts/gcp_caper_server/create_instance.sh index c7dd518c..19ab3786 100755 --- a/scripts/gcp_caper_server/create_instance.sh +++ b/scripts/gcp_caper_server/create_instance.sh @@ -4,19 +4,24 @@ set -eo pipefail if [[ $# -lt 1 ]]; then echo "Automated shell script to create Caper server instance with PostgreSQL on Google Cloud." echo - echo "Usage: ./create_instance.sh [INSTANCE_NAME] [GCP_PRJ]" - echo " [GCP_SERVICE_ACCOUNT_SECRET_JSON_FILE] [GCP_OUT_DIR]" - echo " " + echo "Usage: ./create_instance.sh [INSTANCE_NAME] [GCP_PRJ] [GCP_OUT_DIR] " echo echo "Positional arguments:" echo " [INSTANCE_NAME]: New instance's name." echo " [GCP_PRJ]: Your project's ID on Google Cloud Platform. --gcp-prj in Caper." - echo " [GCP_SERVICE_ACCOUNT_KEY_JSON_FILE]: Service account's secret key JSON file. --gcp-service-account-key-json in Caper." echo " [GCP_OUT_DIR]: gs:// bucket dir path for outputs. --gcp-out-dir in Caper." echo + echo "Authentication (choose one):" + echo " --service-account: (RECOMMENDED) Service account email to attach to the VM." + echo " The VM will authenticate via metadata server (ADC)." + echo " Example: my-sa@my-project.iam.gserviceaccount.com" + echo " --service-account-key-json: (LEGACY) Path to service account JSON key file." + echo " Not recommended for production use." + echo echo "Optional arguments for Caper:" echo " -l, --gcp-loc-dir: gs:// bucket dir path for localization." - echo " --gcp-region: Region for Google Life Sciences API. us-central1 by default. CHECK SUPPORTED REGIONS. This is different from --zone, which is used for instance creation only. us-central1 by default." + echo " --gcp-region: Region for Google Cloud Batch API. us-central1 by default." + echo " --gcp-compute-service-account: Service account for Batch worker VMs (if different from main SA)." echo " --postgresql-db-ip: localhost by default." echo " --postgresql-db-port: 5432 by default." echo " --postgresql-db-user: cromwell by default." @@ -27,16 +32,25 @@ if [[ $# -lt 1 ]]; then echo " -z, --zone: Zone. Check available zones: gcloud compute zones list. us-central1-a by default." echo " -m, --machine-type: Machine type. Check available machine-types: gcloud compute machine-types list. n1-standard-4 by default." echo " -b, --boot-disk-size: Boot disk size. Use a suffix for unit. e.g. GB and MB. 100GB by default." - echo " -u, --username: Username (super user) used for transferring key file to the instance. ubuntu by default." + echo " -u, --username: Username for SSH. ubuntu by default." echo " --boot-disk-type: Boot disk type. pd-standard (Standard persistent disk) by default." - echo " --image: Image. Check available images: gcloud compute images list. ubuntu-2004-focal-v20220118 by default." + echo " --image: Image. Check available images: gcloud compute images list. ubuntu-2204-jammy-v20240119 by default." echo " --image-project: Image project. ubuntu-os-cloud by default." echo " --tags: Tags to apply to the new instance. caper-server by default." - echo " --startup-script: Startup script CONTENTS (NOT A FILE). These command lines should sudo-install screen, Java, PostgreSQL, Python3 and pip3. DO NOT INSTALL CAPER HERE. some apt-get command lines by default." + echo " --startup-script: Startup script CONTENTS (NOT A FILE)." + echo + echo "Examples:" + echo " # Recommended: Using VM-attached service account" + echo " ./create_instance.sh my-caper prod-project gs://my-bucket/caper-out \\" + echo " --service-account caper-sa@prod-project.iam.gserviceaccount.com" + echo + echo " # Legacy: Using JSON key file (not recommended)" + echo " ./create_instance.sh my-caper prod-project gs://my-bucket/caper-out \\" + echo " --service-account-key-json ~/keys/service-account.json" echo - if [[ $# -lt 4 ]]; then - echo "Define all positional arguments." + if [[ $# -lt 3 ]]; then + echo "Error: Define all positional arguments." fi exit 1 fi @@ -46,6 +60,16 @@ POSITIONAL=() while [[ $# -gt 0 ]]; do key="$1" case $key in + --service-account) + SERVICE_ACCOUNT_EMAIL="$2" + shift + shift + ;; + --service-account-key-json) + GCP_SERVICE_ACCOUNT_KEY_JSON_FILE="${2/#\~/$HOME}" + shift + shift + ;; -l|--gcp-loc-dir) GCP_LOC_DIR="$2" shift @@ -56,6 +80,11 @@ while [[ $# -gt 0 ]]; do shift shift ;; + --gcp-compute-service-account) + GCP_COMPUTE_SERVICE_ACCOUNT="$2" + shift + shift + ;; --postgresql-db-ip) POSTGRESQL_DB_IP="$2" shift @@ -127,7 +156,7 @@ while [[ $# -gt 0 ]]; do shift ;; -*) - echo "Wrong parameter: $1." + echo "Unknown parameter: $1." shift exit 1 ;; @@ -144,8 +173,22 @@ set -- "${POSITIONAL[@]}" # parse pos args. INSTANCE_NAME="$1" GCP_PRJ="$2" -GCP_SERVICE_ACCOUNT_KEY_JSON_FILE="${3/#\~/$HOME}" -GCP_OUT_DIR="$4" +GCP_OUT_DIR="$3" + +# Determine authentication method +USE_VM_SERVICE_ACCOUNT=false +if [[ -n "$SERVICE_ACCOUNT_EMAIL" ]]; then + USE_VM_SERVICE_ACCOUNT=true + echo "Using VM-attached service account: $SERVICE_ACCOUNT_EMAIL" +elif [[ -n "$GCP_SERVICE_ACCOUNT_KEY_JSON_FILE" ]]; then + echo "WARNING: Using JSON key file authentication (not recommended for production)." + echo "Consider using --service-account for better security." +else + echo "Error: Must specify either --service-account or --service-account-key-json" + echo " --service-account EMAIL (recommended) Attach service account to VM" + echo " --service-account-key-json (legacy) Use JSON key file" + exit 1 +fi # set defaults for opt args. (caper) if [[ -z "$GCP_LOC_DIR" ]]; then @@ -187,7 +230,7 @@ if [[ -z "$BOOT_DISK_TYPE" ]]; then BOOT_DISK_TYPE=pd-standard fi if [[ -z "$IMAGE" ]]; then - IMAGE=ubuntu-2004-focal-v20220118 + IMAGE=ubuntu-2204-jammy-v20240119 fi if [[ -z "$IMAGE_PROJECT" ]]; then IMAGE_PROJECT=ubuntu-os-cloud @@ -196,10 +239,22 @@ if [[ -z "$TAGS" ]]; then TAGS=caper-server fi if [[ -z "$STARTUP_SCRIPT" ]]; then - STARTUP_SCRIPT=""" -sudo apt-get update -sudo apt-get -y install screen python3 python3-pip default-jre postgresql postgresql-contrib acl -""" + STARTUP_SCRIPT="$(cat <<'EOF' +apt-get update +apt-get -y install \ + screen \ + git \ + curl \ + openjdk-17-jre-headless \ + postgresql \ + postgresql-contrib \ + acl \ + software-properties-common + +# Install uv (official installer) and use it to provision Python 3.12 (no PPAs). +env UV_INSTALL_DIR=/usr/local/bin sh -c 'curl -LsSf https://astral.sh/uv/install.sh | sh' +EOF +)" fi # validate all args. @@ -215,8 +270,8 @@ if [[ "$GCP_LOC_DIR" != gs://* ]]; then echo "-l, --gcp-loc-dir should be a GCS bucket path starting with gs://" exit 1 fi -if [[ ! -f "$GCP_SERVICE_ACCOUNT_KEY_JSON_FILE" ]]; then - echo "[GCP_SERVICE_ACCOUNT_KEY_JSON_FILE] does not exists." +if [[ "$USE_VM_SERVICE_ACCOUNT" == false && ! -f "$GCP_SERVICE_ACCOUNT_KEY_JSON_FILE" ]]; then + echo "[GCP_SERVICE_ACCOUNT_KEY_JSON_FILE] does not exist: $GCP_SERVICE_ACCOUNT_KEY_JSON_FILE" exit 1 fi if [[ "$POSTGRESQL_DB_IP" == localhost && "$POSTGRESQL_DB_PORT" != 5432 ]]; then @@ -231,21 +286,8 @@ ROOT_CAPER_CONF_DIR=/root/.caper GLOBAL_CAPER_CONF_FILE="$CAPER_CONF_DIR/default.conf" REMOTE_KEY_FILE="$CAPER_CONF_DIR/service_account_key.json" -# prepend more init commands to the startup-script -STARTUP_SCRIPT="""#!/bin/bash -### make caper's directories -sudo mkdir -p $CAPER_CONF_DIR -sudo mkdir -p $CAPER_CONF_DIR/local_loc_dir $CAPER_CONF_DIR/local_out_dir - -### set default permission on caper's directories -sudo chmod 777 -R $CAPER_CONF_DIR -sudo setfacl -R -d -m u::rwX $CAPER_CONF_DIR -sudo setfacl -R -d -m g::rwX $CAPER_CONF_DIR -sudo setfacl -R -d -m o::rwX $CAPER_CONF_DIR - -### make caper conf file -cat < $GLOBAL_CAPER_CONF_FILE -# caper +# Build the caper config content based on authentication method +CAPER_CONFIG_CONTENT="# caper backend=gcp no-server-heartbeat=True # cromwell @@ -258,29 +300,73 @@ local-loc-dir=$CAPER_CONF_DIR/local_loc_dir gcp-prj=$GCP_PRJ gcp-region=$GCP_REGION gcp-out-dir=$GCP_OUT_DIR -gcp-loc-dir=$GCP_LOC_DIR -gcp-service-account-key-json=$REMOTE_KEY_FILE -use-google-cloud-life-sciences=True +gcp-loc-dir=$GCP_LOC_DIR" + +# Add compute service account if specified +if [[ -n "$GCP_COMPUTE_SERVICE_ACCOUNT" ]]; then + CAPER_CONFIG_CONTENT="$CAPER_CONFIG_CONTENT +gcp-compute-service-account=$GCP_COMPUTE_SERVICE_ACCOUNT" +fi + +# Add JSON key path only if using legacy authentication +if [[ "$USE_VM_SERVICE_ACCOUNT" == false ]]; then + CAPER_CONFIG_CONTENT="$CAPER_CONFIG_CONTENT +gcp-service-account-key-json=$REMOTE_KEY_FILE" +fi + +# Add database config +CAPER_CONFIG_CONTENT="$CAPER_CONFIG_CONTENT # metadata DB db=postgresql postgresql-db-ip=$POSTGRESQL_DB_IP postgresql-db-port=$POSTGRESQL_DB_PORT postgresql-db-user=$POSTGRESQL_DB_USER postgresql-db-password=$POSTGRESQL_DB_PASSWORD -postgresql-db-name=$POSTGRESQL_DB_NAME +postgresql-db-name=$POSTGRESQL_DB_NAME" + +# Build GCP auth script content based on authentication method +if [[ "$USE_VM_SERVICE_ACCOUNT" == true ]]; then + # VM-attached service account: no need for explicit auth, just set up symlinks + GCP_AUTH_SCRIPT_CONTENT='# Authentication via VM-attached service account (metadata server) +# No explicit credentials needed - using Application Default Credentials +mkdir -p ~/.caper +ln -sf /opt/caper/default.conf ~/.caper/ 2>/dev/null || true' +else + # Legacy JSON key file authentication + GCP_AUTH_SCRIPT_CONTENT="gcloud auth activate-service-account --key-file=$REMOTE_KEY_FILE +mkdir -p ~/.caper +ln -sf /opt/caper/default.conf ~/.caper/ 2>/dev/null || true +export GOOGLE_APPLICATION_CREDENTIALS=$REMOTE_KEY_FILE" +fi + +# prepend more init commands to the startup-script +STARTUP_SCRIPT="""#!/bin/bash +set -euo pipefail +### make caper's directories +mkdir -p $CAPER_CONF_DIR +mkdir -p $CAPER_CONF_DIR/local_loc_dir $CAPER_CONF_DIR/local_out_dir + +### set default permission on caper's directories +chmod 777 -R $CAPER_CONF_DIR +setfacl -R -d -m u::rwX $CAPER_CONF_DIR +setfacl -R -d -m g::rwX $CAPER_CONF_DIR +setfacl -R -d -m o::rwX $CAPER_CONF_DIR + +### make caper conf file +cat <<'EOF' > $GLOBAL_CAPER_CONF_FILE +$CAPER_CONFIG_CONTENT EOF -sudo chmod +r $GLOBAL_CAPER_CONF_FILE +chmod +r $GLOBAL_CAPER_CONF_FILE ### soft-link conf file for root -sudo mkdir -p $ROOT_CAPER_CONF_DIR -sudo ln -s $GLOBAL_CAPER_CONF_FILE $ROOT_CAPER_CONF_DIR +mkdir -p $ROOT_CAPER_CONF_DIR +ln -sf $GLOBAL_CAPER_CONF_FILE $ROOT_CAPER_CONF_DIR ### google auth shared for all users -sudo touch $GCP_AUTH_SH -echo \"gcloud auth activate-service-account --key-file=$REMOTE_KEY_FILE\" > $GCP_AUTH_SH -echo \"mkdir -p ~/.caper\" >> $GCP_AUTH_SH -echo \"ln -s /opt/caper/default.conf ~/.caper/ 2> /dev/null | true\" >> $GCP_AUTH_SH -echo \"export GOOGLE_APPLICATION_CREDENTIALS=$REMOTE_KEY_FILE\" >> $GCP_AUTH_SH +touch $GCP_AUTH_SH +cat <<'AUTHEOF' > $GCP_AUTH_SH +$GCP_AUTH_SCRIPT_CONTENT +AUTHEOF $STARTUP_SCRIPT """ @@ -289,50 +375,77 @@ $STARTUP_SCRIPT STARTUP_SCRIPT="""$STARTUP_SCRIPT ### init PostgreSQL for Cromwell sudo -u postgres createuser root -s -sudo createdb $POSTGRESQL_DB_NAME -sudo psql -d $POSTGRESQL_DB_NAME -c \"create extension lo;\" -sudo psql -d $POSTGRESQL_DB_NAME -c \"create role $POSTGRESQL_DB_USER with superuser login password '$POSTGRESQL_DB_PASSWORD'\" +createdb $POSTGRESQL_DB_NAME +psql -d $POSTGRESQL_DB_NAME -c \"create extension lo;\" +psql -d $POSTGRESQL_DB_NAME -c \"create role $POSTGRESQL_DB_USER with superuser login password '$POSTGRESQL_DB_PASSWORD'\" ### upgrade pip and install caper croo -sudo python3 -m pip install --upgrade pip -sudo pip install --ignore-installed caper croo +# Install CLI tools into isolated uv tool environments and link executables to /usr/local/bin. +mkdir -p /opt/caper/uv-tools +env UV_TOOL_DIR=/opt/caper/uv-tools UV_TOOL_BIN_DIR=/usr/local/bin \\ + /usr/local/bin/uv tool install --python 3.12 git+https://github.com/MoTrPAC/caper +env UV_TOOL_DIR=/opt/caper/uv-tools UV_TOOL_BIN_DIR=/usr/local/bin \\ + /usr/local/bin/uv tool install --python 3.12 croo """ -echo "$(date): Google auth with service account key file." -gcloud auth activate-service-account --key-file="$GCP_SERVICE_ACCOUNT_KEY_JSON_FILE" -export GOOGLE_APPLICATION_CREDENTIALS="$GCP_SERVICE_ACCOUNT_KEY_JSON_FILE" +# Authenticate locally if using JSON key file +if [[ "$USE_VM_SERVICE_ACCOUNT" == false ]]; then + echo "$(date): Google auth with service account key file." + gcloud auth activate-service-account --key-file="$GCP_SERVICE_ACCOUNT_KEY_JSON_FILE" + export GOOGLE_APPLICATION_CREDENTIALS="$GCP_SERVICE_ACCOUNT_KEY_JSON_FILE" +fi echo "$(date): Making a temporary startup script..." echo "$STARTUP_SCRIPT" > tmp_startup_script.sh echo "$(date): Creating an instance..." -gcloud --project "$GCP_PRJ" compute instances create \ - "$INSTANCE_NAME" \ - --boot-disk-size="$BOOT_DISK_SIZE" \ - --boot-disk-type="$BOOT_DISK_TYPE" \ - --machine-type="$MACHINE_TYPE" \ - --zone="$ZONE" \ - --image="$IMAGE" \ - --image-project="$IMAGE_PROJECT" \ - --tags="$TAGS" \ - --metadata-from-file startup-script=tmp_startup_script.sh +if [[ "$USE_VM_SERVICE_ACCOUNT" == true ]]; then + # Create VM with attached service account + gcloud --project "$GCP_PRJ" compute instances create \ + "$INSTANCE_NAME" \ + --boot-disk-size="$BOOT_DISK_SIZE" \ + --boot-disk-type="$BOOT_DISK_TYPE" \ + --machine-type="$MACHINE_TYPE" \ + --zone="$ZONE" \ + --image="$IMAGE" \ + --image-project="$IMAGE_PROJECT" \ + --tags="$TAGS" \ + --service-account="$SERVICE_ACCOUNT_EMAIL" \ + --scopes=cloud-platform \ + --metadata-from-file startup-script=tmp_startup_script.sh +else + # Create VM without attached service account (will use JSON key) + gcloud --project "$GCP_PRJ" compute instances create \ + "$INSTANCE_NAME" \ + --boot-disk-size="$BOOT_DISK_SIZE" \ + --boot-disk-type="$BOOT_DISK_TYPE" \ + --machine-type="$MACHINE_TYPE" \ + --zone="$ZONE" \ + --image="$IMAGE" \ + --image-project="$IMAGE_PROJECT" \ + --tags="$TAGS" \ + --metadata-from-file startup-script=tmp_startup_script.sh +fi echo "$(date): Created an instance successfully." echo "$(date): Deleting the temporary startup script..." rm -f tmp_startup_script.sh -while [[ $(gcloud --project "$GCP_PRJ" compute instances describe "${INSTANCE_NAME}" --zone "${ZONE}" --format="value(status)") -ne "RUNNING" ]]; do +while [[ $(gcloud --project "$GCP_PRJ" compute instances describe "${INSTANCE_NAME}" --zone "${ZONE}" --format="value(status)") != "RUNNING" ]]; do echo "$(date): Waiting for 20 seconds for the instance to spin up..." sleep 20 done -echo "$(date): If key file transfer fails for several times then manually transfer it to $REMOTE_KEY_FILE on the instance." -echo "$(date): Transferring service account key file to the instance..." -until gcloud --project "$GCP_PRJ" compute scp "$GCP_SERVICE_ACCOUNT_KEY_JSON_FILE" "$USERNAME"@"$INSTANCE_NAME":"$REMOTE_KEY_FILE" --zone="$ZONE"; do - echo "$(date): Key file transfer failed. Retrying in 20 seconds..." - sleep 20 -done -echo "$(date): Transferred a key file to instance successfully." +# Transfer key file only if using legacy authentication +if [[ "$USE_VM_SERVICE_ACCOUNT" == false ]]; then + echo "$(date): If key file transfer fails for several times then manually transfer it to $REMOTE_KEY_FILE on the instance." + echo "$(date): Transferring service account key file to the instance..." + until gcloud --project "$GCP_PRJ" compute scp "$GCP_SERVICE_ACCOUNT_KEY_JSON_FILE" "$USERNAME"@"$INSTANCE_NAME":"$REMOTE_KEY_FILE" --zone="$ZONE"; do + echo "$(date): Key file transfer failed. Retrying in 20 seconds..." + sleep 20 + done + echo "$(date): Transferred a key file to instance successfully." +fi echo "$(date): Waiting for the instance finishing up installing Caper..." until gcloud --project "$GCP_PRJ" compute ssh --zone="$ZONE" "$USERNAME"@"$INSTANCE_NAME" --command="caper -v"; do @@ -353,7 +466,15 @@ echo "$(date): Caper server is up and ready to take submissions." echo "$(date): You can find Caper server log file at $CAPER_CONF_DIR/caper_server.log." echo "$(date): Cromwell's STDERR will be written to $CAPER_CONF_DIR/cromwell.out*." echo +if [[ "$USE_VM_SERVICE_ACCOUNT" == true ]]; then + echo "$(date): Authentication: VM-attached service account ($SERVICE_ACCOUNT_EMAIL)" + echo "$(date): The VM uses Application Default Credentials via the metadata server." +else + echo "$(date): Authentication: JSON key file at $REMOTE_KEY_FILE" + echo "$(date): WARNING: Consider migrating to VM-attached service accounts for better security." +fi +echo echo "$(date): Use the following command line to SSH to the instance." echo -echo "gcloud beta compute ssh --zone $ZONE $INSTANCE_NAME --project $GCP_PRJ" +echo "gcloud compute ssh --zone $ZONE $INSTANCE_NAME --project $GCP_PRJ" echo diff --git a/setup.py b/setup.py deleted file mode 100644 index 64043f27..00000000 --- a/setup.py +++ /dev/null @@ -1,73 +0,0 @@ -import os -import re -from pathlib import Path - -import setuptools - -META_PATH = Path('caper', '__init__.py') -HERE = os.path.abspath(os.path.dirname(__file__)) - - -def read(*parts): - """ - Build an absolute path from *parts* and and return the contents of the - resulting file. Assume UTF-8 encoding. - """ - with Path(HERE, *parts).open(encoding='utf-8') as f: - return f.read() - - -META_FILE = read(META_PATH) - - -def find_meta(meta): - """ - Extract __*meta*__ from META_FILE. - """ - meta_match = re.search( - r"^__{meta}__ = ['\"]([^'\"]*)['\"]".format(meta=meta), META_FILE, re.M - ) - if meta_match: - return meta_match.group(1) - raise - - -with open('README.md', 'r') as fh: - long_description = fh.read() - -setuptools.setup( - name='caper', - version=find_meta('version'), - python_requires='>=3.6', - scripts=[ - 'bin/caper', - 'bin/run_mysql_server_docker.sh', - 'bin/run_mysql_server_singularity.sh', - 'scripts/gcp_caper_server/create_instance.sh', - ], - author='Jin Lee', - author_email='leepc12@gmail.com', - description='Cromwell Assisted Pipeline ExecutoR', - long_description='https://github.com/ENCODE-DCC/caper', - long_description_content_type='text/markdown', - url='https://github.com/ENCODE-DCC/caper', - packages=setuptools.find_packages(exclude=['mysql*', 'docs']), - classifiers=[ - 'Programming Language :: Python :: 3', - 'License :: OSI Approved :: MIT License', - 'Operating System :: POSIX :: Linux', - ], - install_requires=[ - 'pyhocon>=0.3.53', - 'requests>=2.20', - 'pyopenssl', - 'autouri>=0.4.4', - 'miniwdl>=0.7.0', - 'humanfriendly', - 'numpy>=1.8.2', - 'pandas>=1.0', - 'scikit-learn>=0.19.2', - 'matplotlib>=1.5', - 'six>=1.13.0', - ], -) diff --git a/tests/conftest.py b/tests/conftest.py index c849a60a..0bb05f05 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,21 +1,19 @@ #!/usr/bin/env python3 -""" -""" +"""Defines shared fixtures and custom CLI options for Caper's tests.""" + +import os + import pytest from caper.cromwell import Cromwell -def pytest_addoption(parser): - parser.addoption( - '--ci-prefix', default='default_ci_prefix', help='Prefix for CI test.' - ) +def pytest_addoption(parser) -> None: + parser.addoption('--ci-prefix', default='default_ci_prefix', help='Prefix for CI test.') parser.addoption( '--gcs-root', - default='gs://encode-test-caper', - help='GCS root path for CI test. ' - 'This GCS bucket must be publicly accessible ' - '(read access for everyone is enough for testing).', + default='gs://motrpac-test-caper', + help='GCS root path for CI test. ', ) parser.addoption( '--cromwell', @@ -27,11 +25,13 @@ def pytest_addoption(parser): default=Cromwell.DEFAULT_WOMTOOL, help='URI for Womtool JAR. Local path is recommended.', ) + parser.addoption('--gcp-prj', help='Project on Google Cloud Platform.') parser.addoption( - '--gcp-prj', default='encode-dcc-1016', help='Project on Google Cloud Platform.' - ) + '--gcp-service-account-key-json', help='JSON key file for GCP service account.' + ) parser.addoption( - '--gcp-service-account-key-json', help='JSON key file for GCP service account.' + '--gcp-compute-service-account', + help='Service account email to use for Google Compute Engine batch jobs.', ) parser.addoption( '--debug-caper', action='store_true', help='Debug-level logging for CLI tests.' @@ -46,7 +46,11 @@ def ci_prefix(request): @pytest.fixture(scope='session') def gcs_root(request): """GCS root to generate test GCS URIs on.""" - return request.config.getoption('--gcs-root').rstrip('/') + root = request.config.getoption('--gcs-root') + if not root.startswith('gs://'): + msg = f'GCS root must start with "gs://" but got {root}' + raise ValueError(msg) + return root.rstrip('/') @pytest.fixture(scope='session') @@ -61,7 +65,11 @@ def womtool(request): @pytest.fixture(scope='session') def gcp_prj(request): - return request.config.getoption('--gcp-prj') + project = request.config.getoption('--gcp-prj') or os.getenv('GOOGLE_CLOUD_PROJECT') + if project is None: + msg = 'Must supply --gcp-prj arg or set GOOGLE_CLOUD_PROJECT env variable' + raise ValueError(msg) + return project @pytest.fixture(scope='session') @@ -69,11 +77,16 @@ def gcp_service_account_key_json(request): return request.config.getoption('--gcp-service-account-key-json') +@pytest.fixture(scope='session') +def gcp_compute_service_account(request): + return request.config.getoption('--gcp-compute-service-account') + + @pytest.fixture(scope='session') def debug_caper(request): return request.config.getoption('--debug-caper') @pytest.fixture(scope='session') -def gcp_res_analysis_metadata(): - return 'gs://caper-data/gcp_resource_analysis/out/atac/e5eab444-cb6c-414a-a090-2c12417be542/metadata.json' +def gcp_res_analysis_metadata(gcs_root) -> str: + return f'{gcs_root}/resource_analysis/metadata.json' diff --git a/tests/example_wdl.py b/tests/example_wdl.py index fa14d40b..317a150b 100644 --- a/tests/example_wdl.py +++ b/tests/example_wdl.py @@ -10,6 +10,7 @@ sub_sub.wdl (imports nothing) inputs.json (inputs JSON file) """ + import json import os from textwrap import dedent @@ -155,10 +156,9 @@ ) -def make_directory_with_wdls(directory, no_sub_wdl=False): +def make_directory_with_wdls(directory, no_sub_wdl=False) -> None: """ - Run Cromwell with WDLs: - main + 1 sub + 1 sub's sub. + Run Cromwell with WDLs: main + 1 sub + 1 sub's sub. Returns: Created root directory @@ -177,10 +177,9 @@ def make_directory_with_wdls(directory, no_sub_wdl=False): AutoURI(sub_sub_wdl).write(SUB_SUB_WDL) -def make_directory_with_failing_wdls(directory, no_sub_wdl=False): +def make_directory_with_failing_wdls(directory, no_sub_wdl=False) -> None: """ - Run Cromwell with WDLs: - main + 1 sub (supposed to fail) + 1 sub's sub. + Run Cromwell with WDLs: main + 1 sub (supposed to fail) + 1 sub's sub. Returns: Created root directory diff --git a/tests/pytest.ini b/tests/pytest.ini deleted file mode 100644 index b0c36b2f..00000000 --- a/tests/pytest.ini +++ /dev/null @@ -1,4 +0,0 @@ -[pytest] -markers= - integration - google_cloud diff --git a/tests/test_arg_tool.py b/tests/test_arg_tool.py index fd2534bf..a9c4da12 100644 --- a/tests/test_arg_tool.py +++ b/tests/test_arg_tool.py @@ -79,7 +79,7 @@ def parser_with_subparsers(): return parser, [p_sub_a, p_sub_b] -def test_read_from_conf(tmp_path): +def test_read_from_conf(tmp_path) -> None: c = tmp_path / 'c1.conf' c.write_text(CONF_CONTENTS) @@ -102,8 +102,8 @@ def test_read_from_conf(tmp_path): d2 = read_from_conf(c, no_strip_quote=True) assert d2['param_wo_default'] == '"please_remove_double_quote"' assert d2['param_w_type_wo_default2'] == '"5.0"' - assert d2['flag_w_default2'] == '\'False\'' - assert d2['flag_wo_default'] == '\'FALSE\'' + assert d2['flag_w_default2'] == "'False'" + assert d2['flag_wo_default'] == "'FALSE'" assert d2['flag_wo_default2'] == '"True"' c2 = tmp_path / 'c2.conf' @@ -113,10 +113,8 @@ def test_read_from_conf(tmp_path): d2 = read_from_conf(c2) -def test_update_parsers_defaults_with_conf(tmp_path, parser_wo_subparsers): - """Check if this function correctly updates argparse parser's - default values. - """ +def test_update_parsers_defaults_with_conf(tmp_path, parser_wo_subparsers) -> None: + """Check if this function correctly updates argparse parser's default values.""" val_type = {'param_w_type_wo_default2': float} val_default = {'param_w_type_wo_default3': 'hello', 'param_w_int_default3': 50} @@ -156,10 +154,8 @@ def test_update_parsers_defaults_with_conf(tmp_path, parser_wo_subparsers): def test_update_parsers_defaults_with_conf_with_subparsers( tmp_path, parser_with_subparsers -): - """Check if this function correctly updates argparse parser's - default values. - """ +) -> None: + """Check if this function correctly updates argparse parser's default values.""" p, subparsers = parser_with_subparsers c1 = tmp_path / 'c1.conf' diff --git a/tests/test_caper_labels.py b/tests/test_caper_labels.py index edb8d1d1..5b2820a0 100644 --- a/tests/test_caper_labels.py +++ b/tests/test_caper_labels.py @@ -4,7 +4,7 @@ from caper.caper_labels import CaperLabels -def test_create_file(tmp_path): +def test_create_file(tmp_path) -> None: cl = CaperLabels() backend = 'my_backend' diff --git a/tests/test_caper_wdl_parser.py b/tests/test_caper_wdl_parser.py index e5f54742..3eb6950c 100644 --- a/tests/test_caper_wdl_parser.py +++ b/tests/test_caper_wdl_parser.py @@ -27,10 +27,10 @@ ) -def test_properties(tmp_path): +def test_properties(tmp_path) -> None: """Test the following properties. - - caper_docker - - caper_singularity + - default_docker + - default_singularity. """ main_wdl = tmp_path / 'main.wdl' main_wdl.write_text(WDL_CONTENTS) @@ -40,10 +40,10 @@ def test_properties(tmp_path): # test reading from workflow.meta main = CaperWDLParser(str(main_wdl)) - assert main.caper_docker == 'ubuntu:latest' - assert main.caper_singularity == 'docker://ubuntu:latest' + assert main.default_docker == 'ubuntu:latest' + assert main.default_singularity == 'docker://ubuntu:latest' # test reading from comments (old-style) old = CaperWDLParser(str(old_wdl)) - assert old.caper_docker == 'ubuntu:latest' - assert old.caper_singularity == 'docker://ubuntu:latest' + assert old.default_docker == 'ubuntu:latest' + assert old.default_singularity == 'docker://ubuntu:latest' diff --git a/tests/test_caper_workflow_opts.py b/tests/test_caper_workflow_opts.py index 10a29a8a..ca145640 100644 --- a/tests/test_caper_workflow_opts.py +++ b/tests/test_caper_workflow_opts.py @@ -5,12 +5,11 @@ import pytest from caper.caper_workflow_opts import CaperWorkflowOpts -from caper.cromwell_backend import BACKEND_AWS, BACKEND_GCP +from caper.cromwell_backend import BackendProvider -def test_create_file(tmp_path): +def test_create_file(tmp_path) -> None: """Test without docker/singularity.""" - use_google_cloud_life_sciences = False gcp_zones = ['us-west-1', 'us-west-2'] slurm_partition = 'my_partition' slurm_account = 'my_account' @@ -24,7 +23,6 @@ def test_create_file(tmp_path): lsf_extra_param = 'my_extra_param' co = CaperWorkflowOpts( - use_google_cloud_life_sciences=use_google_cloud_life_sciences, gcp_zones=gcp_zones, slurm_partition=slurm_partition, slurm_account=slurm_account, @@ -48,9 +46,7 @@ def test_create_file(tmp_path): custom_options = tmp_path / 'my_custom_options.json' custom_options_dict = { 'backend': 'world', - CaperWorkflowOpts.DEFAULT_RUNTIME_ATTRIBUTES: { - 'slurm_partition': 'not_my_partition' - }, + CaperWorkflowOpts.DEFAULT_RUNTIME_ATTRIBUTES: {'slurm_partition': 'not_my_partition'}, } custom_options.write_text(json.dumps(custom_options_dict, indent=4)) @@ -113,13 +109,11 @@ def test_create_file(tmp_path): assert d['monitoring_script'] == gcp_monitoring_script -def test_create_file_with_google_cloud_life_sciences(tmp_path): - """Test with use_google_cloud_life_sciences flag. - zones should not be written to dra. - """ +def test_create_file_google_cloud(tmp_path) -> None: + """Test with Batch. zones should be written to default_runtime_attributes.""" gcp_zones = ['us-west-1', 'us-west-2'] - co = CaperWorkflowOpts(use_google_cloud_life_sciences=True, gcp_zones=gcp_zones) + co = CaperWorkflowOpts(gcp_zones=gcp_zones) wdl = tmp_path / 'test.wdl' wdl.write_text('') @@ -130,10 +124,10 @@ def test_create_file_with_google_cloud_life_sciences(tmp_path): d = json.loads(fp.read()) dra = d[CaperWorkflowOpts.DEFAULT_RUNTIME_ATTRIBUTES] - assert 'zones' not in dra + assert 'zones' in dra -def test_create_file_docker(tmp_path): +def test_create_file_docker(tmp_path) -> None: """Test with docker and docker defined in WDL.""" wdl_contents = dedent( """\ @@ -155,7 +149,7 @@ def test_create_file_docker(tmp_path): f_gcp = co.create_file( directory=str(tmp_path), wdl=str(wdl), - backend=BACKEND_GCP, + backend=BackendProvider.GCP, basename='opts_gcp.json', ) with open(f_gcp) as fp: @@ -167,7 +161,7 @@ def test_create_file_docker(tmp_path): f_aws = co.create_file( directory=str(tmp_path), wdl=str(wdl), - backend=BACKEND_AWS, + backend=BackendProvider.AWS, basename='opts_aws.json', ) with open(f_aws) as fp: @@ -202,7 +196,7 @@ def test_create_file_docker(tmp_path): assert dra_local2['docker'] == 'ubuntu:16' -def test_create_file_singularity(tmp_path): +def test_create_file_singularity(tmp_path) -> None: """Test with singularity and singularity defined in WDL.""" wdl_contents = dedent( """\ @@ -225,7 +219,7 @@ def test_create_file_singularity(tmp_path): f_gcp = co.create_file( directory=str(tmp_path), wdl=str(wdl), - backend=BACKEND_GCP, + backend=BackendProvider.GCP, basename='opts_gcp.json', ) with open(f_gcp) as fp: @@ -237,7 +231,7 @@ def test_create_file_singularity(tmp_path): f_aws = co.create_file( directory=str(tmp_path), wdl=str(wdl), - backend=BACKEND_AWS, + backend=BackendProvider.AWS, basename='opts_aws.json', ) with open(f_aws) as fp: @@ -250,7 +244,7 @@ def test_create_file_singularity(tmp_path): co.create_file( directory=str(tmp_path), wdl=str(wdl), - backend=BACKEND_GCP, + backend=BackendProvider.GCP, singularity='', basename='opts_gcp2.json', ) @@ -258,7 +252,7 @@ def test_create_file_singularity(tmp_path): co.create_file( directory=str(tmp_path), wdl=str(wdl), - backend=BACKEND_AWS, + backend=BackendProvider.AWS, singularity='', basename='opts_aws2.json', ) diff --git a/tests/test_cli_run.py b/tests/test_cli_run.py index 4634f1a1..09fa0220 100644 --- a/tests/test_cli_run.py +++ b/tests/test_cli_run.py @@ -8,6 +8,7 @@ We will use gcp (Google Cloud Platform) backend to test server-client functions. """ + import json import os @@ -21,7 +22,7 @@ from .example_wdl import make_directory_with_wdls -def test_wrong_subcmd(): +def test_wrong_subcmd() -> None: cmd = ['wrong_subcmd'] with pytest.raises(SystemExit): cli_main(cmd) @@ -38,16 +39,17 @@ def test_wrong_subcmd(): ['--docker', 'ubuntu:latest', '--soft-glob-output'], ], ) -def test_mutually_exclusive_params(tmp_path, cmd): +def test_mutually_exclusive_params(tmp_path, cmd) -> None: make_directory_with_wdls(str(tmp_path)) - cmd = ['run', str(tmp_path / 'main.wdl')] + cmd + cmd = ['run', str(tmp_path / 'main.wdl'), *cmd] with pytest.raises(ValueError): cli_main(cmd) +@pytest.mark.slow @pytest.mark.integration -def test_run(tmp_path, cromwell, womtool, debug_caper): +def test_run(tmp_path, cromwell, womtool, debug_caper) -> None: """Will test most local parameters (run only) here.""" make_directory_with_wdls(str(tmp_path)) wdl = tmp_path / 'main.wdl' @@ -93,7 +95,8 @@ def test_run(tmp_path, cromwell, womtool, debug_caper): cm = CromwellMetadata(str(tmp_path / 'metadata.json')) # check if metadata JSON and workflowRoot dir exists root_out_dir = cm.data['workflowRoot'] - assert os.path.exists(root_out_dir) and os.path.isdir(root_out_dir) + assert os.path.exists(root_out_dir) + assert os.path.isdir(root_out_dir) # dry-run should not delete anything cm.cleanup(dry_run=True) @@ -103,9 +106,10 @@ def test_run(tmp_path, cromwell, womtool, debug_caper): assert not os.path.exists(root_out_dir) +@pytest.mark.slow @pytest.mark.google_cloud @pytest.mark.integration -def test_run_gcp_with_life_sciences_api( +def test_run_gcp_batch_api( tmp_path, gcs_root, ci_prefix, @@ -113,9 +117,10 @@ def test_run_gcp_with_life_sciences_api( womtool, gcp_prj, gcp_service_account_key_json, + gcp_compute_service_account, debug_caper, -): - """Test run with Google Cloud Life Sciences API""" +) -> None: + """Test run with Google Cloud Batch API.""" out_gcs_bucket = os.path.join(gcs_root, 'caper_out', ci_prefix) tmp_gcs_bucket = os.path.join(gcs_root, 'caper_tmp') @@ -130,7 +135,8 @@ def test_run_gcp_with_life_sciences_api( cmd += ['-m', str(metadata)] if gcp_service_account_key_json: cmd += ['--gcp-service-account-key-json', gcp_service_account_key_json] - cmd += ['--use-google-cloud-life-sciences'] + if gcp_compute_service_account: + cmd += ['--gcp-compute-service-account', gcp_compute_service_account] cmd += ['--gcp-region', 'us-central1'] # --gcp-zones should be ignored cmd += ['--gcp-zones', 'us-west1-a,us-west1-b'] @@ -155,7 +161,6 @@ def test_run_gcp_with_life_sciences_api( cmd += ['--docker', 'ubuntu:latest'] if debug_caper: cmd += ['--debug'] - print(' '.join(cmd)) cli_main(cmd) m_dict = json.loads(metadata.read_text()) diff --git a/tests/test_cli_server_client_gcp.py b/tests/test_cli_server_client_gcp.py index c2b42e54..f3edbbdd 100644 --- a/tests/test_cli_server_client_gcp.py +++ b/tests/test_cli_server_client_gcp.py @@ -1,6 +1,7 @@ """This does not cover all CLI parameters defined in caper/caper_args.py. gcp (Google Cloud Platform) backend is tested here with server/client functions. """ + import os import time @@ -17,6 +18,7 @@ TIMEOUT_SERVER_RUN_WORKFLOW = 960 +@pytest.mark.slow @pytest.mark.google_cloud @pytest.mark.integration def test_server_client( @@ -27,9 +29,10 @@ def test_server_client( womtool, gcp_prj, gcp_service_account_key_json, + gcp_compute_service_account, debug_caper, -): - """Test server, client stuffs""" +) -> None: + """Test server, client stuffs.""" # server command line server_port = 8015 @@ -41,6 +44,8 @@ def test_server_client( cmd += ['--backend', 'gcp'] if gcp_service_account_key_json: cmd += ['--gcp-service-account-key-json', gcp_service_account_key_json] + if gcp_compute_service_account: + cmd += ['--gcp-compute-service-account', gcp_compute_service_account] cmd += ['--gcp-prj', gcp_prj] cmd += ['--gcp-zones', 'us-west1-a,us-west1-b'] cmd += ['--gcp-out-dir', out_gcs_bucket] @@ -59,7 +64,6 @@ def test_server_client( cmd += ['--port', str(server_port)] if debug_caper: cmd += ['--debug'] - print(' '.join(cmd)) try: th = cli_main(cmd, nonblocking_server=True) @@ -69,7 +73,8 @@ def test_server_client( while th.status is None: time.sleep(1) if time.time() - t_start > TIMEOUT_SERVER_SPIN_UP: - raise TimeoutError('Timed out waiting for Cromwell server spin-up.') + msg = 'Timed out waiting for Cromwell server spin-up.' + raise TimeoutError(msg) # prepare WDLs and input JSON, imports to be submitted make_directory_with_wdls(str(tmp_path)) @@ -125,7 +130,6 @@ def test_server_client( metadata_json_file = os.path.join(workflow_root, 'metadata.json') else: metadata_json_file = None - print('polling: ', workflow_id, m['status'], metadata_json_file) if m['status'] in ('Failed', 'Succeeded'): if AutoURI(metadata_json_file).exists: @@ -134,7 +138,8 @@ def test_server_client( assert not AutoURI(metadata_json_file).exists if time.time() - t_start > TIMEOUT_SERVER_RUN_WORKFLOW: - raise TimeoutError('Timed out waiting for workflow being done.') + msg = 'Timed out waiting for workflow being done.' + raise TimeoutError(msg) finally: # all done. so stop the server diff --git a/tests/test_cromwell.py b/tests/test_cromwell.py index bd2276e7..2db494ad 100644 --- a/tests/test_cromwell.py +++ b/tests/test_cromwell.py @@ -13,8 +13,10 @@ BACKEND_CONF_CONTENTS = """ backend {{ + default = "Local" providers {{ Local {{ + actor-factory = "cromwell.backend.impl.sfs.config.ConfigBackendLifecycleActorFactory" config {{ root = {root} }} @@ -27,7 +29,7 @@ TIMEOUT_SERVER_RUN_WORKFLOW = 960 -def test_validate(tmp_path, cromwell, womtool): +def test_validate(tmp_path, cromwell, womtool) -> None: c = Cromwell(cromwell=cromwell, womtool=womtool) wdl = tmp_path / 'wrong.wdl' @@ -51,7 +53,8 @@ def test_validate(tmp_path, cromwell, womtool): c.validate(str(wdl), str(inputs), imports) -def test_run(tmp_path, cromwell, womtool): +@pytest.mark.slow +def test_run(tmp_path, cromwell, womtool) -> None: fileobj_stdout = sys.stdout c = Cromwell(cromwell=cromwell, womtool=womtool) @@ -114,7 +117,8 @@ def test_run(tmp_path, cromwell, womtool): assert th.returncode == 0 -def test_server(tmp_path, cromwell, womtool): +@pytest.mark.slow +def test_server(tmp_path, cromwell, womtool) -> None: """Test Cromwell.server() method, which returns a Thread object.""" server_port = 8005 fileobj_stdout = sys.stdout @@ -129,21 +133,20 @@ def test_server(tmp_path, cromwell, womtool): is_server_started = False - def on_server_start(): + def on_server_start() -> None: nonlocal is_server_started is_server_started = True workflow_id = None is_workflow_done = False - def on_status_change(metadata): + def on_status_change(metadata) -> None: nonlocal workflow_id nonlocal is_workflow_done - if metadata: - if metadata['id'] == workflow_id: - if metadata['status'] in ('Succeeded', 'Failed'): - is_workflow_done = True + if metadata and metadata['id'] == workflow_id: + if metadata['status'] in ('Succeeded', 'Failed'): + is_workflow_done = True # also tests two callback functions try: @@ -163,7 +166,8 @@ def on_status_change(metadata): while not is_server_started: time.sleep(1) if time.time() - t_start > TIMEOUT_SERVER_SPIN_UP: - raise TimeoutError('Timed out waiting for Cromwell server spin-up.') + msg = 'Timed out waiting for Cromwell server spin-up.' + raise TimeoutError(msg) # another way of checking server is started assert th.status @@ -184,9 +188,9 @@ def on_status_change(metadata): t_start = time.time() while not is_workflow_done: time.sleep(1) - print('polling: ', workflow_id, is_workflow_done) if time.time() - t_start > TIMEOUT_SERVER_RUN_WORKFLOW: - raise TimeoutError('Timed out waiting for workflow being done.') + msg = 'Timed out waiting for workflow being done.' + raise TimeoutError(msg) metadata = cra.get_metadata([workflow_id], embed_subworkflow=True)[0] diff --git a/tests/test_cromwell_backend.py b/tests/test_cromwell_backend.py index 8e776523..dd8e63c4 100644 --- a/tests/test_cromwell_backend.py +++ b/tests/test_cromwell_backend.py @@ -1,14 +1,15 @@ """There are lots of UserDict-based classesi n caper/cromwell_backend.py In this test, only the followings classes with public methods will be tested. - - CromwellBackendBase + - CromwellBackendBase. """ + from caper.cromwell_backend import CromwellBackendBase -def test_cromwell_backend_base_backend(): - """Test a property backend's getter, setter""" +def test_cromwell_backend_base_backend() -> None: + """Test a property backend's getter, setter.""" bb1 = CromwellBackendBase('test1') backend_dict = {'a': 1, 'b': '2'} @@ -16,7 +17,7 @@ def test_cromwell_backend_base_backend(): assert bb1.backend == backend_dict -def test_cromwell_backend_base_merge_backend(): +def test_cromwell_backend_base_merge_backend() -> None: bb1 = CromwellBackendBase('test1') bb1.backend = {'a': 1, 'b': '2'} backend_dict = {'c': 3.0, 'd': '4.0'} @@ -25,13 +26,13 @@ def test_cromwell_backend_base_merge_backend(): assert bb1.backend == {'a': 1, 'b': '2', 'c': 3.0, 'd': '4.0'} -def test_cromwell_backend_base_backend_config(): +def test_cromwell_backend_base_backend_config() -> None: bb1 = CromwellBackendBase('test1') bb1.backend = {'config': {'root': 'test/folder'}} assert bb1.backend_config == {'root': 'test/folder'} -def test_cromwell_backend_base_backend_config_dra(): +def test_cromwell_backend_base_backend_config_dra() -> None: bb1 = CromwellBackendBase('test1') bb1.backend = { 'config': { @@ -40,3 +41,119 @@ def test_cromwell_backend_base_backend_config_dra(): } } assert bb1.default_runtime_attributes == {'docker': 'ubuntu:latest'} + + +def test_cromwell_backend_gcp_with_network_and_subnetwork() -> None: + """Test GCP backend with network and subnetwork specified.""" + from caper.cromwell_backend import CromwellBackendGcp + + gcp = CromwellBackendGcp( + gcp_prj='test-project', + gcp_out_dir='gs://test-bucket/output', + gcp_network='my-vpc', + gcp_subnetwork='my-subnet', + ) + config = gcp.backend_config + assert 'virtual-private-cloud' in config + assert config['virtual-private-cloud']['network-name'] == 'my-vpc' + assert config['virtual-private-cloud']['subnetwork-name'] == 'my-subnet' + + +def test_cromwell_backend_gcp_with_network_only() -> None: + """Test GCP backend with only network specified.""" + from caper.cromwell_backend import CromwellBackendGcp + + gcp = CromwellBackendGcp( + gcp_prj='test-project', + gcp_out_dir='gs://test-bucket/output', + gcp_network='my-vpc', + ) + config = gcp.backend_config + assert 'virtual-private-cloud' in config + assert config['virtual-private-cloud']['network-name'] == 'my-vpc' + assert 'subnetwork-name' not in config['virtual-private-cloud'] + + +def test_cromwell_backend_gcp_with_subnetwork_only() -> None: + """Test GCP backend with only subnetwork specified.""" + from caper.cromwell_backend import CromwellBackendGcp + + gcp = CromwellBackendGcp( + gcp_prj='test-project', + gcp_out_dir='gs://test-bucket/output', + gcp_subnetwork='my-subnet', + ) + config = gcp.backend_config + assert 'virtual-private-cloud' in config + assert config['virtual-private-cloud']['subnetwork-name'] == 'my-subnet' + assert 'network-name' not in config['virtual-private-cloud'] + + +def test_cromwell_backend_gcp_without_network_config() -> None: + """Test GCP backend without network configuration (default behavior).""" + from caper.cromwell_backend import CromwellBackendGcp + + gcp = CromwellBackendGcp( + gcp_prj='test-project', + gcp_out_dir='gs://test-bucket/output', + ) + config = gcp.backend_config + assert 'virtual-private-cloud' not in config + + +def test_cromwell_backend_gcp_with_dockerhub_mirror() -> None: + """Test GCP backend with Docker Hub mirroring enabled.""" + from caper.cromwell_backend import CromwellBackendGcp + + gcp = CromwellBackendGcp( + gcp_prj='test-project', + gcp_out_dir='gs://test-bucket/output', + gcp_dockerhub_mirror=True, + ) + config = gcp.backend_config + assert 'docker-mirror' in config + assert config['docker-mirror']['dockerhub']['enabled'] is True + assert config['docker-mirror']['dockerhub']['address'] == 'mirror.gcr.io' + + +def test_cromwell_backend_gcp_with_dockerhub_mirror_custom_address() -> None: + """Test GCP backend with Docker Hub mirroring and custom address.""" + from caper.cromwell_backend import CromwellBackendGcp + + gcp = CromwellBackendGcp( + gcp_prj='test-project', + gcp_out_dir='gs://test-bucket/output', + gcp_dockerhub_mirror=True, + gcp_dockerhub_mirror_address='custom-mirror.example.com', + ) + config = gcp.backend_config + assert 'docker-mirror' in config + assert config['docker-mirror']['dockerhub']['enabled'] is True + assert config['docker-mirror']['dockerhub']['address'] == 'custom-mirror.example.com' + + +def test_cromwell_backend_gcp_without_dockerhub_mirror() -> None: + """Test GCP backend with Docker Hub mirroring explicitly disabled.""" + from caper.cromwell_backend import CromwellBackendGcp + + gcp = CromwellBackendGcp( + gcp_prj='test-project', + gcp_out_dir='gs://test-bucket/output', + gcp_dockerhub_mirror=False, + ) + config = gcp.backend_config + assert 'docker-mirror' not in config + + +def test_cromwell_backend_gcp_default_has_dockerhub_mirror() -> None: + """Test GCP backend has Docker Hub mirroring enabled by default.""" + from caper.cromwell_backend import CromwellBackendGcp + + gcp = CromwellBackendGcp( + gcp_prj='test-project', + gcp_out_dir='gs://test-bucket/output', + ) + config = gcp.backend_config + assert 'docker-mirror' in config + assert config['docker-mirror']['dockerhub']['enabled'] is True + assert config['docker-mirror']['dockerhub']['address'] == 'mirror.gcr.io' diff --git a/tests/test_cromwell_metadata.py b/tests/test_cromwell_metadata.py index 992eda5d..0c8e4cf2 100644 --- a/tests/test_cromwell_metadata.py +++ b/tests/test_cromwell_metadata.py @@ -1,6 +1,8 @@ +from typing import Any +import pytest import os import sys - +from pathlib import Path from autouri import AutoURI from caper.cromwell import Cromwell @@ -9,7 +11,8 @@ from .example_wdl import make_directory_with_failing_wdls, make_directory_with_wdls -def test_on_successful_workflow(tmp_path, cromwell, womtool): +@pytest.mark.slow +def test_on_successful_workflow(tmp_path: Path, cromwell: str, womtool: str) -> None: fileobj_stdout = sys.stdout make_directory_with_wdls(str(tmp_path / 'successful')) @@ -22,6 +25,7 @@ def test_on_successful_workflow(tmp_path, cromwell, womtool): fileobj_stdout=fileobj_stdout, cwd=str(tmp_path / 'successful'), ) + assert th is not None th.join() metadata = th.returnvalue assert metadata @@ -41,7 +45,7 @@ def test_on_successful_workflow(tmp_path, cromwell, womtool): ) # test recurse_calls(): test with a simple function - def fnc(call_name, call, parent_call_names): + def fnc(call_name: str, call: dict[str, Any], parent_call_names: tuple[str, ...]) -> None: assert call_name in ('main.t1', 'sub.t2', 'sub_sub.t3') assert call['executionStatus'] == 'Done' if call_name == 'main.t1': @@ -51,7 +55,8 @@ def fnc(call_name, call, parent_call_names): elif call_name == 'sub_sub.t3': assert parent_call_names == ('main.sub', 'sub.sub_sub') else: - raise ValueError('Wrong call_name: {name}'.format(name=call_name)) + msg = f'Wrong call_name: {call_name}' + raise ValueError(msg) list(cm.recurse_calls(fnc)) @@ -66,7 +71,8 @@ def fnc(call_name, call, parent_call_names): assert CromwellMetadata(m_file_on_root).metadata == cm.metadata -def test_on_failed_workflow(tmp_path, cromwell, womtool): +@pytest.mark.slow +def test_on_failed_workflow(tmp_path: Path, cromwell: str, womtool: str) -> None: fileobj_stdout = sys.stdout make_directory_with_failing_wdls(str(tmp_path / 'failed')) @@ -80,6 +86,7 @@ def test_on_failed_workflow(tmp_path, cromwell, womtool): fileobj_stdout=fileobj_stdout, cwd=str(tmp_path / 'failed'), ) + assert th is not None th.join() # check failed diff --git a/tests/test_cromwell_rest_api.py b/tests/test_cromwell_rest_api.py index 07f69668..4b54530f 100644 --- a/tests/test_cromwell_rest_api.py +++ b/tests/test_cromwell_rest_api.py @@ -12,7 +12,7 @@ @pytest.mark.parametrize( - 'test_input,expected', + ('test_input', 'expected'), [ ('asldkhjlkasdf289jisdl;sladkjasdflksd', False), ('cromwell-f9c26f2e-f550-4748-a650-5d0d4cab9f3a', False), @@ -21,16 +21,16 @@ ('F9C26f2e-F550-4748-A650-5D0D4cab9f3a', False), ('f9c26f2e', False), ([], False), - (tuple(), False), + ((), False), (None, False), ], ) -def test_is_valid_uuid(test_input, expected): +def test_is_valid_uuid(test_input, expected) -> None: assert is_valid_uuid(test_input) == expected @pytest.mark.parametrize( - 'test_input,expected', + ('test_input', 'expected'), [ ('?????', True), (('lskadfj', 'sdkfjaslf'), False), @@ -40,15 +40,16 @@ def test_is_valid_uuid(test_input, expected): (('*', '?'), True), (('_', '-', 'asdfjkljklasdfjklasdf'), False), ([], False), - (tuple(), False), + ((), False), (None, False), ], ) -def test_has_wildcard(test_input, expected): +def test_has_wildcard(test_input, expected) -> None: assert has_wildcard(test_input) == expected -def test_all(tmp_path, cromwell, womtool): +@pytest.mark.slow +def test_all(tmp_path, cromwell, womtool) -> None: """Test Cromwell.server() method, which returns a Thread object.""" server_port = 8010 fileobj_stdout = sys.stdout @@ -59,27 +60,24 @@ def test_all(tmp_path, cromwell, womtool): o_dir = tmp_path / 'output' o_dir.mkdir() - labels_file = CaperLabels().create_file( - directory=str(tmp_path), str_label=test_label - ) + labels_file = CaperLabels().create_file(directory=str(tmp_path), str_label=test_label) is_server_started = False - def on_server_start(): + def on_server_start() -> None: nonlocal is_server_started is_server_started = True workflow_id = None is_workflow_done = False - def on_status_change(metadata): + def on_status_change(metadata) -> None: nonlocal workflow_id nonlocal is_workflow_done - if metadata: - if metadata['id'] == workflow_id: - if metadata['status'] in ('Succeeded', 'Failed'): - is_workflow_done = True + if metadata and metadata['id'] == workflow_id: + if metadata['status'] in ('Succeeded', 'Failed'): + is_workflow_done = True # also tests two callback functions try: @@ -98,7 +96,8 @@ def on_status_change(metadata): while not is_server_started: time.sleep(1) if time.time() - t_start > 60: - raise TimeoutError('Timed out waiting for Cromwell server spin-up.') + msg = 'Timed out waiting for Cromwell server spin-up.' + raise TimeoutError(msg) # another way of checking server is started assert th.status @@ -129,9 +128,7 @@ def on_status_change(metadata): # find by label workflow_by_label = cra.find(labels=[('caper-str-label', test_label)])[0] # find by workflow ID with wildcard * - workflow_by_id_with_wildcard = cra.find(workflow_ids=[workflow_id[:-10] + '*'])[ - 0 - ] + workflow_by_id_with_wildcard = cra.find(workflow_ids=[workflow_id[:-10] + '*'])[0] # find by label with wildcard ? workflow_by_label_with_wildcard = cra.find( labels=[('caper-str-label', test_label[:-1] + '?')] @@ -169,9 +166,9 @@ def on_status_change(metadata): t_start = time.time() while not is_workflow_done: time.sleep(1) - print('polling: ', workflow_id, is_workflow_done) if time.time() - t_start > 120: - raise TimeoutError('Timed out waiting for workflow being done.') + msg = 'Timed out waiting for workflow being done.' + raise TimeoutError(msg) metadata = cra.get_metadata([workflow_id], embed_subworkflow=True)[0] metadata_wo_sub = cra.get_metadata([workflow_id], embed_subworkflow=False)[0] diff --git a/tests/test_dict_tool.py b/tests/test_dict_tool.py index 459172f8..51bc6e71 100644 --- a/tests/test_dict_tool.py +++ b/tests/test_dict_tool.py @@ -9,7 +9,7 @@ ) -def test_merge_dict(): +def test_merge_dict() -> None: d1 = { 'flagstat_qc': {'rep1': {'read1': 100}, 'rep2': {'read2': 400}}, 'etc': {'samstat_qc': {'rep1': {'unmapped': 500, 'mapped': 600}}}, @@ -35,7 +35,7 @@ def test_merge_dict(): } -def test_flatten_dict(): +def test_flatten_dict() -> None: d = { 'flagstat_qc': { 'rep1': {'read1': 100, 'read2': 200}, @@ -52,7 +52,7 @@ def test_flatten_dict(): } -def test_unflatten_dict(): +def test_unflatten_dict() -> None: d_f = { ('flagstat_qc', 'rep1', 'read1'): 100, ('flagstat_qc', 'rep1', 'read2'): 200, @@ -69,7 +69,7 @@ def test_unflatten_dict(): } -def test_split_dict(): +def test_split_dict() -> None: d = { 'flagstat_qc': { 'rep1': {'read1': 100, 'read2': 200}, @@ -100,7 +100,7 @@ def test_split_dict(): assert splits == splits_ref -def test_dict_to_dot_str(): +def test_dict_to_dot_str() -> None: d = { 'rankDir': 'TD', 'start': '[shape=Mdiamond]', diff --git a/tests/test_hocon_string.py b/tests/test_hocon_string.py index bb2d6268..b8621aea 100644 --- a/tests/test_hocon_string.py +++ b/tests/test_hocon_string.py @@ -8,13 +8,13 @@ def get_test_hocon_str(): - hocon_str = dedent( + return dedent( """\ include required(classpath("application")) backend { default = "gcp" providers { - Local { + local { actor-factory = "cromwell.backend.impl.sfs.config.ConfigBackendLifecycleActorFactory" config { default-runtime-attributes { @@ -26,11 +26,10 @@ def get_test_hocon_str(): } }""" ) - return hocon_str def get_test_hocon_str2(): - hocon_str2 = dedent( + return dedent( """\ include required(classpath("application")) backend { @@ -41,7 +40,6 @@ def get_test_hocon_str2(): } }""" ) - return hocon_str2 def get_test_hocon_str_multiple_includes(): @@ -74,7 +72,7 @@ def get_test_dict(with_include=False): 'backend': { 'default': 'gcp', 'providers': { - 'Local': { + 'local': { 'actor-factory': 'cromwell.backend.impl.sfs.config.ConfigBackendLifecycleActorFactory', 'config': { 'default-runtime-attributes': {'docker': 'ubuntu:latest'}, @@ -85,9 +83,9 @@ def get_test_dict(with_include=False): } } if with_include: - base_dict[ - 'HOCONSTRING_INCLUDE_ad5c3c187d5107c099f66681f1896c70' - ] = 'include required(classpath("application"))' + base_dict['HOCONSTRING_INCLUDE_ad5c3c187d5107c099f66681f1896c70'] = ( + 'include required(classpath("application"))' + ) return base_dict @@ -100,60 +98,53 @@ def get_test_dict2(): def get_test_multiple_includes(with_include=False): if with_include: return { - "HOCONSTRING_INCLUDE_ad5c3c187d5107c099f66681f1896c70": "include required(classpath(\"application\"))", - "HOCONSTRING_INCLUDE_61b86ce2e19939719a2e043b923774e4": "include required(file(\"application\"))", - "HOCONSTRING_INCLUDE_543d042c69d8a730bc2b5785ac2f13c9": "include required(url(\"application\"))", - "HOCONSTRING_INCLUDE_9456b859a44adad9a3d00ff3fcbbc5ae": "include required(\"application.conf\")", - "level1": { - "HOCONSTRING_INCLUDE_0714deb341d3d6291199d4738656c32b": "include file(\"/srv/test.conf\")", - "level2": { - "HOCONSTRING_INCLUDE_91f31b362d72089d09f6245e912efb30": "include url(\"http://ok.com/test.conf\")", - "level3": { - "HOCONSTRING_INCLUDE_906d6e6eff885e840b705c2e7be3ba2d": "include classpath(\"test\")", - "level4": { - "HOCONSTRING_INCLUDE_c971be2dbb00ef0b44b9e4bf3c57f5cb": "include \"test.conf\"", - "level5": { - "HOCONSTRING_INCLUDE_44cb98470497b76dde0ab244c70870f0": "include \"test.hocon\"" + 'HOCONSTRING_INCLUDE_ad5c3c187d5107c099f66681f1896c70': 'include required(classpath("application"))', + 'HOCONSTRING_INCLUDE_61b86ce2e19939719a2e043b923774e4': 'include required(file("application"))', + 'HOCONSTRING_INCLUDE_543d042c69d8a730bc2b5785ac2f13c9': 'include required(url("application"))', + 'HOCONSTRING_INCLUDE_9456b859a44adad9a3d00ff3fcbbc5ae': 'include required("application.conf")', + 'level1': { + 'HOCONSTRING_INCLUDE_0714deb341d3d6291199d4738656c32b': 'include file("/srv/test.conf")', + 'level2': { + 'HOCONSTRING_INCLUDE_91f31b362d72089d09f6245e912efb30': 'include url("http://ok.com/test.conf")', + 'level3': { + 'HOCONSTRING_INCLUDE_906d6e6eff885e840b705c2e7be3ba2d': 'include classpath("test")', + 'level4': { + 'HOCONSTRING_INCLUDE_c971be2dbb00ef0b44b9e4bf3c57f5cb': 'include "test.conf"', + 'level5': { + 'HOCONSTRING_INCLUDE_44cb98470497b76dde0ab244c70870f0': 'include "test.hocon"' }, }, }, }, }, } - else: - return {'level1': {'level2': {'level3': {'level4': {'level5': {}}}}}} + return {'level1': {'level2': {'level3': {'level4': {'level5': {}}}}}} -def test_from_dict(): +def test_from_dict() -> None: ref_d = get_test_dict() hs = HOCONString.from_dict(ref_d, include=INCLUDE_CROMWELL) - print(str(hs)) - print(get_test_hocon_str()) assert str(hs) == get_test_hocon_str() -def test_to_dict(): +def test_to_dict() -> None: hs = HOCONString(get_test_hocon_str()) assert hs.to_dict(with_include=False) == get_test_dict(with_include=False) assert hs.to_dict(with_include=True) == get_test_dict(with_include=True) hs = HOCONString(get_test_hocon_str_multiple_includes()) - assert hs.to_dict(with_include=False) == get_test_multiple_includes( - with_include=False - ) - assert hs.to_dict(with_include=True) == get_test_multiple_includes( - with_include=True - ) + assert hs.to_dict(with_include=False) == get_test_multiple_includes(with_include=False) + assert hs.to_dict(with_include=True) == get_test_multiple_includes(with_include=True) -def test_merge(): +def test_merge() -> None: s1 = get_test_hocon_str() s2 = get_test_hocon_str2() s3 = get_test_hocon_str_multiple_includes() d1 = get_test_dict() d2 = get_test_dict2() - d3 = get_test_multiple_includes(True) + d3 = get_test_multiple_includes(True) # noqa: FBT003 dm12 = deepcopy(d1) merge_dict(dm12, d2) @@ -182,12 +173,11 @@ def test_merge(): assert hs1_original_str != str(hs1) -def test_get_contents(): +def test_get_contents() -> None: s2 = get_test_hocon_str2() hs2 = HOCONString(s2) assert hs2.get_contents(with_include=True).strip() == s2 assert ( - hs2.get_contents(with_include=False).strip() - == s2.replace(INCLUDE_CROMWELL, '').strip() + hs2.get_contents(with_include=False).strip() == s2.replace(INCLUDE_CROMWELL, '').strip() ) diff --git a/tests/test_nb_subproc_thread.py b/tests/test_nb_subproc_thread.py index 412a3d08..d053831a 100644 --- a/tests/test_nb_subproc_thread.py +++ b/tests/test_nb_subproc_thread.py @@ -1,3 +1,4 @@ +import platform import os import time @@ -34,25 +35,23 @@ """ -def on_stdout(stdout): - print('captured stdout:', stdout) +def on_stdout(stdout) -> None: assert stdout.endswith('\n') -def on_stderr(stderr): - print('captured stderr:', stderr) +def on_stderr(stderr) -> None: assert stderr.endswith('\n') -def on_poll(): - print('polling') +def on_poll() -> None: + pass -def on_finish(): +def on_finish() -> str: return 'done' -def test_nb_subproc_thread(tmp_path): +def test_nb_subproc_thread(tmp_path) -> None: sh = tmp_path / 'test.sh' sh.write_text(SH_CONTENTS) @@ -75,7 +74,7 @@ def test_nb_subproc_thread(tmp_path): assert th.returnvalue == 'done' -def test_nb_subproc_thread_stopped(tmp_path): +def test_nb_subproc_thread_stopped(tmp_path) -> None: sh = tmp_path / 'test.sh' sh.write_text(SH_CONTENTS) @@ -91,25 +90,29 @@ def test_nb_subproc_thread_stopped(tmp_path): assert 'hello kitty 4' not in th.stderr -def test_nb_subproc_thread_nonzero_rc(): +def test_nb_subproc_thread_nonzero_rc() -> None: for rc in range(10): - th = NBSubprocThread( - args=['bash', '-c', 'exit {rc}'.format(rc=rc)], on_stderr=on_stderr - ) + th = NBSubprocThread(args=['bash', '-c', f'exit {rc}'], on_stderr=on_stderr) th.start() th.join() assert th.returncode == rc -@pytest.mark.parametrize('test_app,expected_rc', [('cat', 1), ('ls', 2), ('java', 1)]) -def test_nb_subproc_thread_nonzero_rc_for_real_apps(test_app, expected_rc): +# ls on macOS/BSD returns 1 if the file does not exist, Linux returns 2. +# Don't care about windows +LS_RETURN_CODE = 1 if platform.system() == 'Darwin' else 2 + + +@pytest.mark.parametrize( + ('test_app', 'expected_rc'), [('cat', 1), ('ls', LS_RETURN_CODE), ('java', 1)] +) +def test_nb_subproc_thread_nonzero_rc_for_real_apps(test_app, expected_rc) -> None: test_str = 'asdfasf-10190212-zxcv' if os.path.exists(test_str): - raise ValueError('Test string should not be an existing file.') + msg = 'Test string should not be an existing file.' + raise ValueError(msg) - th = NBSubprocThread( - args=[test_app, test_str], on_stdout=on_stdout, on_stderr=on_stderr - ) + th = NBSubprocThread(args=[test_app, test_str], on_stdout=on_stdout, on_stderr=on_stderr) th.start() th.join() assert th.returncode == expected_rc diff --git a/tests/test_resource_analysis.py b/tests/test_resource_analysis.py index 44ffa475..737d4196 100644 --- a/tests/test_resource_analysis.py +++ b/tests/test_resource_analysis.py @@ -1,6 +1,6 @@ """Test is based on a metadata JSON file generated from running atac-seq-pipeline v1.8.0 with the following input JSON. -gs://encode-pipeline-test-samples/encode-atac-seq-pipeline/ENCSR356KRQ_subsampled_caper.json +gs://encode-pipeline-test-samples/encode-atac-seq-pipeline/ENCSR356KRQ_subsampled_caper.json. """ import pytest @@ -8,13 +8,14 @@ from caper.resource_analysis import LinearResourceAnalysis, ResourceAnalysis -def test_resource_analysis_abstract_class(gcp_res_analysis_metadata): +def test_resource_analysis_abstract_class(gcp_res_analysis_metadata) -> None: with pytest.raises(TypeError): # abstract base-class ResourceAnalysis() -def test_resource_analysis_analyze_task(gcp_res_analysis_metadata): +@pytest.mark.google_cloud +def test_resource_analysis_analyze_task(gcp_res_analysis_metadata) -> None: analysis = LinearResourceAnalysis() analysis.collect_resource_data([gcp_res_analysis_metadata]) @@ -32,15 +33,9 @@ def test_resource_analysis_analyze_task(gcp_res_analysis_metadata): assert result_align1['coeffs']['stats.mean.cpu_pct'][0][0] == pytest.approx( 1.6844513715565233e-06 ) - assert result_align1['coeffs']['stats.mean.cpu_pct'][1] == pytest.approx( - 42.28561239506905 - ) - assert result_align1['coeffs']['stats.max.mem'][0][0] == pytest.approx( - 48.91222341236991 - ) - assert result_align1['coeffs']['stats.max.mem'][1] == pytest.approx( - 124314029.09791338 - ) + assert result_align1['coeffs']['stats.mean.cpu_pct'][1] == pytest.approx(42.28561239506905) + assert result_align1['coeffs']['stats.max.mem'][0][0] == pytest.approx(48.91222341236991) + assert result_align1['coeffs']['stats.max.mem'][1] == pytest.approx(124314029.09791338) result_align2 = analysis.analyze_task( 'atac.align', in_file_vars=['fastqs_R2'], reduce_in_file_vars=sum @@ -62,7 +57,8 @@ def test_resource_analysis_analyze_task(gcp_res_analysis_metadata): } -def test_resource_analysis_analyze(gcp_res_analysis_metadata): +@pytest.mark.google_cloud +def test_resource_analysis_analyze(gcp_res_analysis_metadata) -> None: """Test method analyze() which analyze all tasks defined in in_file_vars.""" analysis = LinearResourceAnalysis() analysis.collect_resource_data([gcp_res_analysis_metadata]) @@ -77,9 +73,7 @@ def test_resource_analysis_analyze(gcp_res_analysis_metadata): assert result['atac.align*']['x'] == { 'sum(fastqs_R1,fastqs_R2)': [32138224, 39148587, 32138224, 39148587] } - assert result['atac.filter*']['x'] == { - 'sum(bam)': [61315022, 76789196, 61315022, 76789196] - } + assert result['atac.filter*']['x'] == {'sum(bam)': [61315022, 76789196, 61315022, 76789196]} result_all = analysis.analyze() # 38 tasks in total diff --git a/tests/test_server_heartbeat.py b/tests/test_server_heartbeat.py index c11b19c1..5b996594 100644 --- a/tests/test_server_heartbeat.py +++ b/tests/test_server_heartbeat.py @@ -6,12 +6,12 @@ from caper.server_heartbeat import ServerHeartbeat, ServerHeartbeatTimeoutError -def test_server_heartbeat(tmp_path): +def test_server_heartbeat(tmp_path) -> None: """All methods will be tested here. This willl test 3 things: - can read from file - can get hostname of this machine - - can ignore old file (> heartbeat_timeout of 5 sec) + - can ignore old file (> heartbeat_timeout of 5 sec). """ hb_file = tmp_path / 'hb_file' diff --git a/tests/test_singularity.py b/tests/test_singularity.py index 8193ffc0..69138eb8 100644 --- a/tests/test_singularity.py +++ b/tests/test_singularity.py @@ -11,7 +11,7 @@ ) -def test_find_bindpath(tmp_path): +def test_find_bindpath(tmp_path) -> None: """Parse input JSON file to recursively get all the files defined in it. For found local abspaths, find common root directories for those. diff --git a/tests/test_wdl_parser.py b/tests/test_wdl_parser.py index e5301cde..d196a64f 100644 --- a/tests/test_wdl_parser.py +++ b/tests/test_wdl_parser.py @@ -21,12 +21,12 @@ ) -def test_properties(tmp_path): +def test_properties(tmp_path) -> None: """Test the following properties. - contents - workflow_meta - workflow_parameter_meta - - imports + - imports. """ wdl = tmp_path / 'main.wdl' wdl.write_text(MAIN_WDL) @@ -38,7 +38,7 @@ def test_properties(tmp_path): assert wp.imports == ['sub/sub.wdl'] -def test_zip_subworkflows(tmp_path): +def test_zip_subworkflows(tmp_path) -> None: """This actually tests create_imports_file since create_imports_file's merely a wrapper for zip_subworkflows. """ diff --git a/uv.lock b/uv.lock new file mode 100644 index 00000000..706162f0 --- /dev/null +++ b/uv.lock @@ -0,0 +1,1701 @@ +version = 1 +revision = 3 +requires-python = ">=3.12" +resolution-markers = [ + "python_full_version >= '3.13'", + "python_full_version < '3.13'", +] + +[[package]] +name = "argcomplete" +version = "3.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/38/61/0b9ae6399dd4a58d8c1b1dc5a27d6f2808023d0b5dd3104bb99f45a33ff6/argcomplete-3.6.3.tar.gz", hash = "sha256:62e8ed4fd6a45864acc8235409461b72c9a28ee785a2011cc5eb78318786c89c", size = 73754, upload-time = "2025-10-20T03:33:34.741Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/74/f5/9373290775639cb67a2fce7f629a1c240dce9f12fe927bc32b2736e16dfc/argcomplete-3.6.3-py3-none-any.whl", hash = "sha256:f5007b3a600ccac5d25bbce33089211dfd49eab4a7718da3f10e3082525a92ce", size = 43846, upload-time = "2025-10-20T03:33:33.021Z" }, +] + +[[package]] +name = "autouri" +version = "0.4.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "awscli" }, + { name = "boto3" }, + { name = "dateparser" }, + { name = "filelock" }, + { name = "google-cloud-storage" }, + { name = "ntplib" }, + { name = "pyopenssl" }, + { name = "requests" }, + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bd/59/bf4c9be16e05039548a47bd80f1c8255ead3e59684972b029bdfb0149b08/autouri-0.4.4.tar.gz", hash = "sha256:ea34958a5cb82ae22931fecc0c9616800ffb6f3ac8f42cad4f17198ced5cb593", size = 40570, upload-time = "2023-05-04T17:35:20.526Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/a1/292a2cb4fd57aa2ed40f1196a4e64994d15c2935155f2453fccfacda8858/autouri-0.4.4-py3-none-any.whl", hash = "sha256:b55a9ccb91446fb821c43448fdf865a398e4388e0d8764e550fffd7d4c7733fa", size = 51473, upload-time = "2023-05-04T17:35:11.764Z" }, +] + +[[package]] +name = "awscli" +version = "1.44.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, + { name = "colorama" }, + { name = "docutils" }, + { name = "pyyaml" }, + { name = "rsa" }, + { name = "s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8d/b5/7b9dcca5cb9609da0cef603b61b8a90904b58e5b32b6f8ebdd926829325b/awscli-1.44.4.tar.gz", hash = "sha256:3ff2b3a6b7095c8f3afe6b80e11209671ef96c265e89f760ebd4781fba8210b2", size = 1888698, upload-time = "2025-12-19T20:27:11.136Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/e6/9dc506438e7333713be7a418118ba5cfc31ab29d99b0136284d2b9c09a19/awscli-1.44.4-py3-none-any.whl", hash = "sha256:5d6acfef64ce078e8ce6f280490ec0804e4058204e0cf724638a24b4d7fda857", size = 4646890, upload-time = "2025-12-19T20:27:08.141Z" }, +] + +[[package]] +name = "boto3" +version = "1.42.14" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, + { name = "jmespath" }, + { name = "s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/72/e236ca627bc0461710685f5b7438f759ef3b4106e0e08dda08513a6539ab/boto3-1.42.14.tar.gz", hash = "sha256:a5d005667b480c844ed3f814a59f199ce249d0f5669532a17d06200c0a93119c", size = 112825, upload-time = "2025-12-19T20:27:15.325Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bb/ba/c657ea6f6d63563cc46748202fccd097b51755d17add00ebe4ea27580d06/boto3-1.42.14-py3-none-any.whl", hash = "sha256:bfcc665227bb4432a235cb4adb47719438d6472e5ccbf7f09512046c3f749670", size = 140571, upload-time = "2025-12-19T20:27:13.316Z" }, +] + +[[package]] +name = "botocore" +version = "1.42.14" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/35/3f/50c56f093c2c6ce6de1f579726598db1cf9a9cccd3bf8693f73b1cf5e319/botocore-1.42.14.tar.gz", hash = "sha256:cf5bebb580803c6cfd9886902ca24834b42ecaa808da14fb8cd35ad523c9f621", size = 14910547, upload-time = "2025-12-19T20:27:04.431Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ad/94/67a78a8d08359e779894d4b1672658a3c7fcce216b48f06dfbe1de45521d/botocore-1.42.14-py3-none-any.whl", hash = "sha256:efe89adfafa00101390ec2c371d453b3359d5f9690261bc3bd70131e0d453e8e", size = 14583247, upload-time = "2025-12-19T20:27:00.54Z" }, +] + +[[package]] +name = "cachetools" +version = "6.2.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/1d/ede8680603f6016887c062a2cf4fc8fdba905866a3ab8831aa8aa651320c/cachetools-6.2.4.tar.gz", hash = "sha256:82c5c05585e70b6ba2d3ae09ea60b79548872185d2f24ae1f2709d37299fd607", size = 31731, upload-time = "2025-12-15T18:24:53.744Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/fc/1d7b80d0eb7b714984ce40efc78859c022cd930e402f599d8ca9e39c78a4/cachetools-6.2.4-py3-none-any.whl", hash = "sha256:69a7a52634fed8b8bf6e24a050fb60bff1c9bd8f6d24572b99c32d4e71e62a51", size = 11551, upload-time = "2025-12-15T18:24:52.332Z" }, +] + +[[package]] +name = "caper" +source = { editable = "." } +dependencies = [ + { name = "autouri" }, + { name = "humanfriendly" }, + { name = "matplotlib" }, + { name = "miniwdl" }, + { name = "numpy" }, + { name = "pandas" }, + { name = "pyhocon" }, + { name = "pyopenssl" }, + { name = "requests" }, + { name = "scikit-learn" }, + { name = "six" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pyrefly" }, + { name = "pytest" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "autouri", specifier = ">=0.4.4" }, + { name = "humanfriendly" }, + { name = "matplotlib", specifier = ">=1.5" }, + { name = "miniwdl", specifier = ">=0.7.0" }, + { name = "numpy", specifier = ">=1.8.2" }, + { name = "pandas", specifier = ">=1.0" }, + { name = "pyhocon", specifier = ">=0.3.53" }, + { name = "pyopenssl" }, + { name = "requests", specifier = ">=2.20" }, + { name = "scikit-learn", specifier = ">=0.19.2" }, + { name = "six", specifier = ">=1.13.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pyrefly" }, + { name = "pytest", specifier = ">=8.4.1" }, + { name = "ruff" }, +] + +[[package]] +name = "certifi" +version = "2025.11.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" }, +] + +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, + { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, + { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, + { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, + { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, + { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, + { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, + { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, + { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, + { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, + { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, + { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, + { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, + { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, + { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, + { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, + { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, + { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, + { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, + { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, + { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, + { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coloredlogs" +version = "15.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "humanfriendly" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/c7/eed8f27100517e8c0e6b923d5f0845d0cb99763da6fdee00478f91db7325/coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0", size = 278520, upload-time = "2021-06-11T10:22:45.202Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018, upload-time = "2021-06-11T10:22:42.561Z" }, +] + +[[package]] +name = "contourpy" +version = "1.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/01/1253e6698a07380cd31a736d248a3f2a50a7c88779a1813da27503cadc2a/contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880", size = 13466174, upload-time = "2025-07-26T12:03:12.549Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/45/adfee365d9ea3d853550b2e735f9d66366701c65db7855cd07621732ccfc/contourpy-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b08a32ea2f8e42cf1d4be3169a98dd4be32bafe4f22b6c4cb4ba810fa9e5d2cb", size = 293419, upload-time = "2025-07-26T12:01:21.16Z" }, + { url = "https://files.pythonhosted.org/packages/53/3e/405b59cfa13021a56bba395a6b3aca8cec012b45bf177b0eaf7a202cde2c/contourpy-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:556dba8fb6f5d8742f2923fe9457dbdd51e1049c4a43fd3986a0b14a1d815fc6", size = 273979, upload-time = "2025-07-26T12:01:22.448Z" }, + { url = "https://files.pythonhosted.org/packages/d4/1c/a12359b9b2ca3a845e8f7f9ac08bdf776114eb931392fcad91743e2ea17b/contourpy-1.3.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92d9abc807cf7d0e047b95ca5d957cf4792fcd04e920ca70d48add15c1a90ea7", size = 332653, upload-time = "2025-07-26T12:01:24.155Z" }, + { url = "https://files.pythonhosted.org/packages/63/12/897aeebfb475b7748ea67b61e045accdfcf0d971f8a588b67108ed7f5512/contourpy-1.3.3-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2e8faa0ed68cb29af51edd8e24798bb661eac3bd9f65420c1887b6ca89987c8", size = 379536, upload-time = "2025-07-26T12:01:25.91Z" }, + { url = "https://files.pythonhosted.org/packages/43/8a/a8c584b82deb248930ce069e71576fc09bd7174bbd35183b7943fb1064fd/contourpy-1.3.3-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:626d60935cf668e70a5ce6ff184fd713e9683fb458898e4249b63be9e28286ea", size = 384397, upload-time = "2025-07-26T12:01:27.152Z" }, + { url = "https://files.pythonhosted.org/packages/cc/8f/ec6289987824b29529d0dfda0d74a07cec60e54b9c92f3c9da4c0ac732de/contourpy-1.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d00e655fcef08aba35ec9610536bfe90267d7ab5ba944f7032549c55a146da1", size = 362601, upload-time = "2025-07-26T12:01:28.808Z" }, + { url = "https://files.pythonhosted.org/packages/05/0a/a3fe3be3ee2dceb3e615ebb4df97ae6f3828aa915d3e10549ce016302bd1/contourpy-1.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:451e71b5a7d597379ef572de31eeb909a87246974d960049a9848c3bc6c41bf7", size = 1331288, upload-time = "2025-07-26T12:01:31.198Z" }, + { url = "https://files.pythonhosted.org/packages/33/1d/acad9bd4e97f13f3e2b18a3977fe1b4a37ecf3d38d815333980c6c72e963/contourpy-1.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:459c1f020cd59fcfe6650180678a9993932d80d44ccde1fa1868977438f0b411", size = 1403386, upload-time = "2025-07-26T12:01:33.947Z" }, + { url = "https://files.pythonhosted.org/packages/cf/8f/5847f44a7fddf859704217a99a23a4f6417b10e5ab1256a179264561540e/contourpy-1.3.3-cp312-cp312-win32.whl", hash = "sha256:023b44101dfe49d7d53932be418477dba359649246075c996866106da069af69", size = 185018, upload-time = "2025-07-26T12:01:35.64Z" }, + { url = "https://files.pythonhosted.org/packages/19/e8/6026ed58a64563186a9ee3f29f41261fd1828f527dd93d33b60feca63352/contourpy-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:8153b8bfc11e1e4d75bcb0bff1db232f9e10b274e0929de9d608027e0d34ff8b", size = 226567, upload-time = "2025-07-26T12:01:36.804Z" }, + { url = "https://files.pythonhosted.org/packages/d1/e2/f05240d2c39a1ed228d8328a78b6f44cd695f7ef47beb3e684cf93604f86/contourpy-1.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:07ce5ed73ecdc4a03ffe3e1b3e3c1166db35ae7584be76f65dbbe28a7791b0cc", size = 193655, upload-time = "2025-07-26T12:01:37.999Z" }, + { url = "https://files.pythonhosted.org/packages/68/35/0167aad910bbdb9599272bd96d01a9ec6852f36b9455cf2ca67bd4cc2d23/contourpy-1.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:177fb367556747a686509d6fef71d221a4b198a3905fe824430e5ea0fda54eb5", size = 293257, upload-time = "2025-07-26T12:01:39.367Z" }, + { url = "https://files.pythonhosted.org/packages/96/e4/7adcd9c8362745b2210728f209bfbcf7d91ba868a2c5f40d8b58f54c509b/contourpy-1.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d002b6f00d73d69333dac9d0b8d5e84d9724ff9ef044fd63c5986e62b7c9e1b1", size = 274034, upload-time = "2025-07-26T12:01:40.645Z" }, + { url = "https://files.pythonhosted.org/packages/73/23/90e31ceeed1de63058a02cb04b12f2de4b40e3bef5e082a7c18d9c8ae281/contourpy-1.3.3-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:348ac1f5d4f1d66d3322420f01d42e43122f43616e0f194fc1c9f5d830c5b286", size = 334672, upload-time = "2025-07-26T12:01:41.942Z" }, + { url = "https://files.pythonhosted.org/packages/ed/93/b43d8acbe67392e659e1d984700e79eb67e2acb2bd7f62012b583a7f1b55/contourpy-1.3.3-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:655456777ff65c2c548b7c454af9c6f33f16c8884f11083244b5819cc214f1b5", size = 381234, upload-time = "2025-07-26T12:01:43.499Z" }, + { url = "https://files.pythonhosted.org/packages/46/3b/bec82a3ea06f66711520f75a40c8fc0b113b2a75edb36aa633eb11c4f50f/contourpy-1.3.3-cp313-cp313-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:644a6853d15b2512d67881586bd03f462c7ab755db95f16f14d7e238f2852c67", size = 385169, upload-time = "2025-07-26T12:01:45.219Z" }, + { url = "https://files.pythonhosted.org/packages/4b/32/e0f13a1c5b0f8572d0ec6ae2f6c677b7991fafd95da523159c19eff0696a/contourpy-1.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4debd64f124ca62069f313a9cb86656ff087786016d76927ae2cf37846b006c9", size = 362859, upload-time = "2025-07-26T12:01:46.519Z" }, + { url = "https://files.pythonhosted.org/packages/33/71/e2a7945b7de4e58af42d708a219f3b2f4cff7386e6b6ab0a0fa0033c49a9/contourpy-1.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a15459b0f4615b00bbd1e91f1b9e19b7e63aea7483d03d804186f278c0af2659", size = 1332062, upload-time = "2025-07-26T12:01:48.964Z" }, + { url = "https://files.pythonhosted.org/packages/12/fc/4e87ac754220ccc0e807284f88e943d6d43b43843614f0a8afa469801db0/contourpy-1.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca0fdcd73925568ca027e0b17ab07aad764be4706d0a925b89227e447d9737b7", size = 1403932, upload-time = "2025-07-26T12:01:51.979Z" }, + { url = "https://files.pythonhosted.org/packages/a6/2e/adc197a37443f934594112222ac1aa7dc9a98faf9c3842884df9a9d8751d/contourpy-1.3.3-cp313-cp313-win32.whl", hash = "sha256:b20c7c9a3bf701366556e1b1984ed2d0cedf999903c51311417cf5f591d8c78d", size = 185024, upload-time = "2025-07-26T12:01:53.245Z" }, + { url = "https://files.pythonhosted.org/packages/18/0b/0098c214843213759692cc638fce7de5c289200a830e5035d1791d7a2338/contourpy-1.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:1cadd8b8969f060ba45ed7c1b714fe69185812ab43bd6b86a9123fe8f99c3263", size = 226578, upload-time = "2025-07-26T12:01:54.422Z" }, + { url = "https://files.pythonhosted.org/packages/8a/9a/2f6024a0c5995243cd63afdeb3651c984f0d2bc727fd98066d40e141ad73/contourpy-1.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:fd914713266421b7536de2bfa8181aa8c699432b6763a0ea64195ebe28bff6a9", size = 193524, upload-time = "2025-07-26T12:01:55.73Z" }, + { url = "https://files.pythonhosted.org/packages/c0/b3/f8a1a86bd3298513f500e5b1f5fd92b69896449f6cab6a146a5d52715479/contourpy-1.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:88df9880d507169449d434c293467418b9f6cbe82edd19284aa0409e7fdb933d", size = 306730, upload-time = "2025-07-26T12:01:57.051Z" }, + { url = "https://files.pythonhosted.org/packages/3f/11/4780db94ae62fc0c2053909b65dc3246bd7cecfc4f8a20d957ad43aa4ad8/contourpy-1.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d06bb1f751ba5d417047db62bca3c8fde202b8c11fb50742ab3ab962c81e8216", size = 287897, upload-time = "2025-07-26T12:01:58.663Z" }, + { url = "https://files.pythonhosted.org/packages/ae/15/e59f5f3ffdd6f3d4daa3e47114c53daabcb18574a26c21f03dc9e4e42ff0/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4e6b05a45525357e382909a4c1600444e2a45b4795163d3b22669285591c1ae", size = 326751, upload-time = "2025-07-26T12:02:00.343Z" }, + { url = "https://files.pythonhosted.org/packages/0f/81/03b45cfad088e4770b1dcf72ea78d3802d04200009fb364d18a493857210/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ab3074b48c4e2cf1a960e6bbeb7f04566bf36b1861d5c9d4d8ac04b82e38ba20", size = 375486, upload-time = "2025-07-26T12:02:02.128Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ba/49923366492ffbdd4486e970d421b289a670ae8cf539c1ea9a09822b371a/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c3d53c796f8647d6deb1abe867daeb66dcc8a97e8455efa729516b997b8ed99", size = 388106, upload-time = "2025-07-26T12:02:03.615Z" }, + { url = "https://files.pythonhosted.org/packages/9f/52/5b00ea89525f8f143651f9f03a0df371d3cbd2fccd21ca9b768c7a6500c2/contourpy-1.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50ed930df7289ff2a8d7afeb9603f8289e5704755c7e5c3bbd929c90c817164b", size = 352548, upload-time = "2025-07-26T12:02:05.165Z" }, + { url = "https://files.pythonhosted.org/packages/32/1d/a209ec1a3a3452d490f6b14dd92e72280c99ae3d1e73da74f8277d4ee08f/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4feffb6537d64b84877da813a5c30f1422ea5739566abf0bd18065ac040e120a", size = 1322297, upload-time = "2025-07-26T12:02:07.379Z" }, + { url = "https://files.pythonhosted.org/packages/bc/9e/46f0e8ebdd884ca0e8877e46a3f4e633f6c9c8c4f3f6e72be3fe075994aa/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2b7e9480ffe2b0cd2e787e4df64270e3a0440d9db8dc823312e2c940c167df7e", size = 1391023, upload-time = "2025-07-26T12:02:10.171Z" }, + { url = "https://files.pythonhosted.org/packages/b9/70/f308384a3ae9cd2209e0849f33c913f658d3326900d0ff5d378d6a1422d2/contourpy-1.3.3-cp313-cp313t-win32.whl", hash = "sha256:283edd842a01e3dcd435b1c5116798d661378d83d36d337b8dde1d16a5fc9ba3", size = 196157, upload-time = "2025-07-26T12:02:11.488Z" }, + { url = "https://files.pythonhosted.org/packages/b2/dd/880f890a6663b84d9e34a6f88cded89d78f0091e0045a284427cb6b18521/contourpy-1.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:87acf5963fc2b34825e5b6b048f40e3635dd547f590b04d2ab317c2619ef7ae8", size = 240570, upload-time = "2025-07-26T12:02:12.754Z" }, + { url = "https://files.pythonhosted.org/packages/80/99/2adc7d8ffead633234817ef8e9a87115c8a11927a94478f6bb3d3f4d4f7d/contourpy-1.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:3c30273eb2a55024ff31ba7d052dde990d7d8e5450f4bbb6e913558b3d6c2301", size = 199713, upload-time = "2025-07-26T12:02:14.4Z" }, + { url = "https://files.pythonhosted.org/packages/72/8b/4546f3ab60f78c514ffb7d01a0bd743f90de36f0019d1be84d0a708a580a/contourpy-1.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fde6c716d51c04b1c25d0b90364d0be954624a0ee9d60e23e850e8d48353d07a", size = 292189, upload-time = "2025-07-26T12:02:16.095Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e1/3542a9cb596cadd76fcef413f19c79216e002623158befe6daa03dbfa88c/contourpy-1.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cbedb772ed74ff5be440fa8eee9bd49f64f6e3fc09436d9c7d8f1c287b121d77", size = 273251, upload-time = "2025-07-26T12:02:17.524Z" }, + { url = "https://files.pythonhosted.org/packages/b1/71/f93e1e9471d189f79d0ce2497007731c1e6bf9ef6d1d61b911430c3db4e5/contourpy-1.3.3-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22e9b1bd7a9b1d652cd77388465dc358dafcd2e217d35552424aa4f996f524f5", size = 335810, upload-time = "2025-07-26T12:02:18.9Z" }, + { url = "https://files.pythonhosted.org/packages/91/f9/e35f4c1c93f9275d4e38681a80506b5510e9327350c51f8d4a5a724d178c/contourpy-1.3.3-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a22738912262aa3e254e4f3cb079a95a67132fc5a063890e224393596902f5a4", size = 382871, upload-time = "2025-07-26T12:02:20.418Z" }, + { url = "https://files.pythonhosted.org/packages/b5/71/47b512f936f66a0a900d81c396a7e60d73419868fba959c61efed7a8ab46/contourpy-1.3.3-cp314-cp314-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:afe5a512f31ee6bd7d0dda52ec9864c984ca3d66664444f2d72e0dc4eb832e36", size = 386264, upload-time = "2025-07-26T12:02:21.916Z" }, + { url = "https://files.pythonhosted.org/packages/04/5f/9ff93450ba96b09c7c2b3f81c94de31c89f92292f1380261bd7195bea4ea/contourpy-1.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f64836de09927cba6f79dcd00fdd7d5329f3fccc633468507079c829ca4db4e3", size = 363819, upload-time = "2025-07-26T12:02:23.759Z" }, + { url = "https://files.pythonhosted.org/packages/3e/a6/0b185d4cc480ee494945cde102cb0149ae830b5fa17bf855b95f2e70ad13/contourpy-1.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1fd43c3be4c8e5fd6e4f2baeae35ae18176cf2e5cced681cca908addf1cdd53b", size = 1333650, upload-time = "2025-07-26T12:02:26.181Z" }, + { url = "https://files.pythonhosted.org/packages/43/d7/afdc95580ca56f30fbcd3060250f66cedbde69b4547028863abd8aa3b47e/contourpy-1.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6afc576f7b33cf00996e5c1102dc2a8f7cc89e39c0b55df93a0b78c1bd992b36", size = 1404833, upload-time = "2025-07-26T12:02:28.782Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e2/366af18a6d386f41132a48f033cbd2102e9b0cf6345d35ff0826cd984566/contourpy-1.3.3-cp314-cp314-win32.whl", hash = "sha256:66c8a43a4f7b8df8b71ee1840e4211a3c8d93b214b213f590e18a1beca458f7d", size = 189692, upload-time = "2025-07-26T12:02:30.128Z" }, + { url = "https://files.pythonhosted.org/packages/7d/c2/57f54b03d0f22d4044b8afb9ca0e184f8b1afd57b4f735c2fa70883dc601/contourpy-1.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:cf9022ef053f2694e31d630feaacb21ea24224be1c3ad0520b13d844274614fd", size = 232424, upload-time = "2025-07-26T12:02:31.395Z" }, + { url = "https://files.pythonhosted.org/packages/18/79/a9416650df9b525737ab521aa181ccc42d56016d2123ddcb7b58e926a42c/contourpy-1.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:95b181891b4c71de4bb404c6621e7e2390745f887f2a026b2d99e92c17892339", size = 198300, upload-time = "2025-07-26T12:02:32.956Z" }, + { url = "https://files.pythonhosted.org/packages/1f/42/38c159a7d0f2b7b9c04c64ab317042bb6952b713ba875c1681529a2932fe/contourpy-1.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:33c82d0138c0a062380332c861387650c82e4cf1747aaa6938b9b6516762e772", size = 306769, upload-time = "2025-07-26T12:02:34.2Z" }, + { url = "https://files.pythonhosted.org/packages/c3/6c/26a8205f24bca10974e77460de68d3d7c63e282e23782f1239f226fcae6f/contourpy-1.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ea37e7b45949df430fe649e5de8351c423430046a2af20b1c1961cae3afcda77", size = 287892, upload-time = "2025-07-26T12:02:35.807Z" }, + { url = "https://files.pythonhosted.org/packages/66/06/8a475c8ab718ebfd7925661747dbb3c3ee9c82ac834ccb3570be49d129f4/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d304906ecc71672e9c89e87c4675dc5c2645e1f4269a5063b99b0bb29f232d13", size = 326748, upload-time = "2025-07-26T12:02:37.193Z" }, + { url = "https://files.pythonhosted.org/packages/b4/a3/c5ca9f010a44c223f098fccd8b158bb1cb287378a31ac141f04730dc49be/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca658cd1a680a5c9ea96dc61cdbae1e85c8f25849843aa799dfd3cb370ad4fbe", size = 375554, upload-time = "2025-07-26T12:02:38.894Z" }, + { url = "https://files.pythonhosted.org/packages/80/5b/68bd33ae63fac658a4145088c1e894405e07584a316738710b636c6d0333/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ab2fd90904c503739a75b7c8c5c01160130ba67944a7b77bbf36ef8054576e7f", size = 388118, upload-time = "2025-07-26T12:02:40.642Z" }, + { url = "https://files.pythonhosted.org/packages/40/52/4c285a6435940ae25d7410a6c36bda5145839bc3f0beb20c707cda18b9d2/contourpy-1.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7301b89040075c30e5768810bc96a8e8d78085b47d8be6e4c3f5a0b4ed478a0", size = 352555, upload-time = "2025-07-26T12:02:42.25Z" }, + { url = "https://files.pythonhosted.org/packages/24/ee/3e81e1dd174f5c7fefe50e85d0892de05ca4e26ef1c9a59c2a57e43b865a/contourpy-1.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2a2a8b627d5cc6b7c41a4beff6c5ad5eb848c88255fda4a8745f7e901b32d8e4", size = 1322295, upload-time = "2025-07-26T12:02:44.668Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b2/6d913d4d04e14379de429057cd169e5e00f6c2af3bb13e1710bcbdb5da12/contourpy-1.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fd6ec6be509c787f1caf6b247f0b1ca598bef13f4ddeaa126b7658215529ba0f", size = 1391027, upload-time = "2025-07-26T12:02:47.09Z" }, + { url = "https://files.pythonhosted.org/packages/93/8a/68a4ec5c55a2971213d29a9374913f7e9f18581945a7a31d1a39b5d2dfe5/contourpy-1.3.3-cp314-cp314t-win32.whl", hash = "sha256:e74a9a0f5e3fff48fb5a7f2fd2b9b70a3fe014a67522f79b7cca4c0c7e43c9ae", size = 202428, upload-time = "2025-07-26T12:02:48.691Z" }, + { url = "https://files.pythonhosted.org/packages/fa/96/fd9f641ffedc4fa3ace923af73b9d07e869496c9cc7a459103e6e978992f/contourpy-1.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:13b68d6a62db8eafaebb8039218921399baf6e47bf85006fd8529f2a08ef33fc", size = 250331, upload-time = "2025-07-26T12:02:50.137Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8c/469afb6465b853afff216f9528ffda78a915ff880ed58813ba4faf4ba0b6/contourpy-1.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b7448cb5a725bb1e35ce88771b86fba35ef418952474492cf7c764059933ff8b", size = 203831, upload-time = "2025-07-26T12:02:51.449Z" }, +] + +[[package]] +name = "cryptography" +version = "46.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, + { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, + { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, + { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, + { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, + { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, + { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, + { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, + { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, + { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, + { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" }, + { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" }, + { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" }, + { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" }, + { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" }, + { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" }, + { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" }, + { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" }, + { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" }, + { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" }, + { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" }, + { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" }, + { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" }, + { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" }, + { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, + { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, + { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, + { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, + { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, + { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, + { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, + { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, + { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, + { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, +] + +[[package]] +name = "cycler" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/95/a3dbbb5028f35eafb79008e7522a75244477d2838f38cbb722248dabc2a8/cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c", size = 7615, upload-time = "2023-10-07T05:32:18.335Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321, upload-time = "2023-10-07T05:32:16.783Z" }, +] + +[[package]] +name = "dateparser" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "python-dateutil" }, + { name = "pytz" }, + { name = "regex" }, + { name = "tzlocal" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a9/30/064144f0df1749e7bb5faaa7f52b007d7c2d08ec08fed8411aba87207f68/dateparser-1.2.2.tar.gz", hash = "sha256:986316f17cb8cdc23ea8ce563027c5ef12fc725b6fb1d137c14ca08777c5ecf7", size = 329840, upload-time = "2025-06-26T09:29:23.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/22/f020c047ae1346613db9322638186468238bcfa8849b4668a22b97faad65/dateparser-1.2.2-py3-none-any.whl", hash = "sha256:5a5d7211a09013499867547023a2a0c91d5a27d15dd4dbcea676ea9fe66f2482", size = 315453, upload-time = "2025-06-26T09:29:21.412Z" }, +] + +[[package]] +name = "docker" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834, upload-time = "2024-05-23T11:13:57.216Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, +] + +[[package]] +name = "docutils" +version = "0.19" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/330ea8d383eb2ce973df34d1239b3b21e91cd8c865d21ff82902d952f91f/docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6", size = 2056383, upload-time = "2022-07-05T20:17:31.045Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/69/e391bd51bc08ed9141ecd899a0ddb61ab6465309f1eb470905c0c8868081/docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc", size = 570472, upload-time = "2022-07-05T20:17:26.388Z" }, +] + +[[package]] +name = "filelock" +version = "3.20.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/23/ce7a1126827cedeb958fc043d61745754464eb56c5937c35bbf2b8e26f34/filelock-3.20.1.tar.gz", hash = "sha256:b8360948b351b80f420878d8516519a2204b07aefcdcfd24912a5d33127f188c", size = 19476, upload-time = "2025-12-15T23:54:28.027Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/7f/a1a97644e39e7316d850784c642093c99df1290a460df4ede27659056834/filelock-3.20.1-py3-none-any.whl", hash = "sha256:15d9e9a67306188a44baa72f569d2bfd803076269365fdea0934385da4dc361a", size = 16666, upload-time = "2025-12-15T23:54:26.874Z" }, +] + +[[package]] +name = "fonttools" +version = "4.61.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ec/ca/cf17b88a8df95691275a3d77dc0a5ad9907f328ae53acbe6795da1b2f5ed/fonttools-4.61.1.tar.gz", hash = "sha256:6675329885c44657f826ef01d9e4fb33b9158e9d93c537d84ad8399539bc6f69", size = 3565756, upload-time = "2025-12-12T17:31:24.246Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6f/16/7decaa24a1bd3a70c607b2e29f0adc6159f36a7e40eaba59846414765fd4/fonttools-4.61.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f3cb4a569029b9f291f88aafc927dd53683757e640081ca8c412781ea144565e", size = 2851593, upload-time = "2025-12-12T17:30:04.225Z" }, + { url = "https://files.pythonhosted.org/packages/94/98/3c4cb97c64713a8cf499b3245c3bf9a2b8fd16a3e375feff2aed78f96259/fonttools-4.61.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41a7170d042e8c0024703ed13b71893519a1a6d6e18e933e3ec7507a2c26a4b2", size = 2400231, upload-time = "2025-12-12T17:30:06.47Z" }, + { url = "https://files.pythonhosted.org/packages/b7/37/82dbef0f6342eb01f54bca073ac1498433d6ce71e50c3c3282b655733b31/fonttools-4.61.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10d88e55330e092940584774ee5e8a6971b01fc2f4d3466a1d6c158230880796", size = 4954103, upload-time = "2025-12-12T17:30:08.432Z" }, + { url = "https://files.pythonhosted.org/packages/6c/44/f3aeac0fa98e7ad527f479e161aca6c3a1e47bb6996b053d45226fe37bf2/fonttools-4.61.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:15acc09befd16a0fb8a8f62bc147e1a82817542d72184acca9ce6e0aeda9fa6d", size = 5004295, upload-time = "2025-12-12T17:30:10.56Z" }, + { url = "https://files.pythonhosted.org/packages/14/e8/7424ced75473983b964d09f6747fa09f054a6d656f60e9ac9324cf40c743/fonttools-4.61.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e6bcdf33aec38d16508ce61fd81838f24c83c90a1d1b8c68982857038673d6b8", size = 4944109, upload-time = "2025-12-12T17:30:12.874Z" }, + { url = "https://files.pythonhosted.org/packages/c8/8b/6391b257fa3d0b553d73e778f953a2f0154292a7a7a085e2374b111e5410/fonttools-4.61.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5fade934607a523614726119164ff621e8c30e8fa1ffffbbd358662056ba69f0", size = 5093598, upload-time = "2025-12-12T17:30:15.79Z" }, + { url = "https://files.pythonhosted.org/packages/d9/71/fd2ea96cdc512d92da5678a1c98c267ddd4d8c5130b76d0f7a80f9a9fde8/fonttools-4.61.1-cp312-cp312-win32.whl", hash = "sha256:75da8f28eff26defba42c52986de97b22106cb8f26515b7c22443ebc9c2d3261", size = 2269060, upload-time = "2025-12-12T17:30:18.058Z" }, + { url = "https://files.pythonhosted.org/packages/80/3b/a3e81b71aed5a688e89dfe0e2694b26b78c7d7f39a5ffd8a7d75f54a12a8/fonttools-4.61.1-cp312-cp312-win_amd64.whl", hash = "sha256:497c31ce314219888c0e2fce5ad9178ca83fe5230b01a5006726cdf3ac9f24d9", size = 2319078, upload-time = "2025-12-12T17:30:22.862Z" }, + { url = "https://files.pythonhosted.org/packages/4b/cf/00ba28b0990982530addb8dc3e9e6f2fa9cb5c20df2abdda7baa755e8fe1/fonttools-4.61.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c56c488ab471628ff3bfa80964372fc13504ece601e0d97a78ee74126b2045c", size = 2846454, upload-time = "2025-12-12T17:30:24.938Z" }, + { url = "https://files.pythonhosted.org/packages/5a/ca/468c9a8446a2103ae645d14fee3f610567b7042aba85031c1c65e3ef7471/fonttools-4.61.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dc492779501fa723b04d0ab1f5be046797fee17d27700476edc7ee9ae535a61e", size = 2398191, upload-time = "2025-12-12T17:30:27.343Z" }, + { url = "https://files.pythonhosted.org/packages/a3/4b/d67eedaed19def5967fade3297fed8161b25ba94699efc124b14fb68cdbc/fonttools-4.61.1-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:64102ca87e84261419c3747a0d20f396eb024bdbeb04c2bfb37e2891f5fadcb5", size = 4928410, upload-time = "2025-12-12T17:30:29.771Z" }, + { url = "https://files.pythonhosted.org/packages/b0/8d/6fb3494dfe61a46258cd93d979cf4725ded4eb46c2a4ca35e4490d84daea/fonttools-4.61.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c1b526c8d3f615a7b1867f38a9410849c8f4aef078535742198e942fba0e9bd", size = 4984460, upload-time = "2025-12-12T17:30:32.073Z" }, + { url = "https://files.pythonhosted.org/packages/f7/f1/a47f1d30b3dc00d75e7af762652d4cbc3dff5c2697a0dbd5203c81afd9c3/fonttools-4.61.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:41ed4b5ec103bd306bb68f81dc166e77409e5209443e5773cb4ed837bcc9b0d3", size = 4925800, upload-time = "2025-12-12T17:30:34.339Z" }, + { url = "https://files.pythonhosted.org/packages/a7/01/e6ae64a0981076e8a66906fab01539799546181e32a37a0257b77e4aa88b/fonttools-4.61.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b501c862d4901792adaec7c25b1ecc749e2662543f68bb194c42ba18d6eec98d", size = 5067859, upload-time = "2025-12-12T17:30:36.593Z" }, + { url = "https://files.pythonhosted.org/packages/73/aa/28e40b8d6809a9b5075350a86779163f074d2b617c15d22343fce81918db/fonttools-4.61.1-cp313-cp313-win32.whl", hash = "sha256:4d7092bb38c53bbc78e9255a59158b150bcdc115a1e3b3ce0b5f267dc35dd63c", size = 2267821, upload-time = "2025-12-12T17:30:38.478Z" }, + { url = "https://files.pythonhosted.org/packages/1a/59/453c06d1d83dc0951b69ef692d6b9f1846680342927df54e9a1ca91c6f90/fonttools-4.61.1-cp313-cp313-win_amd64.whl", hash = "sha256:21e7c8d76f62ab13c9472ccf74515ca5b9a761d1bde3265152a6dc58700d895b", size = 2318169, upload-time = "2025-12-12T17:30:40.951Z" }, + { url = "https://files.pythonhosted.org/packages/32/8f/4e7bf82c0cbb738d3c2206c920ca34ca74ef9dabde779030145d28665104/fonttools-4.61.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:fff4f534200a04b4a36e7ae3cb74493afe807b517a09e99cb4faa89a34ed6ecd", size = 2846094, upload-time = "2025-12-12T17:30:43.511Z" }, + { url = "https://files.pythonhosted.org/packages/71/09/d44e45d0a4f3a651f23a1e9d42de43bc643cce2971b19e784cc67d823676/fonttools-4.61.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:d9203500f7c63545b4ce3799319fe4d9feb1a1b89b28d3cb5abd11b9dd64147e", size = 2396589, upload-time = "2025-12-12T17:30:45.681Z" }, + { url = "https://files.pythonhosted.org/packages/89/18/58c64cafcf8eb677a99ef593121f719e6dcbdb7d1c594ae5a10d4997ca8a/fonttools-4.61.1-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa646ecec9528bef693415c79a86e733c70a4965dd938e9a226b0fc64c9d2e6c", size = 4877892, upload-time = "2025-12-12T17:30:47.709Z" }, + { url = "https://files.pythonhosted.org/packages/8a/ec/9e6b38c7ba1e09eb51db849d5450f4c05b7e78481f662c3b79dbde6f3d04/fonttools-4.61.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:11f35ad7805edba3aac1a3710d104592df59f4b957e30108ae0ba6c10b11dd75", size = 4972884, upload-time = "2025-12-12T17:30:49.656Z" }, + { url = "https://files.pythonhosted.org/packages/5e/87/b5339da8e0256734ba0dbbf5b6cdebb1dd79b01dc8c270989b7bcd465541/fonttools-4.61.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b931ae8f62db78861b0ff1ac017851764602288575d65b8e8ff1963fed419063", size = 4924405, upload-time = "2025-12-12T17:30:51.735Z" }, + { url = "https://files.pythonhosted.org/packages/0b/47/e3409f1e1e69c073a3a6fd8cb886eb18c0bae0ee13db2c8d5e7f8495e8b7/fonttools-4.61.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b148b56f5de675ee16d45e769e69f87623a4944f7443850bf9a9376e628a89d2", size = 5035553, upload-time = "2025-12-12T17:30:54.823Z" }, + { url = "https://files.pythonhosted.org/packages/bf/b6/1f6600161b1073a984294c6c031e1a56ebf95b6164249eecf30012bb2e38/fonttools-4.61.1-cp314-cp314-win32.whl", hash = "sha256:9b666a475a65f4e839d3d10473fad6d47e0a9db14a2f4a224029c5bfde58ad2c", size = 2271915, upload-time = "2025-12-12T17:30:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/52/7b/91e7b01e37cc8eb0e1f770d08305b3655e4f002fc160fb82b3390eabacf5/fonttools-4.61.1-cp314-cp314-win_amd64.whl", hash = "sha256:4f5686e1fe5fce75d82d93c47a438a25bf0d1319d2843a926f741140b2b16e0c", size = 2323487, upload-time = "2025-12-12T17:30:59.804Z" }, + { url = "https://files.pythonhosted.org/packages/39/5c/908ad78e46c61c3e3ed70c3b58ff82ab48437faf84ec84f109592cabbd9f/fonttools-4.61.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:e76ce097e3c57c4bcb67c5aa24a0ecdbd9f74ea9219997a707a4061fbe2707aa", size = 2929571, upload-time = "2025-12-12T17:31:02.574Z" }, + { url = "https://files.pythonhosted.org/packages/bd/41/975804132c6dea64cdbfbaa59f3518a21c137a10cccf962805b301ac6ab2/fonttools-4.61.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:9cfef3ab326780c04d6646f68d4b4742aae222e8b8ea1d627c74e38afcbc9d91", size = 2435317, upload-time = "2025-12-12T17:31:04.974Z" }, + { url = "https://files.pythonhosted.org/packages/b0/5a/aef2a0a8daf1ebaae4cfd83f84186d4a72ee08fd6a8451289fcd03ffa8a4/fonttools-4.61.1-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a75c301f96db737e1c5ed5fd7d77d9c34466de16095a266509e13da09751bd19", size = 4882124, upload-time = "2025-12-12T17:31:07.456Z" }, + { url = "https://files.pythonhosted.org/packages/80/33/d6db3485b645b81cea538c9d1c9219d5805f0877fda18777add4671c5240/fonttools-4.61.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:91669ccac46bbc1d09e9273546181919064e8df73488ea087dcac3e2968df9ba", size = 5100391, upload-time = "2025-12-12T17:31:09.732Z" }, + { url = "https://files.pythonhosted.org/packages/6c/d6/675ba631454043c75fcf76f0ca5463eac8eb0666ea1d7badae5fea001155/fonttools-4.61.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c33ab3ca9d3ccd581d58e989d67554e42d8d4ded94ab3ade3508455fe70e65f7", size = 4978800, upload-time = "2025-12-12T17:31:11.681Z" }, + { url = "https://files.pythonhosted.org/packages/7f/33/d3ec753d547a8d2bdaedd390d4a814e8d5b45a093d558f025c6b990b554c/fonttools-4.61.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:664c5a68ec406f6b1547946683008576ef8b38275608e1cee6c061828171c118", size = 5006426, upload-time = "2025-12-12T17:31:13.764Z" }, + { url = "https://files.pythonhosted.org/packages/b4/40/cc11f378b561a67bea850ab50063366a0d1dd3f6d0a30ce0f874b0ad5664/fonttools-4.61.1-cp314-cp314t-win32.whl", hash = "sha256:aed04cabe26f30c1647ef0e8fbb207516fd40fe9472e9439695f5c6998e60ac5", size = 2335377, upload-time = "2025-12-12T17:31:16.49Z" }, + { url = "https://files.pythonhosted.org/packages/e4/ff/c9a2b66b39f8628531ea58b320d66d951267c98c6a38684daa8f50fb02f8/fonttools-4.61.1-cp314-cp314t-win_amd64.whl", hash = "sha256:2180f14c141d2f0f3da43f3a81bc8aa4684860f6b0e6f9e165a4831f24e6a23b", size = 2400613, upload-time = "2025-12-12T17:31:18.769Z" }, + { url = "https://files.pythonhosted.org/packages/c7/4e/ce75a57ff3aebf6fc1f4e9d508b8e5810618a33d900ad6c19eb30b290b97/fonttools-4.61.1-py3-none-any.whl", hash = "sha256:17d2bf5d541add43822bcf0c43d7d847b160c9bb01d15d5007d84e2217aaa371", size = 1148996, upload-time = "2025-12-12T17:31:21.03Z" }, +] + +[[package]] +name = "google-api-core" +version = "2.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "googleapis-common-protos" }, + { name = "proto-plus" }, + { name = "protobuf" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/da/83d7043169ac2c8c7469f0e375610d78ae2160134bf1b80634c482fa079c/google_api_core-2.28.1.tar.gz", hash = "sha256:2b405df02d68e68ce0fbc138559e6036559e685159d148ae5861013dc201baf8", size = 176759, upload-time = "2025-10-28T21:34:51.529Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/d4/90197b416cb61cefd316964fd9e7bd8324bcbafabf40eef14a9f20b81974/google_api_core-2.28.1-py3-none-any.whl", hash = "sha256:4021b0f8ceb77a6fb4de6fde4502cecab45062e66ff4f2895169e0b35bc9466c", size = 173706, upload-time = "2025-10-28T21:34:50.151Z" }, +] + +[[package]] +name = "google-auth" +version = "2.45.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachetools" }, + { name = "pyasn1-modules" }, + { name = "rsa" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e5/00/3c794502a8b892c404b2dea5b3650eb21bfc7069612fbfd15c7f17c1cb0d/google_auth-2.45.0.tar.gz", hash = "sha256:90d3f41b6b72ea72dd9811e765699ee491ab24139f34ebf1ca2b9cc0c38708f3", size = 320708, upload-time = "2025-12-15T22:58:42.889Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/97/451d55e05487a5cd6279a01a7e34921858b16f7dc8aa38a2c684743cd2b3/google_auth-2.45.0-py2.py3-none-any.whl", hash = "sha256:82344e86dc00410ef5382d99be677c6043d72e502b625aa4f4afa0bdacca0f36", size = 233312, upload-time = "2025-12-15T22:58:40.777Z" }, +] + +[[package]] +name = "google-cloud-core" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/03/ef0bc99d0e0faf4fdbe67ac445e18cdaa74824fd93cd069e7bb6548cb52d/google_cloud_core-2.5.0.tar.gz", hash = "sha256:7c1b7ef5c92311717bd05301aa1a91ffbc565673d3b0b4163a52d8413a186963", size = 36027, upload-time = "2025-10-29T23:17:39.513Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/20/bfa472e327c8edee00f04beecc80baeddd2ab33ee0e86fd7654da49d45e9/google_cloud_core-2.5.0-py3-none-any.whl", hash = "sha256:67d977b41ae6c7211ee830c7912e41003ea8194bff15ae7d72fd6f51e57acabc", size = 29469, upload-time = "2025-10-29T23:17:38.548Z" }, +] + +[[package]] +name = "google-cloud-storage" +version = "3.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, + { name = "google-cloud-core" }, + { name = "google-crc32c" }, + { name = "google-resumable-media" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d2/8e/fab2de1a0ab7fdbd452eaae5a9a5c933d0911c26b04efa0c76ddfd921259/google_cloud_storage-3.7.0.tar.gz", hash = "sha256:9ce59c65f4d6e372effcecc0456680a8d73cef4f2dc9212a0704799cb3d69237", size = 17258914, upload-time = "2025-12-09T18:24:48.97Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2d/80/6e5c7c83cea15ed4dfc4843b9df9db0716bc551ac938f7b5dd18a72bd5e4/google_cloud_storage-3.7.0-py3-none-any.whl", hash = "sha256:469bc9540936e02f8a4bfd1619e9dca1e42dec48f95e4204d783b36476a15093", size = 303364, upload-time = "2025-12-09T18:24:47.343Z" }, +] + +[[package]] +name = "google-crc32c" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/03/41/4b9c02f99e4c5fb477122cd5437403b552873f014616ac1d19ac8221a58d/google_crc32c-1.8.0.tar.gz", hash = "sha256:a428e25fb7691024de47fecfbff7ff957214da51eddded0da0ae0e0f03a2cf79", size = 14192, upload-time = "2025-12-16T00:35:25.142Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/5f/7307325b1198b59324c0fa9807cafb551afb65e831699f2ce211ad5c8240/google_crc32c-1.8.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:4b8286b659c1335172e39563ab0a768b8015e88e08329fa5321f774275fc3113", size = 31300, upload-time = "2025-12-16T00:21:56.723Z" }, + { url = "https://files.pythonhosted.org/packages/21/8e/58c0d5d86e2220e6a37befe7e6a94dd2f6006044b1a33edf1ff6d9f7e319/google_crc32c-1.8.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:2a3dc3318507de089c5384cc74d54318401410f82aa65b2d9cdde9d297aca7cb", size = 30867, upload-time = "2025-12-16T00:38:31.302Z" }, + { url = "https://files.pythonhosted.org/packages/ce/a9/a780cc66f86335a6019f557a8aaca8fbb970728f0efd2430d15ff1beae0e/google_crc32c-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14f87e04d613dfa218d6135e81b78272c3b904e2a7053b841481b38a7d901411", size = 33364, upload-time = "2025-12-16T00:40:22.96Z" }, + { url = "https://files.pythonhosted.org/packages/21/3f/3457ea803db0198c9aaca2dd373750972ce28a26f00544b6b85088811939/google_crc32c-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cb5c869c2923d56cb0c8e6bcdd73c009c36ae39b652dbe46a05eb4ef0ad01454", size = 33740, upload-time = "2025-12-16T00:40:23.96Z" }, + { url = "https://files.pythonhosted.org/packages/df/c0/87c2073e0c72515bb8733d4eef7b21548e8d189f094b5dad20b0ecaf64f6/google_crc32c-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:3cc0c8912038065eafa603b238abf252e204accab2a704c63b9e14837a854962", size = 34437, upload-time = "2025-12-16T00:35:21.395Z" }, + { url = "https://files.pythonhosted.org/packages/d1/db/000f15b41724589b0e7bc24bc7a8967898d8d3bc8caf64c513d91ef1f6c0/google_crc32c-1.8.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:3ebb04528e83b2634857f43f9bb8ef5b2bbe7f10f140daeb01b58f972d04736b", size = 31297, upload-time = "2025-12-16T00:23:20.709Z" }, + { url = "https://files.pythonhosted.org/packages/d7/0d/8ebed0c39c53a7e838e2a486da8abb0e52de135f1b376ae2f0b160eb4c1a/google_crc32c-1.8.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:450dc98429d3e33ed2926fc99ee81001928d63460f8538f21a5d6060912a8e27", size = 30867, upload-time = "2025-12-16T00:43:14.628Z" }, + { url = "https://files.pythonhosted.org/packages/ce/42/b468aec74a0354b34c8cbf748db20d6e350a68a2b0912e128cabee49806c/google_crc32c-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3b9776774b24ba76831609ffbabce8cdf6fa2bd5e9df37b594221c7e333a81fa", size = 33344, upload-time = "2025-12-16T00:40:24.742Z" }, + { url = "https://files.pythonhosted.org/packages/1c/e8/b33784d6fc77fb5062a8a7854e43e1e618b87d5ddf610a88025e4de6226e/google_crc32c-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:89c17d53d75562edfff86679244830599ee0a48efc216200691de8b02ab6b2b8", size = 33694, upload-time = "2025-12-16T00:40:25.505Z" }, + { url = "https://files.pythonhosted.org/packages/92/b1/d3cbd4d988afb3d8e4db94ca953df429ed6db7282ed0e700d25e6c7bfc8d/google_crc32c-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:57a50a9035b75643996fbf224d6661e386c7162d1dfdab9bc4ca790947d1007f", size = 34435, upload-time = "2025-12-16T00:35:22.107Z" }, + { url = "https://files.pythonhosted.org/packages/21/88/8ecf3c2b864a490b9e7010c84fd203ec8cf3b280651106a3a74dd1b0ca72/google_crc32c-1.8.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:e6584b12cb06796d285d09e33f63309a09368b9d806a551d8036a4207ea43697", size = 31301, upload-time = "2025-12-16T00:24:48.527Z" }, + { url = "https://files.pythonhosted.org/packages/36/c6/f7ff6c11f5ca215d9f43d3629163727a272eabc356e5c9b2853df2bfe965/google_crc32c-1.8.0-cp314-cp314-macosx_12_0_x86_64.whl", hash = "sha256:f4b51844ef67d6cf2e9425983274da75f18b1597bb2c998e1c0a0e8d46f8f651", size = 30868, upload-time = "2025-12-16T00:48:12.163Z" }, + { url = "https://files.pythonhosted.org/packages/56/15/c25671c7aad70f8179d858c55a6ae8404902abe0cdcf32a29d581792b491/google_crc32c-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b0d1a7afc6e8e4635564ba8aa5c0548e3173e41b6384d7711a9123165f582de2", size = 33381, upload-time = "2025-12-16T00:40:26.268Z" }, + { url = "https://files.pythonhosted.org/packages/42/fa/f50f51260d7b0ef5d4898af122d8a7ec5a84e2984f676f746445f783705f/google_crc32c-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8b3f68782f3cbd1bce027e48768293072813469af6a61a86f6bb4977a4380f21", size = 33734, upload-time = "2025-12-16T00:40:27.028Z" }, + { url = "https://files.pythonhosted.org/packages/08/a5/7b059810934a09fb3ccb657e0843813c1fee1183d3bc2c8041800374aa2c/google_crc32c-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:d511b3153e7011a27ab6ee6bb3a5404a55b994dc1a7322c0b87b29606d9790e2", size = 34878, upload-time = "2025-12-16T00:35:23.142Z" }, +] + +[[package]] +name = "google-resumable-media" +version = "2.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-crc32c" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/64/d7/520b62a35b23038ff005e334dba3ffc75fcf583bee26723f1fd8fd4b6919/google_resumable_media-2.8.0.tar.gz", hash = "sha256:f1157ed8b46994d60a1bc432544db62352043113684d4e030ee02e77ebe9a1ae", size = 2163265, upload-time = "2025-11-17T15:38:06.659Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/0b/93afde9cfe012260e9fe1522f35c9b72d6ee222f316586b1f23ecf44d518/google_resumable_media-2.8.0-py3-none-any.whl", hash = "sha256:dd14a116af303845a8d932ddae161a26e86cc229645bc98b39f026f9b1717582", size = 81340, upload-time = "2025-11-17T15:38:05.594Z" }, +] + +[[package]] +name = "googleapis-common-protos" +version = "1.72.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e5/7b/adfd75544c415c487b33061fe7ae526165241c1ea133f9a9125a56b39fd8/googleapis_common_protos-1.72.0.tar.gz", hash = "sha256:e55a601c1b32b52d7a3e65f43563e2aa61bcd737998ee672ac9b951cd49319f5", size = 147433, upload-time = "2025-11-06T18:29:24.087Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/ab/09169d5a4612a5f92490806649ac8d41e3ec9129c636754575b3553f4ea4/googleapis_common_protos-1.72.0-py3-none-any.whl", hash = "sha256:4299c5a82d5ae1a9702ada957347726b167f9f8d1fc352477702a1e851ff4038", size = 297515, upload-time = "2025-11-06T18:29:13.14Z" }, +] + +[[package]] +name = "humanfriendly" +version = "10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyreadline3", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/3f/2c29224acb2e2df4d2046e4c73ee2662023c58ff5b113c4c1adac0886c43/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc", size = 360702, upload-time = "2021-09-17T21:40:43.31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794, upload-time = "2021-09-17T21:40:39.897Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "jmespath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" }, +] + +[[package]] +name = "joblib" +version = "1.5.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/41/f2/d34e8b3a08a9cc79a50b2208a93dce981fe615b64d5a4d4abee421d898df/joblib-1.5.3.tar.gz", hash = "sha256:8561a3269e6801106863fd0d6d84bb737be9e7631e33aaed3fb9ce5953688da3", size = 331603, upload-time = "2025-12-15T08:41:46.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/91/984aca2ec129e2757d1e4e3c81c3fcda9d0f85b74670a094cc443d9ee949/joblib-1.5.3-py3-none-any.whl", hash = "sha256:5fc3c5039fc5ca8c0276333a188bbd59d6b7ab37fe6632daa76bc7f9ec18e713", size = 309071, upload-time = "2025-12-15T08:41:44.973Z" }, +] + +[[package]] +name = "kiwisolver" +version = "1.4.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/3c/85844f1b0feb11ee581ac23fe5fce65cd049a200c1446708cc1b7f922875/kiwisolver-1.4.9.tar.gz", hash = "sha256:c3b22c26c6fd6811b0ae8363b95ca8ce4ea3c202d3d0975b2914310ceb1bcc4d", size = 97564, upload-time = "2025-08-10T21:27:49.279Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/c9/13573a747838aeb1c76e3267620daa054f4152444d1f3d1a2324b78255b5/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ac5a486ac389dddcc5bef4f365b6ae3ffff2c433324fb38dd35e3fab7c957999", size = 123686, upload-time = "2025-08-10T21:26:10.034Z" }, + { url = "https://files.pythonhosted.org/packages/51/ea/2ecf727927f103ffd1739271ca19c424d0e65ea473fbaeea1c014aea93f6/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2ba92255faa7309d06fe44c3a4a97efe1c8d640c2a79a5ef728b685762a6fd2", size = 66460, upload-time = "2025-08-10T21:26:11.083Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/51f5464373ce2aeb5194508298a508b6f21d3867f499556263c64c621914/kiwisolver-1.4.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a2899935e724dd1074cb568ce7ac0dce28b2cd6ab539c8e001a8578eb106d14", size = 64952, upload-time = "2025-08-10T21:26:12.058Z" }, + { url = "https://files.pythonhosted.org/packages/70/90/6d240beb0f24b74371762873e9b7f499f1e02166a2d9c5801f4dbf8fa12e/kiwisolver-1.4.9-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f6008a4919fdbc0b0097089f67a1eb55d950ed7e90ce2cc3e640abadd2757a04", size = 1474756, upload-time = "2025-08-10T21:26:13.096Z" }, + { url = "https://files.pythonhosted.org/packages/12/42/f36816eaf465220f683fb711efdd1bbf7a7005a2473d0e4ed421389bd26c/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:67bb8b474b4181770f926f7b7d2f8c0248cbcb78b660fdd41a47054b28d2a752", size = 1276404, upload-time = "2025-08-10T21:26:14.457Z" }, + { url = "https://files.pythonhosted.org/packages/2e/64/bc2de94800adc830c476dce44e9b40fd0809cddeef1fde9fcf0f73da301f/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2327a4a30d3ee07d2fbe2e7933e8a37c591663b96ce42a00bc67461a87d7df77", size = 1294410, upload-time = "2025-08-10T21:26:15.73Z" }, + { url = "https://files.pythonhosted.org/packages/5f/42/2dc82330a70aa8e55b6d395b11018045e58d0bb00834502bf11509f79091/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7a08b491ec91b1d5053ac177afe5290adacf1f0f6307d771ccac5de30592d198", size = 1343631, upload-time = "2025-08-10T21:26:17.045Z" }, + { url = "https://files.pythonhosted.org/packages/22/fd/f4c67a6ed1aab149ec5a8a401c323cee7a1cbe364381bb6c9c0d564e0e20/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8fc5c867c22b828001b6a38d2eaeb88160bf5783c6cb4a5e440efc981ce286d", size = 2224963, upload-time = "2025-08-10T21:26:18.737Z" }, + { url = "https://files.pythonhosted.org/packages/45/aa/76720bd4cb3713314677d9ec94dcc21ced3f1baf4830adde5bb9b2430a5f/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3b3115b2581ea35bb6d1f24a4c90af37e5d9b49dcff267eeed14c3893c5b86ab", size = 2321295, upload-time = "2025-08-10T21:26:20.11Z" }, + { url = "https://files.pythonhosted.org/packages/80/19/d3ec0d9ab711242f56ae0dc2fc5d70e298bb4a1f9dfab44c027668c673a1/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858e4c22fb075920b96a291928cb7dea5644e94c0ee4fcd5af7e865655e4ccf2", size = 2487987, upload-time = "2025-08-10T21:26:21.49Z" }, + { url = "https://files.pythonhosted.org/packages/39/e9/61e4813b2c97e86b6fdbd4dd824bf72d28bcd8d4849b8084a357bc0dd64d/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ed0fecd28cc62c54b262e3736f8bb2512d8dcfdc2bcf08be5f47f96bf405b145", size = 2291817, upload-time = "2025-08-10T21:26:22.812Z" }, + { url = "https://files.pythonhosted.org/packages/a0/41/85d82b0291db7504da3c2defe35c9a8a5c9803a730f297bd823d11d5fb77/kiwisolver-1.4.9-cp312-cp312-win_amd64.whl", hash = "sha256:f68208a520c3d86ea51acf688a3e3002615a7f0238002cccc17affecc86a8a54", size = 73895, upload-time = "2025-08-10T21:26:24.37Z" }, + { url = "https://files.pythonhosted.org/packages/e2/92/5f3068cf15ee5cb624a0c7596e67e2a0bb2adee33f71c379054a491d07da/kiwisolver-1.4.9-cp312-cp312-win_arm64.whl", hash = "sha256:2c1a4f57df73965f3f14df20b80ee29e6a7930a57d2d9e8491a25f676e197c60", size = 64992, upload-time = "2025-08-10T21:26:25.732Z" }, + { url = "https://files.pythonhosted.org/packages/31/c1/c2686cda909742ab66c7388e9a1a8521a59eb89f8bcfbee28fc980d07e24/kiwisolver-1.4.9-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5d0432ccf1c7ab14f9949eec60c5d1f924f17c037e9f8b33352fa05799359b8", size = 123681, upload-time = "2025-08-10T21:26:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/ca/f0/f44f50c9f5b1a1860261092e3bc91ecdc9acda848a8b8c6abfda4a24dd5c/kiwisolver-1.4.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efb3a45b35622bb6c16dbfab491a8f5a391fe0e9d45ef32f4df85658232ca0e2", size = 66464, upload-time = "2025-08-10T21:26:27.733Z" }, + { url = "https://files.pythonhosted.org/packages/2d/7a/9d90a151f558e29c3936b8a47ac770235f436f2120aca41a6d5f3d62ae8d/kiwisolver-1.4.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1a12cf6398e8a0a001a059747a1cbf24705e18fe413bc22de7b3d15c67cffe3f", size = 64961, upload-time = "2025-08-10T21:26:28.729Z" }, + { url = "https://files.pythonhosted.org/packages/e9/e9/f218a2cb3a9ffbe324ca29a9e399fa2d2866d7f348ec3a88df87fc248fc5/kiwisolver-1.4.9-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b67e6efbf68e077dd71d1a6b37e43e1a99d0bff1a3d51867d45ee8908b931098", size = 1474607, upload-time = "2025-08-10T21:26:29.798Z" }, + { url = "https://files.pythonhosted.org/packages/d9/28/aac26d4c882f14de59041636292bc838db8961373825df23b8eeb807e198/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5656aa670507437af0207645273ccdfee4f14bacd7f7c67a4306d0dcaeaf6eed", size = 1276546, upload-time = "2025-08-10T21:26:31.401Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ad/8bfc1c93d4cc565e5069162f610ba2f48ff39b7de4b5b8d93f69f30c4bed/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:bfc08add558155345129c7803b3671cf195e6a56e7a12f3dde7c57d9b417f525", size = 1294482, upload-time = "2025-08-10T21:26:32.721Z" }, + { url = "https://files.pythonhosted.org/packages/da/f1/6aca55ff798901d8ce403206d00e033191f63d82dd708a186e0ed2067e9c/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:40092754720b174e6ccf9e845d0d8c7d8e12c3d71e7fc35f55f3813e96376f78", size = 1343720, upload-time = "2025-08-10T21:26:34.032Z" }, + { url = "https://files.pythonhosted.org/packages/d1/91/eed031876c595c81d90d0f6fc681ece250e14bf6998c3d7c419466b523b7/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:497d05f29a1300d14e02e6441cf0f5ee81c1ff5a304b0d9fb77423974684e08b", size = 2224907, upload-time = "2025-08-10T21:26:35.824Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ec/4d1925f2e49617b9cca9c34bfa11adefad49d00db038e692a559454dfb2e/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bdd1a81a1860476eb41ac4bc1e07b3f07259e6d55bbf739b79c8aaedcf512799", size = 2321334, upload-time = "2025-08-10T21:26:37.534Z" }, + { url = "https://files.pythonhosted.org/packages/43/cb/450cd4499356f68802750c6ddc18647b8ea01ffa28f50d20598e0befe6e9/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e6b93f13371d341afee3be9f7c5964e3fe61d5fa30f6a30eb49856935dfe4fc3", size = 2488313, upload-time = "2025-08-10T21:26:39.191Z" }, + { url = "https://files.pythonhosted.org/packages/71/67/fc76242bd99f885651128a5d4fa6083e5524694b7c88b489b1b55fdc491d/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d75aa530ccfaa593da12834b86a0724f58bff12706659baa9227c2ccaa06264c", size = 2291970, upload-time = "2025-08-10T21:26:40.828Z" }, + { url = "https://files.pythonhosted.org/packages/75/bd/f1a5d894000941739f2ae1b65a32892349423ad49c2e6d0771d0bad3fae4/kiwisolver-1.4.9-cp313-cp313-win_amd64.whl", hash = "sha256:dd0a578400839256df88c16abddf9ba14813ec5f21362e1fe65022e00c883d4d", size = 73894, upload-time = "2025-08-10T21:26:42.33Z" }, + { url = "https://files.pythonhosted.org/packages/95/38/dce480814d25b99a391abbddadc78f7c117c6da34be68ca8b02d5848b424/kiwisolver-1.4.9-cp313-cp313-win_arm64.whl", hash = "sha256:d4188e73af84ca82468f09cadc5ac4db578109e52acb4518d8154698d3a87ca2", size = 64995, upload-time = "2025-08-10T21:26:43.889Z" }, + { url = "https://files.pythonhosted.org/packages/e2/37/7d218ce5d92dadc5ebdd9070d903e0c7cf7edfe03f179433ac4d13ce659c/kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:5a0f2724dfd4e3b3ac5a82436a8e6fd16baa7d507117e4279b660fe8ca38a3a1", size = 126510, upload-time = "2025-08-10T21:26:44.915Z" }, + { url = "https://files.pythonhosted.org/packages/23/b0/e85a2b48233daef4b648fb657ebbb6f8367696a2d9548a00b4ee0eb67803/kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1b11d6a633e4ed84fc0ddafd4ebfd8ea49b3f25082c04ad12b8315c11d504dc1", size = 67903, upload-time = "2025-08-10T21:26:45.934Z" }, + { url = "https://files.pythonhosted.org/packages/44/98/f2425bc0113ad7de24da6bb4dae1343476e95e1d738be7c04d31a5d037fd/kiwisolver-1.4.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61874cdb0a36016354853593cffc38e56fc9ca5aa97d2c05d3dcf6922cd55a11", size = 66402, upload-time = "2025-08-10T21:26:47.101Z" }, + { url = "https://files.pythonhosted.org/packages/98/d8/594657886df9f34c4177cc353cc28ca7e6e5eb562d37ccc233bff43bbe2a/kiwisolver-1.4.9-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:60c439763a969a6af93b4881db0eed8fadf93ee98e18cbc35bc8da868d0c4f0c", size = 1582135, upload-time = "2025-08-10T21:26:48.665Z" }, + { url = "https://files.pythonhosted.org/packages/5c/c6/38a115b7170f8b306fc929e166340c24958347308ea3012c2b44e7e295db/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92a2f997387a1b79a75e7803aa7ded2cfbe2823852ccf1ba3bcf613b62ae3197", size = 1389409, upload-time = "2025-08-10T21:26:50.335Z" }, + { url = "https://files.pythonhosted.org/packages/bf/3b/e04883dace81f24a568bcee6eb3001da4ba05114afa622ec9b6fafdc1f5e/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31d512c812daea6d8b3be3b2bfcbeb091dbb09177706569bcfc6240dcf8b41c", size = 1401763, upload-time = "2025-08-10T21:26:51.867Z" }, + { url = "https://files.pythonhosted.org/packages/9f/80/20ace48e33408947af49d7d15c341eaee69e4e0304aab4b7660e234d6288/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:52a15b0f35dad39862d376df10c5230155243a2c1a436e39eb55623ccbd68185", size = 1453643, upload-time = "2025-08-10T21:26:53.592Z" }, + { url = "https://files.pythonhosted.org/packages/64/31/6ce4380a4cd1f515bdda976a1e90e547ccd47b67a1546d63884463c92ca9/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a30fd6fdef1430fd9e1ba7b3398b5ee4e2887783917a687d86ba69985fb08748", size = 2330818, upload-time = "2025-08-10T21:26:55.051Z" }, + { url = "https://files.pythonhosted.org/packages/fa/e9/3f3fcba3bcc7432c795b82646306e822f3fd74df0ee81f0fa067a1f95668/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cc9617b46837c6468197b5945e196ee9ca43057bb7d9d1ae688101e4e1dddf64", size = 2419963, upload-time = "2025-08-10T21:26:56.421Z" }, + { url = "https://files.pythonhosted.org/packages/99/43/7320c50e4133575c66e9f7dadead35ab22d7c012a3b09bb35647792b2a6d/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:0ab74e19f6a2b027ea4f845a78827969af45ce790e6cb3e1ebab71bdf9f215ff", size = 2594639, upload-time = "2025-08-10T21:26:57.882Z" }, + { url = "https://files.pythonhosted.org/packages/65/d6/17ae4a270d4a987ef8a385b906d2bdfc9fce502d6dc0d3aea865b47f548c/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dba5ee5d3981160c28d5490f0d1b7ed730c22470ff7f6cc26cfcfaacb9896a07", size = 2391741, upload-time = "2025-08-10T21:26:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/2a/8f/8f6f491d595a9e5912971f3f863d81baddccc8a4d0c3749d6a0dd9ffc9df/kiwisolver-1.4.9-cp313-cp313t-win_arm64.whl", hash = "sha256:0749fd8f4218ad2e851e11cc4dc05c7cbc0cbc4267bdfdb31782e65aace4ee9c", size = 68646, upload-time = "2025-08-10T21:27:00.52Z" }, + { url = "https://files.pythonhosted.org/packages/6b/32/6cc0fbc9c54d06c2969faa9c1d29f5751a2e51809dd55c69055e62d9b426/kiwisolver-1.4.9-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9928fe1eb816d11ae170885a74d074f57af3a0d65777ca47e9aeb854a1fba386", size = 123806, upload-time = "2025-08-10T21:27:01.537Z" }, + { url = "https://files.pythonhosted.org/packages/b2/dd/2bfb1d4a4823d92e8cbb420fe024b8d2167f72079b3bb941207c42570bdf/kiwisolver-1.4.9-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d0005b053977e7b43388ddec89fa567f43d4f6d5c2c0affe57de5ebf290dc552", size = 66605, upload-time = "2025-08-10T21:27:03.335Z" }, + { url = "https://files.pythonhosted.org/packages/f7/69/00aafdb4e4509c2ca6064646cba9cd4b37933898f426756adb2cb92ebbed/kiwisolver-1.4.9-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2635d352d67458b66fd0667c14cb1d4145e9560d503219034a18a87e971ce4f3", size = 64925, upload-time = "2025-08-10T21:27:04.339Z" }, + { url = "https://files.pythonhosted.org/packages/43/dc/51acc6791aa14e5cb6d8a2e28cefb0dc2886d8862795449d021334c0df20/kiwisolver-1.4.9-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:767c23ad1c58c9e827b649a9ab7809fd5fd9db266a9cf02b0e926ddc2c680d58", size = 1472414, upload-time = "2025-08-10T21:27:05.437Z" }, + { url = "https://files.pythonhosted.org/packages/3d/bb/93fa64a81db304ac8a246f834d5094fae4b13baf53c839d6bb6e81177129/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:72d0eb9fba308b8311685c2268cf7d0a0639a6cd027d8128659f72bdd8a024b4", size = 1281272, upload-time = "2025-08-10T21:27:07.063Z" }, + { url = "https://files.pythonhosted.org/packages/70/e6/6df102916960fb8d05069d4bd92d6d9a8202d5a3e2444494e7cd50f65b7a/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f68e4f3eeca8fb22cc3d731f9715a13b652795ef657a13df1ad0c7dc0e9731df", size = 1298578, upload-time = "2025-08-10T21:27:08.452Z" }, + { url = "https://files.pythonhosted.org/packages/7c/47/e142aaa612f5343736b087864dbaebc53ea8831453fb47e7521fa8658f30/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d84cd4061ae292d8ac367b2c3fa3aad11cb8625a95d135fe93f286f914f3f5a6", size = 1345607, upload-time = "2025-08-10T21:27:10.125Z" }, + { url = "https://files.pythonhosted.org/packages/54/89/d641a746194a0f4d1a3670fb900d0dbaa786fb98341056814bc3f058fa52/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a60ea74330b91bd22a29638940d115df9dc00af5035a9a2a6ad9399ffb4ceca5", size = 2230150, upload-time = "2025-08-10T21:27:11.484Z" }, + { url = "https://files.pythonhosted.org/packages/aa/6b/5ee1207198febdf16ac11f78c5ae40861b809cbe0e6d2a8d5b0b3044b199/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ce6a3a4e106cf35c2d9c4fa17c05ce0b180db622736845d4315519397a77beaf", size = 2325979, upload-time = "2025-08-10T21:27:12.917Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ff/b269eefd90f4ae14dcc74973d5a0f6d28d3b9bb1afd8c0340513afe6b39a/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:77937e5e2a38a7b48eef0585114fe7930346993a88060d0bf886086d2aa49ef5", size = 2491456, upload-time = "2025-08-10T21:27:14.353Z" }, + { url = "https://files.pythonhosted.org/packages/fc/d4/10303190bd4d30de547534601e259a4fbf014eed94aae3e5521129215086/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:24c175051354f4a28c5d6a31c93906dc653e2bf234e8a4bbfb964892078898ce", size = 2294621, upload-time = "2025-08-10T21:27:15.808Z" }, + { url = "https://files.pythonhosted.org/packages/28/e0/a9a90416fce5c0be25742729c2ea52105d62eda6c4be4d803c2a7be1fa50/kiwisolver-1.4.9-cp314-cp314-win_amd64.whl", hash = "sha256:0763515d4df10edf6d06a3c19734e2566368980d21ebec439f33f9eb936c07b7", size = 75417, upload-time = "2025-08-10T21:27:17.436Z" }, + { url = "https://files.pythonhosted.org/packages/1f/10/6949958215b7a9a264299a7db195564e87900f709db9245e4ebdd3c70779/kiwisolver-1.4.9-cp314-cp314-win_arm64.whl", hash = "sha256:0e4e2bf29574a6a7b7f6cb5fa69293b9f96c928949ac4a53ba3f525dffb87f9c", size = 66582, upload-time = "2025-08-10T21:27:18.436Z" }, + { url = "https://files.pythonhosted.org/packages/ec/79/60e53067903d3bc5469b369fe0dfc6b3482e2133e85dae9daa9527535991/kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d976bbb382b202f71c67f77b0ac11244021cfa3f7dfd9e562eefcea2df711548", size = 126514, upload-time = "2025-08-10T21:27:19.465Z" }, + { url = "https://files.pythonhosted.org/packages/25/d1/4843d3e8d46b072c12a38c97c57fab4608d36e13fe47d47ee96b4d61ba6f/kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2489e4e5d7ef9a1c300a5e0196e43d9c739f066ef23270607d45aba368b91f2d", size = 67905, upload-time = "2025-08-10T21:27:20.51Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ae/29ffcbd239aea8b93108de1278271ae764dfc0d803a5693914975f200596/kiwisolver-1.4.9-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e2ea9f7ab7fbf18fffb1b5434ce7c69a07582f7acc7717720f1d69f3e806f90c", size = 66399, upload-time = "2025-08-10T21:27:21.496Z" }, + { url = "https://files.pythonhosted.org/packages/a1/ae/d7ba902aa604152c2ceba5d352d7b62106bedbccc8e95c3934d94472bfa3/kiwisolver-1.4.9-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b34e51affded8faee0dfdb705416153819d8ea9250bbbf7ea1b249bdeb5f1122", size = 1582197, upload-time = "2025-08-10T21:27:22.604Z" }, + { url = "https://files.pythonhosted.org/packages/f2/41/27c70d427eddb8bc7e4f16420a20fefc6f480312122a59a959fdfe0445ad/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8aacd3d4b33b772542b2e01beb50187536967b514b00003bdda7589722d2a64", size = 1390125, upload-time = "2025-08-10T21:27:24.036Z" }, + { url = "https://files.pythonhosted.org/packages/41/42/b3799a12bafc76d962ad69083f8b43b12bf4fe78b097b12e105d75c9b8f1/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7cf974dd4e35fa315563ac99d6287a1024e4dc2077b8a7d7cd3d2fb65d283134", size = 1402612, upload-time = "2025-08-10T21:27:25.773Z" }, + { url = "https://files.pythonhosted.org/packages/d2/b5/a210ea073ea1cfaca1bb5c55a62307d8252f531beb364e18aa1e0888b5a0/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:85bd218b5ecfbee8c8a82e121802dcb519a86044c9c3b2e4aef02fa05c6da370", size = 1453990, upload-time = "2025-08-10T21:27:27.089Z" }, + { url = "https://files.pythonhosted.org/packages/5f/ce/a829eb8c033e977d7ea03ed32fb3c1781b4fa0433fbadfff29e39c676f32/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0856e241c2d3df4efef7c04a1e46b1936b6120c9bcf36dd216e3acd84bc4fb21", size = 2331601, upload-time = "2025-08-10T21:27:29.343Z" }, + { url = "https://files.pythonhosted.org/packages/e0/4b/b5e97eb142eb9cd0072dacfcdcd31b1c66dc7352b0f7c7255d339c0edf00/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9af39d6551f97d31a4deebeac6f45b156f9755ddc59c07b402c148f5dbb6482a", size = 2422041, upload-time = "2025-08-10T21:27:30.754Z" }, + { url = "https://files.pythonhosted.org/packages/40/be/8eb4cd53e1b85ba4edc3a9321666f12b83113a178845593307a3e7891f44/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:bb4ae2b57fc1d8cbd1cf7b1d9913803681ffa903e7488012be5b76dedf49297f", size = 2594897, upload-time = "2025-08-10T21:27:32.803Z" }, + { url = "https://files.pythonhosted.org/packages/99/dd/841e9a66c4715477ea0abc78da039832fbb09dac5c35c58dc4c41a407b8a/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:aedff62918805fb62d43a4aa2ecd4482c380dc76cd31bd7c8878588a61bd0369", size = 2391835, upload-time = "2025-08-10T21:27:34.23Z" }, + { url = "https://files.pythonhosted.org/packages/0c/28/4b2e5c47a0da96896fdfdb006340ade064afa1e63675d01ea5ac222b6d52/kiwisolver-1.4.9-cp314-cp314t-win_amd64.whl", hash = "sha256:1fa333e8b2ce4d9660f2cda9c0e1b6bafcfb2457a9d259faa82289e73ec24891", size = 79988, upload-time = "2025-08-10T21:27:35.587Z" }, + { url = "https://files.pythonhosted.org/packages/80/be/3578e8afd18c88cdf9cb4cffde75a96d2be38c5a903f1ed0ceec061bd09e/kiwisolver-1.4.9-cp314-cp314t-win_arm64.whl", hash = "sha256:4a48a2ce79d65d363597ef7b567ce3d14d68783d2b2263d98db3d9477805ba32", size = 70260, upload-time = "2025-08-10T21:27:36.606Z" }, +] + +[[package]] +name = "lark" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/da/34/28fff3ab31ccff1fd4f6c7c7b0ceb2b6968d8ea4950663eadcb5720591a0/lark-1.3.1.tar.gz", hash = "sha256:b426a7a6d6d53189d318f2b6236ab5d6429eaf09259f1ca33eb716eed10d2905", size = 382732, upload-time = "2025-10-27T18:25:56.653Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/82/3d/14ce75ef66813643812f3093ab17e46d3a206942ce7376d31ec2d36229e7/lark-1.3.1-py3-none-any.whl", hash = "sha256:c629b661023a014c37da873b4ff58a817398d12635d3bbb2c5a03be7fe5d1e12", size = 113151, upload-time = "2025-10-27T18:25:54.882Z" }, +] + +[[package]] +name = "matplotlib" +version = "3.10.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "contourpy" }, + { name = "cycler" }, + { name = "fonttools" }, + { name = "kiwisolver" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "pillow" }, + { name = "pyparsing" }, + { name = "python-dateutil" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8a/76/d3c6e3a13fe484ebe7718d14e269c9569c4eb0020a968a327acb3b9a8fe6/matplotlib-3.10.8.tar.gz", hash = "sha256:2299372c19d56bcd35cf05a2738308758d32b9eaed2371898d8f5bd33f084aa3", size = 34806269, upload-time = "2025-12-10T22:56:51.155Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/67/f997cdcbb514012eb0d10cd2b4b332667997fb5ebe26b8d41d04962fa0e6/matplotlib-3.10.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:64fcc24778ca0404ce0cb7b6b77ae1f4c7231cdd60e6778f999ee05cbd581b9a", size = 8260453, upload-time = "2025-12-10T22:55:30.709Z" }, + { url = "https://files.pythonhosted.org/packages/7e/65/07d5f5c7f7c994f12c768708bd2e17a4f01a2b0f44a1c9eccad872433e2e/matplotlib-3.10.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9a5ca4ac220a0cdd1ba6bcba3608547117d30468fefce49bb26f55c1a3d5c58", size = 8148321, upload-time = "2025-12-10T22:55:33.265Z" }, + { url = "https://files.pythonhosted.org/packages/3e/f3/c5195b1ae57ef85339fd7285dfb603b22c8b4e79114bae5f4f0fcf688677/matplotlib-3.10.8-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3ab4aabc72de4ff77b3ec33a6d78a68227bf1123465887f9905ba79184a1cc04", size = 8716944, upload-time = "2025-12-10T22:55:34.922Z" }, + { url = "https://files.pythonhosted.org/packages/00/f9/7638f5cc82ec8a7aa005de48622eecc3ed7c9854b96ba15bd76b7fd27574/matplotlib-3.10.8-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:24d50994d8c5816ddc35411e50a86ab05f575e2530c02752e02538122613371f", size = 9550099, upload-time = "2025-12-10T22:55:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/57/61/78cd5920d35b29fd2a0fe894de8adf672ff52939d2e9b43cb83cd5ce1bc7/matplotlib-3.10.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:99eefd13c0dc3b3c1b4d561c1169e65fe47aab7b8158754d7c084088e2329466", size = 9613040, upload-time = "2025-12-10T22:55:38.715Z" }, + { url = "https://files.pythonhosted.org/packages/30/4e/c10f171b6e2f44d9e3a2b96efa38b1677439d79c99357600a62cc1e9594e/matplotlib-3.10.8-cp312-cp312-win_amd64.whl", hash = "sha256:dd80ecb295460a5d9d260df63c43f4afbdd832d725a531f008dad1664f458adf", size = 8142717, upload-time = "2025-12-10T22:55:41.103Z" }, + { url = "https://files.pythonhosted.org/packages/f1/76/934db220026b5fef85f45d51a738b91dea7d70207581063cd9bd8fafcf74/matplotlib-3.10.8-cp312-cp312-win_arm64.whl", hash = "sha256:3c624e43ed56313651bc18a47f838b60d7b8032ed348911c54906b130b20071b", size = 8012751, upload-time = "2025-12-10T22:55:42.684Z" }, + { url = "https://files.pythonhosted.org/packages/3d/b9/15fd5541ef4f5b9a17eefd379356cf12175fe577424e7b1d80676516031a/matplotlib-3.10.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3f2e409836d7f5ac2f1c013110a4d50b9f7edc26328c108915f9075d7d7a91b6", size = 8261076, upload-time = "2025-12-10T22:55:44.648Z" }, + { url = "https://files.pythonhosted.org/packages/8d/a0/2ba3473c1b66b9c74dc7107c67e9008cb1782edbe896d4c899d39ae9cf78/matplotlib-3.10.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:56271f3dac49a88d7fca5060f004d9d22b865f743a12a23b1e937a0be4818ee1", size = 8148794, upload-time = "2025-12-10T22:55:46.252Z" }, + { url = "https://files.pythonhosted.org/packages/75/97/a471f1c3eb1fd6f6c24a31a5858f443891d5127e63a7788678d14e249aea/matplotlib-3.10.8-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a0a7f52498f72f13d4a25ea70f35f4cb60642b466cbb0a9be951b5bc3f45a486", size = 8718474, upload-time = "2025-12-10T22:55:47.864Z" }, + { url = "https://files.pythonhosted.org/packages/01/be/cd478f4b66f48256f42927d0acbcd63a26a893136456cd079c0cc24fbabf/matplotlib-3.10.8-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:646d95230efb9ca614a7a594d4fcacde0ac61d25e37dd51710b36477594963ce", size = 9549637, upload-time = "2025-12-10T22:55:50.048Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7c/8dc289776eae5109e268c4fb92baf870678dc048a25d4ac903683b86d5bf/matplotlib-3.10.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f89c151aab2e2e23cb3fe0acad1e8b82841fd265379c4cecd0f3fcb34c15e0f6", size = 9613678, upload-time = "2025-12-10T22:55:52.21Z" }, + { url = "https://files.pythonhosted.org/packages/64/40/37612487cc8a437d4dd261b32ca21fe2d79510fe74af74e1f42becb1bdb8/matplotlib-3.10.8-cp313-cp313-win_amd64.whl", hash = "sha256:e8ea3e2d4066083e264e75c829078f9e149fa119d27e19acd503de65e0b13149", size = 8142686, upload-time = "2025-12-10T22:55:54.253Z" }, + { url = "https://files.pythonhosted.org/packages/66/52/8d8a8730e968185514680c2a6625943f70269509c3dcfc0dcf7d75928cb8/matplotlib-3.10.8-cp313-cp313-win_arm64.whl", hash = "sha256:c108a1d6fa78a50646029cb6d49808ff0fc1330fda87fa6f6250c6b5369b6645", size = 8012917, upload-time = "2025-12-10T22:55:56.268Z" }, + { url = "https://files.pythonhosted.org/packages/b5/27/51fe26e1062f298af5ef66343d8ef460e090a27fea73036c76c35821df04/matplotlib-3.10.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ad3d9833a64cf48cc4300f2b406c3d0f4f4724a91c0bd5640678a6ba7c102077", size = 8305679, upload-time = "2025-12-10T22:55:57.856Z" }, + { url = "https://files.pythonhosted.org/packages/2c/1e/4de865bc591ac8e3062e835f42dd7fe7a93168d519557837f0e37513f629/matplotlib-3.10.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:eb3823f11823deade26ce3b9f40dcb4a213da7a670013929f31d5f5ed1055b22", size = 8198336, upload-time = "2025-12-10T22:55:59.371Z" }, + { url = "https://files.pythonhosted.org/packages/c6/cb/2f7b6e75fb4dce87ef91f60cac4f6e34f4c145ab036a22318ec837971300/matplotlib-3.10.8-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d9050fee89a89ed57b4fb2c1bfac9a3d0c57a0d55aed95949eedbc42070fea39", size = 8731653, upload-time = "2025-12-10T22:56:01.032Z" }, + { url = "https://files.pythonhosted.org/packages/46/b3/bd9c57d6ba670a37ab31fb87ec3e8691b947134b201f881665b28cc039ff/matplotlib-3.10.8-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b44d07310e404ba95f8c25aa5536f154c0a8ec473303535949e52eb71d0a1565", size = 9561356, upload-time = "2025-12-10T22:56:02.95Z" }, + { url = "https://files.pythonhosted.org/packages/c0/3d/8b94a481456dfc9dfe6e39e93b5ab376e50998cddfd23f4ae3b431708f16/matplotlib-3.10.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0a33deb84c15ede243aead39f77e990469fff93ad1521163305095b77b72ce4a", size = 9614000, upload-time = "2025-12-10T22:56:05.411Z" }, + { url = "https://files.pythonhosted.org/packages/bd/cd/bc06149fe5585ba800b189a6a654a75f1f127e8aab02fd2be10df7fa500c/matplotlib-3.10.8-cp313-cp313t-win_amd64.whl", hash = "sha256:3a48a78d2786784cc2413e57397981fb45c79e968d99656706018d6e62e57958", size = 8220043, upload-time = "2025-12-10T22:56:07.551Z" }, + { url = "https://files.pythonhosted.org/packages/e3/de/b22cf255abec916562cc04eef457c13e58a1990048de0c0c3604d082355e/matplotlib-3.10.8-cp313-cp313t-win_arm64.whl", hash = "sha256:15d30132718972c2c074cd14638c7f4592bd98719e2308bccea40e0538bc0cb5", size = 8062075, upload-time = "2025-12-10T22:56:09.178Z" }, + { url = "https://files.pythonhosted.org/packages/3c/43/9c0ff7a2f11615e516c3b058e1e6e8f9614ddeca53faca06da267c48345d/matplotlib-3.10.8-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b53285e65d4fa4c86399979e956235deb900be5baa7fc1218ea67fbfaeaadd6f", size = 8262481, upload-time = "2025-12-10T22:56:10.885Z" }, + { url = "https://files.pythonhosted.org/packages/6f/ca/e8ae28649fcdf039fda5ef554b40a95f50592a3c47e6f7270c9561c12b07/matplotlib-3.10.8-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:32f8dce744be5569bebe789e46727946041199030db8aeb2954d26013a0eb26b", size = 8151473, upload-time = "2025-12-10T22:56:12.377Z" }, + { url = "https://files.pythonhosted.org/packages/f1/6f/009d129ae70b75e88cbe7e503a12a4c0670e08ed748a902c2568909e9eb5/matplotlib-3.10.8-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4cf267add95b1c88300d96ca837833d4112756045364f5c734a2276038dae27d", size = 9553896, upload-time = "2025-12-10T22:56:14.432Z" }, + { url = "https://files.pythonhosted.org/packages/f5/26/4221a741eb97967bc1fd5e4c52b9aa5a91b2f4ec05b59f6def4d820f9df9/matplotlib-3.10.8-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2cf5bd12cecf46908f286d7838b2abc6c91cda506c0445b8223a7c19a00df008", size = 9824193, upload-time = "2025-12-10T22:56:16.29Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/3abf75f38605772cf48a9daf5821cd4f563472f38b4b828c6fba6fa6d06e/matplotlib-3.10.8-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:41703cc95688f2516b480f7f339d8851a6035f18e100ee6a32bc0b8536a12a9c", size = 9615444, upload-time = "2025-12-10T22:56:18.155Z" }, + { url = "https://files.pythonhosted.org/packages/93/a5/de89ac80f10b8dc615807ee1133cd99ac74082581196d4d9590bea10690d/matplotlib-3.10.8-cp314-cp314-win_amd64.whl", hash = "sha256:83d282364ea9f3e52363da262ce32a09dfe241e4080dcedda3c0db059d3c1f11", size = 8272719, upload-time = "2025-12-10T22:56:20.366Z" }, + { url = "https://files.pythonhosted.org/packages/69/ce/b006495c19ccc0a137b48083168a37bd056392dee02f87dba0472f2797fe/matplotlib-3.10.8-cp314-cp314-win_arm64.whl", hash = "sha256:2c1998e92cd5999e295a731bcb2911c75f597d937341f3030cc24ef2733d78a8", size = 8144205, upload-time = "2025-12-10T22:56:22.239Z" }, + { url = "https://files.pythonhosted.org/packages/68/d9/b31116a3a855bd313c6fcdb7226926d59b041f26061c6c5b1be66a08c826/matplotlib-3.10.8-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b5a2b97dbdc7d4f353ebf343744f1d1f1cca8aa8bfddb4262fcf4306c3761d50", size = 8305785, upload-time = "2025-12-10T22:56:24.218Z" }, + { url = "https://files.pythonhosted.org/packages/1e/90/6effe8103f0272685767ba5f094f453784057072f49b393e3ea178fe70a5/matplotlib-3.10.8-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3f5c3e4da343bba819f0234186b9004faba952cc420fbc522dc4e103c1985908", size = 8198361, upload-time = "2025-12-10T22:56:26.787Z" }, + { url = "https://files.pythonhosted.org/packages/d7/65/a73188711bea603615fc0baecca1061429ac16940e2385433cc778a9d8e7/matplotlib-3.10.8-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f62550b9a30afde8c1c3ae450e5eb547d579dd69b25c2fc7a1c67f934c1717a", size = 9561357, upload-time = "2025-12-10T22:56:28.953Z" }, + { url = "https://files.pythonhosted.org/packages/f4/3d/b5c5d5d5be8ce63292567f0e2c43dde9953d3ed86ac2de0a72e93c8f07a1/matplotlib-3.10.8-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:495672de149445ec1b772ff2c9ede9b769e3cb4f0d0aa7fa730d7f59e2d4e1c1", size = 9823610, upload-time = "2025-12-10T22:56:31.455Z" }, + { url = "https://files.pythonhosted.org/packages/4d/4b/e7beb6bbd49f6bae727a12b270a2654d13c397576d25bd6786e47033300f/matplotlib-3.10.8-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:595ba4d8fe983b88f0eec8c26a241e16d6376fe1979086232f481f8f3f67494c", size = 9614011, upload-time = "2025-12-10T22:56:33.85Z" }, + { url = "https://files.pythonhosted.org/packages/7c/e6/76f2813d31f032e65f6f797e3f2f6e4aab95b65015924b1c51370395c28a/matplotlib-3.10.8-cp314-cp314t-win_amd64.whl", hash = "sha256:25d380fe8b1dc32cf8f0b1b448470a77afb195438bafdf1d858bfb876f3edf7b", size = 8362801, upload-time = "2025-12-10T22:56:36.107Z" }, + { url = "https://files.pythonhosted.org/packages/5d/49/d651878698a0b67f23aa28e17f45a6d6dd3d3f933fa29087fa4ce5947b5a/matplotlib-3.10.8-cp314-cp314t-win_arm64.whl", hash = "sha256:113bb52413ea508ce954a02c10ffd0d565f9c3bc7f2eddc27dfe1731e71c7b5f", size = 8192560, upload-time = "2025-12-10T22:56:38.008Z" }, +] + +[[package]] +name = "miniwdl" +version = "1.13.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "argcomplete" }, + { name = "coloredlogs" }, + { name = "docker" }, + { name = "importlib-metadata" }, + { name = "lark" }, + { name = "psutil" }, + { name = "pygtail" }, + { name = "python-json-logger" }, + { name = "questionary" }, + { name = "regex" }, + { name = "ruamel-yaml" }, + { name = "xdg" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d2/f0/b53ed90e8da5b44c259afc47e0fead136b073161160537e3dc046524f0fe/miniwdl-1.13.1.tar.gz", hash = "sha256:c9c19a469aa76249612da85db1f66bdb96b6dc50cf26021481724aaf2be96673", size = 482590, upload-time = "2025-10-25T04:07:51.631Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/a4/353f7f992bb20bf3b7a36eae6ec7704946b6860aa1081dcc864b496029d8/miniwdl-1.13.1-py3-none-any.whl", hash = "sha256:83f9f49c49742befc7c0a990265289ec6869e08561a425d67810abcb65cf31c9", size = 198705, upload-time = "2025-10-25T04:07:49.534Z" }, +] + +[[package]] +name = "ntplib" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b4/14/6b018fb602602d9f6cc7485cbad7c1be3a85d25cea18c233854f05284aed/ntplib-0.4.0.tar.gz", hash = "sha256:899d8fb5f8c2555213aea95efca02934c7343df6ace9d7628a5176b176906267", size = 7135, upload-time = "2021-05-28T19:08:54.394Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/8c/41da70f6feaca807357206a376b6de2001b439c7f78f53473a914a6dbd1e/ntplib-0.4.0-py2.py3-none-any.whl", hash = "sha256:8d27375329ed7ff38755f7b6d4658b28edc147cadf40338a63a0da8133469d60", size = 6849, upload-time = "2021-05-28T19:08:53.323Z" }, +] + +[[package]] +name = "numpy" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a4/7a/6a3d14e205d292b738db449d0de649b373a59edb0d0b4493821d0a3e8718/numpy-2.4.0.tar.gz", hash = "sha256:6e504f7b16118198f138ef31ba24d985b124c2c469fe8467007cf30fd992f934", size = 20685720, upload-time = "2025-12-20T16:18:19.023Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/ff/f6400ffec95de41c74b8e73df32e3fff1830633193a7b1e409be7fb1bb8c/numpy-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2a8b6bb8369abefb8bd1801b054ad50e02b3275c8614dc6e5b0373c305291037", size = 16653117, upload-time = "2025-12-20T16:16:06.709Z" }, + { url = "https://files.pythonhosted.org/packages/fd/28/6c23e97450035072e8d830a3c411bf1abd1f42c611ff9d29e3d8f55c6252/numpy-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e284ca13d5a8367e43734148622caf0b261b275673823593e3e3634a6490f83", size = 12369711, upload-time = "2025-12-20T16:16:08.758Z" }, + { url = "https://files.pythonhosted.org/packages/bc/af/acbef97b630ab1bb45e6a7d01d1452e4251aa88ce680ac36e56c272120ec/numpy-2.4.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:49ff32b09f5aa0cd30a20c2b39db3e669c845589f2b7fc910365210887e39344", size = 5198355, upload-time = "2025-12-20T16:16:10.902Z" }, + { url = "https://files.pythonhosted.org/packages/c1/c8/4e0d436b66b826f2e53330adaa6311f5cac9871a5b5c31ad773b27f25a74/numpy-2.4.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:36cbfb13c152b1c7c184ddac43765db8ad672567e7bafff2cc755a09917ed2e6", size = 6545298, upload-time = "2025-12-20T16:16:12.607Z" }, + { url = "https://files.pythonhosted.org/packages/ef/27/e1f5d144ab54eac34875e79037011d511ac57b21b220063310cb96c80fbc/numpy-2.4.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:35ddc8f4914466e6fc954c76527aa91aa763682a4f6d73249ef20b418fe6effb", size = 14398387, upload-time = "2025-12-20T16:16:14.257Z" }, + { url = "https://files.pythonhosted.org/packages/67/64/4cb909dd5ab09a9a5d086eff9586e69e827b88a5585517386879474f4cf7/numpy-2.4.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dc578891de1db95b2a35001b695451767b580bb45753717498213c5ff3c41d63", size = 16363091, upload-time = "2025-12-20T16:16:17.32Z" }, + { url = "https://files.pythonhosted.org/packages/9d/9c/8efe24577523ec6809261859737cf117b0eb6fdb655abdfdc81b2e468ce4/numpy-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98e81648e0b36e325ab67e46b5400a7a6d4a22b8a7c8e8bbfe20e7db7906bf95", size = 16176394, upload-time = "2025-12-20T16:16:19.524Z" }, + { url = "https://files.pythonhosted.org/packages/61/f0/1687441ece7b47a62e45a1f82015352c240765c707928edd8aef875d5951/numpy-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d57b5046c120561ba8fa8e4030fbb8b822f3063910fa901ffadf16e2b7128ad6", size = 18287378, upload-time = "2025-12-20T16:16:22.866Z" }, + { url = "https://files.pythonhosted.org/packages/d3/6f/f868765d44e6fc466467ed810ba9d8d6db1add7d4a748abfa2a4c99a3194/numpy-2.4.0-cp312-cp312-win32.whl", hash = "sha256:92190db305a6f48734d3982f2c60fa30d6b5ee9bff10f2887b930d7b40119f4c", size = 5955432, upload-time = "2025-12-20T16:16:25.06Z" }, + { url = "https://files.pythonhosted.org/packages/d4/b5/94c1e79fcbab38d1ca15e13777477b2914dd2d559b410f96949d6637b085/numpy-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:680060061adb2d74ce352628cb798cfdec399068aa7f07ba9fb818b2b3305f98", size = 12306201, upload-time = "2025-12-20T16:16:26.979Z" }, + { url = "https://files.pythonhosted.org/packages/70/09/c39dadf0b13bb0768cd29d6a3aaff1fb7c6905ac40e9aaeca26b1c086e06/numpy-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:39699233bc72dd482da1415dcb06076e32f60eddc796a796c5fb6c5efce94667", size = 10308234, upload-time = "2025-12-20T16:16:29.417Z" }, + { url = "https://files.pythonhosted.org/packages/a7/0d/853fd96372eda07c824d24adf02e8bc92bb3731b43a9b2a39161c3667cc4/numpy-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a152d86a3ae00ba5f47b3acf3b827509fd0b6cb7d3259665e63dafbad22a75ea", size = 16649088, upload-time = "2025-12-20T16:16:31.421Z" }, + { url = "https://files.pythonhosted.org/packages/e3/37/cc636f1f2a9f585434e20a3e6e63422f70bfe4f7f6698e941db52ea1ac9a/numpy-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:39b19251dec4de8ff8496cd0806cbe27bf0684f765abb1f4809554de93785f2d", size = 12364065, upload-time = "2025-12-20T16:16:33.491Z" }, + { url = "https://files.pythonhosted.org/packages/ed/69/0b78f37ca3690969beee54103ce5f6021709134e8020767e93ba691a72f1/numpy-2.4.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:009bd0ea12d3c784b6639a8457537016ce5172109e585338e11334f6a7bb88ee", size = 5192640, upload-time = "2025-12-20T16:16:35.636Z" }, + { url = "https://files.pythonhosted.org/packages/1d/2a/08569f8252abf590294dbb09a430543ec8f8cc710383abfb3e75cc73aeda/numpy-2.4.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5fe44e277225fd3dff6882d86d3d447205d43532c3627313d17e754fb3905a0e", size = 6541556, upload-time = "2025-12-20T16:16:37.276Z" }, + { url = "https://files.pythonhosted.org/packages/93/e9/a949885a4e177493d61519377952186b6cbfdf1d6002764c664ba28349b5/numpy-2.4.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f935c4493eda9069851058fa0d9e39dbf6286be690066509305e52912714dbb2", size = 14396562, upload-time = "2025-12-20T16:16:38.953Z" }, + { url = "https://files.pythonhosted.org/packages/99/98/9d4ad53b0e9ef901c2ef1d550d2136f5ac42d3fd2988390a6def32e23e48/numpy-2.4.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8cfa5f29a695cb7438965e6c3e8d06e0416060cf0d709c1b1c1653a939bf5c2a", size = 16351719, upload-time = "2025-12-20T16:16:41.503Z" }, + { url = "https://files.pythonhosted.org/packages/28/de/5f3711a38341d6e8dd619f6353251a0cdd07f3d6d101a8fd46f4ef87f895/numpy-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba0cb30acd3ef11c94dc27fbfba68940652492bc107075e7ffe23057f9425681", size = 16176053, upload-time = "2025-12-20T16:16:44.552Z" }, + { url = "https://files.pythonhosted.org/packages/2a/5b/2a3753dc43916501b4183532e7ace862e13211042bceafa253afb5c71272/numpy-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:60e8c196cd82cbbd4f130b5290007e13e6de3eca79f0d4d38014769d96a7c475", size = 18277859, upload-time = "2025-12-20T16:16:47.174Z" }, + { url = "https://files.pythonhosted.org/packages/2c/c5/a18bcdd07a941db3076ef489d036ab16d2bfc2eae0cf27e5a26e29189434/numpy-2.4.0-cp313-cp313-win32.whl", hash = "sha256:5f48cb3e88fbc294dc90e215d86fbaf1c852c63dbdb6c3a3e63f45c4b57f7344", size = 5953849, upload-time = "2025-12-20T16:16:49.554Z" }, + { url = "https://files.pythonhosted.org/packages/4f/f1/719010ff8061da6e8a26e1980cf090412d4f5f8060b31f0c45d77dd67a01/numpy-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:a899699294f28f7be8992853c0c60741f16ff199205e2e6cdca155762cbaa59d", size = 12302840, upload-time = "2025-12-20T16:16:51.227Z" }, + { url = "https://files.pythonhosted.org/packages/f5/5a/b3d259083ed8b4d335270c76966cb6cf14a5d1b69e1a608994ac57a659e6/numpy-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:9198f447e1dc5647d07c9a6bbe2063cc0132728cc7175b39dbc796da5b54920d", size = 10308509, upload-time = "2025-12-20T16:16:53.313Z" }, + { url = "https://files.pythonhosted.org/packages/31/01/95edcffd1bb6c0633df4e808130545c4f07383ab629ac7e316fb44fff677/numpy-2.4.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74623f2ab5cc3f7c886add4f735d1031a1d2be4a4ae63c0546cfd74e7a31ddf6", size = 12491815, upload-time = "2025-12-20T16:16:55.496Z" }, + { url = "https://files.pythonhosted.org/packages/59/ea/5644b8baa92cc1c7163b4b4458c8679852733fa74ca49c942cfa82ded4e0/numpy-2.4.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:0804a8e4ab070d1d35496e65ffd3cf8114c136a2b81f61dfab0de4b218aacfd5", size = 5320321, upload-time = "2025-12-20T16:16:57.468Z" }, + { url = "https://files.pythonhosted.org/packages/26/4e/e10938106d70bc21319bd6a86ae726da37edc802ce35a3a71ecdf1fdfe7f/numpy-2.4.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:02a2038eb27f9443a8b266a66911e926566b5a6ffd1a689b588f7f35b81e7dc3", size = 6641635, upload-time = "2025-12-20T16:16:59.379Z" }, + { url = "https://files.pythonhosted.org/packages/b3/8d/a8828e3eaf5c0b4ab116924df82f24ce3416fa38d0674d8f708ddc6c8aac/numpy-2.4.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1889b3a3f47a7b5bee16bc25a2145bd7cb91897f815ce3499db64c7458b6d91d", size = 14456053, upload-time = "2025-12-20T16:17:01.768Z" }, + { url = "https://files.pythonhosted.org/packages/68/a1/17d97609d87d4520aa5ae2dcfb32305654550ac6a35effb946d303e594ce/numpy-2.4.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85eef4cb5625c47ee6425c58a3502555e10f45ee973da878ac8248ad58c136f3", size = 16401702, upload-time = "2025-12-20T16:17:04.235Z" }, + { url = "https://files.pythonhosted.org/packages/18/32/0f13c1b2d22bea1118356b8b963195446f3af124ed7a5adfa8fdecb1b6ca/numpy-2.4.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6dc8b7e2f4eb184b37655195f421836cfae6f58197b67e3ffc501f1333d993fa", size = 16242493, upload-time = "2025-12-20T16:17:06.856Z" }, + { url = "https://files.pythonhosted.org/packages/ae/23/48f21e3d309fbc137c068a1475358cbd3a901b3987dcfc97a029ab3068e2/numpy-2.4.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:44aba2f0cafd287871a495fb3163408b0bd25bbce135c6f621534a07f4f7875c", size = 18324222, upload-time = "2025-12-20T16:17:09.392Z" }, + { url = "https://files.pythonhosted.org/packages/ac/52/41f3d71296a3dcaa4f456aaa3c6fc8e745b43d0552b6bde56571bb4b4a0f/numpy-2.4.0-cp313-cp313t-win32.whl", hash = "sha256:20c115517513831860c573996e395707aa9fb691eb179200125c250e895fcd93", size = 6076216, upload-time = "2025-12-20T16:17:11.437Z" }, + { url = "https://files.pythonhosted.org/packages/35/ff/46fbfe60ab0710d2a2b16995f708750307d30eccbb4c38371ea9e986866e/numpy-2.4.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b48e35f4ab6f6a7597c46e301126ceba4c44cd3280e3750f85db48b082624fa4", size = 12444263, upload-time = "2025-12-20T16:17:13.182Z" }, + { url = "https://files.pythonhosted.org/packages/a3/e3/9189ab319c01d2ed556c932ccf55064c5d75bb5850d1df7a482ce0badead/numpy-2.4.0-cp313-cp313t-win_arm64.whl", hash = "sha256:4d1cfce39e511069b11e67cd0bd78ceff31443b7c9e5c04db73c7a19f572967c", size = 10378265, upload-time = "2025-12-20T16:17:15.211Z" }, + { url = "https://files.pythonhosted.org/packages/ab/ed/52eac27de39d5e5a6c9aadabe672bc06f55e24a3d9010cd1183948055d76/numpy-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:c95eb6db2884917d86cde0b4d4cf31adf485c8ec36bf8696dd66fa70de96f36b", size = 16647476, upload-time = "2025-12-20T16:17:17.671Z" }, + { url = "https://files.pythonhosted.org/packages/77/c0/990ce1b7fcd4e09aeaa574e2a0a839589e4b08b2ca68070f1acb1fea6736/numpy-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:65167da969cd1ec3a1df31cb221ca3a19a8aaa25370ecb17d428415e93c1935e", size = 12374563, upload-time = "2025-12-20T16:17:20.216Z" }, + { url = "https://files.pythonhosted.org/packages/37/7c/8c5e389c6ae8f5fd2277a988600d79e9625db3fff011a2d87ac80b881a4c/numpy-2.4.0-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:3de19cfecd1465d0dcf8a5b5ea8b3155b42ed0b639dba4b71e323d74f2a3be5e", size = 5203107, upload-time = "2025-12-20T16:17:22.47Z" }, + { url = "https://files.pythonhosted.org/packages/e6/94/ca5b3bd6a8a70a5eec9a0b8dd7f980c1eff4b8a54970a9a7fef248ef564f/numpy-2.4.0-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:6c05483c3136ac4c91b4e81903cb53a8707d316f488124d0398499a4f8e8ef51", size = 6538067, upload-time = "2025-12-20T16:17:24.001Z" }, + { url = "https://files.pythonhosted.org/packages/79/43/993eb7bb5be6761dde2b3a3a594d689cec83398e3f58f4758010f3b85727/numpy-2.4.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36667db4d6c1cea79c8930ab72fadfb4060feb4bfe724141cd4bd064d2e5f8ce", size = 14411926, upload-time = "2025-12-20T16:17:25.822Z" }, + { url = "https://files.pythonhosted.org/packages/03/75/d4c43b61de473912496317a854dac54f1efec3eeb158438da6884b70bb90/numpy-2.4.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9a818668b674047fd88c4cddada7ab8f1c298812783e8328e956b78dc4807f9f", size = 16354295, upload-time = "2025-12-20T16:17:28.308Z" }, + { url = "https://files.pythonhosted.org/packages/b8/0a/b54615b47ee8736a6461a4bb6749128dd3435c5a759d5663f11f0e9af4ac/numpy-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1ee32359fb7543b7b7bd0b2f46294db27e29e7bbdf70541e81b190836cd83ded", size = 16190242, upload-time = "2025-12-20T16:17:30.993Z" }, + { url = "https://files.pythonhosted.org/packages/98/ce/ea207769aacad6246525ec6c6bbd66a2bf56c72443dc10e2f90feed29290/numpy-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e493962256a38f58283de033d8af176c5c91c084ea30f15834f7545451c42059", size = 18280875, upload-time = "2025-12-20T16:17:33.327Z" }, + { url = "https://files.pythonhosted.org/packages/17/ef/ec409437aa962ea372ed601c519a2b141701683ff028f894b7466f0ab42b/numpy-2.4.0-cp314-cp314-win32.whl", hash = "sha256:6bbaebf0d11567fa8926215ae731e1d58e6ec28a8a25235b8a47405d301332db", size = 6002530, upload-time = "2025-12-20T16:17:35.729Z" }, + { url = "https://files.pythonhosted.org/packages/5f/4a/5cb94c787a3ed1ac65e1271b968686521169a7b3ec0b6544bb3ca32960b0/numpy-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:3d857f55e7fdf7c38ab96c4558c95b97d1c685be6b05c249f5fdafcbd6f9899e", size = 12435890, upload-time = "2025-12-20T16:17:37.599Z" }, + { url = "https://files.pythonhosted.org/packages/48/a0/04b89db963af9de1104975e2544f30de89adbf75b9e75f7dd2599be12c79/numpy-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:bb50ce5fb202a26fd5404620e7ef820ad1ab3558b444cb0b55beb7ef66cd2d63", size = 10591892, upload-time = "2025-12-20T16:17:39.649Z" }, + { url = "https://files.pythonhosted.org/packages/53/e5/d74b5ccf6712c06c7a545025a6a71bfa03bdc7e0568b405b0d655232fd92/numpy-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:355354388cba60f2132df297e2d53053d4063f79077b67b481d21276d61fc4df", size = 12494312, upload-time = "2025-12-20T16:17:41.714Z" }, + { url = "https://files.pythonhosted.org/packages/c2/08/3ca9cc2ddf54dfee7ae9a6479c071092a228c68aef08252aa08dac2af002/numpy-2.4.0-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:1d8f9fde5f6dc1b6fc34df8162f3b3079365468703fee7f31d4e0cc8c63baed9", size = 5322862, upload-time = "2025-12-20T16:17:44.145Z" }, + { url = "https://files.pythonhosted.org/packages/87/74/0bb63a68394c0c1e52670cfff2e309afa41edbe11b3327d9af29e4383f34/numpy-2.4.0-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:e0434aa22c821f44eeb4c650b81c7fbdd8c0122c6c4b5a576a76d5a35625ecd9", size = 6644986, upload-time = "2025-12-20T16:17:46.203Z" }, + { url = "https://files.pythonhosted.org/packages/06/8f/9264d9bdbcf8236af2823623fe2f3981d740fc3461e2787e231d97c38c28/numpy-2.4.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:40483b2f2d3ba7aad426443767ff5632ec3156ef09742b96913787d13c336471", size = 14457958, upload-time = "2025-12-20T16:17:48.017Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d9/f9a69ae564bbc7236a35aa883319364ef5fd41f72aa320cc1cbe66148fe2/numpy-2.4.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6a7664ddd9746e20b7325351fe1a8408d0a2bf9c63b5e898290ddc8f09544", size = 16398394, upload-time = "2025-12-20T16:17:50.409Z" }, + { url = "https://files.pythonhosted.org/packages/34/c7/39241501408dde7f885d241a98caba5421061a2c6d2b2197ac5e3aa842d8/numpy-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ecb0019d44f4cdb50b676c5d0cb4b1eae8e15d1ed3d3e6639f986fc92b2ec52c", size = 16241044, upload-time = "2025-12-20T16:17:52.661Z" }, + { url = "https://files.pythonhosted.org/packages/7c/95/cae7effd90e065a95e59fe710eeee05d7328ed169776dfdd9f789e032125/numpy-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d0ffd9e2e4441c96a9c91ec1783285d80bf835b677853fc2770a89d50c1e48ac", size = 18321772, upload-time = "2025-12-20T16:17:54.947Z" }, + { url = "https://files.pythonhosted.org/packages/96/df/3c6c279accd2bfb968a76298e5b276310bd55d243df4fa8ac5816d79347d/numpy-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:77f0d13fa87036d7553bf81f0e1fe3ce68d14c9976c9851744e4d3e91127e95f", size = 6148320, upload-time = "2025-12-20T16:17:57.249Z" }, + { url = "https://files.pythonhosted.org/packages/92/8d/f23033cce252e7a75cae853d17f582e86534c46404dea1c8ee094a9d6d84/numpy-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b1f5b45829ac1848893f0ddf5cb326110604d6df96cdc255b0bf9edd154104d4", size = 12623460, upload-time = "2025-12-20T16:17:58.963Z" }, + { url = "https://files.pythonhosted.org/packages/a4/4f/1f8475907d1a7c4ef9020edf7f39ea2422ec896849245f00688e4b268a71/numpy-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:23a3e9d1a6f360267e8fbb38ba5db355a6a7e9be71d7fce7ab3125e88bb646c8", size = 10661799, upload-time = "2025-12-20T16:18:01.078Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pandas" +version = "2.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "python-dateutil" }, + { name = "pytz" }, + { name = "tzdata" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223, upload-time = "2025-09-29T23:34:51.853Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53", size = 11597846, upload-time = "2025-09-29T23:19:48.856Z" }, + { url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35", size = 10729618, upload-time = "2025-09-29T23:39:08.659Z" }, + { url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908", size = 11737212, upload-time = "2025-09-29T23:19:59.765Z" }, + { url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89", size = 12362693, upload-time = "2025-09-29T23:20:14.098Z" }, + { url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98", size = 12771002, upload-time = "2025-09-29T23:20:26.76Z" }, + { url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084", size = 13450971, upload-time = "2025-09-29T23:20:41.344Z" }, + { url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b", size = 10992722, upload-time = "2025-09-29T23:20:54.139Z" }, + { url = "https://files.pythonhosted.org/packages/cd/4b/18b035ee18f97c1040d94debd8f2e737000ad70ccc8f5513f4eefad75f4b/pandas-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:56851a737e3470de7fa88e6131f41281ed440d29a9268dcbf0002da5ac366713", size = 11544671, upload-time = "2025-09-29T23:21:05.024Z" }, + { url = "https://files.pythonhosted.org/packages/31/94/72fac03573102779920099bcac1c3b05975c2cb5f01eac609faf34bed1ca/pandas-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdcd9d1167f4885211e401b3036c0c8d9e274eee67ea8d0758a256d60704cfe8", size = 10680807, upload-time = "2025-09-29T23:21:15.979Z" }, + { url = "https://files.pythonhosted.org/packages/16/87/9472cf4a487d848476865321de18cc8c920b8cab98453ab79dbbc98db63a/pandas-2.3.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e32e7cc9af0f1cc15548288a51a3b681cc2a219faa838e995f7dc53dbab1062d", size = 11709872, upload-time = "2025-09-29T23:21:27.165Z" }, + { url = "https://files.pythonhosted.org/packages/15/07/284f757f63f8a8d69ed4472bfd85122bd086e637bf4ed09de572d575a693/pandas-2.3.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:318d77e0e42a628c04dc56bcef4b40de67918f7041c2b061af1da41dcff670ac", size = 12306371, upload-time = "2025-09-29T23:21:40.532Z" }, + { url = "https://files.pythonhosted.org/packages/33/81/a3afc88fca4aa925804a27d2676d22dcd2031c2ebe08aabd0ae55b9ff282/pandas-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e0a175408804d566144e170d0476b15d78458795bb18f1304fb94160cabf40c", size = 12765333, upload-time = "2025-09-29T23:21:55.77Z" }, + { url = "https://files.pythonhosted.org/packages/8d/0f/b4d4ae743a83742f1153464cf1a8ecfafc3ac59722a0b5c8602310cb7158/pandas-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2d9ab0fc11822b5eece72ec9587e172f63cff87c00b062f6e37448ced4493", size = 13418120, upload-time = "2025-09-29T23:22:10.109Z" }, + { url = "https://files.pythonhosted.org/packages/4f/c7/e54682c96a895d0c808453269e0b5928a07a127a15704fedb643e9b0a4c8/pandas-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f8bfc0e12dc78f777f323f55c58649591b2cd0c43534e8355c51d3fede5f4dee", size = 10993991, upload-time = "2025-09-29T23:25:04.889Z" }, + { url = "https://files.pythonhosted.org/packages/f9/ca/3f8d4f49740799189e1395812f3bf23b5e8fc7c190827d55a610da72ce55/pandas-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:75ea25f9529fdec2d2e93a42c523962261e567d250b0013b16210e1d40d7c2e5", size = 12048227, upload-time = "2025-09-29T23:22:24.343Z" }, + { url = "https://files.pythonhosted.org/packages/0e/5a/f43efec3e8c0cc92c4663ccad372dbdff72b60bdb56b2749f04aa1d07d7e/pandas-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74ecdf1d301e812db96a465a525952f4dde225fdb6d8e5a521d47e1f42041e21", size = 11411056, upload-time = "2025-09-29T23:22:37.762Z" }, + { url = "https://files.pythonhosted.org/packages/46/b1/85331edfc591208c9d1a63a06baa67b21d332e63b7a591a5ba42a10bb507/pandas-2.3.3-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6435cb949cb34ec11cc9860246ccb2fdc9ecd742c12d3304989017d53f039a78", size = 11645189, upload-time = "2025-09-29T23:22:51.688Z" }, + { url = "https://files.pythonhosted.org/packages/44/23/78d645adc35d94d1ac4f2a3c4112ab6f5b8999f4898b8cdf01252f8df4a9/pandas-2.3.3-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:900f47d8f20860de523a1ac881c4c36d65efcb2eb850e6948140fa781736e110", size = 12121912, upload-time = "2025-09-29T23:23:05.042Z" }, + { url = "https://files.pythonhosted.org/packages/53/da/d10013df5e6aaef6b425aa0c32e1fc1f3e431e4bcabd420517dceadce354/pandas-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a45c765238e2ed7d7c608fc5bc4a6f88b642f2f01e70c0c23d2224dd21829d86", size = 12712160, upload-time = "2025-09-29T23:23:28.57Z" }, + { url = "https://files.pythonhosted.org/packages/bd/17/e756653095a083d8a37cbd816cb87148debcfcd920129b25f99dd8d04271/pandas-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c4fc4c21971a1a9f4bdb4c73978c7f7256caa3e62b323f70d6cb80db583350bc", size = 13199233, upload-time = "2025-09-29T23:24:24.876Z" }, + { url = "https://files.pythonhosted.org/packages/04/fd/74903979833db8390b73b3a8a7d30d146d710bd32703724dd9083950386f/pandas-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ee15f284898e7b246df8087fc82b87b01686f98ee67d85a17b7ab44143a3a9a0", size = 11540635, upload-time = "2025-09-29T23:25:52.486Z" }, + { url = "https://files.pythonhosted.org/packages/21/00/266d6b357ad5e6d3ad55093a7e8efc7dd245f5a842b584db9f30b0f0a287/pandas-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1611aedd912e1ff81ff41c745822980c49ce4a7907537be8692c8dbc31924593", size = 10759079, upload-time = "2025-09-29T23:26:33.204Z" }, + { url = "https://files.pythonhosted.org/packages/ca/05/d01ef80a7a3a12b2f8bbf16daba1e17c98a2f039cbc8e2f77a2c5a63d382/pandas-2.3.3-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d2cefc361461662ac48810cb14365a365ce864afe85ef1f447ff5a1e99ea81c", size = 11814049, upload-time = "2025-09-29T23:27:15.384Z" }, + { url = "https://files.pythonhosted.org/packages/15/b2/0e62f78c0c5ba7e3d2c5945a82456f4fac76c480940f805e0b97fcbc2f65/pandas-2.3.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee67acbbf05014ea6c763beb097e03cd629961c8a632075eeb34247120abcb4b", size = 12332638, upload-time = "2025-09-29T23:27:51.625Z" }, + { url = "https://files.pythonhosted.org/packages/c5/33/dd70400631b62b9b29c3c93d2feee1d0964dc2bae2e5ad7a6c73a7f25325/pandas-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c46467899aaa4da076d5abc11084634e2d197e9460643dd455ac3db5856b24d6", size = 12886834, upload-time = "2025-09-29T23:28:21.289Z" }, + { url = "https://files.pythonhosted.org/packages/d3/18/b5d48f55821228d0d2692b34fd5034bb185e854bdb592e9c640f6290e012/pandas-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6253c72c6a1d990a410bc7de641d34053364ef8bcd3126f7e7450125887dffe3", size = 13409925, upload-time = "2025-09-29T23:28:58.261Z" }, + { url = "https://files.pythonhosted.org/packages/a6/3d/124ac75fcd0ecc09b8fdccb0246ef65e35b012030defb0e0eba2cbbbe948/pandas-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:1b07204a219b3b7350abaae088f451860223a52cfb8a6c53358e7948735158e5", size = 11109071, upload-time = "2025-09-29T23:32:27.484Z" }, + { url = "https://files.pythonhosted.org/packages/89/9c/0e21c895c38a157e0faa1fb64587a9226d6dd46452cac4532d80c3c4a244/pandas-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2462b1a365b6109d275250baaae7b760fd25c726aaca0054649286bcfbb3e8ec", size = 12048504, upload-time = "2025-09-29T23:29:31.47Z" }, + { url = "https://files.pythonhosted.org/packages/d7/82/b69a1c95df796858777b68fbe6a81d37443a33319761d7c652ce77797475/pandas-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0242fe9a49aa8b4d78a4fa03acb397a58833ef6199e9aa40a95f027bb3a1b6e7", size = 11410702, upload-time = "2025-09-29T23:29:54.591Z" }, + { url = "https://files.pythonhosted.org/packages/f9/88/702bde3ba0a94b8c73a0181e05144b10f13f29ebfc2150c3a79062a8195d/pandas-2.3.3-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a21d830e78df0a515db2b3d2f5570610f5e6bd2e27749770e8bb7b524b89b450", size = 11634535, upload-time = "2025-09-29T23:30:21.003Z" }, + { url = "https://files.pythonhosted.org/packages/a4/1e/1bac1a839d12e6a82ec6cb40cda2edde64a2013a66963293696bbf31fbbb/pandas-2.3.3-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e3ebdb170b5ef78f19bfb71b0dc5dc58775032361fa188e814959b74d726dd5", size = 12121582, upload-time = "2025-09-29T23:30:43.391Z" }, + { url = "https://files.pythonhosted.org/packages/44/91/483de934193e12a3b1d6ae7c8645d083ff88dec75f46e827562f1e4b4da6/pandas-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d051c0e065b94b7a3cea50eb1ec32e912cd96dba41647eb24104b6c6c14c5788", size = 12699963, upload-time = "2025-09-29T23:31:10.009Z" }, + { url = "https://files.pythonhosted.org/packages/70/44/5191d2e4026f86a2a109053e194d3ba7a31a2d10a9c2348368c63ed4e85a/pandas-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3869faf4bd07b3b66a9f462417d0ca3a9df29a9f6abd5d0d0dbab15dac7abe87", size = 13202175, upload-time = "2025-09-29T23:31:59.173Z" }, +] + +[[package]] +name = "pillow" +version = "12.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/cace85a1b0c9775a9f8f5d5423c8261c858760e2466c79b2dd184638b056/pillow-12.0.0.tar.gz", hash = "sha256:87d4f8125c9988bfbed67af47dd7a953e2fc7b0cc1e7800ec6d2080d490bb353", size = 47008828, upload-time = "2025-10-15T18:24:14.008Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/90/4fcce2c22caf044e660a198d740e7fbc14395619e3cb1abad12192c0826c/pillow-12.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:53561a4ddc36facb432fae7a9d8afbfaf94795414f5cdc5fc52f28c1dca90371", size = 5249377, upload-time = "2025-10-15T18:22:05.993Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e0/ed960067543d080691d47d6938ebccbf3976a931c9567ab2fbfab983a5dd/pillow-12.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:71db6b4c1653045dacc1585c1b0d184004f0d7e694c7b34ac165ca70c0838082", size = 4650343, upload-time = "2025-10-15T18:22:07.718Z" }, + { url = "https://files.pythonhosted.org/packages/e7/a1/f81fdeddcb99c044bf7d6faa47e12850f13cee0849537a7d27eeab5534d4/pillow-12.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fa5f0b6716fc88f11380b88b31fe591a06c6315e955c096c35715788b339e3f", size = 6232981, upload-time = "2025-10-15T18:22:09.287Z" }, + { url = "https://files.pythonhosted.org/packages/88/e1/9098d3ce341a8750b55b0e00c03f1630d6178f38ac191c81c97a3b047b44/pillow-12.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:82240051c6ca513c616f7f9da06e871f61bfd7805f566275841af15015b8f98d", size = 8041399, upload-time = "2025-10-15T18:22:10.872Z" }, + { url = "https://files.pythonhosted.org/packages/a7/62/a22e8d3b602ae8cc01446d0c57a54e982737f44b6f2e1e019a925143771d/pillow-12.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55f818bd74fe2f11d4d7cbc65880a843c4075e0ac7226bc1a23261dbea531953", size = 6347740, upload-time = "2025-10-15T18:22:12.769Z" }, + { url = "https://files.pythonhosted.org/packages/4f/87/424511bdcd02c8d7acf9f65caa09f291a519b16bd83c3fb3374b3d4ae951/pillow-12.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b87843e225e74576437fd5b6a4c2205d422754f84a06942cfaf1dc32243e45a8", size = 7040201, upload-time = "2025-10-15T18:22:14.813Z" }, + { url = "https://files.pythonhosted.org/packages/dc/4d/435c8ac688c54d11755aedfdd9f29c9eeddf68d150fe42d1d3dbd2365149/pillow-12.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c607c90ba67533e1b2355b821fef6764d1dd2cbe26b8c1005ae84f7aea25ff79", size = 6462334, upload-time = "2025-10-15T18:22:16.375Z" }, + { url = "https://files.pythonhosted.org/packages/2b/f2/ad34167a8059a59b8ad10bc5c72d4d9b35acc6b7c0877af8ac885b5f2044/pillow-12.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:21f241bdd5080a15bc86d3466a9f6074a9c2c2b314100dd896ac81ee6db2f1ba", size = 7134162, upload-time = "2025-10-15T18:22:17.996Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b1/a7391df6adacf0a5c2cf6ac1cf1fcc1369e7d439d28f637a847f8803beb3/pillow-12.0.0-cp312-cp312-win32.whl", hash = "sha256:dd333073e0cacdc3089525c7df7d39b211bcdf31fc2824e49d01c6b6187b07d0", size = 6298769, upload-time = "2025-10-15T18:22:19.923Z" }, + { url = "https://files.pythonhosted.org/packages/a2/0b/d87733741526541c909bbf159e338dcace4f982daac6e5a8d6be225ca32d/pillow-12.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9fe611163f6303d1619bbcb653540a4d60f9e55e622d60a3108be0d5b441017a", size = 7001107, upload-time = "2025-10-15T18:22:21.644Z" }, + { url = "https://files.pythonhosted.org/packages/bc/96/aaa61ce33cc98421fb6088af2a03be4157b1e7e0e87087c888e2370a7f45/pillow-12.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:7dfb439562f234f7d57b1ac6bc8fe7f838a4bd49c79230e0f6a1da93e82f1fad", size = 2436012, upload-time = "2025-10-15T18:22:23.621Z" }, + { url = "https://files.pythonhosted.org/packages/62/f2/de993bb2d21b33a98d031ecf6a978e4b61da207bef02f7b43093774c480d/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:0869154a2d0546545cde61d1789a6524319fc1897d9ee31218eae7a60ccc5643", size = 4045493, upload-time = "2025-10-15T18:22:25.758Z" }, + { url = "https://files.pythonhosted.org/packages/0e/b6/bc8d0c4c9f6f111a783d045310945deb769b806d7574764234ffd50bc5ea/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:a7921c5a6d31b3d756ec980f2f47c0cfdbce0fc48c22a39347a895f41f4a6ea4", size = 4120461, upload-time = "2025-10-15T18:22:27.286Z" }, + { url = "https://files.pythonhosted.org/packages/5d/57/d60d343709366a353dc56adb4ee1e7d8a2cc34e3fbc22905f4167cfec119/pillow-12.0.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:1ee80a59f6ce048ae13cda1abf7fbd2a34ab9ee7d401c46be3ca685d1999a399", size = 3576912, upload-time = "2025-10-15T18:22:28.751Z" }, + { url = "https://files.pythonhosted.org/packages/a4/a4/a0a31467e3f83b94d37568294b01d22b43ae3c5d85f2811769b9c66389dd/pillow-12.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c50f36a62a22d350c96e49ad02d0da41dbd17ddc2e29750dbdba4323f85eb4a5", size = 5249132, upload-time = "2025-10-15T18:22:30.641Z" }, + { url = "https://files.pythonhosted.org/packages/83/06/48eab21dd561de2914242711434c0c0eb992ed08ff3f6107a5f44527f5e9/pillow-12.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5193fde9a5f23c331ea26d0cf171fbf67e3f247585f50c08b3e205c7aeb4589b", size = 4650099, upload-time = "2025-10-15T18:22:32.73Z" }, + { url = "https://files.pythonhosted.org/packages/fc/bd/69ed99fd46a8dba7c1887156d3572fe4484e3f031405fcc5a92e31c04035/pillow-12.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bde737cff1a975b70652b62d626f7785e0480918dece11e8fef3c0cf057351c3", size = 6230808, upload-time = "2025-10-15T18:22:34.337Z" }, + { url = "https://files.pythonhosted.org/packages/ea/94/8fad659bcdbf86ed70099cb60ae40be6acca434bbc8c4c0d4ef356d7e0de/pillow-12.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6597ff2b61d121172f5844b53f21467f7082f5fb385a9a29c01414463f93b07", size = 8037804, upload-time = "2025-10-15T18:22:36.402Z" }, + { url = "https://files.pythonhosted.org/packages/20/39/c685d05c06deecfd4e2d1950e9a908aa2ca8bc4e6c3b12d93b9cafbd7837/pillow-12.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b817e7035ea7f6b942c13aa03bb554fc44fea70838ea21f8eb31c638326584e", size = 6345553, upload-time = "2025-10-15T18:22:38.066Z" }, + { url = "https://files.pythonhosted.org/packages/38/57/755dbd06530a27a5ed74f8cb0a7a44a21722ebf318edbe67ddbd7fb28f88/pillow-12.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f4f1231b7dec408e8670264ce63e9c71409d9583dd21d32c163e25213ee2a344", size = 7037729, upload-time = "2025-10-15T18:22:39.769Z" }, + { url = "https://files.pythonhosted.org/packages/ca/b6/7e94f4c41d238615674d06ed677c14883103dce1c52e4af16f000338cfd7/pillow-12.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e51b71417049ad6ab14c49608b4a24d8fb3fe605e5dfabfe523b58064dc3d27", size = 6459789, upload-time = "2025-10-15T18:22:41.437Z" }, + { url = "https://files.pythonhosted.org/packages/9c/14/4448bb0b5e0f22dd865290536d20ec8a23b64e2d04280b89139f09a36bb6/pillow-12.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d120c38a42c234dc9a8c5de7ceaaf899cf33561956acb4941653f8bdc657aa79", size = 7130917, upload-time = "2025-10-15T18:22:43.152Z" }, + { url = "https://files.pythonhosted.org/packages/dd/ca/16c6926cc1c015845745d5c16c9358e24282f1e588237a4c36d2b30f182f/pillow-12.0.0-cp313-cp313-win32.whl", hash = "sha256:4cc6b3b2efff105c6a1656cfe59da4fdde2cda9af1c5e0b58529b24525d0a098", size = 6302391, upload-time = "2025-10-15T18:22:44.753Z" }, + { url = "https://files.pythonhosted.org/packages/6d/2a/dd43dcfd6dae9b6a49ee28a8eedb98c7d5ff2de94a5d834565164667b97b/pillow-12.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:4cf7fed4b4580601c4345ceb5d4cbf5a980d030fd5ad07c4d2ec589f95f09905", size = 7007477, upload-time = "2025-10-15T18:22:46.838Z" }, + { url = "https://files.pythonhosted.org/packages/77/f0/72ea067f4b5ae5ead653053212af05ce3705807906ba3f3e8f58ddf617e6/pillow-12.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:9f0b04c6b8584c2c193babcccc908b38ed29524b29dd464bc8801bf10d746a3a", size = 2435918, upload-time = "2025-10-15T18:22:48.399Z" }, + { url = "https://files.pythonhosted.org/packages/f5/5e/9046b423735c21f0487ea6cb5b10f89ea8f8dfbe32576fe052b5ba9d4e5b/pillow-12.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7fa22993bac7b77b78cae22bad1e2a987ddf0d9015c63358032f84a53f23cdc3", size = 5251406, upload-time = "2025-10-15T18:22:49.905Z" }, + { url = "https://files.pythonhosted.org/packages/12/66/982ceebcdb13c97270ef7a56c3969635b4ee7cd45227fa707c94719229c5/pillow-12.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f135c702ac42262573fe9714dfe99c944b4ba307af5eb507abef1667e2cbbced", size = 4653218, upload-time = "2025-10-15T18:22:51.587Z" }, + { url = "https://files.pythonhosted.org/packages/16/b3/81e625524688c31859450119bf12674619429cab3119eec0e30a7a1029cb/pillow-12.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c85de1136429c524e55cfa4e033b4a7940ac5c8ee4d9401cc2d1bf48154bbc7b", size = 6266564, upload-time = "2025-10-15T18:22:53.215Z" }, + { url = "https://files.pythonhosted.org/packages/98/59/dfb38f2a41240d2408096e1a76c671d0a105a4a8471b1871c6902719450c/pillow-12.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38df9b4bfd3db902c9c2bd369bcacaf9d935b2fff73709429d95cc41554f7b3d", size = 8069260, upload-time = "2025-10-15T18:22:54.933Z" }, + { url = "https://files.pythonhosted.org/packages/dc/3d/378dbea5cd1874b94c312425ca77b0f47776c78e0df2df751b820c8c1d6c/pillow-12.0.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d87ef5795da03d742bf49439f9ca4d027cde49c82c5371ba52464aee266699a", size = 6379248, upload-time = "2025-10-15T18:22:56.605Z" }, + { url = "https://files.pythonhosted.org/packages/84/b0/d525ef47d71590f1621510327acec75ae58c721dc071b17d8d652ca494d8/pillow-12.0.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aff9e4d82d082ff9513bdd6acd4f5bd359f5b2c870907d2b0a9c5e10d40c88fe", size = 7066043, upload-time = "2025-10-15T18:22:58.53Z" }, + { url = "https://files.pythonhosted.org/packages/61/2c/aced60e9cf9d0cde341d54bf7932c9ffc33ddb4a1595798b3a5150c7ec4e/pillow-12.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8d8ca2b210ada074d57fcee40c30446c9562e542fc46aedc19baf758a93532ee", size = 6490915, upload-time = "2025-10-15T18:23:00.582Z" }, + { url = "https://files.pythonhosted.org/packages/ef/26/69dcb9b91f4e59f8f34b2332a4a0a951b44f547c4ed39d3e4dcfcff48f89/pillow-12.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:99a7f72fb6249302aa62245680754862a44179b545ded638cf1fef59befb57ef", size = 7157998, upload-time = "2025-10-15T18:23:02.627Z" }, + { url = "https://files.pythonhosted.org/packages/61/2b/726235842220ca95fa441ddf55dd2382b52ab5b8d9c0596fe6b3f23dafe8/pillow-12.0.0-cp313-cp313t-win32.whl", hash = "sha256:4078242472387600b2ce8d93ade8899c12bf33fa89e55ec89fe126e9d6d5d9e9", size = 6306201, upload-time = "2025-10-15T18:23:04.709Z" }, + { url = "https://files.pythonhosted.org/packages/c0/3d/2afaf4e840b2df71344ababf2f8edd75a705ce500e5dc1e7227808312ae1/pillow-12.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2c54c1a783d6d60595d3514f0efe9b37c8808746a66920315bfd34a938d7994b", size = 7013165, upload-time = "2025-10-15T18:23:06.46Z" }, + { url = "https://files.pythonhosted.org/packages/6f/75/3fa09aa5cf6ed04bee3fa575798ddf1ce0bace8edb47249c798077a81f7f/pillow-12.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:26d9f7d2b604cd23aba3e9faf795787456ac25634d82cd060556998e39c6fa47", size = 2437834, upload-time = "2025-10-15T18:23:08.194Z" }, + { url = "https://files.pythonhosted.org/packages/54/2a/9a8c6ba2c2c07b71bec92cf63e03370ca5e5f5c5b119b742bcc0cde3f9c5/pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:beeae3f27f62308f1ddbcfb0690bf44b10732f2ef43758f169d5e9303165d3f9", size = 4045531, upload-time = "2025-10-15T18:23:10.121Z" }, + { url = "https://files.pythonhosted.org/packages/84/54/836fdbf1bfb3d66a59f0189ff0b9f5f666cee09c6188309300df04ad71fa/pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:d4827615da15cd59784ce39d3388275ec093ae3ee8d7f0c089b76fa87af756c2", size = 4120554, upload-time = "2025-10-15T18:23:12.14Z" }, + { url = "https://files.pythonhosted.org/packages/0d/cd/16aec9f0da4793e98e6b54778a5fbce4f375c6646fe662e80600b8797379/pillow-12.0.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:3e42edad50b6909089750e65c91aa09aaf1e0a71310d383f11321b27c224ed8a", size = 3576812, upload-time = "2025-10-15T18:23:13.962Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b7/13957fda356dc46339298b351cae0d327704986337c3c69bb54628c88155/pillow-12.0.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:e5d8efac84c9afcb40914ab49ba063d94f5dbdf5066db4482c66a992f47a3a3b", size = 5252689, upload-time = "2025-10-15T18:23:15.562Z" }, + { url = "https://files.pythonhosted.org/packages/fc/f5/eae31a306341d8f331f43edb2e9122c7661b975433de5e447939ae61c5da/pillow-12.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:266cd5f2b63ff316d5a1bba46268e603c9caf5606d44f38c2873c380950576ad", size = 4650186, upload-time = "2025-10-15T18:23:17.379Z" }, + { url = "https://files.pythonhosted.org/packages/86/62/2a88339aa40c4c77e79108facbd307d6091e2c0eb5b8d3cf4977cfca2fe6/pillow-12.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:58eea5ebe51504057dd95c5b77d21700b77615ab0243d8152793dc00eb4faf01", size = 6230308, upload-time = "2025-10-15T18:23:18.971Z" }, + { url = "https://files.pythonhosted.org/packages/c7/33/5425a8992bcb32d1cb9fa3dd39a89e613d09a22f2c8083b7bf43c455f760/pillow-12.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f13711b1a5ba512d647a0e4ba79280d3a9a045aaf7e0cc6fbe96b91d4cdf6b0c", size = 8039222, upload-time = "2025-10-15T18:23:20.909Z" }, + { url = "https://files.pythonhosted.org/packages/d8/61/3f5d3b35c5728f37953d3eec5b5f3e77111949523bd2dd7f31a851e50690/pillow-12.0.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6846bd2d116ff42cba6b646edf5bf61d37e5cbd256425fa089fee4ff5c07a99e", size = 6346657, upload-time = "2025-10-15T18:23:23.077Z" }, + { url = "https://files.pythonhosted.org/packages/3a/be/ee90a3d79271227e0f0a33c453531efd6ed14b2e708596ba5dd9be948da3/pillow-12.0.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c98fa880d695de164b4135a52fd2e9cd7b7c90a9d8ac5e9e443a24a95ef9248e", size = 7038482, upload-time = "2025-10-15T18:23:25.005Z" }, + { url = "https://files.pythonhosted.org/packages/44/34/a16b6a4d1ad727de390e9bd9f19f5f669e079e5826ec0f329010ddea492f/pillow-12.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa3ed2a29a9e9d2d488b4da81dcb54720ac3104a20bf0bd273f1e4648aff5af9", size = 6461416, upload-time = "2025-10-15T18:23:27.009Z" }, + { url = "https://files.pythonhosted.org/packages/b6/39/1aa5850d2ade7d7ba9f54e4e4c17077244ff7a2d9e25998c38a29749eb3f/pillow-12.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d034140032870024e6b9892c692fe2968493790dd57208b2c37e3fb35f6df3ab", size = 7131584, upload-time = "2025-10-15T18:23:29.752Z" }, + { url = "https://files.pythonhosted.org/packages/bf/db/4fae862f8fad0167073a7733973bfa955f47e2cac3dc3e3e6257d10fab4a/pillow-12.0.0-cp314-cp314-win32.whl", hash = "sha256:1b1b133e6e16105f524a8dec491e0586d072948ce15c9b914e41cdadd209052b", size = 6400621, upload-time = "2025-10-15T18:23:32.06Z" }, + { url = "https://files.pythonhosted.org/packages/2b/24/b350c31543fb0107ab2599464d7e28e6f856027aadda995022e695313d94/pillow-12.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:8dc232e39d409036af549c86f24aed8273a40ffa459981146829a324e0848b4b", size = 7142916, upload-time = "2025-10-15T18:23:34.71Z" }, + { url = "https://files.pythonhosted.org/packages/0f/9b/0ba5a6fd9351793996ef7487c4fdbde8d3f5f75dbedc093bb598648fddf0/pillow-12.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:d52610d51e265a51518692045e372a4c363056130d922a7351429ac9f27e70b0", size = 2523836, upload-time = "2025-10-15T18:23:36.967Z" }, + { url = "https://files.pythonhosted.org/packages/f5/7a/ceee0840aebc579af529b523d530840338ecf63992395842e54edc805987/pillow-12.0.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1979f4566bb96c1e50a62d9831e2ea2d1211761e5662afc545fa766f996632f6", size = 5255092, upload-time = "2025-10-15T18:23:38.573Z" }, + { url = "https://files.pythonhosted.org/packages/44/76/20776057b4bfd1aef4eeca992ebde0f53a4dce874f3ae693d0ec90a4f79b/pillow-12.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b2e4b27a6e15b04832fe9bf292b94b5ca156016bbc1ea9c2c20098a0320d6cf6", size = 4653158, upload-time = "2025-10-15T18:23:40.238Z" }, + { url = "https://files.pythonhosted.org/packages/82/3f/d9ff92ace07be8836b4e7e87e6a4c7a8318d47c2f1463ffcf121fc57d9cb/pillow-12.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fb3096c30df99fd01c7bf8e544f392103d0795b9f98ba71a8054bcbf56b255f1", size = 6267882, upload-time = "2025-10-15T18:23:42.434Z" }, + { url = "https://files.pythonhosted.org/packages/9f/7a/4f7ff87f00d3ad33ba21af78bfcd2f032107710baf8280e3722ceec28cda/pillow-12.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7438839e9e053ef79f7112c881cef684013855016f928b168b81ed5835f3e75e", size = 8071001, upload-time = "2025-10-15T18:23:44.29Z" }, + { url = "https://files.pythonhosted.org/packages/75/87/fcea108944a52dad8cca0715ae6247e271eb80459364a98518f1e4f480c1/pillow-12.0.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d5c411a8eaa2299322b647cd932586b1427367fd3184ffbb8f7a219ea2041ca", size = 6380146, upload-time = "2025-10-15T18:23:46.065Z" }, + { url = "https://files.pythonhosted.org/packages/91/52/0d31b5e571ef5fd111d2978b84603fce26aba1b6092f28e941cb46570745/pillow-12.0.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7e091d464ac59d2c7ad8e7e08105eaf9dafbc3883fd7265ffccc2baad6ac925", size = 7067344, upload-time = "2025-10-15T18:23:47.898Z" }, + { url = "https://files.pythonhosted.org/packages/7b/f4/2dd3d721f875f928d48e83bb30a434dee75a2531bca839bb996bb0aa5a91/pillow-12.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:792a2c0be4dcc18af9d4a2dfd8a11a17d5e25274a1062b0ec1c2d79c76f3e7f8", size = 6491864, upload-time = "2025-10-15T18:23:49.607Z" }, + { url = "https://files.pythonhosted.org/packages/30/4b/667dfcf3d61fc309ba5a15b141845cece5915e39b99c1ceab0f34bf1d124/pillow-12.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:afbefa430092f71a9593a99ab6a4e7538bc9eabbf7bf94f91510d3503943edc4", size = 7158911, upload-time = "2025-10-15T18:23:51.351Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2f/16cabcc6426c32218ace36bf0d55955e813f2958afddbf1d391849fee9d1/pillow-12.0.0-cp314-cp314t-win32.whl", hash = "sha256:3830c769decf88f1289680a59d4f4c46c72573446352e2befec9a8512104fa52", size = 6408045, upload-time = "2025-10-15T18:23:53.177Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/e29aa0c9c666cf787628d3f0dcf379f4791fba79f4936d02f8b37165bdf8/pillow-12.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:905b0365b210c73afb0ebe9101a32572152dfd1c144c7e28968a331b9217b94a", size = 7148282, upload-time = "2025-10-15T18:23:55.316Z" }, + { url = "https://files.pythonhosted.org/packages/c1/70/6b41bdcddf541b437bbb9f47f94d2db5d9ddef6c37ccab8c9107743748a4/pillow-12.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:99353a06902c2e43b43e8ff74ee65a7d90307d82370604746738a1e0661ccca7", size = 2525630, upload-time = "2025-10-15T18:23:57.149Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "prompt-toolkit" +version = "3.0.52" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wcwidth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" }, +] + +[[package]] +name = "proto-plus" +version = "1.27.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/01/89/9cbe2f4bba860e149108b683bc2efec21f14d5f7ed6e25562ad86acbc373/proto_plus-1.27.0.tar.gz", hash = "sha256:873af56dd0d7e91836aee871e5799e1c6f1bda86ac9a983e0bb9f0c266a568c4", size = 56158, upload-time = "2025-12-16T13:46:25.729Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cd/24/3b7a0818484df9c28172857af32c2397b6d8fcd99d9468bd4684f98ebf0a/proto_plus-1.27.0-py3-none-any.whl", hash = "sha256:1baa7f81cf0f8acb8bc1f6d085008ba4171eaf669629d1b6d1673b21ed1c0a82", size = 50205, upload-time = "2025-12-16T13:46:24.76Z" }, +] + +[[package]] +name = "protobuf" +version = "6.33.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/34/44/e49ecff446afeec9d1a66d6bbf9adc21e3c7cea7803a920ca3773379d4f6/protobuf-6.33.2.tar.gz", hash = "sha256:56dc370c91fbb8ac85bc13582c9e373569668a290aa2e66a590c2a0d35ddb9e4", size = 444296, upload-time = "2025-12-06T00:17:53.311Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/91/1e3a34881a88697a7354ffd177e8746e97a722e5e8db101544b47e84afb1/protobuf-6.33.2-cp310-abi3-win32.whl", hash = "sha256:87eb388bd2d0f78febd8f4c8779c79247b26a5befad525008e49a6955787ff3d", size = 425603, upload-time = "2025-12-06T00:17:41.114Z" }, + { url = "https://files.pythonhosted.org/packages/64/20/4d50191997e917ae13ad0a235c8b42d8c1ab9c3e6fd455ca16d416944355/protobuf-6.33.2-cp310-abi3-win_amd64.whl", hash = "sha256:fc2a0e8b05b180e5fc0dd1559fe8ebdae21a27e81ac77728fb6c42b12c7419b4", size = 436930, upload-time = "2025-12-06T00:17:43.278Z" }, + { url = "https://files.pythonhosted.org/packages/b2/ca/7e485da88ba45c920fb3f50ae78de29ab925d9e54ef0de678306abfbb497/protobuf-6.33.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d9b19771ca75935b3a4422957bc518b0cecb978b31d1dd12037b088f6bcc0e43", size = 427621, upload-time = "2025-12-06T00:17:44.445Z" }, + { url = "https://files.pythonhosted.org/packages/7d/4f/f743761e41d3b2b2566748eb76bbff2b43e14d5fcab694f494a16458b05f/protobuf-6.33.2-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:b5d3b5625192214066d99b2b605f5783483575656784de223f00a8d00754fc0e", size = 324460, upload-time = "2025-12-06T00:17:45.678Z" }, + { url = "https://files.pythonhosted.org/packages/b1/fa/26468d00a92824020f6f2090d827078c09c9c587e34cbfd2d0c7911221f8/protobuf-6.33.2-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8cd7640aee0b7828b6d03ae518b5b4806fdfc1afe8de82f79c3454f8aef29872", size = 339168, upload-time = "2025-12-06T00:17:46.813Z" }, + { url = "https://files.pythonhosted.org/packages/56/13/333b8f421738f149d4fe5e49553bc2a2ab75235486259f689b4b91f96cec/protobuf-6.33.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:1f8017c48c07ec5859106533b682260ba3d7c5567b1ca1f24297ce03384d1b4f", size = 323270, upload-time = "2025-12-06T00:17:48.253Z" }, + { url = "https://files.pythonhosted.org/packages/0e/15/4f02896cc3df04fc465010a4c6a0cd89810f54617a32a70ef531ed75d61c/protobuf-6.33.2-py3-none-any.whl", hash = "sha256:7636aad9bb01768870266de5dc009de2d1b936771b38a793f73cbbf279c91c5c", size = 170501, upload-time = "2025-12-06T00:17:52.211Z" }, +] + +[[package]] +name = "psutil" +version = "7.1.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/88/bdd0a41e5857d5d703287598cbf08dad90aed56774ea52ae071bae9071b6/psutil-7.1.3.tar.gz", hash = "sha256:6c86281738d77335af7aec228328e944b30930899ea760ecf33a4dba66be5e74", size = 489059, upload-time = "2025-11-02T12:25:54.619Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/93/0c49e776b8734fef56ec9c5c57f923922f2cf0497d62e0f419465f28f3d0/psutil-7.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0005da714eee687b4b8decd3d6cc7c6db36215c9e74e5ad2264b90c3df7d92dc", size = 239751, upload-time = "2025-11-02T12:25:58.161Z" }, + { url = "https://files.pythonhosted.org/packages/6f/8d/b31e39c769e70780f007969815195a55c81a63efebdd4dbe9e7a113adb2f/psutil-7.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19644c85dcb987e35eeeaefdc3915d059dac7bd1167cdcdbf27e0ce2df0c08c0", size = 240368, upload-time = "2025-11-02T12:26:00.491Z" }, + { url = "https://files.pythonhosted.org/packages/62/61/23fd4acc3c9eebbf6b6c78bcd89e5d020cfde4acf0a9233e9d4e3fa698b4/psutil-7.1.3-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95ef04cf2e5ba0ab9eaafc4a11eaae91b44f4ef5541acd2ee91d9108d00d59a7", size = 287134, upload-time = "2025-11-02T12:26:02.613Z" }, + { url = "https://files.pythonhosted.org/packages/30/1c/f921a009ea9ceb51aa355cb0cc118f68d354db36eae18174bab63affb3e6/psutil-7.1.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1068c303be3a72f8e18e412c5b2a8f6d31750fb152f9cb106b54090296c9d251", size = 289904, upload-time = "2025-11-02T12:26:05.207Z" }, + { url = "https://files.pythonhosted.org/packages/a6/82/62d68066e13e46a5116df187d319d1724b3f437ddd0f958756fc052677f4/psutil-7.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:18349c5c24b06ac5612c0428ec2a0331c26443d259e2a0144a9b24b4395b58fa", size = 249642, upload-time = "2025-11-02T12:26:07.447Z" }, + { url = "https://files.pythonhosted.org/packages/df/ad/c1cd5fe965c14a0392112f68362cfceb5230819dbb5b1888950d18a11d9f/psutil-7.1.3-cp313-cp313t-win_arm64.whl", hash = "sha256:c525ffa774fe4496282fb0b1187725793de3e7c6b29e41562733cae9ada151ee", size = 245518, upload-time = "2025-11-02T12:26:09.719Z" }, + { url = "https://files.pythonhosted.org/packages/2e/bb/6670bded3e3236eb4287c7bcdc167e9fae6e1e9286e437f7111caed2f909/psutil-7.1.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b403da1df4d6d43973dc004d19cee3b848e998ae3154cc8097d139b77156c353", size = 239843, upload-time = "2025-11-02T12:26:11.968Z" }, + { url = "https://files.pythonhosted.org/packages/b8/66/853d50e75a38c9a7370ddbeefabdd3d3116b9c31ef94dc92c6729bc36bec/psutil-7.1.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ad81425efc5e75da3f39b3e636293360ad8d0b49bed7df824c79764fb4ba9b8b", size = 240369, upload-time = "2025-11-02T12:26:14.358Z" }, + { url = "https://files.pythonhosted.org/packages/41/bd/313aba97cb5bfb26916dc29cf0646cbe4dd6a89ca69e8c6edce654876d39/psutil-7.1.3-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f33a3702e167783a9213db10ad29650ebf383946e91bc77f28a5eb083496bc9", size = 288210, upload-time = "2025-11-02T12:26:16.699Z" }, + { url = "https://files.pythonhosted.org/packages/c2/fa/76e3c06e760927a0cfb5705eb38164254de34e9bd86db656d4dbaa228b04/psutil-7.1.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fac9cd332c67f4422504297889da5ab7e05fd11e3c4392140f7370f4208ded1f", size = 291182, upload-time = "2025-11-02T12:26:18.848Z" }, + { url = "https://files.pythonhosted.org/packages/0f/1d/5774a91607035ee5078b8fd747686ebec28a962f178712de100d00b78a32/psutil-7.1.3-cp314-cp314t-win_amd64.whl", hash = "sha256:3792983e23b69843aea49c8f5b8f115572c5ab64c153bada5270086a2123c7e7", size = 250466, upload-time = "2025-11-02T12:26:21.183Z" }, + { url = "https://files.pythonhosted.org/packages/00/ca/e426584bacb43a5cb1ac91fae1937f478cd8fbe5e4ff96574e698a2c77cd/psutil-7.1.3-cp314-cp314t-win_arm64.whl", hash = "sha256:31d77fcedb7529f27bb3a0472bea9334349f9a04160e8e6e5020f22c59893264", size = 245756, upload-time = "2025-11-02T12:26:23.148Z" }, + { url = "https://files.pythonhosted.org/packages/ef/94/46b9154a800253e7ecff5aaacdf8ebf43db99de4a2dfa18575b02548654e/psutil-7.1.3-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2bdbcd0e58ca14996a42adf3621a6244f1bb2e2e528886959c72cf1e326677ab", size = 238359, upload-time = "2025-11-02T12:26:25.284Z" }, + { url = "https://files.pythonhosted.org/packages/68/3a/9f93cff5c025029a36d9a92fef47220ab4692ee7f2be0fba9f92813d0cb8/psutil-7.1.3-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:bc31fa00f1fbc3c3802141eede66f3a2d51d89716a194bf2cd6fc68310a19880", size = 239171, upload-time = "2025-11-02T12:26:27.23Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b1/5f49af514f76431ba4eea935b8ad3725cdeb397e9245ab919dbc1d1dc20f/psutil-7.1.3-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb428f9f05c1225a558f53e30ccbad9930b11c3fc206836242de1091d3e7dd3", size = 263261, upload-time = "2025-11-02T12:26:29.48Z" }, + { url = "https://files.pythonhosted.org/packages/e0/95/992c8816a74016eb095e73585d747e0a8ea21a061ed3689474fabb29a395/psutil-7.1.3-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56d974e02ca2c8eb4812c3f76c30e28836fffc311d55d979f1465c1feeb2b68b", size = 264635, upload-time = "2025-11-02T12:26:31.74Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/c3ed1a622b6ae2fd3c945a366e64eb35247a31e4db16cf5095e269e8eb3c/psutil-7.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:f39c2c19fe824b47484b96f9692932248a54c43799a84282cfe58d05a6449efd", size = 247633, upload-time = "2025-11-02T12:26:33.887Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ad/33b2ccec09bf96c2b2ef3f9a6f66baac8253d7565d8839e024a6b905d45d/psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1", size = 244608, upload-time = "2025-11-02T12:26:36.136Z" }, +] + +[[package]] +name = "pyasn1" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322, upload-time = "2024-09-10T22:41:42.55Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135, upload-time = "2024-09-11T16:00:36.122Z" }, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892, upload-time = "2025-03-28T02:41:22.17Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" }, +] + +[[package]] +name = "pycparser" +version = "2.23" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pygtail" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/89/437120e303d5d2c81107ed3415d5f3c9975f7dfdeef9e4440cef364e3bf9/pygtail-0.14.0.tar.gz", hash = "sha256:55616d31a081eaaeb069d0946f2bc7e530ebf505d4c3c050f8e941786a3449d3", size = 13509, upload-time = "2022-11-06T14:54:28.816Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/77/5558c3904229d2e320722ec58d38b82f9ff3063b7eeda49a8821b5595de1/pygtail-0.14.0-py3-none-any.whl", hash = "sha256:3d8e847d4d5c56e3cece1f65577562e63f8b75af7b93bf21b71c3213d369c2a9", size = 14319, upload-time = "2022-11-06T14:54:26.714Z" }, +] + +[[package]] +name = "pyhocon" +version = "0.3.61" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyparsing" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/2d/cd65dc4fa8c901e6d02b4074771ced04828d71af18b97da24ed1e55507d7/pyhocon-0.3.61-py3-none-any.whl", hash = "sha256:73d0f064af9a7d454949c5557284ce1d716cfd8e1383ecc90095fc575d278df0", size = 25049, upload-time = "2024-05-29T15:09:23.683Z" }, +] + +[[package]] +name = "pyopenssl" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/80/be/97b83a464498a79103036bc74d1038df4a7ef0e402cfaf4d5e113fb14759/pyopenssl-25.3.0.tar.gz", hash = "sha256:c981cb0a3fd84e8602d7afc209522773b94c1c2446a3c710a75b06fe1beae329", size = 184073, upload-time = "2025-09-17T00:32:21.037Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/81/ef2b1dfd1862567d573a4fdbc9f969067621764fbb74338496840a1d2977/pyopenssl-25.3.0-py3-none-any.whl", hash = "sha256:1fda6fc034d5e3d179d39e59c1895c9faeaf40a79de5fc4cbbfbe0d36f4a77b6", size = 57268, upload-time = "2025-09-17T00:32:19.474Z" }, +] + +[[package]] +name = "pyparsing" +version = "3.2.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/a5/181488fc2b9d093e3972d2a472855aae8a03f000592dbfce716a512b3359/pyparsing-3.2.5.tar.gz", hash = "sha256:2df8d5b7b2802ef88e8d016a2eb9c7aeaa923529cd251ed0fe4608275d4105b6", size = 1099274, upload-time = "2025-09-21T04:11:06.277Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/5e/1aa9a93198c6b64513c9d7752de7422c06402de6600a8767da1524f9570b/pyparsing-3.2.5-py3-none-any.whl", hash = "sha256:e38a4f02064cf41fe6593d328d0512495ad1f3d8a91c4f73fc401b3079a59a5e", size = 113890, upload-time = "2025-09-21T04:11:04.117Z" }, +] + +[[package]] +name = "pyreadline3" +version = "3.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839, upload-time = "2024-09-19T02:40:10.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178, upload-time = "2024-09-19T02:40:08.598Z" }, +] + +[[package]] +name = "pyrefly" +version = "0.46.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8d/7e/b4f9f26611f72405af16af916f460025bd6d40d00952a046eab0df98bb82/pyrefly-0.46.1.tar.gz", hash = "sha256:ea6db4788cd11eb7fd7ef7f0bdeef4621861cb44cd7d62db073706022669ef4a", size = 4760174, upload-time = "2025-12-23T23:06:10.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/46/8f9d4400e4d60da6555415351b11cbe5e122cfa299194278ebf7bcf26cd3/pyrefly-0.46.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f4975d4d2a451423727f70461bd1ef3f18c6c8c0e4bb5acf902add73bdaf6642", size = 11659836, upload-time = "2025-12-23T23:05:49.879Z" }, + { url = "https://files.pythonhosted.org/packages/3b/15/486b8ea769560e65152201df5d887c9f817fa4e536388e86eb6b1ce774f0/pyrefly-0.46.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:bcecd921990e56759e7ec2c91ab69aaf790dcc6c17b36054d0f42789085e0dde", size = 11269185, upload-time = "2025-12-23T23:05:52.423Z" }, + { url = "https://files.pythonhosted.org/packages/33/bc/b5982fc9dfe2abe5d5341a3576aca3c8c5e3af24223b56d256f16df1d31b/pyrefly-0.46.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:262f6ff2c593ec575eb626dbb3309d9fbb1527cbb0dab2b4d6ae8c8f51bf2715", size = 31504426, upload-time = "2025-12-23T23:05:54.692Z" }, + { url = "https://files.pythonhosted.org/packages/05/fd/34c9dec50075bbf471c23ec46ccca4b167490a3418aef351cfd0cdd7feeb/pyrefly-0.46.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:adccc7e54952a3788f7a108abc3177f9ad83b56b052fc0cb1ed7a93da5c69f71", size = 33721544, upload-time = "2025-12-23T23:05:57.465Z" }, + { url = "https://files.pythonhosted.org/packages/fe/2f/7c9dcf8b77ad3e3579fe7a8d2eaf3a2df8a31e8be7bc5561b369e0bc73f8/pyrefly-0.46.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7251eec7366a3a4df5e87214300645c8f7d27b1339cf30b166227aa8f07979d9", size = 34778153, upload-time = "2025-12-23T23:06:00.313Z" }, + { url = "https://files.pythonhosted.org/packages/5d/1f/6ffa777f3a8f1a66a96ff5b6a6f1659e2172662fec5fcbce5db0f3ff617d/pyrefly-0.46.1-py3-none-win32.whl", hash = "sha256:a5babc50ebfc2227e4209697e4e5754a10935cedba3ab26d26fd3e20625b6479", size = 10728406, upload-time = "2025-12-23T23:06:02.863Z" }, + { url = "https://files.pythonhosted.org/packages/f7/34/7faaee043cc6b268010e0124a82bb5793588531e3d4af2e3283588d88eb7/pyrefly-0.46.1-py3-none-win_amd64.whl", hash = "sha256:e2a784530ad8c918ad7f656957c9db8d00e484111298a6601490141cabd9966a", size = 11418624, upload-time = "2025-12-23T23:06:06.16Z" }, + { url = "https://files.pythonhosted.org/packages/58/4f/13e0e1c2136d35c44a034c9606bce2513ed3e896df86985fedd9c1347432/pyrefly-0.46.1-py3-none-win_arm64.whl", hash = "sha256:1835206055454cc2b88bc0b6acb557c2e5bd9ae8df724bb48fd2dc3dc40ffe13", size = 10964488, upload-time = "2025-12-23T23:06:08.165Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + +[[package]] +name = "python-json-logger" +version = "3.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/de/d3144a0bceede957f961e975f3752760fbe390d57fbe194baf709d8f1f7b/python_json_logger-3.3.0.tar.gz", hash = "sha256:12b7e74b17775e7d565129296105bbe3910842d9d0eb083fc83a6a617aa8df84", size = 16642, upload-time = "2025-03-07T07:08:27.301Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/20/0f2523b9e50a8052bc6a8b732dfc8568abbdc42010aef03a2d750bdab3b2/python_json_logger-3.3.0-py3-none-any.whl", hash = "sha256:dd980fae8cffb24c13caf6e158d3d61c0d6d22342f932cb6e9deedab3d35eec7", size = 15163, upload-time = "2025-03-07T07:08:25.627Z" }, +] + +[[package]] +name = "pytz" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, +] + +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "questionary" +version = "2.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "prompt-toolkit" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f6/45/eafb0bba0f9988f6a2520f9ca2df2c82ddfa8d67c95d6625452e97b204a5/questionary-2.1.1.tar.gz", hash = "sha256:3d7e980292bb0107abaa79c68dd3eee3c561b83a0f89ae482860b181c8bd412d", size = 25845, upload-time = "2025-08-28T19:00:20.851Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/26/1062c7ec1b053db9e499b4d2d5bc231743201b74051c973dadeac80a8f43/questionary-2.1.1-py3-none-any.whl", hash = "sha256:a51af13f345f1cdea62347589fbb6df3b290306ab8930713bfae4d475a7d4a59", size = 36753, upload-time = "2025-08-28T19:00:19.56Z" }, +] + +[[package]] +name = "regex" +version = "2025.11.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/a9/546676f25e573a4cf00fe8e119b78a37b6a8fe2dc95cda877b30889c9c45/regex-2025.11.3.tar.gz", hash = "sha256:1fedc720f9bb2494ce31a58a1631f9c82df6a09b49c19517ea5cc280b4541e01", size = 414669, upload-time = "2025-11-03T21:34:22.089Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/74/18f04cb53e58e3fb107439699bd8375cf5a835eec81084e0bddbd122e4c2/regex-2025.11.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bc8ab71e2e31b16e40868a40a69007bc305e1109bd4658eb6cad007e0bf67c41", size = 489312, upload-time = "2025-11-03T21:31:34.343Z" }, + { url = "https://files.pythonhosted.org/packages/78/3f/37fcdd0d2b1e78909108a876580485ea37c91e1acf66d3bb8e736348f441/regex-2025.11.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:22b29dda7e1f7062a52359fca6e58e548e28c6686f205e780b02ad8ef710de36", size = 291256, upload-time = "2025-11-03T21:31:35.675Z" }, + { url = "https://files.pythonhosted.org/packages/bf/26/0a575f58eb23b7ebd67a45fccbc02ac030b737b896b7e7a909ffe43ffd6a/regex-2025.11.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a91e4a29938bc1a082cc28fdea44be420bf2bebe2665343029723892eb073e1", size = 288921, upload-time = "2025-11-03T21:31:37.07Z" }, + { url = "https://files.pythonhosted.org/packages/ea/98/6a8dff667d1af907150432cf5abc05a17ccd32c72a3615410d5365ac167a/regex-2025.11.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08b884f4226602ad40c5d55f52bf91a9df30f513864e0054bad40c0e9cf1afb7", size = 798568, upload-time = "2025-11-03T21:31:38.784Z" }, + { url = "https://files.pythonhosted.org/packages/64/15/92c1db4fa4e12733dd5a526c2dd2b6edcbfe13257e135fc0f6c57f34c173/regex-2025.11.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3e0b11b2b2433d1c39c7c7a30e3f3d0aeeea44c2a8d0bae28f6b95f639927a69", size = 864165, upload-time = "2025-11-03T21:31:40.559Z" }, + { url = "https://files.pythonhosted.org/packages/f9/e7/3ad7da8cdee1ce66c7cd37ab5ab05c463a86ffeb52b1a25fe7bd9293b36c/regex-2025.11.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:87eb52a81ef58c7ba4d45c3ca74e12aa4b4e77816f72ca25258a85b3ea96cb48", size = 912182, upload-time = "2025-11-03T21:31:42.002Z" }, + { url = "https://files.pythonhosted.org/packages/84/bd/9ce9f629fcb714ffc2c3faf62b6766ecb7a585e1e885eb699bcf130a5209/regex-2025.11.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a12ab1f5c29b4e93db518f5e3872116b7e9b1646c9f9f426f777b50d44a09e8c", size = 803501, upload-time = "2025-11-03T21:31:43.815Z" }, + { url = "https://files.pythonhosted.org/packages/7c/0f/8dc2e4349d8e877283e6edd6c12bdcebc20f03744e86f197ab6e4492bf08/regex-2025.11.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7521684c8c7c4f6e88e35ec89680ee1aa8358d3f09d27dfbdf62c446f5d4c695", size = 787842, upload-time = "2025-11-03T21:31:45.353Z" }, + { url = "https://files.pythonhosted.org/packages/f9/73/cff02702960bc185164d5619c0c62a2f598a6abff6695d391b096237d4ab/regex-2025.11.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7fe6e5440584e94cc4b3f5f4d98a25e29ca12dccf8873679a635638349831b98", size = 858519, upload-time = "2025-11-03T21:31:46.814Z" }, + { url = "https://files.pythonhosted.org/packages/61/83/0e8d1ae71e15bc1dc36231c90b46ee35f9d52fab2e226b0e039e7ea9c10a/regex-2025.11.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8e026094aa12b43f4fd74576714e987803a315c76edb6b098b9809db5de58f74", size = 850611, upload-time = "2025-11-03T21:31:48.289Z" }, + { url = "https://files.pythonhosted.org/packages/c8/f5/70a5cdd781dcfaa12556f2955bf170cd603cb1c96a1827479f8faea2df97/regex-2025.11.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:435bbad13e57eb5606a68443af62bed3556de2f46deb9f7d4237bc2f1c9fb3a0", size = 789759, upload-time = "2025-11-03T21:31:49.759Z" }, + { url = "https://files.pythonhosted.org/packages/59/9b/7c29be7903c318488983e7d97abcf8ebd3830e4c956c4c540005fcfb0462/regex-2025.11.3-cp312-cp312-win32.whl", hash = "sha256:3839967cf4dc4b985e1570fd8d91078f0c519f30491c60f9ac42a8db039be204", size = 266194, upload-time = "2025-11-03T21:31:51.53Z" }, + { url = "https://files.pythonhosted.org/packages/1a/67/3b92df89f179d7c367be654ab5626ae311cb28f7d5c237b6bb976cd5fbbb/regex-2025.11.3-cp312-cp312-win_amd64.whl", hash = "sha256:e721d1b46e25c481dc5ded6f4b3f66c897c58d2e8cfdf77bbced84339108b0b9", size = 277069, upload-time = "2025-11-03T21:31:53.151Z" }, + { url = "https://files.pythonhosted.org/packages/d7/55/85ba4c066fe5094d35b249c3ce8df0ba623cfd35afb22d6764f23a52a1c5/regex-2025.11.3-cp312-cp312-win_arm64.whl", hash = "sha256:64350685ff08b1d3a6fff33f45a9ca183dc1d58bbfe4981604e70ec9801bbc26", size = 270330, upload-time = "2025-11-03T21:31:54.514Z" }, + { url = "https://files.pythonhosted.org/packages/e1/a7/dda24ebd49da46a197436ad96378f17df30ceb40e52e859fc42cac45b850/regex-2025.11.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c1e448051717a334891f2b9a620fe36776ebf3dd8ec46a0b877c8ae69575feb4", size = 489081, upload-time = "2025-11-03T21:31:55.9Z" }, + { url = "https://files.pythonhosted.org/packages/19/22/af2dc751aacf88089836aa088a1a11c4f21a04707eb1b0478e8e8fb32847/regex-2025.11.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9b5aca4d5dfd7fbfbfbdaf44850fcc7709a01146a797536a8f84952e940cca76", size = 291123, upload-time = "2025-11-03T21:31:57.758Z" }, + { url = "https://files.pythonhosted.org/packages/a3/88/1a3ea5672f4b0a84802ee9891b86743438e7c04eb0b8f8c4e16a42375327/regex-2025.11.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:04d2765516395cf7dda331a244a3282c0f5ae96075f728629287dfa6f76ba70a", size = 288814, upload-time = "2025-11-03T21:32:01.12Z" }, + { url = "https://files.pythonhosted.org/packages/fb/8c/f5987895bf42b8ddeea1b315c9fedcfe07cadee28b9c98cf50d00adcb14d/regex-2025.11.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d9903ca42bfeec4cebedba8022a7c97ad2aab22e09573ce9976ba01b65e4361", size = 798592, upload-time = "2025-11-03T21:32:03.006Z" }, + { url = "https://files.pythonhosted.org/packages/99/2a/6591ebeede78203fa77ee46a1c36649e02df9eaa77a033d1ccdf2fcd5d4e/regex-2025.11.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:639431bdc89d6429f6721625e8129413980ccd62e9d3f496be618a41d205f160", size = 864122, upload-time = "2025-11-03T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/94/d6/be32a87cf28cf8ed064ff281cfbd49aefd90242a83e4b08b5a86b38e8eb4/regex-2025.11.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f117efad42068f9715677c8523ed2be1518116d1c49b1dd17987716695181efe", size = 912272, upload-time = "2025-11-03T21:32:06.148Z" }, + { url = "https://files.pythonhosted.org/packages/62/11/9bcef2d1445665b180ac7f230406ad80671f0fc2a6ffb93493b5dd8cd64c/regex-2025.11.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4aecb6f461316adf9f1f0f6a4a1a3d79e045f9b71ec76055a791affa3b285850", size = 803497, upload-time = "2025-11-03T21:32:08.162Z" }, + { url = "https://files.pythonhosted.org/packages/e5/a7/da0dc273d57f560399aa16d8a68ae7f9b57679476fc7ace46501d455fe84/regex-2025.11.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3b3a5f320136873cc5561098dfab677eea139521cb9a9e8db98b7e64aef44cbc", size = 787892, upload-time = "2025-11-03T21:32:09.769Z" }, + { url = "https://files.pythonhosted.org/packages/da/4b/732a0c5a9736a0b8d6d720d4945a2f1e6f38f87f48f3173559f53e8d5d82/regex-2025.11.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:75fa6f0056e7efb1f42a1c34e58be24072cb9e61a601340cc1196ae92326a4f9", size = 858462, upload-time = "2025-11-03T21:32:11.769Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f5/a2a03df27dc4c2d0c769220f5110ba8c4084b0bfa9ab0f9b4fcfa3d2b0fc/regex-2025.11.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:dbe6095001465294f13f1adcd3311e50dd84e5a71525f20a10bd16689c61ce0b", size = 850528, upload-time = "2025-11-03T21:32:13.906Z" }, + { url = "https://files.pythonhosted.org/packages/d6/09/e1cd5bee3841c7f6eb37d95ca91cdee7100b8f88b81e41c2ef426910891a/regex-2025.11.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:454d9b4ae7881afbc25015b8627c16d88a597479b9dea82b8c6e7e2e07240dc7", size = 789866, upload-time = "2025-11-03T21:32:15.748Z" }, + { url = "https://files.pythonhosted.org/packages/eb/51/702f5ea74e2a9c13d855a6a85b7f80c30f9e72a95493260193c07f3f8d74/regex-2025.11.3-cp313-cp313-win32.whl", hash = "sha256:28ba4d69171fc6e9896337d4fc63a43660002b7da53fc15ac992abcf3410917c", size = 266189, upload-time = "2025-11-03T21:32:17.493Z" }, + { url = "https://files.pythonhosted.org/packages/8b/00/6e29bb314e271a743170e53649db0fdb8e8ff0b64b4f425f5602f4eb9014/regex-2025.11.3-cp313-cp313-win_amd64.whl", hash = "sha256:bac4200befe50c670c405dc33af26dad5a3b6b255dd6c000d92fe4629f9ed6a5", size = 277054, upload-time = "2025-11-03T21:32:19.042Z" }, + { url = "https://files.pythonhosted.org/packages/25/f1/b156ff9f2ec9ac441710764dda95e4edaf5f36aca48246d1eea3f1fd96ec/regex-2025.11.3-cp313-cp313-win_arm64.whl", hash = "sha256:2292cd5a90dab247f9abe892ac584cb24f0f54680c73fcb4a7493c66c2bf2467", size = 270325, upload-time = "2025-11-03T21:32:21.338Z" }, + { url = "https://files.pythonhosted.org/packages/20/28/fd0c63357caefe5680b8ea052131acbd7f456893b69cc2a90cc3e0dc90d4/regex-2025.11.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:1eb1ebf6822b756c723e09f5186473d93236c06c579d2cc0671a722d2ab14281", size = 491984, upload-time = "2025-11-03T21:32:23.466Z" }, + { url = "https://files.pythonhosted.org/packages/df/ec/7014c15626ab46b902b3bcc4b28a7bae46d8f281fc7ea9c95e22fcaaa917/regex-2025.11.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1e00ec2970aab10dc5db34af535f21fcf32b4a31d99e34963419636e2f85ae39", size = 292673, upload-time = "2025-11-03T21:32:25.034Z" }, + { url = "https://files.pythonhosted.org/packages/23/ab/3b952ff7239f20d05f1f99e9e20188513905f218c81d52fb5e78d2bf7634/regex-2025.11.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a4cb042b615245d5ff9b3794f56be4138b5adc35a4166014d31d1814744148c7", size = 291029, upload-time = "2025-11-03T21:32:26.528Z" }, + { url = "https://files.pythonhosted.org/packages/21/7e/3dc2749fc684f455f162dcafb8a187b559e2614f3826877d3844a131f37b/regex-2025.11.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44f264d4bf02f3176467d90b294d59bf1db9fe53c141ff772f27a8b456b2a9ed", size = 807437, upload-time = "2025-11-03T21:32:28.363Z" }, + { url = "https://files.pythonhosted.org/packages/1b/0b/d529a85ab349c6a25d1ca783235b6e3eedf187247eab536797021f7126c6/regex-2025.11.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7be0277469bf3bd7a34a9c57c1b6a724532a0d235cd0dc4e7f4316f982c28b19", size = 873368, upload-time = "2025-11-03T21:32:30.4Z" }, + { url = "https://files.pythonhosted.org/packages/7d/18/2d868155f8c9e3e9d8f9e10c64e9a9f496bb8f7e037a88a8bed26b435af6/regex-2025.11.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0d31e08426ff4b5b650f68839f5af51a92a5b51abd8554a60c2fbc7c71f25d0b", size = 914921, upload-time = "2025-11-03T21:32:32.123Z" }, + { url = "https://files.pythonhosted.org/packages/2d/71/9d72ff0f354fa783fe2ba913c8734c3b433b86406117a8db4ea2bf1c7a2f/regex-2025.11.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e43586ce5bd28f9f285a6e729466841368c4a0353f6fd08d4ce4630843d3648a", size = 812708, upload-time = "2025-11-03T21:32:34.305Z" }, + { url = "https://files.pythonhosted.org/packages/e7/19/ce4bf7f5575c97f82b6e804ffb5c4e940c62609ab2a0d9538d47a7fdf7d4/regex-2025.11.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0f9397d561a4c16829d4e6ff75202c1c08b68a3bdbfe29dbfcdb31c9830907c6", size = 795472, upload-time = "2025-11-03T21:32:36.364Z" }, + { url = "https://files.pythonhosted.org/packages/03/86/fd1063a176ffb7b2315f9a1b08d17b18118b28d9df163132615b835a26ee/regex-2025.11.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:dd16e78eb18ffdb25ee33a0682d17912e8cc8a770e885aeee95020046128f1ce", size = 868341, upload-time = "2025-11-03T21:32:38.042Z" }, + { url = "https://files.pythonhosted.org/packages/12/43/103fb2e9811205e7386366501bc866a164a0430c79dd59eac886a2822950/regex-2025.11.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:ffcca5b9efe948ba0661e9df0fa50d2bc4b097c70b9810212d6b62f05d83b2dd", size = 854666, upload-time = "2025-11-03T21:32:40.079Z" }, + { url = "https://files.pythonhosted.org/packages/7d/22/e392e53f3869b75804762c7c848bd2dd2abf2b70fb0e526f58724638bd35/regex-2025.11.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c56b4d162ca2b43318ac671c65bd4d563e841a694ac70e1a976ac38fcf4ca1d2", size = 799473, upload-time = "2025-11-03T21:32:42.148Z" }, + { url = "https://files.pythonhosted.org/packages/4f/f9/8bd6b656592f925b6845fcbb4d57603a3ac2fb2373344ffa1ed70aa6820a/regex-2025.11.3-cp313-cp313t-win32.whl", hash = "sha256:9ddc42e68114e161e51e272f667d640f97e84a2b9ef14b7477c53aac20c2d59a", size = 268792, upload-time = "2025-11-03T21:32:44.13Z" }, + { url = "https://files.pythonhosted.org/packages/e5/87/0e7d603467775ff65cd2aeabf1b5b50cc1c3708556a8b849a2fa4dd1542b/regex-2025.11.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7a7c7fdf755032ffdd72c77e3d8096bdcb0eb92e89e17571a196f03d88b11b3c", size = 280214, upload-time = "2025-11-03T21:32:45.853Z" }, + { url = "https://files.pythonhosted.org/packages/8d/d0/2afc6f8e94e2b64bfb738a7c2b6387ac1699f09f032d363ed9447fd2bb57/regex-2025.11.3-cp313-cp313t-win_arm64.whl", hash = "sha256:df9eb838c44f570283712e7cff14c16329a9f0fb19ca492d21d4b7528ee6821e", size = 271469, upload-time = "2025-11-03T21:32:48.026Z" }, + { url = "https://files.pythonhosted.org/packages/31/e9/f6e13de7e0983837f7b6d238ad9458800a874bf37c264f7923e63409944c/regex-2025.11.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9697a52e57576c83139d7c6f213d64485d3df5bf84807c35fa409e6c970801c6", size = 489089, upload-time = "2025-11-03T21:32:50.027Z" }, + { url = "https://files.pythonhosted.org/packages/a3/5c/261f4a262f1fa65141c1b74b255988bd2fa020cc599e53b080667d591cfc/regex-2025.11.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e18bc3f73bd41243c9b38a6d9f2366cd0e0137a9aebe2d8ff76c5b67d4c0a3f4", size = 291059, upload-time = "2025-11-03T21:32:51.682Z" }, + { url = "https://files.pythonhosted.org/packages/8e/57/f14eeb7f072b0e9a5a090d1712741fd8f214ec193dba773cf5410108bb7d/regex-2025.11.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:61a08bcb0ec14ff4e0ed2044aad948d0659604f824cbd50b55e30b0ec6f09c73", size = 288900, upload-time = "2025-11-03T21:32:53.569Z" }, + { url = "https://files.pythonhosted.org/packages/3c/6b/1d650c45e99a9b327586739d926a1cd4e94666b1bd4af90428b36af66dc7/regex-2025.11.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9c30003b9347c24bcc210958c5d167b9e4f9be786cb380a7d32f14f9b84674f", size = 799010, upload-time = "2025-11-03T21:32:55.222Z" }, + { url = "https://files.pythonhosted.org/packages/99/ee/d66dcbc6b628ce4e3f7f0cbbb84603aa2fc0ffc878babc857726b8aab2e9/regex-2025.11.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4e1e592789704459900728d88d41a46fe3969b82ab62945560a31732ffc19a6d", size = 864893, upload-time = "2025-11-03T21:32:57.239Z" }, + { url = "https://files.pythonhosted.org/packages/bf/2d/f238229f1caba7ac87a6c4153d79947fb0261415827ae0f77c304260c7d3/regex-2025.11.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6538241f45eb5a25aa575dbba1069ad786f68a4f2773a29a2bd3dd1f9de787be", size = 911522, upload-time = "2025-11-03T21:32:59.274Z" }, + { url = "https://files.pythonhosted.org/packages/bd/3d/22a4eaba214a917c80e04f6025d26143690f0419511e0116508e24b11c9b/regex-2025.11.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce22519c989bb72a7e6b36a199384c53db7722fe669ba891da75907fe3587db", size = 803272, upload-time = "2025-11-03T21:33:01.393Z" }, + { url = "https://files.pythonhosted.org/packages/84/b1/03188f634a409353a84b5ef49754b97dbcc0c0f6fd6c8ede505a8960a0a4/regex-2025.11.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:66d559b21d3640203ab9075797a55165d79017520685fb407b9234d72ab63c62", size = 787958, upload-time = "2025-11-03T21:33:03.379Z" }, + { url = "https://files.pythonhosted.org/packages/99/6a/27d072f7fbf6fadd59c64d210305e1ff865cc3b78b526fd147db768c553b/regex-2025.11.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:669dcfb2e38f9e8c69507bace46f4889e3abbfd9b0c29719202883c0a603598f", size = 859289, upload-time = "2025-11-03T21:33:05.374Z" }, + { url = "https://files.pythonhosted.org/packages/9a/70/1b3878f648e0b6abe023172dacb02157e685564853cc363d9961bcccde4e/regex-2025.11.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:32f74f35ff0f25a5021373ac61442edcb150731fbaa28286bbc8bb1582c89d02", size = 850026, upload-time = "2025-11-03T21:33:07.131Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d5/68e25559b526b8baab8e66839304ede68ff6727237a47727d240006bd0ff/regex-2025.11.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e6c7a21dffba883234baefe91bc3388e629779582038f75d2a5be918e250f0ed", size = 789499, upload-time = "2025-11-03T21:33:09.141Z" }, + { url = "https://files.pythonhosted.org/packages/fc/df/43971264857140a350910d4e33df725e8c94dd9dee8d2e4729fa0d63d49e/regex-2025.11.3-cp314-cp314-win32.whl", hash = "sha256:795ea137b1d809eb6836b43748b12634291c0ed55ad50a7d72d21edf1cd565c4", size = 271604, upload-time = "2025-11-03T21:33:10.9Z" }, + { url = "https://files.pythonhosted.org/packages/01/6f/9711b57dc6894a55faf80a4c1b5aa4f8649805cb9c7aef46f7d27e2b9206/regex-2025.11.3-cp314-cp314-win_amd64.whl", hash = "sha256:9f95fbaa0ee1610ec0fc6b26668e9917a582ba80c52cc6d9ada15e30aa9ab9ad", size = 280320, upload-time = "2025-11-03T21:33:12.572Z" }, + { url = "https://files.pythonhosted.org/packages/f1/7e/f6eaa207d4377481f5e1775cdeb5a443b5a59b392d0065f3417d31d80f87/regex-2025.11.3-cp314-cp314-win_arm64.whl", hash = "sha256:dfec44d532be4c07088c3de2876130ff0fbeeacaa89a137decbbb5f665855a0f", size = 273372, upload-time = "2025-11-03T21:33:14.219Z" }, + { url = "https://files.pythonhosted.org/packages/c3/06/49b198550ee0f5e4184271cee87ba4dfd9692c91ec55289e6282f0f86ccf/regex-2025.11.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ba0d8a5d7f04f73ee7d01d974d47c5834f8a1b0224390e4fe7c12a3a92a78ecc", size = 491985, upload-time = "2025-11-03T21:33:16.555Z" }, + { url = "https://files.pythonhosted.org/packages/ce/bf/abdafade008f0b1c9da10d934034cb670432d6cf6cbe38bbb53a1cfd6cf8/regex-2025.11.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:442d86cf1cfe4faabf97db7d901ef58347efd004934da045c745e7b5bd57ac49", size = 292669, upload-time = "2025-11-03T21:33:18.32Z" }, + { url = "https://files.pythonhosted.org/packages/f9/ef/0c357bb8edbd2ad8e273fcb9e1761bc37b8acbc6e1be050bebd6475f19c1/regex-2025.11.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fd0a5e563c756de210bb964789b5abe4f114dacae9104a47e1a649b910361536", size = 291030, upload-time = "2025-11-03T21:33:20.048Z" }, + { url = "https://files.pythonhosted.org/packages/79/06/edbb67257596649b8fb088d6aeacbcb248ac195714b18a65e018bf4c0b50/regex-2025.11.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf3490bcbb985a1ae97b2ce9ad1c0f06a852d5b19dde9b07bdf25bf224248c95", size = 807674, upload-time = "2025-11-03T21:33:21.797Z" }, + { url = "https://files.pythonhosted.org/packages/f4/d9/ad4deccfce0ea336296bd087f1a191543bb99ee1c53093dcd4c64d951d00/regex-2025.11.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3809988f0a8b8c9dcc0f92478d6501fac7200b9ec56aecf0ec21f4a2ec4b6009", size = 873451, upload-time = "2025-11-03T21:33:23.741Z" }, + { url = "https://files.pythonhosted.org/packages/13/75/a55a4724c56ef13e3e04acaab29df26582f6978c000ac9cd6810ad1f341f/regex-2025.11.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f4ff94e58e84aedb9c9fce66d4ef9f27a190285b451420f297c9a09f2b9abee9", size = 914980, upload-time = "2025-11-03T21:33:25.999Z" }, + { url = "https://files.pythonhosted.org/packages/67/1e/a1657ee15bd9116f70d4a530c736983eed997b361e20ecd8f5ca3759d5c5/regex-2025.11.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eb542fd347ce61e1321b0a6b945d5701528dca0cd9759c2e3bb8bd57e47964d", size = 812852, upload-time = "2025-11-03T21:33:27.852Z" }, + { url = "https://files.pythonhosted.org/packages/b8/6f/f7516dde5506a588a561d296b2d0044839de06035bb486b326065b4c101e/regex-2025.11.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d6c2d5919075a1f2e413c00b056ea0c2f065b3f5fe83c3d07d325ab92dce51d6", size = 795566, upload-time = "2025-11-03T21:33:32.364Z" }, + { url = "https://files.pythonhosted.org/packages/d9/dd/3d10b9e170cc16fb34cb2cef91513cf3df65f440b3366030631b2984a264/regex-2025.11.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3f8bf11a4827cc7ce5a53d4ef6cddd5ad25595d3c1435ef08f76825851343154", size = 868463, upload-time = "2025-11-03T21:33:34.459Z" }, + { url = "https://files.pythonhosted.org/packages/f5/8e/935e6beff1695aa9085ff83195daccd72acc82c81793df480f34569330de/regex-2025.11.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:22c12d837298651e5550ac1d964e4ff57c3f56965fc1812c90c9fb2028eaf267", size = 854694, upload-time = "2025-11-03T21:33:36.793Z" }, + { url = "https://files.pythonhosted.org/packages/92/12/10650181a040978b2f5720a6a74d44f841371a3d984c2083fc1752e4acf6/regex-2025.11.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:62ba394a3dda9ad41c7c780f60f6e4a70988741415ae96f6d1bf6c239cf01379", size = 799691, upload-time = "2025-11-03T21:33:39.079Z" }, + { url = "https://files.pythonhosted.org/packages/67/90/8f37138181c9a7690e7e4cb388debbd389342db3c7381d636d2875940752/regex-2025.11.3-cp314-cp314t-win32.whl", hash = "sha256:4bf146dca15cdd53224a1bf46d628bd7590e4a07fbb69e720d561aea43a32b38", size = 274583, upload-time = "2025-11-03T21:33:41.302Z" }, + { url = "https://files.pythonhosted.org/packages/8f/cd/867f5ec442d56beb56f5f854f40abcfc75e11d10b11fdb1869dd39c63aaf/regex-2025.11.3-cp314-cp314t-win_amd64.whl", hash = "sha256:adad1a1bcf1c9e76346e091d22d23ac54ef28e1365117d99521631078dfec9de", size = 284286, upload-time = "2025-11-03T21:33:43.324Z" }, + { url = "https://files.pythonhosted.org/packages/20/31/32c0c4610cbc070362bf1d2e4ea86d1ea29014d400a6d6c2486fcfd57766/regex-2025.11.3-cp314-cp314t-win_arm64.whl", hash = "sha256:c54f768482cef41e219720013cd05933b6f971d9562544d691c68699bf2b6801", size = 274741, upload-time = "2025-11-03T21:33:45.557Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "rsa" +version = "4.7.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/db/b5/475c45a58650b0580421746504b680cd2db4e81bc941e94ca53785250269/rsa-4.7.2.tar.gz", hash = "sha256:9d689e6ca1b3038bc82bf8d23e944b6b6037bc02301a574935b2dd946e0353b9", size = 39711, upload-time = "2021-02-24T10:55:05.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/93/0c0f002031f18b53af7a6166103c02b9c0667be528944137cc954ec921b3/rsa-4.7.2-py3-none-any.whl", hash = "sha256:78f9a9bf4e7be0c5ded4583326e7461e3a3c5aae24073648b4bdfa797d78c9d2", size = 34505, upload-time = "2021-02-24T10:55:03.55Z" }, +] + +[[package]] +name = "ruamel-yaml" +version = "0.18.17" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ruamel-yaml-clib", marker = "python_full_version < '3.15' and platform_python_implementation == 'CPython'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3a/2b/7a1f1ebcd6b3f14febdc003e658778d81e76b40df2267904ee6b13f0c5c6/ruamel_yaml-0.18.17.tar.gz", hash = "sha256:9091cd6e2d93a3a4b157ddb8fabf348c3de7f1fb1381346d985b6b247dcd8d3c", size = 149602, upload-time = "2025-12-17T20:02:55.757Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/fe/b6045c782f1fd1ae317d2a6ca1884857ce5c20f59befe6ab25a8603c43a7/ruamel_yaml-0.18.17-py3-none-any.whl", hash = "sha256:9c8ba9eb3e793efdf924b60d521820869d5bf0cb9c6f1b82d82de8295e290b9d", size = 121594, upload-time = "2025-12-17T20:02:07.657Z" }, +] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.15" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/97/60fda20e2fb54b83a61ae14648b0817c8f5d84a3821e40bfbdae1437026a/ruamel_yaml_clib-0.2.15.tar.gz", hash = "sha256:46e4cc8c43ef6a94885f72512094e482114a8a706d3c555a34ed4b0d20200600", size = 225794, upload-time = "2025-11-16T16:12:59.761Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/72/4b/5fde11a0722d676e469d3d6f78c6a17591b9c7e0072ca359801c4bd17eee/ruamel_yaml_clib-0.2.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cb15a2e2a90c8475df45c0949793af1ff413acfb0a716b8b94e488ea95ce7cff", size = 149088, upload-time = "2025-11-16T16:13:22.836Z" }, + { url = "https://files.pythonhosted.org/packages/85/82/4d08ac65ecf0ef3b046421985e66301a242804eb9a62c93ca3437dc94ee0/ruamel_yaml_clib-0.2.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:64da03cbe93c1e91af133f5bec37fd24d0d4ba2418eaf970d7166b0a26a148a2", size = 134553, upload-time = "2025-11-16T16:13:24.151Z" }, + { url = "https://files.pythonhosted.org/packages/b9/cb/22366d68b280e281a932403b76da7a988108287adff2bfa5ce881200107a/ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f6d3655e95a80325b84c4e14c080b2470fe4f33b6846f288379ce36154993fb1", size = 737468, upload-time = "2025-11-16T20:22:47.335Z" }, + { url = "https://files.pythonhosted.org/packages/71/73/81230babf8c9e33770d43ed9056f603f6f5f9665aea4177a2c30ae48e3f3/ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71845d377c7a47afc6592aacfea738cc8a7e876d586dfba814501d8c53c1ba60", size = 753349, upload-time = "2025-11-16T16:13:26.269Z" }, + { url = "https://files.pythonhosted.org/packages/61/62/150c841f24cda9e30f588ef396ed83f64cfdc13b92d2f925bb96df337ba9/ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11e5499db1ccbc7f4b41f0565e4f799d863ea720e01d3e99fa0b7b5fcd7802c9", size = 788211, upload-time = "2025-11-16T16:13:27.441Z" }, + { url = "https://files.pythonhosted.org/packages/30/93/e79bd9cbecc3267499d9ead919bd61f7ddf55d793fb5ef2b1d7d92444f35/ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4b293a37dc97e2b1e8a1aec62792d1e52027087c8eea4fc7b5abd2bdafdd6642", size = 743203, upload-time = "2025-11-16T16:13:28.671Z" }, + { url = "https://files.pythonhosted.org/packages/8d/06/1eb640065c3a27ce92d76157f8efddb184bd484ed2639b712396a20d6dce/ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:512571ad41bba04eac7268fe33f7f4742210ca26a81fe0c75357fa682636c690", size = 747292, upload-time = "2025-11-16T20:22:48.584Z" }, + { url = "https://files.pythonhosted.org/packages/a5/21/ee353e882350beab65fcc47a91b6bdc512cace4358ee327af2962892ff16/ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e5e9f630c73a490b758bf14d859a39f375e6999aea5ddd2e2e9da89b9953486a", size = 771624, upload-time = "2025-11-16T16:13:29.853Z" }, + { url = "https://files.pythonhosted.org/packages/57/34/cc1b94057aa867c963ecf9ea92ac59198ec2ee3a8d22a126af0b4d4be712/ruamel_yaml_clib-0.2.15-cp312-cp312-win32.whl", hash = "sha256:f4421ab780c37210a07d138e56dd4b51f8642187cdfb433eb687fe8c11de0144", size = 100342, upload-time = "2025-11-16T16:13:31.067Z" }, + { url = "https://files.pythonhosted.org/packages/b3/e5/8925a4208f131b218f9a7e459c0d6fcac8324ae35da269cb437894576366/ruamel_yaml_clib-0.2.15-cp312-cp312-win_amd64.whl", hash = "sha256:2b216904750889133d9222b7b873c199d48ecbb12912aca78970f84a5aa1a4bc", size = 119013, upload-time = "2025-11-16T16:13:32.164Z" }, + { url = "https://files.pythonhosted.org/packages/17/5e/2f970ce4c573dc30c2f95825f2691c96d55560268ddc67603dc6ea2dd08e/ruamel_yaml_clib-0.2.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4dcec721fddbb62e60c2801ba08c87010bd6b700054a09998c4d09c08147b8fb", size = 147450, upload-time = "2025-11-16T16:13:33.542Z" }, + { url = "https://files.pythonhosted.org/packages/d6/03/a1baa5b94f71383913f21b96172fb3a2eb5576a4637729adbf7cd9f797f8/ruamel_yaml_clib-0.2.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:65f48245279f9bb301d1276f9679b82e4c080a1ae25e679f682ac62446fac471", size = 133139, upload-time = "2025-11-16T16:13:34.587Z" }, + { url = "https://files.pythonhosted.org/packages/dc/19/40d676802390f85784235a05788fd28940923382e3f8b943d25febbb98b7/ruamel_yaml_clib-0.2.15-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:46895c17ead5e22bea5e576f1db7e41cb273e8d062c04a6a49013d9f60996c25", size = 731474, upload-time = "2025-11-16T20:22:49.934Z" }, + { url = "https://files.pythonhosted.org/packages/ce/bb/6ef5abfa43b48dd55c30d53e997f8f978722f02add61efba31380d73e42e/ruamel_yaml_clib-0.2.15-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3eb199178b08956e5be6288ee0b05b2fb0b5c1f309725ad25d9c6ea7e27f962a", size = 748047, upload-time = "2025-11-16T16:13:35.633Z" }, + { url = "https://files.pythonhosted.org/packages/ff/5d/e4f84c9c448613e12bd62e90b23aa127ea4c46b697f3d760acc32cb94f25/ruamel_yaml_clib-0.2.15-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d1032919280ebc04a80e4fb1e93f7a738129857eaec9448310e638c8bccefcf", size = 782129, upload-time = "2025-11-16T16:13:36.781Z" }, + { url = "https://files.pythonhosted.org/packages/de/4b/e98086e88f76c00c88a6bcf15eae27a1454f661a9eb72b111e6bbb69024d/ruamel_yaml_clib-0.2.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ab0df0648d86a7ecbd9c632e8f8d6b21bb21b5fc9d9e095c796cacf32a728d2d", size = 736848, upload-time = "2025-11-16T16:13:37.952Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5c/5964fcd1fd9acc53b7a3a5d9a05ea4f95ead9495d980003a557deb9769c7/ruamel_yaml_clib-0.2.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:331fb180858dd8534f0e61aa243b944f25e73a4dae9962bd44c46d1761126bbf", size = 741630, upload-time = "2025-11-16T20:22:51.718Z" }, + { url = "https://files.pythonhosted.org/packages/07/1e/99660f5a30fceb58494598e7d15df883a07292346ef5696f0c0ae5dee8c6/ruamel_yaml_clib-0.2.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fd4c928ddf6bce586285daa6d90680b9c291cfd045fc40aad34e445d57b1bf51", size = 766619, upload-time = "2025-11-16T16:13:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/36/2f/fa0344a9327b58b54970e56a27b32416ffbcfe4dcc0700605516708579b2/ruamel_yaml_clib-0.2.15-cp313-cp313-win32.whl", hash = "sha256:bf0846d629e160223805db9fe8cc7aec16aaa11a07310c50c8c7164efa440aec", size = 100171, upload-time = "2025-11-16T16:13:40.456Z" }, + { url = "https://files.pythonhosted.org/packages/06/c4/c124fbcef0684fcf3c9b72374c2a8c35c94464d8694c50f37eef27f5a145/ruamel_yaml_clib-0.2.15-cp313-cp313-win_amd64.whl", hash = "sha256:45702dfbea1420ba3450bb3dd9a80b33f0badd57539c6aac09f42584303e0db6", size = 118845, upload-time = "2025-11-16T16:13:41.481Z" }, + { url = "https://files.pythonhosted.org/packages/3e/bd/ab8459c8bb759c14a146990bf07f632c1cbec0910d4853feeee4be2ab8bb/ruamel_yaml_clib-0.2.15-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:753faf20b3a5906faf1fc50e4ddb8c074cb9b251e00b14c18b28492f933ac8ef", size = 147248, upload-time = "2025-11-16T16:13:42.872Z" }, + { url = "https://files.pythonhosted.org/packages/69/f2/c4cec0a30f1955510fde498aac451d2e52b24afdbcb00204d3a951b772c3/ruamel_yaml_clib-0.2.15-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:480894aee0b29752560a9de46c0e5f84a82602f2bc5c6cde8db9a345319acfdf", size = 133764, upload-time = "2025-11-16T16:13:43.932Z" }, + { url = "https://files.pythonhosted.org/packages/82/c7/2480d062281385a2ea4f7cc9476712446e0c548cd74090bff92b4b49e898/ruamel_yaml_clib-0.2.15-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4d3b58ab2454b4747442ac76fab66739c72b1e2bb9bd173d7694b9f9dbc9c000", size = 730537, upload-time = "2025-11-16T20:22:52.918Z" }, + { url = "https://files.pythonhosted.org/packages/75/08/e365ee305367559f57ba6179d836ecc3d31c7d3fdff2a40ebf6c32823a1f/ruamel_yaml_clib-0.2.15-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bfd309b316228acecfa30670c3887dcedf9b7a44ea39e2101e75d2654522acd4", size = 746944, upload-time = "2025-11-16T16:13:45.338Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5c/8b56b08db91e569d0a4fbfa3e492ed2026081bdd7e892f63ba1c88a2f548/ruamel_yaml_clib-0.2.15-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2812ff359ec1f30129b62372e5f22a52936fac13d5d21e70373dbca5d64bb97c", size = 778249, upload-time = "2025-11-16T16:13:46.871Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1d/70dbda370bd0e1a92942754c873bd28f513da6198127d1736fa98bb2a16f/ruamel_yaml_clib-0.2.15-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7e74ea87307303ba91073b63e67f2c667e93f05a8c63079ee5b7a5c8d0d7b043", size = 737140, upload-time = "2025-11-16T16:13:48.349Z" }, + { url = "https://files.pythonhosted.org/packages/5b/87/822d95874216922e1120afb9d3fafa795a18fdd0c444f5c4c382f6dac761/ruamel_yaml_clib-0.2.15-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:713cd68af9dfbe0bb588e144a61aad8dcc00ef92a82d2e87183ca662d242f524", size = 741070, upload-time = "2025-11-16T20:22:54.151Z" }, + { url = "https://files.pythonhosted.org/packages/b9/17/4e01a602693b572149f92c983c1f25bd608df02c3f5cf50fd1f94e124a59/ruamel_yaml_clib-0.2.15-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:542d77b72786a35563f97069b9379ce762944e67055bea293480f7734b2c7e5e", size = 765882, upload-time = "2025-11-16T16:13:49.526Z" }, + { url = "https://files.pythonhosted.org/packages/9f/17/7999399081d39ebb79e807314de6b611e1d1374458924eb2a489c01fc5ad/ruamel_yaml_clib-0.2.15-cp314-cp314-win32.whl", hash = "sha256:424ead8cef3939d690c4b5c85ef5b52155a231ff8b252961b6516ed7cf05f6aa", size = 102567, upload-time = "2025-11-16T16:13:50.78Z" }, + { url = "https://files.pythonhosted.org/packages/d2/67/be582a7370fdc9e6846c5be4888a530dcadd055eef5b932e0e85c33c7d73/ruamel_yaml_clib-0.2.15-cp314-cp314-win_amd64.whl", hash = "sha256:ac9b8d5fa4bb7fd2917ab5027f60d4234345fd366fe39aa711d5dca090aa1467", size = 122847, upload-time = "2025-11-16T16:13:51.807Z" }, +] + +[[package]] +name = "ruff" +version = "0.14.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/08/52232a877978dd8f9cf2aeddce3e611b40a63287dfca29b6b8da791f5e8d/ruff-0.14.10.tar.gz", hash = "sha256:9a2e830f075d1a42cd28420d7809ace390832a490ed0966fe373ba288e77aaf4", size = 5859763, upload-time = "2025-12-18T19:28:57.98Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/60/01/933704d69f3f05ee16ef11406b78881733c186fe14b6a46b05cfcaf6d3b2/ruff-0.14.10-py3-none-linux_armv6l.whl", hash = "sha256:7a3ce585f2ade3e1f29ec1b92df13e3da262178df8c8bdf876f48fa0e8316c49", size = 13527080, upload-time = "2025-12-18T19:29:25.642Z" }, + { url = "https://files.pythonhosted.org/packages/df/58/a0349197a7dfa603ffb7f5b0470391efa79ddc327c1e29c4851e85b09cc5/ruff-0.14.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:674f9be9372907f7257c51f1d4fc902cb7cf014b9980152b802794317941f08f", size = 13797320, upload-time = "2025-12-18T19:29:02.571Z" }, + { url = "https://files.pythonhosted.org/packages/7b/82/36be59f00a6082e38c23536df4e71cdbc6af8d7c707eade97fcad5c98235/ruff-0.14.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d85713d522348837ef9df8efca33ccb8bd6fcfc86a2cde3ccb4bc9d28a18003d", size = 12918434, upload-time = "2025-12-18T19:28:51.202Z" }, + { url = "https://files.pythonhosted.org/packages/a6/00/45c62a7f7e34da92a25804f813ebe05c88aa9e0c25e5cb5a7d23dd7450e3/ruff-0.14.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6987ebe0501ae4f4308d7d24e2d0fe3d7a98430f5adfd0f1fead050a740a3a77", size = 13371961, upload-time = "2025-12-18T19:29:04.991Z" }, + { url = "https://files.pythonhosted.org/packages/40/31/a5906d60f0405f7e57045a70f2d57084a93ca7425f22e1d66904769d1628/ruff-0.14.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16a01dfb7b9e4eee556fbfd5392806b1b8550c9b4a9f6acd3dbe6812b193c70a", size = 13275629, upload-time = "2025-12-18T19:29:21.381Z" }, + { url = "https://files.pythonhosted.org/packages/3e/60/61c0087df21894cf9d928dc04bcd4fb10e8b2e8dca7b1a276ba2155b2002/ruff-0.14.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7165d31a925b7a294465fa81be8c12a0e9b60fb02bf177e79067c867e71f8b1f", size = 14029234, upload-time = "2025-12-18T19:29:00.132Z" }, + { url = "https://files.pythonhosted.org/packages/44/84/77d911bee3b92348b6e5dab5a0c898d87084ea03ac5dc708f46d88407def/ruff-0.14.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c561695675b972effb0c0a45db233f2c816ff3da8dcfbe7dfc7eed625f218935", size = 15449890, upload-time = "2025-12-18T19:28:53.573Z" }, + { url = "https://files.pythonhosted.org/packages/e9/36/480206eaefa24a7ec321582dda580443a8f0671fdbf6b1c80e9c3e93a16a/ruff-0.14.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bb98fcbbc61725968893682fd4df8966a34611239c9fd07a1f6a07e7103d08e", size = 15123172, upload-time = "2025-12-18T19:29:23.453Z" }, + { url = "https://files.pythonhosted.org/packages/5c/38/68e414156015ba80cef5473d57919d27dfb62ec804b96180bafdeaf0e090/ruff-0.14.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f24b47993a9d8cb858429e97bdf8544c78029f09b520af615c1d261bf827001d", size = 14460260, upload-time = "2025-12-18T19:29:27.808Z" }, + { url = "https://files.pythonhosted.org/packages/b3/19/9e050c0dca8aba824d67cc0db69fb459c28d8cd3f6855b1405b3f29cc91d/ruff-0.14.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59aabd2e2c4fd614d2862e7939c34a532c04f1084476d6833dddef4afab87e9f", size = 14229978, upload-time = "2025-12-18T19:29:11.32Z" }, + { url = "https://files.pythonhosted.org/packages/51/eb/e8dd1dd6e05b9e695aa9dd420f4577debdd0f87a5ff2fedda33c09e9be8c/ruff-0.14.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:213db2b2e44be8625002dbea33bb9c60c66ea2c07c084a00d55732689d697a7f", size = 14338036, upload-time = "2025-12-18T19:29:09.184Z" }, + { url = "https://files.pythonhosted.org/packages/6a/12/f3e3a505db7c19303b70af370d137795fcfec136d670d5de5391e295c134/ruff-0.14.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b914c40ab64865a17a9a5b67911d14df72346a634527240039eb3bd650e5979d", size = 13264051, upload-time = "2025-12-18T19:29:13.431Z" }, + { url = "https://files.pythonhosted.org/packages/08/64/8c3a47eaccfef8ac20e0484e68e0772013eb85802f8a9f7603ca751eb166/ruff-0.14.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1484983559f026788e3a5c07c81ef7d1e97c1c78ed03041a18f75df104c45405", size = 13283998, upload-time = "2025-12-18T19:29:06.994Z" }, + { url = "https://files.pythonhosted.org/packages/12/84/534a5506f4074e5cc0529e5cd96cfc01bb480e460c7edf5af70d2bcae55e/ruff-0.14.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c70427132db492d25f982fffc8d6c7535cc2fd2c83fc8888f05caaa248521e60", size = 13601891, upload-time = "2025-12-18T19:28:55.811Z" }, + { url = "https://files.pythonhosted.org/packages/0d/1e/14c916087d8598917dbad9b2921d340f7884824ad6e9c55de948a93b106d/ruff-0.14.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5bcf45b681e9f1ee6445d317ce1fa9d6cba9a6049542d1c3d5b5958986be8830", size = 14336660, upload-time = "2025-12-18T19:29:16.531Z" }, + { url = "https://files.pythonhosted.org/packages/f2/1c/d7b67ab43f30013b47c12b42d1acd354c195351a3f7a1d67f59e54227ede/ruff-0.14.10-py3-none-win32.whl", hash = "sha256:104c49fc7ab73f3f3a758039adea978869a918f31b73280db175b43a2d9b51d6", size = 13196187, upload-time = "2025-12-18T19:29:19.006Z" }, + { url = "https://files.pythonhosted.org/packages/fb/9c/896c862e13886fae2af961bef3e6312db9ebc6adc2b156fe95e615dee8c1/ruff-0.14.10-py3-none-win_amd64.whl", hash = "sha256:466297bd73638c6bdf06485683e812db1c00c7ac96d4ddd0294a338c62fdc154", size = 14661283, upload-time = "2025-12-18T19:29:30.16Z" }, + { url = "https://files.pythonhosted.org/packages/74/31/b0e29d572670dca3674eeee78e418f20bdf97fa8aa9ea71380885e175ca0/ruff-0.14.10-py3-none-win_arm64.whl", hash = "sha256:e51d046cf6dda98a4633b8a8a771451107413b0f07183b2bef03f075599e44e6", size = 13729839, upload-time = "2025-12-18T19:28:48.636Z" }, +] + +[[package]] +name = "s3transfer" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/05/04/74127fc843314818edfa81b5540e26dd537353b123a4edc563109d8f17dd/s3transfer-0.16.0.tar.gz", hash = "sha256:8e990f13268025792229cd52fa10cb7163744bf56e719e0b9cb925ab79abf920", size = 153827, upload-time = "2025-12-01T02:30:59.114Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/51/727abb13f44c1fcf6d145979e1535a35794db0f6e450a0cb46aa24732fe2/s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:18e25d66fed509e3868dc1572b3f427ff947dd2c56f844a5bf09481ad3f3b2fe", size = 86830, upload-time = "2025-12-01T02:30:57.729Z" }, +] + +[[package]] +name = "scikit-learn" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "joblib" }, + { name = "numpy" }, + { name = "scipy" }, + { name = "threadpoolctl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0e/d4/40988bf3b8e34feec1d0e6a051446b1f66225f8529b9309becaeef62b6c4/scikit_learn-1.8.0.tar.gz", hash = "sha256:9bccbb3b40e3de10351f8f5068e105d0f4083b1a65fa07b6634fbc401a6287fd", size = 7335585, upload-time = "2025-12-10T07:08:53.618Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/74/e6a7cc4b820e95cc38cf36cd74d5aa2b42e8ffc2d21fe5a9a9c45c1c7630/scikit_learn-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5fb63362b5a7ddab88e52b6dbb47dac3fd7dafeee740dc6c8d8a446ddedade8e", size = 8548242, upload-time = "2025-12-10T07:07:51.568Z" }, + { url = "https://files.pythonhosted.org/packages/49/d8/9be608c6024d021041c7f0b3928d4749a706f4e2c3832bbede4fb4f58c95/scikit_learn-1.8.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:5025ce924beccb28298246e589c691fe1b8c1c96507e6d27d12c5fadd85bfd76", size = 8079075, upload-time = "2025-12-10T07:07:53.697Z" }, + { url = "https://files.pythonhosted.org/packages/dd/47/f187b4636ff80cc63f21cd40b7b2d177134acaa10f6bb73746130ee8c2e5/scikit_learn-1.8.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4496bb2cf7a43ce1a2d7524a79e40bc5da45cf598dbf9545b7e8316ccba47bb4", size = 8660492, upload-time = "2025-12-10T07:07:55.574Z" }, + { url = "https://files.pythonhosted.org/packages/97/74/b7a304feb2b49df9fafa9382d4d09061a96ee9a9449a7cbea7988dda0828/scikit_learn-1.8.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0bcfe4d0d14aec44921545fd2af2338c7471de9cb701f1da4c9d85906ab847a", size = 8931904, upload-time = "2025-12-10T07:07:57.666Z" }, + { url = "https://files.pythonhosted.org/packages/9f/c4/0ab22726a04ede56f689476b760f98f8f46607caecff993017ac1b64aa5d/scikit_learn-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:35c007dedb2ffe38fe3ee7d201ebac4a2deccd2408e8621d53067733e3c74809", size = 8019359, upload-time = "2025-12-10T07:07:59.838Z" }, + { url = "https://files.pythonhosted.org/packages/24/90/344a67811cfd561d7335c1b96ca21455e7e472d281c3c279c4d3f2300236/scikit_learn-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:8c497fff237d7b4e07e9ef1a640887fa4fb765647f86fbe00f969ff6280ce2bb", size = 7641898, upload-time = "2025-12-10T07:08:01.36Z" }, + { url = "https://files.pythonhosted.org/packages/03/aa/e22e0768512ce9255eba34775be2e85c2048da73da1193e841707f8f039c/scikit_learn-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0d6ae97234d5d7079dc0040990a6f7aeb97cb7fa7e8945f1999a429b23569e0a", size = 8513770, upload-time = "2025-12-10T07:08:03.251Z" }, + { url = "https://files.pythonhosted.org/packages/58/37/31b83b2594105f61a381fc74ca19e8780ee923be2d496fcd8d2e1147bd99/scikit_learn-1.8.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:edec98c5e7c128328124a029bceb09eda2d526997780fef8d65e9a69eead963e", size = 8044458, upload-time = "2025-12-10T07:08:05.336Z" }, + { url = "https://files.pythonhosted.org/packages/2d/5a/3f1caed8765f33eabb723596666da4ebbf43d11e96550fb18bdec42b467b/scikit_learn-1.8.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:74b66d8689d52ed04c271e1329f0c61635bcaf5b926db9b12d58914cdc01fe57", size = 8610341, upload-time = "2025-12-10T07:08:07.732Z" }, + { url = "https://files.pythonhosted.org/packages/38/cf/06896db3f71c75902a8e9943b444a56e727418f6b4b4a90c98c934f51ed4/scikit_learn-1.8.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8fdf95767f989b0cfedb85f7ed8ca215d4be728031f56ff5a519ee1e3276dc2e", size = 8900022, upload-time = "2025-12-10T07:08:09.862Z" }, + { url = "https://files.pythonhosted.org/packages/1c/f9/9b7563caf3ec8873e17a31401858efab6b39a882daf6c1bfa88879c0aa11/scikit_learn-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:2de443b9373b3b615aec1bb57f9baa6bb3a9bd093f1269ba95c17d870422b271", size = 7989409, upload-time = "2025-12-10T07:08:12.028Z" }, + { url = "https://files.pythonhosted.org/packages/49/bd/1f4001503650e72c4f6009ac0c4413cb17d2d601cef6f71c0453da2732fc/scikit_learn-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:eddde82a035681427cbedded4e6eff5e57fa59216c2e3e90b10b19ab1d0a65c3", size = 7619760, upload-time = "2025-12-10T07:08:13.688Z" }, + { url = "https://files.pythonhosted.org/packages/d2/7d/a630359fc9dcc95496588c8d8e3245cc8fd81980251079bc09c70d41d951/scikit_learn-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7cc267b6108f0a1499a734167282c00c4ebf61328566b55ef262d48e9849c735", size = 8826045, upload-time = "2025-12-10T07:08:15.215Z" }, + { url = "https://files.pythonhosted.org/packages/cc/56/a0c86f6930cfcd1c7054a2bc417e26960bb88d32444fe7f71d5c2cfae891/scikit_learn-1.8.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:fe1c011a640a9f0791146011dfd3c7d9669785f9fed2b2a5f9e207536cf5c2fd", size = 8420324, upload-time = "2025-12-10T07:08:17.561Z" }, + { url = "https://files.pythonhosted.org/packages/46/1e/05962ea1cebc1cf3876667ecb14c283ef755bf409993c5946ade3b77e303/scikit_learn-1.8.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:72358cce49465d140cc4e7792015bb1f0296a9742d5622c67e31399b75468b9e", size = 8680651, upload-time = "2025-12-10T07:08:19.952Z" }, + { url = "https://files.pythonhosted.org/packages/fe/56/a85473cd75f200c9759e3a5f0bcab2d116c92a8a02ee08ccd73b870f8bb4/scikit_learn-1.8.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:80832434a6cc114f5219211eec13dcbc16c2bac0e31ef64c6d346cde3cf054cb", size = 8925045, upload-time = "2025-12-10T07:08:22.11Z" }, + { url = "https://files.pythonhosted.org/packages/cc/b7/64d8cfa896c64435ae57f4917a548d7ac7a44762ff9802f75a79b77cb633/scikit_learn-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ee787491dbfe082d9c3013f01f5991658b0f38aa8177e4cd4bf434c58f551702", size = 8507994, upload-time = "2025-12-10T07:08:23.943Z" }, + { url = "https://files.pythonhosted.org/packages/5e/37/e192ea709551799379958b4c4771ec507347027bb7c942662c7fbeba31cb/scikit_learn-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf97c10a3f5a7543f9b88cbf488d33d175e9146115a451ae34568597ba33dcde", size = 7869518, upload-time = "2025-12-10T07:08:25.71Z" }, + { url = "https://files.pythonhosted.org/packages/24/05/1af2c186174cc92dcab2233f327336058c077d38f6fe2aceb08e6ab4d509/scikit_learn-1.8.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:c22a2da7a198c28dd1a6e1136f19c830beab7fdca5b3e5c8bba8394f8a5c45b3", size = 8528667, upload-time = "2025-12-10T07:08:27.541Z" }, + { url = "https://files.pythonhosted.org/packages/a8/25/01c0af38fe969473fb292bba9dc2b8f9b451f3112ff242c647fee3d0dfe7/scikit_learn-1.8.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:6b595b07a03069a2b1740dc08c2299993850ea81cce4fe19b2421e0c970de6b7", size = 8066524, upload-time = "2025-12-10T07:08:29.822Z" }, + { url = "https://files.pythonhosted.org/packages/be/ce/a0623350aa0b68647333940ee46fe45086c6060ec604874e38e9ab7d8e6c/scikit_learn-1.8.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:29ffc74089f3d5e87dfca4c2c8450f88bdc61b0fc6ed5d267f3988f19a1309f6", size = 8657133, upload-time = "2025-12-10T07:08:31.865Z" }, + { url = "https://files.pythonhosted.org/packages/b8/cb/861b41341d6f1245e6ca80b1c1a8c4dfce43255b03df034429089ca2a2c5/scikit_learn-1.8.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fb65db5d7531bccf3a4f6bec3462223bea71384e2cda41da0f10b7c292b9e7c4", size = 8923223, upload-time = "2025-12-10T07:08:34.166Z" }, + { url = "https://files.pythonhosted.org/packages/76/18/a8def8f91b18cd1ba6e05dbe02540168cb24d47e8dcf69e8d00b7da42a08/scikit_learn-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:56079a99c20d230e873ea40753102102734c5953366972a71d5cb39a32bc40c6", size = 8096518, upload-time = "2025-12-10T07:08:36.339Z" }, + { url = "https://files.pythonhosted.org/packages/d1/77/482076a678458307f0deb44e29891d6022617b2a64c840c725495bee343f/scikit_learn-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:3bad7565bc9cf37ce19a7c0d107742b320c1285df7aab1a6e2d28780df167242", size = 7754546, upload-time = "2025-12-10T07:08:38.128Z" }, + { url = "https://files.pythonhosted.org/packages/2d/d1/ef294ca754826daa043b2a104e59960abfab4cf653891037d19dd5b6f3cf/scikit_learn-1.8.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:4511be56637e46c25721e83d1a9cea9614e7badc7040c4d573d75fbe257d6fd7", size = 8848305, upload-time = "2025-12-10T07:08:41.013Z" }, + { url = "https://files.pythonhosted.org/packages/5b/e2/b1f8b05138ee813b8e1a4149f2f0d289547e60851fd1bb268886915adbda/scikit_learn-1.8.0-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:a69525355a641bf8ef136a7fa447672fb54fe8d60cab5538d9eb7c6438543fb9", size = 8432257, upload-time = "2025-12-10T07:08:42.873Z" }, + { url = "https://files.pythonhosted.org/packages/26/11/c32b2138a85dcb0c99f6afd13a70a951bfdff8a6ab42d8160522542fb647/scikit_learn-1.8.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c2656924ec73e5939c76ac4c8b026fc203b83d8900362eb2599d8aee80e4880f", size = 8678673, upload-time = "2025-12-10T07:08:45.362Z" }, + { url = "https://files.pythonhosted.org/packages/c7/57/51f2384575bdec454f4fe4e7a919d696c9ebce914590abf3e52d47607ab8/scikit_learn-1.8.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15fc3b5d19cc2be65404786857f2e13c70c83dd4782676dd6814e3b89dc8f5b9", size = 8922467, upload-time = "2025-12-10T07:08:47.408Z" }, + { url = "https://files.pythonhosted.org/packages/35/4d/748c9e2872637a57981a04adc038dacaa16ba8ca887b23e34953f0b3f742/scikit_learn-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:00d6f1d66fbcf4eba6e356e1420d33cc06c70a45bb1363cd6f6a8e4ebbbdece2", size = 8774395, upload-time = "2025-12-10T07:08:49.337Z" }, + { url = "https://files.pythonhosted.org/packages/60/22/d7b2ebe4704a5e50790ba089d5c2ae308ab6bb852719e6c3bd4f04c3a363/scikit_learn-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:f28dd15c6bb0b66ba09728cf09fd8736c304be29409bd8445a080c1280619e8c", size = 8002647, upload-time = "2025-12-10T07:08:51.601Z" }, +] + +[[package]] +name = "scipy" +version = "1.16.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0a/ca/d8ace4f98322d01abcd52d381134344bf7b431eba7ed8b42bdea5a3c2ac9/scipy-1.16.3.tar.gz", hash = "sha256:01e87659402762f43bd2fee13370553a17ada367d42e7487800bf2916535aecb", size = 30597883, upload-time = "2025-10-28T17:38:54.068Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/41/5bf55c3f386b1643812f3a5674edf74b26184378ef0f3e7c7a09a7e2ca7f/scipy-1.16.3-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:81fc5827606858cf71446a5e98715ba0e11f0dbc83d71c7409d05486592a45d6", size = 36659043, upload-time = "2025-10-28T17:32:40.285Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0f/65582071948cfc45d43e9870bf7ca5f0e0684e165d7c9ef4e50d783073eb/scipy-1.16.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:c97176013d404c7346bf57874eaac5187d969293bf40497140b0a2b2b7482e07", size = 28898986, upload-time = "2025-10-28T17:32:45.325Z" }, + { url = "https://files.pythonhosted.org/packages/96/5e/36bf3f0ac298187d1ceadde9051177d6a4fe4d507e8f59067dc9dd39e650/scipy-1.16.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2b71d93c8a9936046866acebc915e2af2e292b883ed6e2cbe5c34beb094b82d9", size = 20889814, upload-time = "2025-10-28T17:32:49.277Z" }, + { url = "https://files.pythonhosted.org/packages/80/35/178d9d0c35394d5d5211bbff7ac4f2986c5488b59506fef9e1de13ea28d3/scipy-1.16.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3d4a07a8e785d80289dfe66b7c27d8634a773020742ec7187b85ccc4b0e7b686", size = 23565795, upload-time = "2025-10-28T17:32:53.337Z" }, + { url = "https://files.pythonhosted.org/packages/fa/46/d1146ff536d034d02f83c8afc3c4bab2eddb634624d6529a8512f3afc9da/scipy-1.16.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0553371015692a898e1aa858fed67a3576c34edefa6b7ebdb4e9dde49ce5c203", size = 33349476, upload-time = "2025-10-28T17:32:58.353Z" }, + { url = "https://files.pythonhosted.org/packages/79/2e/415119c9ab3e62249e18c2b082c07aff907a273741b3f8160414b0e9193c/scipy-1.16.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:72d1717fd3b5e6ec747327ce9bda32d5463f472c9dce9f54499e81fbd50245a1", size = 35676692, upload-time = "2025-10-28T17:33:03.88Z" }, + { url = "https://files.pythonhosted.org/packages/27/82/df26e44da78bf8d2aeaf7566082260cfa15955a5a6e96e6a29935b64132f/scipy-1.16.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fb2472e72e24d1530debe6ae078db70fb1605350c88a3d14bc401d6306dbffe", size = 36019345, upload-time = "2025-10-28T17:33:09.773Z" }, + { url = "https://files.pythonhosted.org/packages/82/31/006cbb4b648ba379a95c87262c2855cd0d09453e500937f78b30f02fa1cd/scipy-1.16.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c5192722cffe15f9329a3948c4b1db789fbb1f05c97899187dcf009b283aea70", size = 38678975, upload-time = "2025-10-28T17:33:15.809Z" }, + { url = "https://files.pythonhosted.org/packages/c2/7f/acbd28c97e990b421af7d6d6cd416358c9c293fc958b8529e0bd5d2a2a19/scipy-1.16.3-cp312-cp312-win_amd64.whl", hash = "sha256:56edc65510d1331dae01ef9b658d428e33ed48b4f77b1d51caf479a0253f96dc", size = 38555926, upload-time = "2025-10-28T17:33:21.388Z" }, + { url = "https://files.pythonhosted.org/packages/ce/69/c5c7807fd007dad4f48e0a5f2153038dc96e8725d3345b9ee31b2b7bed46/scipy-1.16.3-cp312-cp312-win_arm64.whl", hash = "sha256:a8a26c78ef223d3e30920ef759e25625a0ecdd0d60e5a8818b7513c3e5384cf2", size = 25463014, upload-time = "2025-10-28T17:33:25.975Z" }, + { url = "https://files.pythonhosted.org/packages/72/f1/57e8327ab1508272029e27eeef34f2302ffc156b69e7e233e906c2a5c379/scipy-1.16.3-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:d2ec56337675e61b312179a1ad124f5f570c00f920cc75e1000025451b88241c", size = 36617856, upload-time = "2025-10-28T17:33:31.375Z" }, + { url = "https://files.pythonhosted.org/packages/44/13/7e63cfba8a7452eb756306aa2fd9b37a29a323b672b964b4fdeded9a3f21/scipy-1.16.3-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:16b8bc35a4cc24db80a0ec836a9286d0e31b2503cb2fd7ff7fb0e0374a97081d", size = 28874306, upload-time = "2025-10-28T17:33:36.516Z" }, + { url = "https://files.pythonhosted.org/packages/15/65/3a9400efd0228a176e6ec3454b1fa998fbbb5a8defa1672c3f65706987db/scipy-1.16.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:5803c5fadd29de0cf27fa08ccbfe7a9e5d741bf63e4ab1085437266f12460ff9", size = 20865371, upload-time = "2025-10-28T17:33:42.094Z" }, + { url = "https://files.pythonhosted.org/packages/33/d7/eda09adf009a9fb81827194d4dd02d2e4bc752cef16737cc4ef065234031/scipy-1.16.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:b81c27fc41954319a943d43b20e07c40bdcd3ff7cf013f4fb86286faefe546c4", size = 23524877, upload-time = "2025-10-28T17:33:48.483Z" }, + { url = "https://files.pythonhosted.org/packages/7d/6b/3f911e1ebc364cb81320223a3422aab7d26c9c7973109a9cd0f27c64c6c0/scipy-1.16.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0c3b4dd3d9b08dbce0f3440032c52e9e2ab9f96ade2d3943313dfe51a7056959", size = 33342103, upload-time = "2025-10-28T17:33:56.495Z" }, + { url = "https://files.pythonhosted.org/packages/21/f6/4bfb5695d8941e5c570a04d9fcd0d36bce7511b7d78e6e75c8f9791f82d0/scipy-1.16.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7dc1360c06535ea6116a2220f760ae572db9f661aba2d88074fe30ec2aa1ff88", size = 35697297, upload-time = "2025-10-28T17:34:04.722Z" }, + { url = "https://files.pythonhosted.org/packages/04/e1/6496dadbc80d8d896ff72511ecfe2316b50313bfc3ebf07a3f580f08bd8c/scipy-1.16.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:663b8d66a8748051c3ee9c96465fb417509315b99c71550fda2591d7dd634234", size = 36021756, upload-time = "2025-10-28T17:34:13.482Z" }, + { url = "https://files.pythonhosted.org/packages/fe/bd/a8c7799e0136b987bda3e1b23d155bcb31aec68a4a472554df5f0937eef7/scipy-1.16.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eab43fae33a0c39006a88096cd7b4f4ef545ea0447d250d5ac18202d40b6611d", size = 38696566, upload-time = "2025-10-28T17:34:22.384Z" }, + { url = "https://files.pythonhosted.org/packages/cd/01/1204382461fcbfeb05b6161b594f4007e78b6eba9b375382f79153172b4d/scipy-1.16.3-cp313-cp313-win_amd64.whl", hash = "sha256:062246acacbe9f8210de8e751b16fc37458213f124bef161a5a02c7a39284304", size = 38529877, upload-time = "2025-10-28T17:35:51.076Z" }, + { url = "https://files.pythonhosted.org/packages/7f/14/9d9fbcaa1260a94f4bb5b64ba9213ceb5d03cd88841fe9fd1ffd47a45b73/scipy-1.16.3-cp313-cp313-win_arm64.whl", hash = "sha256:50a3dbf286dbc7d84f176f9a1574c705f277cb6565069f88f60db9eafdbe3ee2", size = 25455366, upload-time = "2025-10-28T17:35:59.014Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a3/9ec205bd49f42d45d77f1730dbad9ccf146244c1647605cf834b3a8c4f36/scipy-1.16.3-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:fb4b29f4cf8cc5a8d628bc8d8e26d12d7278cd1f219f22698a378c3d67db5e4b", size = 37027931, upload-time = "2025-10-28T17:34:31.451Z" }, + { url = "https://files.pythonhosted.org/packages/25/06/ca9fd1f3a4589cbd825b1447e5db3a8ebb969c1eaf22c8579bd286f51b6d/scipy-1.16.3-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:8d09d72dc92742988b0e7750bddb8060b0c7079606c0d24a8cc8e9c9c11f9079", size = 29400081, upload-time = "2025-10-28T17:34:39.087Z" }, + { url = "https://files.pythonhosted.org/packages/6a/56/933e68210d92657d93fb0e381683bc0e53a965048d7358ff5fbf9e6a1b17/scipy-1.16.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:03192a35e661470197556de24e7cb1330d84b35b94ead65c46ad6f16f6b28f2a", size = 21391244, upload-time = "2025-10-28T17:34:45.234Z" }, + { url = "https://files.pythonhosted.org/packages/a8/7e/779845db03dc1418e215726329674b40576879b91814568757ff0014ad65/scipy-1.16.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:57d01cb6f85e34f0946b33caa66e892aae072b64b034183f3d87c4025802a119", size = 23929753, upload-time = "2025-10-28T17:34:51.793Z" }, + { url = "https://files.pythonhosted.org/packages/4c/4b/f756cf8161d5365dcdef9e5f460ab226c068211030a175d2fc7f3f41ca64/scipy-1.16.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:96491a6a54e995f00a28a3c3badfff58fd093bf26cd5fb34a2188c8c756a3a2c", size = 33496912, upload-time = "2025-10-28T17:34:59.8Z" }, + { url = "https://files.pythonhosted.org/packages/09/b5/222b1e49a58668f23839ca1542a6322bb095ab8d6590d4f71723869a6c2c/scipy-1.16.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cd13e354df9938598af2be05822c323e97132d5e6306b83a3b4ee6724c6e522e", size = 35802371, upload-time = "2025-10-28T17:35:08.173Z" }, + { url = "https://files.pythonhosted.org/packages/c1/8d/5964ef68bb31829bde27611f8c9deeac13764589fe74a75390242b64ca44/scipy-1.16.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:63d3cdacb8a824a295191a723ee5e4ea7768ca5ca5f2838532d9f2e2b3ce2135", size = 36190477, upload-time = "2025-10-28T17:35:16.7Z" }, + { url = "https://files.pythonhosted.org/packages/ab/f2/b31d75cb9b5fa4dd39a0a931ee9b33e7f6f36f23be5ef560bf72e0f92f32/scipy-1.16.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e7efa2681ea410b10dde31a52b18b0154d66f2485328830e45fdf183af5aefc6", size = 38796678, upload-time = "2025-10-28T17:35:26.354Z" }, + { url = "https://files.pythonhosted.org/packages/b4/1e/b3723d8ff64ab548c38d87055483714fefe6ee20e0189b62352b5e015bb1/scipy-1.16.3-cp313-cp313t-win_amd64.whl", hash = "sha256:2d1ae2cf0c350e7705168ff2429962a89ad90c2d49d1dd300686d8b2a5af22fc", size = 38640178, upload-time = "2025-10-28T17:35:35.304Z" }, + { url = "https://files.pythonhosted.org/packages/8e/f3/d854ff38789aca9b0cc23008d607ced9de4f7ab14fa1ca4329f86b3758ca/scipy-1.16.3-cp313-cp313t-win_arm64.whl", hash = "sha256:0c623a54f7b79dd88ef56da19bc2873afec9673a48f3b85b18e4d402bdd29a5a", size = 25803246, upload-time = "2025-10-28T17:35:42.155Z" }, + { url = "https://files.pythonhosted.org/packages/99/f6/99b10fd70f2d864c1e29a28bbcaa0c6340f9d8518396542d9ea3b4aaae15/scipy-1.16.3-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:875555ce62743e1d54f06cdf22c1e0bc47b91130ac40fe5d783b6dfa114beeb6", size = 36606469, upload-time = "2025-10-28T17:36:08.741Z" }, + { url = "https://files.pythonhosted.org/packages/4d/74/043b54f2319f48ea940dd025779fa28ee360e6b95acb7cd188fad4391c6b/scipy-1.16.3-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:bb61878c18a470021fb515a843dc7a76961a8daceaaaa8bad1332f1bf4b54657", size = 28872043, upload-time = "2025-10-28T17:36:16.599Z" }, + { url = "https://files.pythonhosted.org/packages/4d/e1/24b7e50cc1c4ee6ffbcb1f27fe9f4c8b40e7911675f6d2d20955f41c6348/scipy-1.16.3-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:f2622206f5559784fa5c4b53a950c3c7c1cf3e84ca1b9c4b6c03f062f289ca26", size = 20862952, upload-time = "2025-10-28T17:36:22.966Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3a/3e8c01a4d742b730df368e063787c6808597ccb38636ed821d10b39ca51b/scipy-1.16.3-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:7f68154688c515cdb541a31ef8eb66d8cd1050605be9dcd74199cbd22ac739bc", size = 23508512, upload-time = "2025-10-28T17:36:29.731Z" }, + { url = "https://files.pythonhosted.org/packages/1f/60/c45a12b98ad591536bfe5330cb3cfe1850d7570259303563b1721564d458/scipy-1.16.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8b3c820ddb80029fe9f43d61b81d8b488d3ef8ca010d15122b152db77dc94c22", size = 33413639, upload-time = "2025-10-28T17:36:37.982Z" }, + { url = "https://files.pythonhosted.org/packages/71/bc/35957d88645476307e4839712642896689df442f3e53b0fa016ecf8a3357/scipy-1.16.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d3837938ae715fc0fe3c39c0202de3a8853aff22ca66781ddc2ade7554b7e2cc", size = 35704729, upload-time = "2025-10-28T17:36:46.547Z" }, + { url = "https://files.pythonhosted.org/packages/3b/15/89105e659041b1ca11c386e9995aefacd513a78493656e57789f9d9eab61/scipy-1.16.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:aadd23f98f9cb069b3bd64ddc900c4d277778242e961751f77a8cb5c4b946fb0", size = 36086251, upload-time = "2025-10-28T17:36:55.161Z" }, + { url = "https://files.pythonhosted.org/packages/1a/87/c0ea673ac9c6cc50b3da2196d860273bc7389aa69b64efa8493bdd25b093/scipy-1.16.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b7c5f1bda1354d6a19bc6af73a649f8285ca63ac6b52e64e658a5a11d4d69800", size = 38716681, upload-time = "2025-10-28T17:37:04.1Z" }, + { url = "https://files.pythonhosted.org/packages/91/06/837893227b043fb9b0d13e4bd7586982d8136cb249ffb3492930dab905b8/scipy-1.16.3-cp314-cp314-win_amd64.whl", hash = "sha256:e5d42a9472e7579e473879a1990327830493a7047506d58d73fc429b84c1d49d", size = 39358423, upload-time = "2025-10-28T17:38:20.005Z" }, + { url = "https://files.pythonhosted.org/packages/95/03/28bce0355e4d34a7c034727505a02d19548549e190bedd13a721e35380b7/scipy-1.16.3-cp314-cp314-win_arm64.whl", hash = "sha256:6020470b9d00245926f2d5bb93b119ca0340f0d564eb6fbaad843eaebf9d690f", size = 26135027, upload-time = "2025-10-28T17:38:24.966Z" }, + { url = "https://files.pythonhosted.org/packages/b2/6f/69f1e2b682efe9de8fe9f91040f0cd32f13cfccba690512ba4c582b0bc29/scipy-1.16.3-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:e1d27cbcb4602680a49d787d90664fa4974063ac9d4134813332a8c53dbe667c", size = 37028379, upload-time = "2025-10-28T17:37:14.061Z" }, + { url = "https://files.pythonhosted.org/packages/7c/2d/e826f31624a5ebbab1cd93d30fd74349914753076ed0593e1d56a98c4fb4/scipy-1.16.3-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:9b9c9c07b6d56a35777a1b4cc8966118fb16cfd8daf6743867d17d36cfad2d40", size = 29400052, upload-time = "2025-10-28T17:37:21.709Z" }, + { url = "https://files.pythonhosted.org/packages/69/27/d24feb80155f41fd1f156bf144e7e049b4e2b9dd06261a242905e3bc7a03/scipy-1.16.3-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:3a4c460301fb2cffb7f88528f30b3127742cff583603aa7dc964a52c463b385d", size = 21391183, upload-time = "2025-10-28T17:37:29.559Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d3/1b229e433074c5738a24277eca520a2319aac7465eea7310ea6ae0e98ae2/scipy-1.16.3-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:f667a4542cc8917af1db06366d3f78a5c8e83badd56409f94d1eac8d8d9133fa", size = 23930174, upload-time = "2025-10-28T17:37:36.306Z" }, + { url = "https://files.pythonhosted.org/packages/16/9d/d9e148b0ec680c0f042581a2be79a28a7ab66c0c4946697f9e7553ead337/scipy-1.16.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f379b54b77a597aa7ee5e697df0d66903e41b9c85a6dd7946159e356319158e8", size = 33497852, upload-time = "2025-10-28T17:37:42.228Z" }, + { url = "https://files.pythonhosted.org/packages/2f/22/4e5f7561e4f98b7bea63cf3fd7934bff1e3182e9f1626b089a679914d5c8/scipy-1.16.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4aff59800a3b7f786b70bfd6ab551001cb553244988d7d6b8299cb1ea653b353", size = 35798595, upload-time = "2025-10-28T17:37:48.102Z" }, + { url = "https://files.pythonhosted.org/packages/83/42/6644d714c179429fc7196857866f219fef25238319b650bb32dde7bf7a48/scipy-1.16.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:da7763f55885045036fabcebd80144b757d3db06ab0861415d1c3b7c69042146", size = 36186269, upload-time = "2025-10-28T17:37:53.72Z" }, + { url = "https://files.pythonhosted.org/packages/ac/70/64b4d7ca92f9cf2e6fc6aaa2eecf80bb9b6b985043a9583f32f8177ea122/scipy-1.16.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ffa6eea95283b2b8079b821dc11f50a17d0571c92b43e2b5b12764dc5f9b285d", size = 38802779, upload-time = "2025-10-28T17:37:59.393Z" }, + { url = "https://files.pythonhosted.org/packages/61/82/8d0e39f62764cce5ffd5284131e109f07cf8955aef9ab8ed4e3aa5e30539/scipy-1.16.3-cp314-cp314t-win_amd64.whl", hash = "sha256:d9f48cafc7ce94cf9b15c6bffdc443a81a27bf7075cf2dcd5c8b40f85d10c4e7", size = 39471128, upload-time = "2025-10-28T17:38:05.259Z" }, + { url = "https://files.pythonhosted.org/packages/64/47/a494741db7280eae6dc033510c319e34d42dd41b7ac0c7ead39354d1a2b5/scipy-1.16.3-cp314-cp314t-win_arm64.whl", hash = "sha256:21d9d6b197227a12dcbf9633320a4e34c6b0e51c57268df255a0942983bac562", size = 26464127, upload-time = "2025-10-28T17:38:11.34Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "threadpoolctl" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b7/4d/08c89e34946fce2aec4fbb45c9016efd5f4d7f24af8e5d93296e935631d8/threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e", size = 21274, upload-time = "2025-03-13T13:49:23.031Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/d5/f9a850d79b0851d1d4ef6456097579a9005b31fea68726a4ae5f2d82ddd9/threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb", size = 18638, upload-time = "2025-03-13T13:49:21.846Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "tzdata" +version = "2025.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/a7/c202b344c5ca7daf398f3b8a477eeb205cf3b6f32e7ec3a6bac0629ca975/tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7", size = 196772, upload-time = "2025-12-13T17:45:35.667Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" }, +] + +[[package]] +name = "tzlocal" +version = "5.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/2e/c14812d3d4d9cd1773c6be938f89e5735a1f11a9f184ac3639b93cef35d5/tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd", size = 30761, upload-time = "2025-03-05T21:17:41.549Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/14/e2a54fabd4f08cd7af1c07030603c3356b74da07f7cc056e600436edfa17/tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d", size = 18026, upload-time = "2025-03-05T21:17:39.857Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1e/24/a2a2ed9addd907787d7aa0355ba36a6cadf1768b934c652ea78acbd59dcd/urllib3-2.6.2.tar.gz", hash = "sha256:016f9c98bb7e98085cb2b4b17b87d2c702975664e4f060c6532e64d1c1a5e797", size = 432930, upload-time = "2025-12-11T15:56:40.252Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/b9/4095b668ea3678bf6a0af005527f39de12fb026516fb3df17495a733b7f8/urllib3-2.6.2-py3-none-any.whl", hash = "sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd", size = 131182, upload-time = "2025-12-11T15:56:38.584Z" }, +] + +[[package]] +name = "wcwidth" +version = "0.2.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" }, +] + +[[package]] +name = "xdg" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/b9/0e6e6f19fb75cf5e1758f4f33c1256738f718966700cffc0fde2f966218b/xdg-6.0.0.tar.gz", hash = "sha256:24278094f2d45e846d1eb28a2ebb92d7b67fc0cab5249ee3ce88c95f649a1c92", size = 3453, upload-time = "2023-02-27T19:27:44.309Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dd/54/3516c1cf349060fc3578686d271eba242f10ec00b4530c2985af9faac49b/xdg-6.0.0-py3-none-any.whl", hash = "sha256:df3510755b4395157fc04fc3b02467c777f3b3ca383257397f09ab0d4c16f936", size = 3855, upload-time = "2023-02-27T19:27:42.151Z" }, +] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +]