diff --git a/.conda/README.md b/.conda/README.md deleted file mode 100644 index ad97793f..00000000 --- a/.conda/README.md +++ /dev/null @@ -1,32 +0,0 @@ -# Publish to conda - -There are two publishing systems for openfisca conda packages: -- A fully automatic CI that publishes to an _openfisca_ channel. `openfisca-survey-manager` conda package is published to this channel. See below for more information. -- A more complex CI calling Conda-Forge CI, that publishes to the _Conda-Forge_ channel. See https://www.youtube.com/watch?v=N2XwK9BkJpA for an introduction to Conda-Forge. We do not use it for this repository. - -## Automatic upload - -The CI automaticaly builds the conda package from the [PyPi package](https://pypi.org/project/OpenFisca-Survey-Manager/), and uploads it to [anaconda.org](https://anaconda.org/search?q=openfisca-survey-manager). You can check this out in the GitHub Actions configuration file `.github/workflow/workflow.yml` and its `publish-to-conda` step. - -## Manual actions made to make it work the first time - -- Create an account on https://anaconda.org. -- Create a token on https://anaconda.org/openfisca/settings/access with _Allow write access to the API site_. -- Put the token in a CI environment variable named `ANACONDA_TOKEN`. - -⚠️ Warning, the current token expires on 2025/01/14. Check existing tokens and their expiration dates on Anaconda.org website and its [_Access_ section](https://anaconda.org/openfisca/settings/access). - -## Manual actions before initializing the CI or to test the conda packaging - -Before initializing the CI the conda package was created locally. Now, the conda packaging is done by the CI. Nevertheless, if you want to test it, this section describes how a package is built and uploaded. - -To create a conda package for this repository you can check the packaging configuration in `.conda/meta.yaml` and do the following in the project root folder: - -1. Build package: - - `conda install -c anaconda conda-build anaconda-client` - (`conda-build` to build the package and [anaconda-client](https://github.com/Anaconda-Platform/anaconda-client) to push the package to anaconda.org) - - `conda build .conda --channel openfisca` - -2. Upload the package to Anaconda.org, but DON'T do it if you don't want to publish your locally built package as an official OpenFisca-Survey-Manager library: - - `anaconda login` - - `anaconda upload openfisca-survey-manager--py_0.tar.bz2` diff --git a/.conda/meta.yaml b/.conda/meta.yaml deleted file mode 100644 index 39eb8759..00000000 --- a/.conda/meta.yaml +++ /dev/null @@ -1,103 +0,0 @@ -############################################################################### -## File describing the package for Anaconda.org -## It uses Jinja2 templating code to retreive information from setup.py -############################################################################### - -{% set name = "OpenFisca-Survey-Manager" %} -{% set data = load_setup_py_data() %} -{% set version = data.get('version') %} - -package: - name: {{ name|lower }} - version: {{ version }} - -# openfisca-survey-manager package source -source: - path: .. - -build: - noarch: python - number: 0 - script: "{{ PYTHON }} -m pip install . -vv" - -requirements: - host: - - python - - pip - run: - {% for req in data.get('install_requires', []) %} - {% if req.startswith('tables') %} - # PyPI 'tables' library is named 'pytables' for conda - - {{ req.replace('tables', 'pytables') }} - {% else %} - - {{ req | replace(" ","")}} - {% endif %} - {% endfor %} - - -test: - imports: - - openfisca_survey_manager - requires: - - pip - commands: - - pip check - -outputs: - - name: openfisca-survey-manager - - - name: openfisca-survey-manager-matching - build: - noarch: python - commands: - - echo {{ data.keys() }} - requirements: - host: - - python - run: - {% if 'extra_requires' in data %} - {% for req in data['extra_requires'].get('matching', []) %} - - {{ req }} - {% endfor %} - {% endif %} - - {{ pin_subpackage('openfisca-survey-manager', exact=True) }} - - - name: openfisca-survey-manager-dev - build: - noarch: python - requirements: - host: - - python - run: - {% if 'extra_requires' in data %} - {% for req in data['extra_requires'].get('dev', []) %} - - {{ req }} - {% endfor %} - {% endif %} - - {{ pin_subpackage('openfisca-survey-manager', exact=True) }} - - - name: openfisca-survey-manager-sas - build: - noarch: python - requirements: - host: - - python - run: - {% if 'extra_requires' in data %} - {% for req in data['extra_requires'].get('sas', []) %} - - {{ req }} - {% endfor %} - {% endif %} - - {{ pin_subpackage('openfisca-survey-manager', exact=True) }} - -about: - home: https://fr.openfisca.org/ - license_family: AGPL - license: AGPL-3.0-only - license_file: LICENSE.AGPL.txt - summary: "Survey-Manager module, to work with OpenFisca and survey data." - description: | - OpenFisca is a versatile microsimulation free software. - This repository contains the Survey-Manager module, to work with OpenFisca and survey data. - doc_url: https://fr.openfisca.org/ - dev_url: https://github.com/openfisca/openfisca-survey-manager/ diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md deleted file mode 100644 index c9d529a5..00000000 --- a/.github/ISSUE_TEMPLATE.md +++ /dev/null @@ -1,27 +0,0 @@ -Hi there! - -I really enjoy OpenFisca-Survey-Manager, but I recently encountered an issue. - -### Here is what I did: - - -### Here is what I expected to happen: - - -### Here is what actually happened: - - -### Here is data (or links to it) that can help you reproduce this issue: - - - -## Context - -I identify more as a: - -- Economist _(I make microsimulations with real populations)_. -- Developer _(I create tools that use OpenFisca-Survey-Manager)_. -- Commenter _(I make data visualisations)_. -- LexImpact _(I model reforms to make them exist)_. - -(remove this line as well as all items in the list that don't fit your situation) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index c74fa4c7..00000000 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,20 +0,0 @@ -Thanks for contributing to OpenFisca-Survey-Manager! Remove this line, as well as any other in the following that don't fit your contribution :) - -#### Breaking changes - -- In _some module_: - - Remove… - -#### New features - -- Introduce `some_function()` - - Allows for… - -#### Deprecations - -- Deprecate `some_function`. - - The functionality is now provided by… - -#### Technical changes - -- Rename `private_function`. diff --git a/.github/dependabot.yml b/.github/dependabot.yml deleted file mode 100644 index fc2faa89..00000000 --- a/.github/dependabot.yml +++ /dev/null @@ -1,8 +0,0 @@ -version: 2 -updates: -- package-ecosystem: pip - directory: "/" - schedule: - interval: weekly - labels: - - kind:dependencies diff --git a/.github/has-functional-changes.sh b/.github/has-functional-changes.sh deleted file mode 100755 index 6770500c..00000000 --- a/.github/has-functional-changes.sh +++ /dev/null @@ -1,12 +0,0 @@ -#! /usr/bin/env bash - -IGNORE_DIFF_ON="README.md CONTRIBUTING.md Makefile .gitignore .github/* .conda/*" - -last_tagged_commit=`git describe --tags --abbrev=0 --first-parent` # --first-parent ensures we don't follow tags not published in master through an unlikely intermediary merge commit - -if git diff-index --name-only --exit-code $last_tagged_commit -- . `echo " $IGNORE_DIFF_ON" | sed 's/ / :(exclude)/g'` # Check if any file that has not be listed in IGNORE_DIFF_ON has changed since the last tag was published. -then - echo "No functional changes detected." - exit 1 -else echo "The functional files above were changed." -fi diff --git a/.github/is-version-number-acceptable.sh b/.github/is-version-number-acceptable.sh deleted file mode 100755 index aff071cc..00000000 --- a/.github/is-version-number-acceptable.sh +++ /dev/null @@ -1,45 +0,0 @@ -#! /usr/bin/env bash - -if [[ ${GITHUB_REF#refs/heads/} == master ]] -then - echo "No need for a version check on master." - exit 0 -fi - -if ! $(dirname "$BASH_SOURCE")/has-functional-changes.sh -then - echo "No need for a version update." - exit 0 -fi - -current_version=$(python `dirname "$BASH_SOURCE"`/pyproject_version.py --only_package_version True) # parsing with tomllib is complicated, see https://github.com/python-poetry/poetry/issues/273 -if [ $? -eq 0 ]; then - echo "Package version in pyproject.toml : $current_version" -else - echo "ERROR getting current version: $current_version" - exit 3 -fi - -if [[ -z $current_version ]] -then - echo "Error getting current version" - exit 1 -fi - -if git rev-parse --verify --quiet $current_version -then - echo "Version $current_version already exists in commit:" - git --no-pager log -1 $current_version - echo - echo "Update the version number in pyproject.toml before merging this branch into master." - echo "Look at the CONTRIBUTING.md file to learn how the version number should be updated." - exit 2 -fi - -if ! $(dirname "$BASH_SOURCE")/has-functional-changes.sh | grep --quiet CHANGELOG.md -then - echo "CHANGELOG.md has not been modified, while functional changes were made." - echo "Explain what you changed before merging this branch into master." - echo "Look at the CONTRIBUTING.md file to learn how to write the changelog." - exit 2 -fi diff --git a/.github/publish-git-tag.sh b/.github/publish-git-tag.sh deleted file mode 100755 index f60b980c..00000000 --- a/.github/publish-git-tag.sh +++ /dev/null @@ -1,5 +0,0 @@ -#! /usr/bin/env bash - -current_version=$(grep '^version =' pyproject.toml | cut -d '"' -f 2) # parsing with tomllib is complicated, see https://github.com/python-poetry/poetry/issues/273 -git tag $current_version -git push --tags # update the repository version diff --git a/.github/pyproject_version.py b/.github/pyproject_version.py deleted file mode 100644 index 1a491419..00000000 --- a/.github/pyproject_version.py +++ /dev/null @@ -1,60 +0,0 @@ -# Read package version in pyproject.toml and replace it in .conda/recipe.yaml - -import argparse -import logging -import re - -logging.basicConfig(level=logging.INFO, format="%(message)s") -PACKAGE_VERSION = "X.X.X" -CORE_VERSION = ">=43,<44" -NUMPY_VERSION = ">=1.24.3,<2" - - -def get_versions(): - """ - Read package version and deps in pyproject.toml - """ - # openfisca_core_api = None - openfisca_survey_manager = None - # numpy = None - with open("./pyproject.toml", "r") as file: - content = file.read() - # Extract the version of openfisca_survey_manager - version_match = re.search(r'^version\s*=\s*"([\d.]*)"', content, re.MULTILINE) - if version_match: - openfisca_survey_manager = version_match.group(1) - else: - raise Exception("Package version not found in pyproject.toml") - # Extract dependencies - # version = re.search(r'openfisca-core\s*(>=\s*[\d\.]*,\s*<\d*)"', content, re.MULTILINE) - # if version: - # openfisca_core_api = version.group(1) - # version = re.search(r'numpy\s*(>=\s*[\d\.]*,\s*<\d*)"', content, re.MULTILINE) - # if version: - # numpy = version.group(1) - # if not openfisca_core_api or not numpy: - # raise Exception('Dependencies not found in pyproject.toml') - return { - "openfisca_survey_manager": openfisca_survey_manager, - # 'openfisca_core_api': openfisca_core_api.replace(' ', ''), - # 'numpy': numpy.replace(' ', ''), - } - - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument("-r", "--replace", type=bool, default=False, required=False, help="replace in file") - parser.add_argument( - "-f", "--filename", type=str, default=".conda/recipe.yaml", help="Path to recipe.yaml, with filename" - ) - parser.add_argument( - "-o", "--only_package_version", type=bool, default=False, help="Only display current package version" - ) - args = parser.parse_args() - info = get_versions() - file = args.filename - if args.only_package_version: - print(f"{info['openfisca_survey_manager']}") # noqa: T201 - exit() - logging.info("Versions :") - print(info) # noqa: T201 diff --git a/.github/workflows/workflow.yml b/.github/workflows/workflow.yml deleted file mode 100644 index a82802eb..00000000 --- a/.github/workflows/workflow.yml +++ /dev/null @@ -1,156 +0,0 @@ -name: OpenFisca-Survey-Manager - -on: - push: - pull_request: - types: [opened, reopened] - -jobs: - build: - runs-on: ${{ matrix.os }} - strategy: - fail-fast: true - matrix: - os: ["ubuntu-24.04"] # On peut ajouter "macos-latest" si besoin - python-version: ["3.9.13", "3.10.6", "3.11.13"] - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - name: Install required build libraries - run: sudo apt-get update && sudo apt-get install -y build-essential libc6-dev - - name: Cache build - id: restore-build - uses: actions/cache@v4 - with: - path: ${{ env.pythonLocation }} - key: build-${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }}-${{ github.sha }}-${{ matrix.os }} - restore-keys: | # in case of a cache miss (systematically unless the same commit is built repeatedly), the keys below will be used to restore dependencies from previous builds, and the cache will be stored at the end of the job, making up-to-date dependencies available for all jobs of the workflow; see more at https://docs.github.com/en/actions/advanced-guides/caching-dependencies-to-speed-up-workflows#example-using-the-cache-action - build-${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }}-${{ matrix.os }} - build-${{ env.pythonLocation }}-${{ matrix.os }} - - name: Build package - run: make build - - name: Cache release - id: restore-release - uses: actions/cache@v4 - with: - path: dist - key: release-${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }}-${{ github.sha }}-${{ matrix.os }} - - lint-files: - runs-on: ubuntu-24.04 - needs: [ build ] - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 # Fetch all the tags - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: 3.9.13 - - name: Cache build - id: restore-build - uses: actions/cache@v4 - with: - path: ${{ env.pythonLocation }} - key: build-${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }}-${{ github.sha }}-ubuntu-24.04 - - run: pip install -e .[dev] # Need to install linter - - run: make check-syntax-errors - - run: make check-style - - test-python: - runs-on: ${{ matrix.os }} - needs: [ build ] - strategy: - fail-fast: true - matrix: - os: [ "ubuntu-24.04" ] # On peut ajouter "macos-latest" si besoin - python-version: ["3.9.13", "3.10.6", "3.11.13"] - steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - name: Cache build - id: restore-build - uses: actions/cache@v4 - with: - path: ${{ env.pythonLocation }} - key: build-${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }}-${{ github.sha }}-${{ matrix.os }} - - name: install - run: make install - - name: test - run: make test - - name : coverage - run: coveralls --service=github - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - check-version-and-changelog: - runs-on: ubuntu-24.04 - needs: [ lint-files, build ] # TODO: Replace build by test-python - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 # Fetch all the tags - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: 3.9.13 - - name: Check version number has been properly updated - run: "${GITHUB_WORKSPACE}/.github/is-version-number-acceptable.sh" - - # GitHub Actions does not have a halt job option, to stop from deploying if no functional changes were found. - # We build a separate job to substitute the halt option. - # The `deploy` job is dependent on the output of the `check-for-functional-changes` job. - check-for-functional-changes: - runs-on: ubuntu-24.04 - if: github.ref == 'refs/heads/master' # Only triggered for the `master` branch - needs: [ check-version-and-changelog ] - outputs: - status: ${{ steps.stop-early.outputs.has_functional_changes_status }} - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 # Fetch all the tags - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: 3.9.13 - - id: stop-early - run: if "${GITHUB_WORKSPACE}/.github/has-functional-changes.sh" ; then echo "has_functional_changes_status=detected_functional_changes" >> $GITHUB_OUTPUT ; fi - - deploy: - runs-on: ubuntu-24.04 - needs: [ check-for-functional-changes ] - if: needs.check-for-functional-changes.outputs.status == 'detected_functional_changes' - env: - PYPI_TOKEN_OPENFISCA_BOT: ${{ secrets.PYPI_TOKEN_OPENFISCA_BOT }} - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 # Fetch all the tags - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: 3.9.13 - - name: Cache build - id: restore-build - uses: actions/cache@v4 - with: - path: ${{ env.pythonLocation }} - key: build-${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }}-${{ github.sha }}-ubuntu-24.04 - - name: Cache release - id: restore-release - uses: actions/cache@v4 - with: - path: dist - key: release-${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }}-${{ github.sha }}-ubuntu-24.04 - - name: Upload a Python package to PyPi - run: twine upload dist/* --username __token__ --password $PYPI_TOKEN_OPENFISCA_BOT - - name: Publish a git tag - run: "${GITHUB_WORKSPACE}/.github/publish-git-tag.sh" diff --git a/.gitignore b/.gitignore deleted file mode 100644 index b188e6f0..00000000 --- a/.gitignore +++ /dev/null @@ -1,104 +0,0 @@ -# Config files -.config -raw_data.ini -config.ini - -# Collections -.collections - -# Fixtures, dumps, personal tests, etc. -openfisca_survey_manager/tests/test_read_dbf.py -openfisca_survey_manager/tests/data_files/dump/ -openfisca_survey_manager/tests/data_files/config.ini -openfisca_survey_manager/tests/data_files/fake.hdf5 -openfisca_survey_manager/tests/data_files/fake.json -openfisca_survey_manager/tests/data_files/test_random_generator.json - -# Vscode config -.vscode - -# ignore tags files -.tags* -tags - -# Ignore temporary h5 files -*.h5 - -# Spyder junk -.spyderworkspace -.spyderproject -.spyproject - -# Ignore emacs saves -*~ - -# Ignore local config file -*_local.ini - -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] - -# C extensions -*.so - -# Pyenv config -.python-version - -# Distribution / packaging -.Python -env/ -bin/ -build/ -develop-eggs/ -dist/ -eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -*.egg-info/ -.installed.cfg -*.egg - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.coverage -.cache -nosetests.xml -coverage.xml -.pytest_cache - -# Translations -*.mo - -# Mr Developer -.mr.developer.cfg -.project -.pydevproject - -# Rope -.ropeproject - -# Django stuff: -*.log -*.pot - -# Sphinx documentation -docs/_build/ - -# ctags file -.tags* -/tags -tags -.venv - -# Tests files -*.parquet -test_*.json diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml deleted file mode 100644 index 58d4c40d..00000000 --- a/.pre-commit-config.yaml +++ /dev/null @@ -1,7 +0,0 @@ -repos: - - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.14.1 # Use the version of ruff you have installed or a recent one - hooks: - - id: ruff - args: [--fix, --exit-non-zero-on-fix] - - id: ruff-format \ No newline at end of file diff --git a/.yamllint b/.yamllint deleted file mode 100644 index 1db00ce7..00000000 --- a/.yamllint +++ /dev/null @@ -1,9 +0,0 @@ ---- -extends: relaxed - -rules: - truthy: enable - # Disable warning level rules to remove flood in yamllint output - # YAML file should be modified to comply to those rules before activation - indentation: disable - line-length: disable diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index 5c4bdd91..00000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,940 +0,0 @@ -# Changelog - -# 4.0.0 [#345](https://github.com/openfisca/openfisca-survey-manager/pull/345) - -* New features - - enhance the aggregates table by adding "winners/losers/neutral" categories. The changes include fixing typos (e.g., "looser" to "loser" thus a major version bump), addressing linting issues, correcting indentation and formatting, and improving doctests. - -### 3.2.7 [#344](https://github.com/openfisca/openfisca-survey-manager/pull/344) - -* Technical changes -- Use ruff and isort instead of flake8 and autopep8. - -### 3.2.6 [#343](https://github.com/openfisca/openfisca-survey-manager/pull/343) - -* Technical changes - - Remove sas7dbat dependency - -### 3.2.5 [#324](https://github.com/openfisca/openfisca-survey-manager/pull/324) - -* New features - - Introduce the possibility to use calmar on variables changed with expressions. - -### 3.2.4 [#341](https://github.com/openfisca/openfisca-survey-manager/pull/341) - -* Technical changes - - Add message to make debugging easier - -### 3.2.3 [#340](https://github.com/openfisca/openfisca-survey-manager/pull/340) - -* New features - - Update the build_raw_coicop_nomenclature function, to make it work with latest panda version in the environment - - Add a argument year (1998 or 2016) to the function. - -### 3.2.2 [#339](https://github.com/openfisca/openfisca-survey-manager/pull/339) - -* New features - - Allows for the use of the 2016 coicop nomenclature instead of the 1998 one. - - Uses the 2016 one by default but its still possible to choose for the old one -* Technical changes - - Within the assets folder I created the COICOP folder which contains all the 1998 and 2016 nomenclature files in two separate folders. - -## 3.2.0 [#338](https://github.com/openfisca/openfisca-survey-manager/pull/338) - -* Technical changes - - Decrease verbosity. - - Anticipate deprecation in `fillna` and `map` `pandas.DataFrame` methods. - - Fix `compute_aggregate` to handle correctly the case when no weight is defined. - -## 3.1.0 [#337](https://github.com/openfisca/openfisca-survey-manager/pull/337) - -* Technical changes - - Add variability across variable `housing_occupancy_status` in the input dataframe to the test scenario. - -### 3.0.7 [#334](https://github.com/openfisca/openfisca-survey-manager/pull/334) - -* Technical changes - - Fix incomplete enum generation. - -### 3.0.6 [#336](https://github.com/openfisca/openfisca-survey-manager/pull/336) - -* Technical changes - - Fix `test_scenario` to use correct `SurveyScenario` class. - -### 3.0.5 [#335](https://github.com/openfisca/openfisca-survey-manager/pull/335) - -* Technical changes - - Add Python 3.11 to tested versions. - -### 3.0.4 [#326](https://github.com/openfisca/openfisca-survey-manager/pull/326) - -* Technical changes - - Fix get_values of Survey class. - -### 3.0.3 [#329](https://github.com/openfisca/openfisca-survey-manager/pull/329) - -* Technical changes - - Fix dependencies to be able to install package with uv - -### 3.0.2 [#328](https://github.com/openfisca/openfisca-survey-manager/pull/328) - -* Technical changes - - Fix categorical dtype check when initialising a Simulation. - -### 3.0.1 [#322](https://github.com/openfisca/openfisca-survey-manager/pull/322) - -* Technical changes - - Fix build. - -# 3.0.0 [#320](https://github.com/openfisca/openfisca-survey-manager/pull/320) - -* Technical changes - - Migrate config to `pyproject.toml`. - - Comment out conda build. - - Add a `openfisa_survey_manager.paths` module to store paths (breaking change). - -### 2.3.5 [#319](https://github.com/openfisca/openfisca-survey-manager/pull/319) - -* Technical changes - - Fix the conda build by moving the Numpy pinning to `install_requires`. - - Add a conda build step in CI to inform the dev about a problem before merging. - -### 2.3.4 [#317](https://github.com/openfisca/openfisca-survey-manager/pull/317) - -* Technical changes - - add `"numpy >=1.24.2, <2.0"` in setup.py to solve conda build - -### 2.3.3 [#316](https://github.com/openfisca/openfisca-survey-manager/pull/315) - -* Technical changes - - Solve an issue caused by #299 when there is a target_entity_count without variables of the same entity. - - Correct the CHANGELOG which had two errors. - - -### 2.3.2 [#315](https://github.com/openfisca/openfisca-survey-manager/pull/315) - -* Technical changes - - Transform input arrays of Enums variables in EnumArray type to improve computation speed due to changes in Openfisca-core 42 - -### 2.3.1 [#300](https://github.com/openfisca/openfisca-survey-manager/pull/300) - -* New features - - Add a new distance function method, the hyperbolic sinus. It allows to cap the weight ratio with a one dimensionnal parametring, and with less computation issues than with the logit method. - - It is described in Calmar 2 documentation - -## 2.3.0 [#299](https://github.com/openfisca/openfisca-survey-manager/pull/299) - -* New features - - Allows for calibration of variables defined in two different entities ; it requires that one is included in the other (ex : individuals and fiscal household : each individual is in only one fiscal household). To use it, a variable identifying in the smaller entity the identifier of the larger one is needed (the name must be given in parameter "id_variable_link"). - - Reshape the input of calmar and calibration (with dicts more than dataframes to allow several entities) - - Adapt the tests -The idea comes from Insee's Calmar 2 documentation. - -### 2.2.7 [#311](https://github.com/openfisca/openfisca-survey-manager/pull/311) - -* Bug fix - - Fix filepath and table fetcher when using parquet format. - -### 2.2.6 [#308](https://github.com/openfisca/openfisca-survey-manager/pull/308) - -* Minor change. - - Update OpenFisca-Core to 43.0.0 - -### 2.2.5 [#305](https://github.com/openfisca/openfisca-survey-manager/pull/305) - -* Minor change. - - Add docstrings and types to some tests. - -### 2.2.4 [#304](https://github.com/openfisca/openfisca-survey-manager/pull/304) - -* Minor change. - - Add docstrings and types to some tests. - -### 2.2.3 [#297](https://github.com/openfisca/openfisca-survey-manager/pull/297) - -* Minor change. - - Update configarser to 8.0.0 - -### 2.2.2 [#302](https://github.com/openfisca/openfisca-survey-manager/pull/302) - -* Minor change. - - Update OpenFisca-Core to 42.0.0 - -### 2.2.1 [#298](https://github.com/openfisca/openfisca-survey-manager/pull/298) - -* For `compute_aggregate`, replace the warning by an assert when `weighted = True` (the default) and no weight is defined for the variable. Before, a warning was displayed and the aggregate was computed using uniform weights. - -## 2.2.0 [#295](https://github.com/openfisca/openfisca-survey-manager/pull/295) - -* Improve support for parquet file format : - - If a parquet file is provided to build-collection it will be cleaned an added to the collection as a parquet file. - - If the option `keep_original_parquet_file` is passed the parquet file is kept and added to the collection as is as is. - -## 2.1.0 [#277](https://github.com/openfisca/openfisca-survey-manager/pull/277) - -* New features - - Support parquet file format : - - if a parquet file is provided to build-collection it won't be converted in HDF5 but added to the collection as is. - - It is possible to provide a folder containing many files : each files will be used as a separate table. - - Run a simulation on a part of the input datasets (for the moment it works only for two entities simulations) : - - With a batch size option - - With a filter_by option - -### 2.0.10 [#285](https://github.com/openfisca/openfisca-survey-manager/pull/285) - -* Technical changes -- Some arguments for calibration were not consistently named. - -### 2.0.9 [#294](https://github.com/openfisca/openfisca-survey-manager/pull/294) - -* Technical changes - - Add config files directory option in `set_table_in_survey` - -### 2.0.8 [#292](https://github.com/openfisca/openfisca-survey-manager/pull/292) - -* Technical change - - With the security enforcement of PyPi, we have to use token instead of login/password. - -### 2.0.7 [#293](https://github.com/openfisca/openfisca-survey-manager/pull/293) - -* Technical changes - - Improve correction done in https://github.com/openfisca/openfisca-survey-manager/pull/286/files - -### 2.0.6 [#291](https://github.com/openfisca/openfisca-survey-manager/pull/291) - -* Technical changes - - Fix the script check-version-and-changelog.sh to fix issue #288 - - Upgrade CI actions - -### 2.0.5 [#286](https://github.com/openfisca/openfisca-survey-manager/pull/286) - -* New features - - Allows sub-annual weighted aggregates in compute_aggregate when the weights are annual. - -### 2.0.4 [#283](https://github.com/openfisca/openfisca-survey-manager/pull/283) - -- Correction function compute_pivot_tables in ReformScenario - -### 2.0.3 [#282](https://github.com/openfisca/openfisca-survey-manager/pull/282) - -- Correction typo error in create_data_frame_by_entity - -### 2.0.2 [#280](https://github.com/openfisca/openfisca-survey-manager/pull/280) - -- Add a try/except for the tests config in openfisca_survey_manager/__init__.py - -### 2.0.1 [#279](https://github.com/openfisca/openfisca-survey-manager/pull/279) - -#### Technical changes - -- Fix names of ids columns when the merge option is True in create_data_frame_by_entity. - -# 2.0.O [#273](https://github.com/openfisca/openfisca-survey-manager/pull/273) - -#### Breaking changes - -This is a major refactoring of the `AbstractSurveyScenario` object and affects other related objects. - -- Refactor `AbstractSurveyScenario` -- Create `ReformScenario` -- Monkey patch `openfiscca_core.simulations.Simulation` and `openfisca_core.simulations.simulation_builder.SimulationBuilder`. -- Adapt `AbstractAggregates` accordingly - -#### Rationale - -The main goal was to separate the different steps to produce an impact analysis on survey or administrative data -and to create a more flexible tools to deal with different use case. -To do so, we performed the following changes: -- Create a generic `AbstractSurveyScenario` that can hande as many simulations as needed. -- Move to the appropriate (lower) level the methods to load the data or perform some calculation, mainly: - - Monkey patch the `Simulation` objects to deal all loading and calculation using `pandas` that are not available in the original `openfisca_core.simulations.Simulation` object which rely solely on `numpy` (and will not change anytime soon for good reason) - - Monkey patch the `SimulationBuilder` to add the needed methods to init the simulation from tabular data. -- Create a `ReformScenario` that retains the main characteristics of the old `AbstractSurveyScenario` -- Adapt `AbstractAggregates` to these new scenarios. Might need more refactoring to be more generic, but works with actual use case mainly `openfisca-france-data`. - -#### Migration - -- Users of `AbstractSurveyScebario` should use `ReformScenario`. -- Use attribute `period` instead of `year`. -- The generic simulation initialisation from survey data goes through the method `Simulation.new_from_tax_benefit_system` with a data dict argument with new keys as `collection`, `id_variable_by_entity_key`, `role_variable_by_entity_key`, `used_as_input_variables` to mimic at the simulation level what was done before this PR at the scenario level. - -### 1.1.9 [#274](https://github.com/openfisca/openfisca-survey-manager/pull/274) - -* Technical changes - - Adapt config files location to private Ci-runs of LexImpact (hack like everything dealing with config files location definition). - -### 1.1.8 [#272](https://github.com/openfisca/openfisca-survey-manager/pull/272) - -* Technical changes - - Use `openfisca-core` version >= 41.0.3. - -### 1.1.7 [#271](https://github.com/openfisca/openfisca-survey-manager/pull/271) - -* Technical changes - - Set pandas dependency to version >= 2.0.3, <3.0. - -### 1.1.5 [#265](https://github.com/openfisca/openfisca-survey-manager/pull/265) - -* Technical changes - - Use `find_namespace_packages` and instead of `find_packages` in `setup.py` - -### 1.1.4 [#264](https://github.com/openfisca/openfisca-survey-manager/pull/264) - -* Technical changes - - Change importlib metadata import to work with all Python version - -### 1.1.3 [#263](https://github.com/openfisca/openfisca-survey-manager/pull/263) - -* Technical changes - - Use importlib instead of pkg_resources to avoid deprecation warnings - -### 1.1.2 [#262](https://github.com/openfisca/openfisca-survey-manager/pull/262) - -* Technical changes - - Remove old `CircleCI` continuous integration configuration - - Set `README` CI badge to current `GitHub Actions` CI - -### 1.1.1 [#261](https://github.com/openfisca/openfisca-survey-manager/pull/261) - -* Technical changes - - Fix `Conda build` step in `publish-to-conda` GitHub Actions job - - Define `OpenFisca-Survey-Manager` package dependencies once for `PyPI` and `conda` - - Use `setup.py` general requirement and extra requirements for `conda` package - - Adapt `tables` library name to `pytables` for `conda` - - Build `conda` package from repository sources instead of `PyPI` .tar.gz - -## 1.1.0 [#260](https://github.com/openfisca/openfisca-survey-manager/pull/260) - -* New features -- Add options in inflate_parameters and inflate_parameter_leaf: - - `start_update_instant` : Instant of the year when the inflation should start, if different from January 1st - - `round_ndigits` : number of digits in the rounded result -- Adjustment of inflate_parameters to use it with parameter leaf - -### 1.0.2 [#259](https://github.com/openfisca/openfisca-survey-manager/pull/259) - -* Technical changes - - A parameter `config_files_directory` exist but it is not used evrywhere, this PR generalize it. - - Add tests using this parameter. - -### 1.0.1 [#257](https://github.com/openfisca/openfisca-survey-manager/pull/257) - -* Technical changes - - In GitHub Actions workflow, fixes the `check-for-functional-changes` → **`deploy`** → `publish-to-conda` jobs sequence - - Fix the activation of the `deploy` job by fixing how it gets `check-for-functional-changes` output status - - Allow the activation of `publish-to-conda` job that needs the `deploy` job - - Add conda configuration files to non functional files for CI - -# 1.0.0 [#252](https://github.com/openfisca/openfisca-survey-manager/pull/252) - -* Technical improvement - - Impacted periods: all. - - Impacted areas: all. - - Details: - - Upgrade every dependencies & use their latest versions - -### 0.47.2 [#249](https://github.com/openfisca/openfisca-survey-manager/pull/249) - -* Technical changes - - Fix `default_config_directory` for use with `openfisca-france-data` in a CI - -### 0.47.1 [#246](https://github.com/openfisca/openfisca-survey-manager/pull/246) - -* Bug fix - - Debug france data ci (fixes 0.47.0) - -## 0.47.0 [#245](https://github.com/openfisca/openfisca-survey-manager/pull/245) - -* Technical changes - - Fix `default_config_directory` for use with `openfisca-france-data` in a CI - -### 0.46.19 [#244](https://github.com/openfisca/openfisca-survey-manager/pull/244) - -* Technical changes - - Bump to publish package - -### 0.46.18 [#243](https://github.com/openfisca/openfisca-survey-manager/pull/243) - -* Technical changes - - Bump to publish package - -### 0.46.17 [#242](https://github.com/openfisca/openfisca-survey-manager/pull/242) - -* Technical changes - - Bug fix in `SurveyCollection.load` - -### 0.46.16 - -* CI test - -### 0.46.15 [#236](https://github.com/openfisca/openfisca-survey-manager/pull/236) - -* Technical changes - - Put back test in CI - - Fix coveralls config fot GitHub Actions - - Add a test for create_data_frame_by_entity - - Bump Actions and Python version to fix warnings - -### 0.46.14 [#234](https://github.com/openfisca/openfisca-survey-manager/pull/234) - -* Technical changes - - Convert every cells of a column to string. - -### 0.46.13 [#233](https://github.com/openfisca/openfisca-survey-manager/pull/233) - -* Technical changes - - Correcting the code asking for the period before it's instated - - Checking the new period assignment - -### 0.46.12 [#232](https://github.com/openfisca/openfisca-survey-manager/pull/232) - -* Technical changes - - Deal with Nan in Enum variables - -### 0.46.11 [#227](https://github.com/openfisca/openfisca-survey-manager/pull/227) - -* Technical changes - - Add build of a tar.gz - - Add a make entry for build - - Move CI from Circle CI to GitHub Action (Except `make test` that run only on CircleCI) - -### 0.46.10 [#229](https://github.com/openfisca/openfisca-survey-manager/pull/229) - -* Technical changes - - Add tar.gz to PyPi - - Add display readme to PyPi - -### 0.46.9 [#228](https://github.com/openfisca/openfisca-survey-manager/pull/228) - -* Technical changes - - Refactor tables method to mutualize code - - Save variables in table survey data - -### 0.46.8 [#226](https://github.com/openfisca/openfisca-survey-manager/pull/226) - -* Technical changes - - Add a set seed in `mark_weighted_percentiles`, so that when a survey scenario with a baseline and a reform is run, variables which use this function take the same value for a given entity between the baseline and the reform. - -### 0.46.7 [#227](https://github.com/openfisca/openfisca-survey-manager/pull/225) - -* Technical changes - - Handle explicitly SAS related dependecy. - -### 0.46.6 [#224](https://github.com/openfisca/openfisca-survey-manager/pull/224) - -* Bug fix - - Using pyreadstat instead of SAS7BDAT which is no more the canonical way to read sas files into pandas dataframes. - -### 0.46.5 [#223](https://github.com/openfisca/openfisca-survey-manager/pull/223) - -* Bug fix - - Deal with HDF5 file opening strict policy in build-collection - -### 0.46.4 [#219](https://github.com/openfisca/openfisca-survey-manager/pull/219) - -* Technical changes - - Better handling of CategoricalDtype in input data - -### 0.46.3 [#217](https://github.com/openfisca/openfisca-survey-manager/pull/217) - -* Bug fix - - Deal with HDF5 file opening strict policy - -### 0.46.2 [#214](https://github.com/openfisca/openfisca-survey-manager/pull/214) - -* New features - - Introduce AbsstractSurveyScenario.calculate_series - -### 0.46.1 [#211](https://github.com/openfisca/openfisca-survey-manager/pull/211) - -* Technical changes - - Improve dialect detection for csv files - -## 0.46 [#210](https://github.com/openfisca/openfisca-survey-manager/pull/210) - -* Technical changes - - Hack to deal with encodings and delimiter not detected by pandas.read_csv - -## 0.45 [#143](https://github.com/openfisca/openfisca-survey-manager/pull/143) - -* Technical changes - - In compute_marginal_tax_rate allow for automatic aggregation on group entity when target and varying variables entity are not the same and the varying variable entity is a person one. - -### 0.44.2 [#208](https://github.com/openfisca/openfisca-survey-manager/pull/208) - -* Bug fix - - Fix typo. - -### 0.44.1 [#207](https://github.com/openfisca/openfisca-survey-manager/pull/207) - -* Bug fix - - Fix aggregates export to html. - -## 0.44 [#206](https://github.com/openfisca/openfisca-survey-manager/pull/206) - -* New feature - - Ability to export aggregates to html. - -## 0.43 [#135](https://github.com/openfisca/openfisca-survey-manager/pull/135) - -* New feature - - Introduce aggregates. - -### 0.42.3 [#189](https://github.com/openfisca/openfisca-survey-manager/pull/189) - -* Technical changes - - Accept categorical columns in input data frames to initialize Enum variables. - -### 0.42.2 [#204](https://github.com/openfisca/openfisca-survey-manager/pull/204) - -* Technical changes - - Add on sub-periods when creating a quantile on a larger period - -### 0.42.1 [#200](https://github.com/openfisca/openfisca-survey-manager/pull/200) - -* Bug fix - - Let numpy dependence come from openfisca-core - -### 0.42.0 [#198](https://github.com/openfisca/openfisca-survey-manager/pull/198) - -* New feature - - Allow to build collections/surveys from csv files - -### 0.41.3 [#196](https://github.com/openfisca/openfisca-survey-manager/pull/196) - -* Bug fix - - Enforce HDF store closing when done - -### 0.41.2 [#194](https://github.com/openfisca/openfisca-survey-manager/pull/194) - -* Bug fix - - Enforce us of np.array for weights and filters when computing aggregates - -### 0.41.1 [#187](https://github.com/openfisca/openfisca-survey-manager/pull/187) - -* Update dependencies -### 0.41.0 [#185](https://github.com/openfisca/openfisca-survey-manager/pull/186) - -* New features - - Add a method to compute quantile - - Extend the computation of marginal tax rate - -### 0.40.1 [#185](https://github.com/openfisca/openfisca-survey-manager/pull/185) - -* Technical improvement - - Introduce weighted option in `compute_aggregate` and `compute_pivot_table` - - Change `weights` to `alternative_weights` in `compute_aggregate` and `compute_pivot_table` - -### 0.40.0 [#184](https://github.com/openfisca/openfisca-survey-manager/pull/184) - -* Technical improvement - - Add weights keyword argument to `compute_aggregate` and `compute_pivot_table` - -* Improve documentation - - Use googl style in docstring - - Add some docstring - -### 0.39.1 [#178](https://github.com/openfisca/openfisca-survey-manager/pull/178) - -* Bug fix - - Fix inflate that inflated twice when baseline_simulation == simulation - -### 0.39.0 [#170](https://github.com/openfisca/openfisca-survey-manager/pull/170) - -- Add statistical helpers to compute top and bottom shares - -### 0.38.3 [#XXX](https://github.com/openfisca/openfisca-survey-manager/pull/XXX) - -- Fix _set_used_as_input_variables_by_entity - -### 0.38.2 [#162](https://github.com/openfisca/openfisca-survey-manager/pull/162) - -- Update `pytables` and `numpy` dependencies - -### 0.38.1 [#158](https://github.com/openfisca/openfisca-survey-manager/pull/158) - -- Clarify documentation on configuration directory and build-collection command - -## 0.38.0 [#156](https://github.com/openfisca/openfisca-survey-manager/pull/156) - -* New features - - Introduce `survey_scenario.generate_performance_data(output_dir)` - - This generates a performance graph and CSV tables containing details about execution times of OpenFisca formulas - -### 0.37.3 [#157](https://github.com/openfisca/openfisca-survey-manager/pull/157) - -* Technical changes - - Add `tables` library to default requirements -* Add documentation for users installing, configuring and running the module for the first time - -### 0.37.2 [#155](https://github.com/openfisca/openfisca-survey-manager/pull/155) - -* Technical changes - - Improve error mesage in build_collection (fix previous version) - -### 0.37.1 [#154](https://github.com/openfisca/openfisca-survey-manager/pull/154) - -* Technical changes - - Improve error mesage in build_collection - -## 0.37.0 - -* Technical changes - - Add ignorecase argument to Survey.get_values - -### 0.36.3 [#152](https://github.com/openfisca/openfisca-survey-manager/pull/152) - -* Technical changes - - Fix asof for `TaxScale` - - Use `simulation.get_known_periods` instead of `Holder`'s method in `summariaze_variable` - -## 0.36.0 [#152](https://github.com/openfisca/openfisca-survey-manager/pull/152) - -* Technical changes - - Create collections directory when it is missing - -### 0.35.2 [#150](https://github.com/openfisca/openfisca-survey-manager/pull/150) - -* Technical changes - - Fix assets inclusion - -### 0.35.1 [#149](https://github.com/openfisca/openfisca-survey-manager/pull/149) - -* Technical changes - - Fix deprecation in pandas. - - Fix stripping of coicop categories - -## 0.35 [#148](https://github.com/openfisca/openfisca-survey-manager/pull/148) - -* Introduce some functions to deal with coicop nomenclature - -## 0.34 [#147](https://github.com/openfisca/openfisca-survey-manager/pull/147) - -* Better handling of categorical variables - -## 0.33 [#145](https://github.com/openfisca/openfisca-survey-manager/pull/145) - -* Convert string-like columns to category and save to HDF files in table mode - -### 0.32.1 [#144](https://github.com/openfisca/openfisca-survey-manager/pull/144) - -* Fix typo (remove quotes) in inflate - -## 0.32 [#143](https://github.com/openfisca/openfisca-survey-manager/pull/143) - -* Remove python 2 unicode marks `u"` and `u'`. - -## 0.31 [#140](https://github.com/openfisca/openfisca-survey-manager/pull/140) - -* Group column dropping since DataFrame.drop is expensive. - -### 0.30.1 [#137](https://github.com/openfisca/openfisca-survey-manager/pull/137) - -* Fix bug in input data loader - -## 0.30.0 [#136](https://github.com/openfisca/openfisca-survey-manager/pull/136) - -* Adding description -* Adding function documentation. - -## 0.29.0 [#134](https://github.com/openfisca/openfisca-survey-manager/pull/134) - -* New features - - Introduce compute_marginal_tax_rate. - -## 0.28.0 [#133](https://github.com/openfisca/openfisca-survey-manager/pull/133) - -- Fix _set_used_as_input_variables_by_entity -- Add missing custom_input_data_frame before initializing the data -- Fix entity ids setting - -## 0.27.0 [#132](https://github.com/openfisca/openfisca-survey-manager/pull/132) - -* Technical changes - - Fix create_data_frame_by_entity - - Fix some deprecations - -## 0.26.0 - -* New features - - Neutralized variables are now correctly handled by summarize_variable - - Extend testing to doctest - -## 0.25.0 [#126](https://github.com/openfisca/openfisca-survey-manager/pull/126) - -* New features - - create_data_frame_by_entity is able to handle expressions for filtering (filter_by can be an expression) - - This allow compute_aggregate and compute_pivot_table to handle expressions as well for filter_by. - -* Deprecations - - Deprecate helper get_entity - - Deprecate helper get_weights - -## 0.24.0 [#127](https://github.com/openfisca/openfisca-survey-manager/pull/127) - -* Fix a bug in create_data_frame_by_entity - -## 0.23.0 [#124](https://github.com/openfisca/openfisca-survey-manager/pull/124) - -* Rename weight_column_name_by_entity to weight_variable_by_entity - -## 0.22.0 [#123](https://github.com/openfisca/openfisca-survey-manager/pull/123) - -* Add github templates - -## 0.21.0 [#122](https://github.com/openfisca/openfisca-survey-manager/pull/122) - -* Use SimulationBuilder.join_with_persons to initialize entites - -## 0.20.0 [#120](https://github.com/openfisca/openfisca-survey-manager/pull/120) - -* Adapt to SimulatioBuilder shipping with openfisa-core v34 - -### 0.19.1 [#107](https://github.com/openfisca/openfisca-survey-manager/pull/107) - -* Fix `set_table_in_survey` - -## 0.19.0 [#103](https://github.com/openfisca/openfisca-survey-manager/pull/103) - -* Add a `--path PATH` option to `build-collection` - -### 0.18.5 [#101](https://github.com/openfisca/openfisca-survey-manager/pull/101) - -* Add documentation to `init_from_data` -* Split setters to gain readability - -### 0.18.4 - -* Add badges to help and reassure users/contributors - -### 0.18.3 - -* Update `setup.py` with missing dependencies - -### 0.18.2 - -* Incrementally fixing Calibration - -### 0.18.1 - -* Lower version for pandas dependency - -## 0.18.0 - -* Add a way of creating a raw_data.ini file on Google Colab - -### 0.17.5 - -* Add pyxdg as a core dependency - -### 0.17.4 - -* Fix config.ini initialisation -* Remove ptyhon2 tests - -### 0.17.3 - -* Update MANIFEST.in - -### 0.17.2 - -* Minor change in dependencies to allow more flexibility and use in Binder - -### 0.17.1 - -* Minor Python 2/3 compatibility string issue fixed - -## 0.17 - -* Fix, test and document calmar - -### 0.16.5 - -* Fix asof -* Clean Makefile -* Clean style - -### 0.16.4 - -* Use `simulation.delete_arrays` introduced by openfisca-core version 24.10.0 - -### 0.16.2, 0.16.3 - -* Add stata file conversion helper for survey scenarios - -### 0.16.1 - -* Rename test_random_data_generator method to create_randomly_initialized_survey_scenario -* Improve doc tests -* use pytest instead of nosetest - -## 0.16.0 - -* Provide summarize_variable for enums - -### 0.15.3 - -* Fix asof - -### 0.15.2 - -* Test tagging - -### 0.15.1 - -* Remove travis config file - -## 0.15.0 - -* openfisca-survey-manager can be used with both python 2 and 3 - -### 0.14.2 - -* Use `simulation.set_input` introduced by openfisca-core version 24.5.0 - -### 0.14.1 - -* Use [weightedcalcs](https://github.com/jsvine/weightedcalcs) to compute quantiles - -## 0.14.0 - -* Introduce new tools: `asof` extract from any tax_benefit_system its variables and parameters as of some date - -## 0.13.0 - -* Introduce new option : add the `count_non_zero` value in the `aggfunc` argument of `compute_aggregate` - -## 0.12 - -* Introduce new `SurveyScenario` methods: - - dump_simulations: dumps the `survey_scenario simulations - - restore_simulations: retores previously dumped `survey_scenario simulations - -### 0.11.1 - -* Fix travis tests - -## 0.11.0 - -* Add legislation parameters inflator - -### 0.10.1 - -* Cleaner checks for travis use - -### 0.10 - -* Migrate to a new method to pass data to SurveyScenario - -### 0.9.10 - -* Add a difference argument for compute_aggregate (fixes #45) - -### 0.9.9 - -* Add `trace` and `debug` attributes to `AbstractSurveyScenario` to use with `new_simulation` - -### 0.9.8 - -* Create directory for config templates files - -### 0.9.7 - -* Remove unused imports - -### 0.9.6 - -* Add a Quantile class inheriting for Variable - -### 0.9.5 - -* Pandas deprecates the use of sort_index for sort_values -* Numpy [deprecates use of np.float with issubdtype](https://github.com/numpy/numpy/pull/9505) - -### 0.9.4 - -* Fix bug when initialising mono-entity (person-only) TaxBenefitSystem - -### 0.9.3 - -* Fix difference pivot_table computation -* Clarify code (use variable instead of column) and add some doctring - -### 0.9.2 - -* Hack to custom default_config_files_diretory at CASD when using taxipp - -## 0.9.0 - -* Migrate to openfisca-core v20 syntax -* Fix a bug in `create_data_frame_by_entity` - -### 0.8.13 - -* Migrate to openfisca-core v14.1.2 syntax - -### 0.8.12 - -* Fix a bug resulting from pandas [v0.20 pivot_table fix](https://github.com/pandas-dev/pandas/pull/13554) - -### 0.8.11 - -* Decrease logs verbosity by starting using the DEBUG level more often - -### 0.8.10 - -* Fix a bug in `compute_pivot_table` which was no more able to compute non-difference pivot-table - -### 0.8.9 - -* Fix a bug when variables are missing form the tax and benefit system in `create_entity_by_dataframe` - -### 0.8.8 - -* Improve handling of difference option in `create_entity_by_dataframe` - -### 0.8.7 - -* Improve `create_entity_by_dataframe` by adding `expressions` and `merge` options and enhancing `filter_by` - -### 0.8.4 - -* Fix `summarize_variable` when dealing with neutralized variables - -### 0.8.3 - -* Add humanize to dependencies - -### 0.8.2 - -* Fix a bug when `output_cache` is unset - -### 0.8.1 - -* Add automatic push to PyPi - -## 0.8.0 - -* Improve `compute_aggregates` and `compute_pivot_table` - -### 0.6.1 - -* Fix `config_files_directory` default in utils - -### 0.6.0 - -* Adapat to new syntax (progressive elimination of `entity_key_plural`) - -### 0.5.2 - -* Fix path of entry point build-collection -* Add entry point build-collection - -### 0.5.1 - -* Fix tagging - -## 0.5 - -* Create Changelog.md -* Check version and changelog when pushing diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index b0314696..00000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,75 +0,0 @@ -Thank you for wanting to contribute to OpenFisca-Survey-Manager! :smiley: - -TL;DR: [GitHub Flow](https://guides.github.com/introduction/flow/), [SemVer](http://semver.org/), sweat on naming and messages. - - -## Pull requests - -We follow the [GitHub Flow](https://guides.github.com/introduction/flow/): all code contributions are submitted via a pull request towards the `master` branch. - -Opening a Pull Request means you want that code to be merged. If you want to only discuss it, send a link to your branch along with your questions through whichever communication channel you prefer. - - -### Peer reviews - -All pull requests must be reviewed by someone else than their original author. - -> In case of a lack of available reviewers, one may review oneself, but only after at least 24 hours have passed without working on the code to review. - -To help reviewers, make sure to add to your PR a **clear text explanation** of your changes. - -In case of breaking changes, you **must** give details about what features were deprecated. - -> You must also provide guidelines to help users adapt their code to be compatible with the new version of the package. - -## Debug tests in VSCode - -To launch the debugger in VSCode, you need to create a `launch.json` file in the `.vscode` folder at the root of the project. The content of the file should be the following: - -```json -{ - "version": "0.2.0", - "configurations": [ - { - "name": "Python debug Pytest", - "type": "python", - "request": "launch", - "module": "pytest", - "args": ["${file}"], - "console": "integratedTerminal", - "env": { - "CI": "1", - } - } - - ] -} -``` - -If you have an error "ModuleNotFoundError" create your environment in `.venv` folder and install the requirements. - -## Advertising changes - -### Version number - -We follow the [semantic versioning](http://semver.org/) spec: any change impacts the version number, and the version number conveys API compatibility information **only**. - -Examples: - -#### Patch bump - -- Internal optimization, with no consequence to the package's API. - -#### Minor bump - -- Adding a helper. - -#### Major bump - -- Renaming or deprecating a helper. -- Changing the signature or behaviour of a helper. - - -### Changelog - -Document all changes in the `CHANGELOG.md` file, following the examples already there. diff --git a/LICENSE.AGPL.txt b/LICENSE.AGPL.txt deleted file mode 100644 index 3ffc5678..00000000 --- a/LICENSE.AGPL.txt +++ /dev/null @@ -1,661 +0,0 @@ -GNU AFFERO GENERAL PUBLIC LICENSE - Version 3, 19 November 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU Affero General Public License is a free, copyleft license for -software and other kinds of works, specifically designed to ensure -cooperation with the community in the case of network server software. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -our General Public Licenses are intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - Developers that use our General Public Licenses protect your rights -with two steps: (1) assert copyright on the software, and (2) offer -you this License which gives you legal permission to copy, distribute -and/or modify the software. - - A secondary benefit of defending all users' freedom is that -improvements made in alternate versions of the program, if they -receive widespread use, become available for other developers to -incorporate. Many developers of free software are heartened and -encouraged by the resulting cooperation. However, in the case of -software used on network servers, this result may fail to come about. -The GNU General Public License permits making a modified version and -letting the public access it on a server without ever releasing its -source code to the public. - - The GNU Affero General Public License is designed specifically to -ensure that, in such cases, the modified source code becomes available -to the community. It requires the operator of a network server to -provide the source code of the modified version running there to the -users of that server. Therefore, public use of a modified version, on -a publicly accessible server, gives the public access to the source -code of the modified version. - - An older license, called the Affero General Public License and -published by Affero, was designed to accomplish similar goals. This is -a different license, not a version of the Affero GPL, but Affero has -released a new version of the Affero GPL which permits relicensing under -this license. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU Affero General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Remote Network Interaction; Use with the GNU General Public License. - - Notwithstanding any other provision of this License, if you modify the -Program, your modified version must prominently offer all users -interacting with it remotely through a computer network (if your version -supports such interaction) an opportunity to receive the Corresponding -Source of your version by providing access to the Corresponding Source -from a network server at no charge, through some standard or customary -means of facilitating copying of software. This Corresponding Source -shall include the Corresponding Source for any work covered by version 3 -of the GNU General Public License that is incorporated pursuant to the -following paragraph. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the work with which it is combined will remain governed by version -3 of the GNU General Public License. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU Affero General Public License from time to time. Such new versions -will be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU Affero General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU Affero General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU Affero General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU Affero General Public License as published - by the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU Affero General Public License for more details. - - You should have received a copy of the GNU Affero General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If your software can interact with users remotely through a computer -network, you should also make sure that it provides a way for users to -get its source. For example, if your program is a web application, its -interface could display a "Source" link that leads users to an archive -of the code. There are many ways you could offer source, and different -solutions will be better for different programs; see section 13 for the -specific requirements. - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU AGPL, see -. \ No newline at end of file diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 56f7ebd6..00000000 --- a/MANIFEST.in +++ /dev/null @@ -1,3 +0,0 @@ -recursive-include openfisca_survey_manager/config_files_templates *.ini -include openfisca_survey_manager/tests/data_files/config_template.ini -recursive-include openfisca_survey_manager/assets *.csv diff --git a/Makefile b/Makefile deleted file mode 100644 index 34dbcd15..00000000 --- a/Makefile +++ /dev/null @@ -1,47 +0,0 @@ -all: test - -uninstall: - pip freeze | grep -v "^-e" | sed "s/@.*//" | xargs pip uninstall -y - -clean: - rm -rf build dist - rm -f openfisca_survey_manager/tests/data_files/config.ini - rm -f openfisca_survey_manager/tests/data_files/test_parquet_collection.json - rm -rf openfisca_survey_manager/tests/data_files/test_multiple_parquet_collection - rm -rf openfisca_survey_manager/tests/data_files/test_parquet_collection - rm -rf openfisca_survey_manager/tests/data_files/test_random_generator.json - find . -name '*.pyc' -exec rm \{\} \; - -deps: - pip install build twine - -install: deps - @# Install OpenFisca-Survey-Manager for development. - @# `make install` installs the editable version of OpenFisca-Survey-Manager. - @# This allows contributors to test as they code. - pip install --editable .[dev,sas] - -build: clean deps - @# Install OpenFisca-Survey-Manager for deployment and publishing. - @# `make build` allows us to be be sure tests are run against the packaged version - @# of OpenFisca-Survey-Manager, the same we put in the hands of users and reusers. - python -m build - pip uninstall --yes OpenFisca-Survey-Manager - find dist -name "*.whl" -exec pip install {}[dev,sas] \; - -check-syntax-errors: - python -m compileall -q . - -format-style: - isort . - ruff format . - -check-style: - ruff check . - -test: clean check-syntax-errors check-style - @# Launch tests from openfisca_survey_manager/tests directory (and not .) because TaxBenefitSystem must be initialized - @# before parsing source files containing formulas. - rm -rf ./openfisca_survey_manager/tests/data_files/config.ini - rm -rf ./openfisca_survey_manager/tests/data_files/dump - pytest diff --git a/README.md b/README.md deleted file mode 100644 index 1ea634ec..00000000 --- a/README.md +++ /dev/null @@ -1,293 +0,0 @@ -# OpenFisca Survey Manager - -[![Newsletter](https://img.shields.io/badge/newsletter-subscribe!-informational.svg?style=flat)](mailto:contact%40openfisca.org?subject=Subscribe%20to%20your%20newsletter%20%7C%20S'inscrire%20%C3%A0%20votre%20newsletter&body=%5BEnglish%20version%20below%5D%0A%0ABonjour%2C%0A%0AVotre%C2%A0pr%C3%A9sence%C2%A0ici%C2%A0nous%C2%A0ravit%C2%A0!%20%F0%9F%98%83%0A%0AEnvoyez-nous%20cet%20email%20pour%20que%20l'on%20puisse%20vous%20inscrire%20%C3%A0%20la%20newsletter.%20%0A%0AAh%C2%A0!%20Et%20si%20vous%20pouviez%20remplir%20ce%20petit%20questionnaire%2C%20%C3%A7a%20serait%20encore%20mieux%C2%A0!%0Ahttps%3A%2F%2Fgoo.gl%2Fforms%2F45M0VR1TYKD1RGzX2%0A%0AAmiti%C3%A9%2C%0AL%E2%80%99%C3%A9quipe%20OpenFisca%0A%0A%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%20ENGLISH%20VERSION%20%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%0A%0AHi%2C%20%0A%0AWe're%20glad%20to%20see%20you%20here!%20%F0%9F%98%83%0A%0APlease%20send%20us%20this%20email%2C%20so%20we%20can%20subscribe%20you%20to%20the%20newsletter.%0A%0AAlso%2C%20if%20you%20can%20fill%20out%20this%20short%20survey%2C%20even%20better!%0Ahttps%3A%2F%2Fgoo.gl%2Fforms%2FsOg8K1abhhm441LG2%0A%0ACheers%2C%0AThe%20OpenFisca%20Team) -[![Twitter](https://img.shields.io/badge/twitter-follow%20us!-9cf.svg?style=flat)](https://twitter.com/intent/follow?screen_name=openfisca) -[![Slack](https://img.shields.io/badge/slack-join%20us!-blueviolet.svg?style=flat)](mailto:contact%40openfisca.org?subject=Join%20you%20on%20Slack%20%7C%20Nous%20rejoindre%20sur%20Slack&body=%5BEnglish%20version%20below%5D%0A%0ABonjour%2C%0A%0AVotre%C2%A0pr%C3%A9sence%C2%A0ici%C2%A0nous%C2%A0ravit%C2%A0!%20%F0%9F%98%83%0A%0ARacontez-nous%20un%20peu%20de%20vous%2C%20et%20du%20pourquoi%20de%20votre%20int%C3%A9r%C3%AAt%20de%20rejoindre%20la%20communaut%C3%A9%20OpenFisca%20sur%20Slack.%0A%0AAh%C2%A0!%20Et%20si%20vous%20pouviez%20remplir%20ce%20petit%20questionnaire%2C%20%C3%A7a%20serait%20encore%20mieux%C2%A0!%0Ahttps%3A%2F%2Fgoo.gl%2Fforms%2F45M0VR1TYKD1RGzX2%0A%0AN%E2%80%99oubliez%20pas%20de%20nous%20envoyer%20cet%20email%C2%A0!%20Sinon%2C%20on%20ne%20pourra%20pas%20vous%20contacter%20ni%20vous%20inviter%20sur%20Slack.%0A%0AAmiti%C3%A9%2C%0AL%E2%80%99%C3%A9quipe%20OpenFisca%0A%0A%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%20ENGLISH%20VERSION%20%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%3D%0A%0AHi%2C%20%0A%0AWe're%20glad%20to%20see%20you%20here!%20%F0%9F%98%83%0A%0APlease%20tell%20us%20a%20bit%20about%20you%20and%20why%20you%20want%20to%20join%20the%20OpenFisca%20community%20on%20Slack.%0A%0AAlso%2C%20if%20you%20can%20fill%20out%20this%20short%20survey%2C%20even%20better!%0Ahttps%3A%2F%2Fgoo.gl%2Fforms%2FsOg8K1abhhm441LG2.%0A%0ADon't%20forget%20to%20send%20us%20this%20email!%20Otherwise%20we%20won't%20be%20able%20to%20contact%20you%20back%2C%20nor%20invite%20you%20on%20Slack.%0A%0ACheers%2C%0AThe%20OpenFisca%20Team) -[![GitHub Actions](https://github.com/openfisca/openfisca-survey-manager/actions/workflows/workflow.yml/badge.svg?branch=master&event=push)](https://github.com/openfisca/openfisca-survey-manager/actions?query=branch%3Amaster) -[![Coveralls](https://img.shields.io/coveralls/github/openfisca/openfisca-survey-manager/master.svg?style=flat)](https://coveralls.io/github/openfisca/openfisca-survey-manager?branch=master) -[![Python](https://img.shields.io/pypi/pyversions/openfisca-survey-manager.svg)](https://pypi.python.org/pypi/openfisca-survey-manager) -[![PyPi](https://img.shields.io/pypi/v/openfisca-survey-manager.svg?style=flat)](https://pypi.python.org/pypi/openfisca-survey-manager) - -## [EN] Introduction - -[OpenFisca](https://openfisca.org) is a versatile microsimulation free software. You can check the [online documentation](https://openfisca.org/doc/) for more details. - -This repository contains the Survey-Manager module, to work with OpenFisca and survey data. - -It provides two main features: -* A Python API to access data in [Hierarchical Data Format](https://en.wikipedia.org/wiki/Hierarchical_Data_Format) (HDF) or [Parquet](https://parquet.apache.org/). -* A script that transforms Parquet, SAS, Stata, SPSS, and CSV data files to HDF data files, along with some metadata so they can be used by the Python API. If the format is Parquet, it is kept as is. - -> For France survey data, you might find useful information on the next steps in [openfisca-france-data](https://github.com/openfisca/openfisca-france-data) repository. - -## [FR] Introduction - -[OpenFisca](https://openfisca.org) est un logiciel libre de micro-simulation. Pour plus d'information, vous pouvez consulter la [documentation officielle](https://openfisca.org/doc/). - -Ce dépôt contient le module Survey-Manager. Il facilite l'usage d'OpenFisca avec des données d'enquête. - -Il fournit deux fonctionnalités principales: -* Une API Python permettant l'accès à des données au format [Hierarchical Data Format](https://fr.wikipedia.org/wiki/Hierarchical_Data_Format) (HDF) ou [Parquet](https://parquet.apache.org/). -* Un script qui tranforme les fichiers de données aux formats SAS, Stata, SPSS, and CSV data files en fichiers de données au format HDF, avec quelques metadonnées leur permettant d'être utilisés par l'API Python. Si le format est Parquet, il est conservé tel quel. - -> Si vous disposez de données d'enquête sur la France, le dépôt [openfisca-france-data](https://github.com/openfisca/openfisca-france-data) pourrait être utile à vos prochaines étapes de traitement. - -## Environment - -OpenFisca-Survey-Manager runs on Python 3.9. More recent versions should work, but are not tested. - -## Usage - -### Installation - -#### Install with PIP - -If you're developing your own script or looking to run `OpenFisca-Survey-Manager` without editing it, you don't need to get its source code. It just needs to be known by your environment. -To do so, first, install the package with `pip`: - -```shell -pip install --upgrade pip -pip install openfisca-survey-manager -``` - -This should not display any error and end with: - -`Successfully installed [... openfisca-survey-manager-xx.xx.xx ...]` - -It comes with `build-collection` command that we will use in the next steps. - -> If you want to improve this module, please see the `Development` section below. - -#### Install with Conda - -Create an anvironment and install openfisca-survey-manager -``` -conda create -n survey python=3.9 -conda activate survey -conda install -c conda-forge -c openfisca openfisca-survey-manager -``` - -You are ready to go ! - -To exit your environment: -``` -conda deactivate -``` - -### Getting the configuration directory path - -To be able to use OpenFisca-Survey-Manager, you have to create two configuration files: -* `raw_data.ini`, -* and `config.ini`. - -To know where to copy them to, use the following command: - -```shell -build-collection --help -``` - -You should get the following result. - -```shell -usage: build-collection [-h] -c COLLECTION [-d] [-m] [-p PATH] [-s SURVEY] - [-v] - -optional arguments: - -h, --help show this help message and exit - -c COLLECTION, --collection COLLECTION - name of collection to build or update - -d, --replace-data erase existing survey data HDF5 file (instead of - failing when HDF5 file already exists) - -m, --replace-metadata - erase existing collection metadata JSON file (instead - of just adding new surveys) - -p PATH, --path PATH path to the config files directory (default = - /your/path/.config/openfisca-survey-manager) - -s SURVEY, --survey SURVEY - name of survey to build or update (default = all) - -v, --verbose increase output verbosity -``` - -Take note of the default configuration directory path in `-p PATH, --path PATH` option's description. This is the directory where you will set your `raw_data.ini` and `config.ini` files. In this example, it is `/Users/you/.config/openfisca-survey-manager`. - -> If you want to use a different path, you can pass the `--path /another/path` option to `build-collection`. This feature is still experimental though. - -### Editing the config files - -Configuration files are INI files (text files). - -The `raw_data.ini` lists your input surveys while `config.ini` specifies the paths to SurveyManager outputs. - -> `raw_data.ini` and `config.ini` must not be committed (they are already ignored by [`.gitignore`](.gitignore)). - -#### raw_data.ini, for inputs configuration - -To initialise your `raw_data.ini` file, you can follow these steps: - - 1. Copy the template file [raw_data_template.ini](openfisca_survey_manager/config_files_templates/raw_data_template.ini) to the configuration directory path you identified in the previous step and rename it to `raw_data.ini`. - Ex: `/your/path/.config/openfisca-survey-manager/raw_data.ini` - - 2. Edit the latter by adding a section title for your survey. - For example, if you name your survey `housing_survey`, you should get a line with: - ```ini - [housing_survey] - ``` - - 3. Add a reference to the location of your raw data **directory** (SAS, stata DTA files, SPSS, CSV files). - For paths in Windows, use `/` instead of `\` to separate folders. - You do not need to put quotes, even when the path name contains spaces. - - Your file should look like this: - - ```ini - [housing_survey] - - 2014 = /path/to/your/raw/data/HOUSING_2014 - ``` - -You can also set multiple surveys as follows: - -```ini -[revenue_survey] - -2014 = /path/to/your/raw/data/REVENUE_2014 -2015 = /path/to/your/raw/data/REVENUE_2015 -2016 = /path/to/your/raw/data/REVENUE_2016 - -[housing_survey] - -2014 = /path/to/your/raw/data/HOUSING_2014 -``` - -#### config.ini, for outputs configuration - -To initilalise your `config.ini` file: - - 1. Copy its template file [config_template.ini](openfisca_survey_manager/config_files_templates/config_template.ini) to your configuration directory and rename it to `config.ini`. - Ex: `/your/path/.config/openfisca-survey-manager/config.ini`. - - 2. Define a `collections_directory` path where the SurveyManager will generate your survey inputs and outputs JSON description. - Ex: `/.../openfisca-survey-manager/transformed_housing_survey` - For a `housing_survey`, you will get a `/.../openfisca-survey-manager/transformed_housing_survey/housing_survey.json` file. - - 3. Define an `output_directory` where the generated HDF file will be registered. - This directory could be a sub-directory of your `collections_directory`. - - 4. Define a `tmp_directory` that will store temporay calculation results. Its content will be deleted at the end of the calculation. - This directory could be a sub-directory of your `collections_directory`. - -Your `config.ini` file should look similar to this: - - ```ini - [collections] - - collections_directory = /path/to/your/collections/directory - - [data] - - output_directory = /path/to/your/data/output/directory - tmp_directory = /path/to/your/data/tmp/directory - ``` - -> Make sure those directories exist, otherwise the script will fail. - -### Building the HDF5 files - -This step will read your configuration files and you survey data and generate a HDF5 file (`.h5`) for your survey. -To build the HDF5 files, we'll use the [`build-collection`](openfisca_survey_manager/scripts/build_collection.py) script. - -Here is an example for one survey with one serie: our `housing_survey` that knows only 2014 serie. We call our survey as a collection (with `-c` option) and build the HDF5 file with this command: - -```shell -build-collection -c housing_survey -d -m -v -``` - -`-d -m` options put you on the safe side as they remove previous outputs if they exist. - -It will generate: -* A `housing_survey.json` listing a `housing_survey_2014` survey with both: - * your input `tables` and your input file paths in an `informations` key, - * the transformed survey path in a `hdf5_file_path` key. -* Your transformed survey in a `housing_survey_2014.h5` file. - -#### build-collection, what else? - -As `build-collection --help` shows, other options exist. Here are other usage examples. - -If you have multiple series of one survey like the `revenue_survey`, you can run the specific `2015` serie with: - -```shell -build-collection -c revenue_survey -s 2015 -d -m -v -``` - -Or if you want to specify a different configuration directory path: - -```shell -build-collection -p /another/path -c housing_survey -s 2014 -d -m -v -``` - -> The `--path /another/path` option is still experimental though. - -It should work. If it doesn't, please do not hesitate to [open an issue](https://github.com/openfisca/openfisca-survey-manager/issues/new). - -### Parquet files - -Parquet files could be used as input files. They will not be converted to HDF5. As Parquet files can only contains one table, we add a `"parquet_file"` key to each table in a survey. This key contains the path to the Parquet file, or the folder containing many parquet files for the same table. - -If using folder you have to name your files with the following pattern: `some_name_-.parquet` and keep only the files for the same table in the same folder. - -If a single file contains all the table, you can have many files for different tables in the same folder. - -## Development - -If you want to contribute to OpenFisca-Survey-Manager, please be welcomed! To install it locally in development mode: - -```shell -git clone https://github.com/openfisca/openfisca-survey-manager.git -cd openfisca-survey-manager -make install -``` - -## Testing - -To run the entire test suite: - -```sh -make test -``` - -To run the entire test suite with the same config as in Continuous Integration (CI): - -```sh -CI=True make test -``` - -## Style - -This repository adheres to a certain coding style, and we invite you to follow it for your contributions to be integrated promptly. - -To run the style checker: - -```sh -make check-style -``` - -To automatically style-format your code changes: - -```sh -make format-style -``` - -To automatically style-format your code changes each time you commit: - -```sh -touch .git/hooks/pre-commit -chmod +x .git/hooks/pre-commit - -tee -a .git/hooks/pre-commit << END -#!/bin/sh -# -# Automatically format your code before committing. -exec make format-style -END -``` diff --git a/__init__.py b/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/openfisca_survey_manager/__init__.py b/openfisca_survey_manager/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/openfisca_survey_manager/aggregates.py b/openfisca_survey_manager/aggregates.py deleted file mode 100644 index 98b6a9d1..00000000 --- a/openfisca_survey_manager/aggregates.py +++ /dev/null @@ -1,551 +0,0 @@ -import collections -import logging -import os -from datetime import datetime - -import numpy as np -import pandas as pd - -log = logging.getLogger(__name__) - - -# TODO: -# * Localisation - - -class AbstractAggregates(object): - aggregate_variables = None - amount_unit = 1e6 - currency = None - base_data_frame = None - baseline_simulation = None - beneficiaries_unit = 1e3 - filter_by = None - labels = None - simulation = None - survey_scenario = None - totals_df = None - - def __init__( - self, - survey_scenario=None, - absolute_minimal_detected_variation=0, - relative_minimal_detected_variation=0, - observations_threshold=0, - ): - assert survey_scenario is not None - - self.period = survey_scenario.period - self.survey_scenario = survey_scenario - assert len(survey_scenario.simulations) >= 1 - - self.simulations = survey_scenario.simulations - self.absolute_minimal_detected_variation = absolute_minimal_detected_variation - self.relative_minimal_detected_variation = relative_minimal_detected_variation - self.observations_threshold = observations_threshold - - for name in survey_scenario.tax_benefit_systems: - assert survey_scenario.simulations[name] is not None - - self.weight_variable_by_entity = survey_scenario.weight_variable_by_entity - if self.labels is None: - amount_unit_str = "({} {})".format(self.amount_unit, self.currency) - beneficiaries_unit_str = "({})".format(self.beneficiaries_unit) - self.labels = collections.OrderedDict( - ( - ("label", "Mesure"), - ("entity", "Entité"), - ("reform_amount", "Dépenses\n" + amount_unit_str), - ("reform_beneficiaries", "Bénéficiaires\n(milliers)"), - ("baseline_amount", "Dépenses initiales\n" + amount_unit_str), - ("baseline_beneficiaries", "Bénéficiaires\ninitiaux\n" + beneficiaries_unit_str), - ("actual_amount", "Dépenses\nréelles\n" + amount_unit_str), - ("actual_beneficiaries", "Bénéficiaires\nréels\n" + beneficiaries_unit_str), - ("absolute_difference_amount", "Diff. absolue\nDépenses\n" + amount_unit_str), - ("absolute_difference_beneficiaries", "Diff absolue\nBénéficiaires\n" + beneficiaries_unit_str), - ("relative_difference_amount", "Diff. relative\nDépenses"), - ("relative_difference_beneficiaries", "Diff. relative\nBénéficiaires"), - ("winners", "Gagnants"), - ("losers", "Perdants"), - ("neutral", "Neutres"), - ) - ) - - def compute_aggregates(self, use_baseline: bool = True, reform: bool = True, actual: bool = True) -> pd.DataFrame: - """ - Compute aggregate amounts. - - Args: - use_baseline (bool, optional): _description_. Defaults to True. - reform (bool, optional): _description_. Defaults to True. - actual (bool, optional): _description_. Defaults to True. - - Returns: - pd.DataFrame: The aggregates - """ - filter_by = self.filter_by - if actual: - self.totals_df = self.load_actual_data(period=self.period) - - simulation_types = [] - if use_baseline: - assert self.simulations["baseline"] is not None - simulation_types.append("baseline") - if reform: - simulation_types.append("reform") - if actual: - simulation_types.append("actual") - - data_frame_by_simulation_type = {} - - for simulation_type in simulation_types: - if simulation_type == "actual": - data_frame_by_simulation_type["actual"] = self.totals_df.copy() if self.totals_df is not None else None - else: - use_baseline = simulation_type != "reform" - data_frame = pd.DataFrame() - assert self.aggregate_variables is not None - for variable in self.aggregate_variables: - variable_data_frame = self.compute_variable_aggregates( - variable, use_baseline=use_baseline, filter_by=filter_by - ) - data_frame = pd.concat((data_frame, variable_data_frame)) - - data_frame.rename( - columns={ - "amount": f"{simulation_type}_amount", - "beneficiaries": f"{simulation_type}_beneficiaries", - }, - inplace=True, - ) - data_frame_by_simulation_type[simulation_type] = data_frame - - if use_baseline and reform: - del data_frame_by_simulation_type["reform"]["entity"] - del data_frame_by_simulation_type["reform"]["label"] - - self.base_data_frame = pd.concat( - list(data_frame_by_simulation_type.values()), - axis=1, - sort=True, - ).loc[self.aggregate_variables] - return self.base_data_frame - - def compute_difference( - self, - target: str = "baseline", - default: str = "actual", - amount: bool = True, - beneficiaries: bool = True, - absolute: bool = True, - relative: bool = True, - ) -> pd.DataFrame: - """ - Compute and add relative and/or absolute differences to the data_frame. - - Args: - target (str, optional): Target simulation. Defaults to "baseline". - default (str, optional): Default simulation. Defaults to 'actual'. - amount (bool, optional): Provide amounts. Defaults to True. - beneficiaries (bool, optional): Provide beneficiaries. Defaults to True. - absolute (bool, optional): Return absolute values. Defaults to True. - relative (bool, optional): Return relative values. Defaults to True. - - Returns: - pd.DataFrame: The differences - """ - assert relative or absolute - assert amount or beneficiaries - base_data_frame = self.base_data_frame if self.base_data_frame is not None else self.compute_aggregates() - - difference_data_frame = base_data_frame[["label", "entity"]].copy() - # Remove duplicates - difference_data_frame = difference_data_frame.loc[:, ~difference_data_frame.columns.duplicated()].copy() - - quantities = [] - quantities += ["amount"] if amount else None - quantities += ["beneficiaries"] if beneficiaries else None - - for quantity in quantities: - difference_data_frame[f"absolute_difference_{quantity}"] = ( - abs(base_data_frame[f"{target}_{quantity}"]) - base_data_frame[f"{default}_{quantity}"] - ) - difference_data_frame[f"relative_difference_{quantity}"] = ( - abs(base_data_frame[f"{target}_{quantity}"]) - base_data_frame[f"{default}_{quantity}"] - ) / abs(base_data_frame[f"{default}_{quantity}"]) - - return difference_data_frame - - def compute_variable_aggregates( - self, variable: str, use_baseline: bool = False, filter_by: str = None - ) -> pd.DataFrame: - """ - Return aggregate spending, and number of beneficiaries for the relevant entity level. - - Args: - variable (str): Name of the variable aggregated according to its entity - use_baseline (bool, optional): Use the baseline or the reform or the only avalilable simulation when no reform (default). Defaults to False. - filter_by (str, optional): The variable to filter by. Defaults to None. - - Returns: - pd.DataFrame: The amount and beneficiaries for the variable - """ - if len(self.simulations) == 1: - simulation = list(self.simulations.values())[0] - elif use_baseline: - simulation = self.simulations["baseline"] - else: - simulation = self.simulations["reform"] - - variables = simulation.tax_benefit_system.variables - variable_instance = variables.get(variable) - - if variable_instance is None: - msg = f"Variable {variable} is not available" - if use_baseline: - msg += " in baseline simulation" - log.info(msg) - return pd.DataFrame( - data={ - "label": variable, - "entity": "Unknown entity", - "amount": 0, - "beneficiaries": 0, - }, - index=[variable], - ) - - entity_key = variable_instance.entity.key - - if self.weight_variable_by_entity is not None: - weight = self.weight_variable_by_entity[entity_key] - assert weight in variables, f"{weight} not a variable of the tax_benefit_system" - weight_array = simulation.calculate(weight, period=self.period).astype("float") - assert not np.isnan(np.sum(weight_array)), f"The are some NaN in weights {weight} for entity {entity_key}" - # amounts and beneficiaries from current data and default data if exists - # Build weights for each entity - else: - log.debug(f"No weight variable defined for entity {entity_key}, using 1 as weight.") - weight = "weight" - weight_array = 1 - - variable_array = simulation.calculate_add(variable, period=self.period).astype("float") - assert np.isfinite(variable_array).all(), ( - f"The are non finite values in variable {variable} for entity {entity_key}" - ) - data = pd.DataFrame( - { - variable: variable_array, - weight: weight_array, - } - ) - if filter_by: - filter_dummy_variable = ( - filter_by if filter_by in variables else self.survey_scenario.filtering_variable_by_entity[entity_key] - ) - filter_dummy_array = simulation.calculate(filter_dummy_variable, period=self.period) - - else: - filter_dummy_array = 1 - - assert np.isfinite(filter_dummy_array).all(), "The are non finite values in variable {} for entity {}".format( - filter_dummy_variable, entity_key - ) - - amount = int((data[variable] * data[weight] * filter_dummy_array / self.amount_unit).sum()) - beneficiaries = int( - ((data[variable] != 0) * data[weight] * filter_dummy_array / self.beneficiaries_unit).sum() - ) - variable_data_frame = pd.DataFrame( - data={ - "label": variables[variable].label, - "entity": variables[variable].entity.key, - "amount": amount, - "beneficiaries": beneficiaries, - }, - index=[variable], - ) - - return variable_data_frame - - def create_description(self): - """Create a description dataframe.""" - now = datetime.now() - return pd.DataFrame( - [ - "OpenFisca", - "Calculé le %s à %s" % (now.strftime("%d-%m-%Y"), now.strftime("%H:%M")), - "Système socio-fiscal au %s" % self.simulation.period.start.year, - "Données d'enquêtes de l'année %s" % str(self.data_year), - ] - ) - - def to_csv( - self, - path=None, - absolute=True, - amount=True, - beneficiaries=True, - default="actual", - relative=True, - target="reform", - ): - """Saves the table to csv.""" - assert path is not None - - if os.path.isdir(path): - now = datetime.now() - file_path = os.path.join(path, "Aggregates_%s.%s" % (now.strftime("%d-%m-%Y"), ".csv")) - else: - file_path = path - - df = self.get_data_frame( - absolute=absolute, - amount=amount, - beneficiaries=beneficiaries, - default=default, - relative=relative, - target=target, - ) - df.to_csv(file_path, index=False, header=True) - - def to_excel( - self, - path=None, - absolute=True, - amount=True, - beneficiaries=True, - default="actual", - relative=True, - target="reform", - ): - """Save the table to excel.""" - assert path is not None - - if os.path.isdir(path): - now = datetime.now() - file_path = os.path.join(path, "Aggregates_%s.%s" % (now.strftime("%d-%m-%Y"), ".xlsx")) - else: - file_path = path - - df = self.get_data_frame( - absolute=absolute, - amount=amount, - beneficiaries=beneficiaries, - default=default, - relative=relative, - target=target, - ) - writer = pd.ExcelWriter(file_path) - df.to_excel(writer, "aggregates", index=False, header=True) - descr = self.create_description() - descr.to_excel(writer, "description", index=False, header=False) - writer.save() - - def to_html( - self, - path=None, - absolute=True, - amount=True, - beneficiaries=True, - default="actual", - relative=True, - target="reform", - ): - """Get or saves the table to html format.""" - df = self.get_data_frame( - absolute=absolute, - amount=amount, - beneficiaries=beneficiaries, - default=default, - relative=relative, - target=target, - ) - - if path is not None and os.path.isdir(path): - now = datetime.now() - file_path = os.path.join(path, "Aggregates_%s.%s" % (now.strftime("%d-%m-%Y"), ".html")) - else: - file_path = path - - if file_path is not None: - with open(file_path, "w") as html_file: - df.to_html(html_file) - return df.to_html() - - def to_markdown( - self, - path=None, - absolute=True, - amount=True, - beneficiaries=True, - default="actual", - relative=True, - target="reform", - ): - """Get or saves the table to markdown format.""" - df = self.get_data_frame( - absolute=absolute, - amount=amount, - beneficiaries=beneficiaries, - default=default, - relative=relative, - target=target, - ) - - if path is not None and os.path.isdir(path): - now = datetime.now() - file_path = os.path.join(path, "Aggregates_%s.%s" % (now.strftime("%d-%m-%Y"), ".md")) - else: - file_path = path - - if file_path is not None: - with open(file_path, "w") as markdown_file: - df.to_markdown(markdown_file) - - return df.to_markdown() - - def get_calibration_coeffcient(self, target: str = "reform") -> pd.DataFrame: - df = self.compute_aggregates( - actual=True, - use_baseline=target == "baseline", - reform=target == "reform", - ) - return df["{}_amount".format(target)] / df["actual_amount"] - - def get_data_frame( - self, - absolute: bool = True, - amount: bool = True, - beneficiaries: bool = True, - default: str = "actual", - formatting: bool = True, - relative: bool = True, - target: str = "reform", - ignore_labels: bool = False, - ): - assert target is None or target in ["reform", "baseline"] - - columns = self.labels.keys() - if (absolute or relative) and (target != default): - difference_data_frame = self.compute_difference( - absolute=absolute, - amount=amount, - beneficiaries=beneficiaries, - default=default, - relative=relative, - target=target, - ) - else: - difference_data_frame = None - - # Removing unwanted columns - if amount is False: - columns = [column for column in columns if "amount" not in columns] - - if beneficiaries is False: - columns = [column for column in columns if "beneficiaries" not in column] - - if absolute is False: - columns = [column for column in columns if "absolute" not in column] - - if relative is False: - columns = [column for column in columns if "relative" not in column] - - for simulation_type in ["reform", "baseline", "actual"]: - if simulation_type not in [target, default]: - columns = [column for column in columns if simulation_type not in column] - - aggregates_data_frame = self.compute_aggregates( - actual="actual" in [target, default], - use_baseline="baseline" in [target, default], - reform="reform" in [target, default], - ) - - if "reform_amount" in aggregates_data_frame.columns and "baseline_amount" in aggregates_data_frame.columns: - winners_losers_df = self.compute_all_winners_losers(filter_by=self.filter_by) - aggregates_data_frame = aggregates_data_frame.join(winners_losers_df) - - ordered_columns = [ - "label", - "entity", - "reform_amount", - "baseline_amount", - "actual_amount", - "absolute_difference_amount", - "relative_difference_amount", - "reform_beneficiaries", - "baseline_beneficiaries", - "actual_beneficiaries", - "absolute_difference_beneficiaries", - "relative_difference_beneficiaries", - "winners", - "losers", - "neutral", - ] - if difference_data_frame is not None: - # Remove eventual duplication - difference_data_frame = difference_data_frame.loc[:, ~difference_data_frame.columns.duplicated()].copy() - aggregates_data_frame = aggregates_data_frame.loc[:, ~aggregates_data_frame.columns.duplicated()].copy() - df = aggregates_data_frame.merge(difference_data_frame, how="left")[columns] - else: - columns = [column for column in columns if column in aggregates_data_frame.columns] - df = aggregates_data_frame[columns] - - df = df.reindex(columns=ordered_columns).dropna(axis=1, how="all") - - if formatting: - relative_columns = [column for column in df.columns if "relative" in column] - df[relative_columns] = df[relative_columns].map(lambda x: "{:.2%}".format(x) if str(x) != "nan" else "nan") - for column in df.columns: - if issubclass(np.dtype(df[column]).type, np.number): - df[column] = df[column].apply(lambda x: "{:d}".format(int(round(x))) if str(x) != "nan" else "nan") - - if not ignore_labels: - df = df.rename(columns=self.labels) - - return df - - def load_actual_data(self, period=None): - NotImplementedError - - def compute_winners_losers(self, variable: str, filter_by: str = None): - if "reform" not in self.simulations or "baseline" not in self.simulations: - log.warning("Cannot compute winners and losers without a reform and a baseline simulation.") - return pd.DataFrame() - - reform_simulation = self.simulations["reform"] - baseline_simulation = self.simulations["baseline"] - - variable_instance = reform_simulation.tax_benefit_system.variables.get(variable) - if variable_instance is None: - log.warning(f"Variable {variable} not found in reform simulation.") - return pd.DataFrame() - - stats = reform_simulation.compute_winners_losers( - baseline_simulation=baseline_simulation, - variable=variable, - period=self.period, - filter_by=filter_by, - filtering_variable_by_entity=self.survey_scenario.filtering_variable_by_entity, - absolute_minimal_detected_variation=self.absolute_minimal_detected_variation, - relative_minimal_detected_variation=self.relative_minimal_detected_variation, - observations_threshold=self.observations_threshold, - ) - - winners_losers_df = pd.DataFrame( - { - "winners": [stats["above_after"]], - "losers": [stats["lower_after"]], - "neutral": [stats["neutral"]], - }, - index=[variable], - ) - return winners_losers_df - - def compute_all_winners_losers(self, filter_by: str = None): - all_winners_losers = pd.DataFrame() - for variable in self.aggregate_variables: - winners_losers = self.compute_winners_losers(variable, filter_by=filter_by) - all_winners_losers = pd.concat([all_winners_losers, winners_losers]) - return all_winners_losers diff --git a/openfisca_survey_manager/assets/COICOP/1998/nomenclature_coicop1998_source_by_classes.csv b/openfisca_survey_manager/assets/COICOP/1998/nomenclature_coicop1998_source_by_classes.csv deleted file mode 100644 index e9faac51..00000000 --- a/openfisca_survey_manager/assets/COICOP/1998/nomenclature_coicop1998_source_by_classes.csv +++ /dev/null @@ -1,88 +0,0 @@ -COICOP, 1998 - Niveau 3 - Liste des classes; -Code;Libellé -01.1.1;Pain et céréales -01.1.2;Viande -01.1.3;Poissons et crustacés -01.1.4;Lait, fromage et œufs -01.1.5;Huiles et graisses -01.1.6;Fruits -01.1.7;Légumes -01.1.8;Sucre, confitures, chocolat, confiserie et produits glacés -01.1.9;Sel, épices, sauces et produits alimentaires n.d.a. -01.2.1;Café, thé et cacao -01.2.2;Autres boissons non alcoolisées -02.1.1;Alcools -02.1.2;Vins, cidres et champagne -02.1.3;Bières -02.2.1;Tabac -03.1.1;Tissus d'habillement -03.1.2;Vêtements -03.1.3;Autres articles vestimentaires et accessoires d'habillement -03.1.4;Nettoyage, réparation et location de vêtements -03.2.1;Chaussures y.c. réparation -04.1.1;Loyers effectifs -04.3.1;Produits pour l'entretien et la réparation courante du logement -04.3.2;Services pour l'entretien et la réparation courante du logement -04.4.1;Fourniture d'eau et autres services liés au logement -04.5.1;Électricité -04.5.2;Gaz -04.5.3;Combustibles liquides -04.5.4;Combustibles solides -04.5.5;Eau chaude, vapeur et glace -05.1.1;Meubles et articles d'ameublement -05.1.2;Tapis et autres revêtements de sol -05.2.1;Articles de ménage en textile -05.3.1;Gros appareils ménagers électriques ou non -05.3.2;Petits appareils électroménagers -05.3.3;Réparation d'appareils ménagers -05.4.1;Verrerie, vaisselle et ustensiles de ménage -05.5.1;Outillage et autres matériels pour la maison et le jardin -05.6.1;Articles de ménage non durables -05.6.2;Services domestiques et autres services pour l'habitation -06.1.1;Préparations et produits pharmaceutiques -06.2.1;Service des médecins -06.2.2;Service des dentistes -06.2.3;Services paramédicaux -07.1.1;Automobiles neuves et d'occasion -07.1.2;Motos, cycles -07.2.1;Pièces détachées et accessoires, hors montage, pour véhicules personnels -07.2.2;Carburants et lubrifiants -07.2.3;Entretien et réparation de véhicules personnels -07.2.4;Autres services liés aux véhicules personnels -07.3.1;Transports ferroviaires de voyageurs -07.3.2;Transports routiers de voyageurs -07.3.3;Transports aériens de voyageurs -07.3.5;Transports combinés de voyageurs -07.3.6;Autres achats de services de transport y.c. transports maritimes et fluviaux -08.1.1;Services postaux -08.1.2;Équipement et services de téléphone et de télécopie -09.1.1;Équipements audio-visuels -09.1.2;Équipements photo et cinéma, instruments d'optique -09.1.3;Matériels de traitement de l'information y.c. micro-ordinateurs -09.1.4;Supports d'enregistrements de l'image et du son -09.1.5;Réparation de matériel audiovisuel, photographique et de traitement de l'information -09.2.1;Autres biens durables importants pour les loisirs et la culture y.c. réparation -09.3.1;Jeux et jouets -09.3.2;Équipement de sport, de camping et de loisirs de plein air -09.3.3;Horticulture -09.3.4;Animaux d'agrément y c. services vétérinaires et autres services -09.4.1;Services récréatifs -09.4.2;Services culturels -09.5.1;Livres -09.5.2;Journaux et périodiques -09.5.3;Imprimés divers, articles de papeterie et de dessin -09.6.1;Voyages touristiques tout compris -10.1.1;Services d'éducation -11.1.1;Restauration et cafés -11.1.2;Cantines -11.2.1;Services d'hébergement -12.1.1;Salons de coiffure et esthétique corporelle -12.1.3;Appareils, autres articles et produits pour soins personnels -12.3.1;Horlogerie, bijouterie, joaillerie -12.3.2;Autres effets personnels -12.4.1;Services de protection sociale -12.5.2;Assurance habitation -12.5.3;Assurance santé -12.5.4;Assurance automobile -12.6.1;Services financiers n.d.a. -12.7.1;Autres services diff --git a/openfisca_survey_manager/assets/COICOP/1998/nomenclature_coicop1998_source_by_divisions.csv b/openfisca_survey_manager/assets/COICOP/1998/nomenclature_coicop1998_source_by_divisions.csv deleted file mode 100644 index f5b3a853..00000000 --- a/openfisca_survey_manager/assets/COICOP/1998/nomenclature_coicop1998_source_by_divisions.csv +++ /dev/null @@ -1,14 +0,0 @@ -COICOP, 1998 - Niveau 1 - Liste des divisions; -Code;Libellé -01;Produits alimentaires et boissons non alcoolisées -02;Boissons alcoolisées et tabac -03;Habillement et chaussures -04;Logement, eau, gaz, électricité et autres combustibles -05;Ameublement, équipement ménager et entretien courant de la maison -06;Santé -07;Transport -08;Communications -09;Loisirs et culture -10;Éducation -11;Hôtellerie, cafés, restauration -12;Autres biens et services diff --git a/openfisca_survey_manager/assets/COICOP/1998/nomenclature_coicop1998_source_by_groupes.csv b/openfisca_survey_manager/assets/COICOP/1998/nomenclature_coicop1998_source_by_groupes.csv deleted file mode 100644 index 3974a75e..00000000 --- a/openfisca_survey_manager/assets/COICOP/1998/nomenclature_coicop1998_source_by_groupes.csv +++ /dev/null @@ -1,39 +0,0 @@ -COICOP, 1998 - Niveau 2 - Liste des groupes; -Code;Libellé -01.1;Produits alimentaires -01.2;Boissons non alcoolisées -02.1;Boissons alcoolisées -02.2;Tabac -03.1;Habillement -03.2;Chaussures y.c. réparation -04.1;Loyers d'habitation effectifs -04.3;Entretien et réparation courante du logement -04.4;Fourniture d'eau et autres services lies au logement -04.5;Électricité, gaz et autres combustibles -05.1;Meubles, articles d'ameublement, tapis et autres revêt. de sol y.c.réparation -05.2;Articles de ménage en textile -05.3;Appareils ménagers -05.4;Verrerie, vaisselle et ustensiles de ménage -05.5;Outillage et autres matériels pour la maison et le jardin -05.6;Biens et services pour l'entretien courant de l'habitation -06.1;Médicaments et autres prod. pharmaceutiques, appareils et mat. thérapeutiques -06.2;Médecine non hospitalière et services paramédicaux (services ambulatoires) -07.1;Achats de véhicules -07.2;Utilisation de véhicules personnels -07.3;Services de transport -08.1;Communications -09.1;Équipements audio-visuels, photographiques et informatiques -09.2;Autres biens durables importants pour les loisirs et la culture y.c. réparation -09.3;Autres articles et équipements de loisirs, fleurs et animaux d'agrément -09.4;Services récréatifs et culturels -09.5;Presse, livres et papeterie -09.6;Voyages touristiques tout compris -10.1;Services d'éducation -11.1;Restauration et cafés -11.2;Services d'hébergement -12.1;Soins personnels -12.3;Effets personnels n.d.a. -12.4;Protection sociale -12.5;Assurances -12.6;Services financiers -12.7;Autres services diff --git a/openfisca_survey_manager/assets/COICOP/1998/nomenclature_coicop1998_source_by_postes.csv b/openfisca_survey_manager/assets/COICOP/1998/nomenclature_coicop1998_source_by_postes.csv deleted file mode 100644 index 6e0c236f..00000000 --- a/openfisca_survey_manager/assets/COICOP/1998/nomenclature_coicop1998_source_by_postes.csv +++ /dev/null @@ -1,307 +0,0 @@ -COICOP, 1998 - Niveau 5 - Liste des postes; -Code;Libellé -01.1.1.1.1;Pain -01.1.1.2.1;Pâtisserie fraîche -01.1.1.2.2;Viennoiserie -01.1.1.3.1;Biscottes, triscottes, pains durs -01.1.1.3.2;Biscuits et gâteaux -01.1.1.3.3;Pâtisserie de conservation -01.1.1.4.1;Farines, semoules, céréales -01.1.1.4.2;Pâtes alimentaires -01.1.1.4.3;Riz -01.1.1.4.4;Plats cuisinés à base de céréales -01.1.2.1.1;Bœuf à rôtir -01.1.2.1.2;Bœuf à braiser et bouillir -01.1.2.1.3;Bœuf haché -01.1.2.2.1;Veau à rôtir -01.1.2.2.2;Veau à braiser et bouillir -01.1.2.3.1;Agneau -01.1.2.3.2;Cheval -01.1.2.4.1;Porc frais, salé, fumé -01.1.2.4.2;Jambon -01.1.2.4.3;Pâté -01.1.2.4.4;Saucisson -01.1.2.4.5;Saucisse -01.1.2.4.6;Autres charcuteries et conserves de viande -01.1.2.5.1;Poulet -01.1.2.5.2;Dinde -01.1.2.5.3;Autres volailles -01.1.2.6.1;Lapin, gibier -01.1.2.6.2;Triperie -01.1.2.6.3;Plats cuisinés à base de viande -01.1.3.1.1;Poissons frais -01.1.3.1.2;Crustacés et mollusques frais -01.1.3.2.1;Poissons en conserves, surgelés, séchés, fumés -01.1.3.2.2;Plats cuisinés à base de poisson -01.1.4.1.1;Laits -01.1.4.1.2;Crèmes fraîches -01.1.4.2.1;Yaourts -01.1.4.2.2;Desserts lactés frais -01.1.4.3.1;Fromages frais -01.1.4.3.2;Fromages à pâte molle et à pâte persillée -01.1.4.3.3;Fromages à pâte pressée -01.1.4.3.4;Fromages de chèvre -01.1.4.3.5;Fromages fondus -01.1.4.4.1;Œufs -01.1.5.1.1;Beurres -01.1.5.2.1;Huiles alimentaires -01.1.5.2.2;Margarine végétale et autres graisses -01.1.6.1.1;Fruits frais -01.1.6.2.1;Fruits surgelés, appertisés ou secs -01.1.7.1.1;Légumes frais -01.1.7.2.1;Légumes surgelés, secs et en conserve -01.1.7.2.2;Pommes de terre préparées -01.1.7.2.3;Autres plats cuisinés à base de légumes -01.1.8.1.1;Sucre -01.1.8.1.2;Confitures, compotes et miel -01.1.8.1.3;Confiseries à base de sucre -01.1.8.2.1;Chocolat en tablettes -01.1.8.2.2;Confiseries à base de chocolat -01.1.8.3.1;Crèmes glacées, glaces et sorbets -01.1.9.1.1;Sels, épices, condiments, vinaigres et sauces -01.1.9.2.1;Aliments préparés pour bébés -01.1.9.2.2;Produits diététiques -01.1.9.3.1;Potages préparés -01.1.9.3.2;Desserts et produits pour pâtisserie -01.2.1.1.1;Chocolat en poudre -01.2.1.2.1;Cafés en grains ou moulu -01.2.1.2.2;Café soluble et succédanée de café -01.2.1.3.1;Thés et infusions -01.2.2.1.1;Eaux minérales et de source -01.2.2.2.1;Jus de fruits et de légumes -01.2.2.2.2;Boissons aux fruits non gazeuses, sodas, limonades -01.2.2.2.3;Sirops et concentrés -02.1.1.1.1;Apéritifs anisés à base d'alcool -02.1.1.1.2;Whisky -02.1.1.2.1;Eaux de vie -02.1.1.2.2;Liqueurs -02.1.2.1.1;Vins de consommation courante -02.1.2.1.2;Vins supérieurs -02.1.2.1.3;Vins doux naturels et apéritifs à base de vin -02.1.2.2.1;Champagne et mousseux -02.1.2.2.2;Cidres -02.1.3.1.1;Bières -02.2.1.1.1;Tabac -03.1.1.1.1;Tissus d'habillement -03.1.2.1.1;Manteaux, imper. et vêtements en peau pour hommes -03.1.2.1.2;Costumes pour hommes -03.1.2.1.3;Vestes pour hommes -03.1.2.1.4;Pantalons pour hommes -03.1.2.1.5;Vêtements de travail -03.1.2.2.1;Manteaux, imper et vêtements en peau pour femmes -03.1.2.2.2;Tailleurs et ensembles pour femmes -03.1.2.2.3;Jupes -03.1.2.2.4;Robes -03.1.2.2.5;Pantalons pour femmes -03.1.2.2.6;Vestes pour femmes -03.1.2.3.1;Manteaux, imperméables et vestes pour enfants -03.1.2.3.2;Pantalons pour enfants -03.1.2.3.3;Jupes, robes et ensembles pour enfants -03.1.2.4.1;Vêtements de sport ou de loisirs pour hommes et garçonnets -03.1.2.4.2;Vêtements de sport ou de loisir pour femmes et fillettes -03.1.2.5.1;Chemises pour hommes -03.1.2.5.2;Sous-vêtements et vêtements de nuit pour hommes -03.1.2.5.3;Chaussettes pour hommes -03.1.2.5.4;Tricots et pull-overs pour hommes -03.1.2.6.1;Chemisiers et tuniques pour femmes -03.1.2.6.2;Lingerie, vêtements de nuit pour femmes -03.1.2.6.3;Collants, bas et chaussettes pour femmes -03.1.2.6.4;Tricots et pull-overs pour femmes -03.1.2.7.1;Chemises, sous- vêtements et vêtements de nuit enfants -03.1.2.7.2;Collants et chaussettes pour enfants -03.1.2.7.3;Tricots et pull-overs pour enfants -03.1.2.7.4;Layette -03.1.3.1.1;Accessoires du vêtement et vêtements de travail -03.1.3.1.2;Laines et mercerie -03.1.4.1.1;Blanchissage et nettoyage de vêtements -03.1.4.1.2;Réparations de vêtements -03.2.1.1.1;Chaussures de ville pour hommes -03.2.1.1.2;Chaussures de ville pour femmes -03.2.1.1.3;Chaussures de ville pour enfants -03.2.1.2.1;Chaussures de sport -03.2.1.2.2;Autres articles chaussants -03.2.1.2.3;Réparation de chaussures -04.1.1.1.1;Loyers des résidences principales -04.1.1.2.1;Loyers des résidences de vacances et gîtes ruraux -04.3.1.1.1;Peintures, vernis et papiers peints -04.3.1.1.2;Art. de quincaillerie, fournitures pour bricolage -04.3.1.1.3;Articles en bois -04.3.2.1.1;Pose de revêtement de sols et murs -04.3.2.2.1;Services d'entretien du logement -04.4.1.1.1;Distribution d'eau -04.4.1.2.1;Enlèvement des ordures ménagères -04.4.1.3.1;Service d'assainissement -04.4.1.4.1;Ramonage, gardiennage et entretien des ascenseurs -04.5.1.1.1;Électricité -04.5.2.1.1;Gaz de ville -04.5.2.2.1;Gaz liquéfiés -04.5.3.1.1;Fiouls domestiques -04.5.4.1.1;Autres combustibles solides -04.5.5.1.1;Chauffage urbain -05.1.1.1.1;Chambres d'enfants -05.1.1.1.2;Chambres d'adultes -05.1.1.1.3;Sommiers -05.1.1.2.1;Étagères et éléments modulables -05.1.1.2.2;Buffets, livings, bahuts, bibliothèques -05.1.1.2.3;Tables et chaises de salle à manger -05.1.1.3.1;Mobilier de cuisine et de salle de bains -05.1.1.4.1;Fauteuils et canapés -05.1.1.5.1;Luminaires -05.1.1.5.2;Articles de décoration -05.1.1.5.3;Meubles de jardin -05.1.1.5.4;Réparation des meubles -05.1.2.1.1;Tapis et autres revêtements de sol -05.2.1.1.1;Linge de lit -05.2.1.1.2;Couvertures et accessoires de literie -05.2.1.1.3;Literie -05.2.1.2.1;Tissus d'ameublement, voilages -05.2.1.2.2;Linge de toilette et de table, torchons -05.3.1.1.1;Lave-linge, sèche-linge -05.3.1.1.2;Lave-vaisselle -05.3.1.2.1;Appareils de cuisson -05.3.1.3.1;Réfrigérateurs, congélateurs -05.3.1.4.1;Aspirateurs et autres gros appareils ménagers -05.3.1.4.2;Appareils de chauffage -05.3.2.1.1; petits appareils électroménagers -05.3.3.1.1;Réparation d'appareils ménagers -05.4.1.1.1;Faïence et porcelaine -05.4.1.1.2;Verrerie -05.4.1.2.1;Couverts et autres ustensiles de cuisine en métal -05.4.1.2.2;Orfèvrerie -05.4.1.3.1;Autres ustensiles de ménage -05.5.1.1.1;Outillage à moteur -05.5.1.1.2;Piles -05.5.1.1.3;Lampes et autres petits matériels électriques -05.5.1.1.4;Outillage à main, ustensiles de jardin -05.5.1.1.5;Autres petits matériels pour la maison -05.6.1.1.1;Petits articles pour l'entretien du logement -05.6.1.1.2;Petits articles de quincaillerie et de couture -05.6.1.2.1;Savons de ménage et détergents -05.6.1.2.2;Produits d'entretien de la maison -05.6.2.1.1;Employé de maison -05.6.2.2.1;Locations d'appareils de nettoyage et de bricolage -05.6.2.2.2;Blanchissage et nettoyage d'articles textiles hors vêtements -06.1.1.1.1;Produits pharmaceutiques -06.1.1.2.1;Parapharmacie -06.1.1.3.1;Optique médicale -06.1.1.3.2;Appareils orthopédiques et prothèses -06.2.1.1.1;Médecins -06.2.2.1.1;Dentistes -06.2.3.1.1;Analyses de laboratoire -06.2.3.1.2;Auxiliaires médicaux -06.2.3.1.3;Transports sanitaires -06.2.3.1.4;Cures thermales privées -07.1.1.1.1;Automobiles neuves -07.1.1.2.1;Automobiles d'occasion -07.1.2.1.1;Motocycles -07.1.2.1.2;Cycles -07.2.1.1.1;Pneumatiques -07.2.1.2.1;Autres grosses pièces de rechange pour véhicules -07.2.1.2.2;Accessoires et petites pièces de rechange pour véhicules -07.2.2.1.1;Supercarburants -07.2.2.1.2;Gazole et gpl -07.2.2.2.1;Lubrifiants -07.2.3.1.1;Entretien de véhicules personnels -07.2.3.2.1;Réparation de véhicules personnels (heure de m.o.) -07.2.3.2.2;Réparation de véhicules personnels (forfait pièces et m.o.) -07.2.4.1.1;Péages -07.2.4.1.2;Parkings -07.2.4.2.1;Auto-écoles -07.2.4.2.2;Locations de véhicules privés -07.2.4.2.3;Formalités administratives liées au véhicule personnel -07.3.1.1.1;Transports ferroviaires de voyageurs -07.3.2.1.1;Transports routiers interurbains de voyageurs -07.3.2.1.2;Transports routiers urbains et suburbains de voyageurs -07.3.2.2.1;Taxis -07.3.3.1.1;Transports aériens de voyageurs -07.3.5.1.1;Transports combinés urbains et suburbains de voyageurs -07.3.6.1.1;Déménagement -07.3.6.1.2;Transports maritimes et fluviaux -08.1.1.1.1;Services postaux -08.1.2.1.1;Poste téléphonique -08.1.2.2.1;Services de télécommunications -09.1.1.1.1;Matériel vidéo -09.1.1.1.2;Téléviseurs -09.1.1.1.3;Autres appareils électro-acoustiques -09.1.2.1.1;Appareils de photo, inst. d'optique, d'observation -09.1.2.1.2;Caméscope -09.1.3.1.1;Matériel de traitement de l'info. yc micro-ordinateurs -09.1.4.1.1;Disques et cassettes enregistrés -09.1.4.1.2;Disquettes et cassettes vierges -09.1.4.1.3;Produits photographiques -09.1.5.1.1;Réparation d'appareils et d'accessoires récréatifs -09.2.1.1.1;Caravanes -09.2.1.1.2;Bateaux et accessoires -09.2.1.1.3;Instruments de musique et autres articles pour les loisirs -09.3.1.1.1;Jeux et jouets -09.3.2.1.1;Articles de sport, armes et munitions -09.3.2.1.2;Articles de camping -09.3.3.1.1;Fleurs et plantes -09.3.3.2.1;Plants et graines yc engrais -09.3.3.2.2;Compositions florales -09.3.4.1.1;Aliments et produits d'hygiène pour animaux -09.3.4.1.2;Achats d'animaux -09.3.4.1.3;Soins pour animaux d'agrément -09.4.1.1.1;Pratique d'un sport -09.4.1.1.2;Assistance à des manifestations sportives -09.4.1.1.3;Enseignement artistique et autres services récréatifs -09.4.2.1.1;Cinémas -09.4.2.2.1;Théâtres, concerts et music-halls -09.4.2.2.2;Visite de monument ou musée -09.4.2.3.1;Redevance et abonnements télévision -09.4.2.4.1;Travaux photographiques -09.4.2.4.2;Locations d'articles récréatifs -09.5.1.1.1;Livres scolaires et parascolaires, dict., encyclo. -09.5.1.1.2;Ouvrages de littérature générale -09.5.1.1.3;Livres pour la jeunesse, livres pratiques et autres -09.5.2.1.1;Journaux -09.5.2.2.1;Revues -09.5.3.1.1;Autres articles imprimés -09.5.3.1.2;Articles de papeterie -09.5.3.2.1;Stylos, crayons et encres -09.5.3.2.2;Autres fournitures scolaires et de bureau -09.6.1.1.1;Voyages organisés -10.1.1.1.1;Secondaires à financement privé -10.1.1.1.2;Supérieurs à financement privé -10.1.1.1.3;Cours par correspondance -11.1.1.1.1;Repas traditionnel dans un restaurant -11.1.1.1.2;Repas en libre service et restauration rapide -11.1.1.2.1;Cafés et autres boiss. chaudes cons. dans les cafés -11.1.1.2.2;Vin consommé dans les cafés -11.1.1.2.3;Bière et cidre consommés dans les cafés -11.1.1.2.4;Spiritueux consommés dans les cafés -11.1.1.2.5;Boissons non alcoolisées cons. dans les cafés -11.1.2.1.1;Repas dans un restaurant scolaire ou universitaire -11.1.2.2.1;Repas dans un restaurant d'entreprise ou d'admin. -11.2.1.1.1;Locations de chambres d'hôtel -11.2.1.1.2;Pensions et demi-pensions dans les hôtels -11.2.1.2.1;Frais d'internat scol., résidences univers., foyers -11.2.1.3.1;Hébergements de vacances ou de loisirs -11.2.1.3.2;Terrains de camping et auberges de jeunesse -12.1.1.1.1;Coiffeurs pour hommes -12.1.1.1.2;Coiffeurs pour femmes -12.1.1.2.1;Autres services d'esthétique corporelle -12.1.3.1.1;Parfums et eaux de toilette -12.1.3.1.2;Produits de maquillage et de soin de la peau -12.1.3.2.1;Savons, dentifrices et produits de toilette -12.1.3.2.2;Shampooings et produits capillaires divers -12.1.3.3.1;Autres articles de toilette -12.1.3.3.2;Articles d'hygiène à base de papier -12.1.3.3.3;Appareils de toilette électriques -12.3.1.1.1;Horlogerie -12.3.1.1.2;Bijouterie, joaillerie -12.3.2.1.1;Maroquinerie et articles de voyage -12.3.2.2.1;Articles de puériculture -12.3.2.2.2;Articles pour fumeurs, articles divers -12.3.2.2.3;Réparation horlogerie ou bijoux -12.4.1.1.1;Crèches, assistantes maternelles -12.4.1.2.1;Maisons de retraite et autres services pour personnes âgées -12.4.1.3.1;Établissements pour adultes handicapés -12.5.2.1.1;Assurance habitation -12.5.3.1.1;Assurance santé complémentaire -12.5.4.1.1;Assurance automobile -12.6.1.1.1;Services financiers -12.7.1.1.1;Services funéraires -12.7.1.2.1;Formalités administratives -12.7.1.2.2;Services juridiques -12.7.1.2.3;Autres services aux ménages diff --git a/openfisca_survey_manager/assets/COICOP/1998/nomenclature_coicop1998_source_by_sous_classes.csv b/openfisca_survey_manager/assets/COICOP/1998/nomenclature_coicop1998_source_by_sous_classes.csv deleted file mode 100644 index 1aa04b50..00000000 --- a/openfisca_survey_manager/assets/COICOP/1998/nomenclature_coicop1998_source_by_sous_classes.csv +++ /dev/null @@ -1,163 +0,0 @@ -COICOP, 1998 - Niveau 4 - Liste des sous-classes; -Code;Libellé -01.1.1.1;Pain -01.1.1.2;Pâtisserie fraîche et viennoiserie -01.1.1.3;Gâteaux, pâtisserie de conservation -01.1.1.4;Céréales -01.1.2.1;Viande de bœuf -01.1.2.2;Viande de veau -01.1.2.3;Viande de mouton, de cheval -01.1.2.4;Viande de porc et charcuteries -01.1.2.5;Volaille -01.1.2.6;Autres viandes et plats cuisinés à base de viande -01.1.3.1;Poissons et crustacés frais -01.1.3.2;Poissons et crustacés préparés, en conserves et surgelés -01.1.4.1;Lait et crème -01.1.4.2;Yaourts et desserts lactés -01.1.4.3;Fromages -01.1.4.4;Œufs -01.1.5.1;Beurres -01.1.5.2;Huiles et margarines -01.1.6.1;Fruits frais -01.1.6.2;Fruits surgelés, appertisés ou secs -01.1.7.1;Légumes frais -01.1.7.2;Légumes préparés, surgelés et en conserve -01.1.8.1;Produits à base de sucre -01.1.8.2;Produits à base de chocolat -01.1.8.3;Crèmes glacées, glaces et sorbets -01.1.9.1;Condiments et assaisonnements -01.1.9.2;Aliments préparés pour enfants et produits diététiques -01.1.9.3;Autres produits alimentaires n.d.a. -01.2.1.1;Produits chocolatés -01.2.1.2;Cafés -01.2.1.3;Thés et infusions -01.2.2.1;Eaux minérales et de source -01.2.2.2;Boissons gazeuses, jus et sirops -02.1.1.1;Apéritifs -02.1.1.2;Eaux de vie et liqueurs -02.1.2.1;Vins -02.1.2.2;Champagne, mousseux et cidres -02.1.3.1;Bières -02.2.1.1;Tabac -03.1.1.1;Tissus d'habillement -03.1.2.1;Vêtements de dessus pour hommes -03.1.2.2;Vêtements de dessus pour femmes -03.1.2.3;Vêtements de dessus pour enfants -03.1.2.4;Vêtements de sport -03.1.2.5;Lingerie, bonneterie pour hommes -03.1.2.6;Lingerie, bonneterie pour femmes -03.1.2.7;Lingerie, bonneterie pour enfants -03.1.3.1;Autres articles vestimentaires et accessoires d'habillement -03.1.4.1;Nettoyage et réparation de vêtements -03.2.1.1;Chaussures de ville -03.2.1.2;Autres chaussures y.c. réparation -04.1.1.1;Loyers des résidences principales -04.1.1.2; loyers des résidences de vacances et gîtes ruraux -04.3.1.1;Produits pour l'entretien et la réparation courante du logement -04.3.2.1;Pose de revêtements de sols et murs -04.3.2.2;Autres services d'entretien du logement -04.4.1.1;Distribution d'eau -04.4.1.2;Enlèvement des ordures ménagères -04.4.1.3;Assainissement -04.4.1.4;Autres services liés au logement n.d.a. -04.5.1.1;Électricité -04.5.2.1;Gaz de ville -04.5.2.2; gaz liquéfiés -04.5.3.1;Combustibles liquides -04.5.4.1;Combustibles solides -04.5.5.1;Eau chaude, vapeur et glace -05.1.1.1;Mobilier de chambre -05.1.1.2;Mobilier de séjour -05.1.1.3;Mobilier de cuisine et salle de bains -05.1.1.4;Fauteuils et canapés -05.1.1.5;Accessoires du mobilier, meubles de jardin et réparation de meubles -05.1.2.1;Tapis et autres revêtements de sol -05.2.1.1;Articles pour literie -05.2.1.2;Autres articles textiles -05.3.1.1;Appareils de lavage -05.3.1.2;Appareils de cuisson -05.3.1.3;Réfrigérateurs, congélateurs -05.3.1.4;Autres appareils -05.3.2.1;Petits appareils électroménagers -05.3.3.1;Réparation d'appareils ménagers -05.4.1.1;Verrerie, faïence et porcelaine -05.4.1.2;Ustensiles de ménage en métal -05.4.1.3;Autres articles de ménage -05.5.1.1;Outillage et autres matériels pour la maison et le jardin -05.6.1.1;Petits articles pour l'entretien du logement -05.6.1.2;Savons de ménage et produits d'entretien -05.6.2.1;Services domestiques -05.6.2.2;Autres services pour l'habitation -06.1.1.1;Produits pharmaceutiques -06.1.1.2;Parapharmacie -06.1.1.3;Appareils et matériels thérapeutiques -06.2.1.1;Médecins -06.2.2.1;Dentistes -06.2.3.1;Services paramédicaux -07.1.1.1;Automobiles neuves -07.1.1.2;Automobiles d'occasion -07.1.2.1;Motos et cycles -07.2.1.1;Pneumatiques -07.2.1.2;Pièces de rechange et accessoires -07.2.2.1;Carburants -07.2.2.2;Lubrifiants -07.2.3.1;Entretien de véhicules personnels -07.2.3.2;Réparation de véhicules personnels -07.2.4.1;Péages et parkings -07.2.4.2;Autres services pour véhicules personnels -07.3.1.1;Transports ferroviaires de voyageurs -07.3.2.1;Transports routiers de voyageurs -07.3.2.2;Taxis -07.3.3.1;Transports aériens de voyageurs -07.3.5.1;Transports combinés de voyageurs -07.3.6.1;Autres achats de services de transport y.c. transports maritimes et fluviaux -08.1.1.1;Services postaux -08.1.2.1;Équipement de téléphone et de télécopie -08.1.2.2;Service de télécommunications -09.1.1.1;Équipements audio-visuels -09.1.2.1;Équipements photo et cinéma -09.1.3.1;Matériels de traitement de l'information y.c. micro-ordinateurs -09.1.4.1;Supports d'enregistrements de l'image et du son -09.1.5.1;Réparation de matériel audiovisuel, photographique et de traitement de l'information -09.2.1.1;Autres biens durables importants pour les loisirs et la culture y.c. réparation -09.3.1.1;Jeux et jouets -09.3.2.1;Équipement de sport, de camping et de loisirs de plein air -09.3.3.1;Fleurs et plantes -09.3.3.2;Plants et graines -09.3.4.1;Animaux d'agrément y c. services -09.4.1.1;Services récréatifs -09.4.2.1;Cinémas -09.4.2.2;Autres spectacles culturels et musées -09.4.2.3;Redevance et abonnements télévision -09.4.2.4;Autres services culturels -09.5.1.1;Livres -09.5.2.1;Journaux -09.5.2.2;Magazines -09.5.3.1;Articles à base de papier -09.5.3.2;Autres articles de bureau -09.6.1.1;Voyages touristiques tout compris -10.1.1.1;Services d'éducation -11.1.1.1;Restauration -11.1.1.2;Consommation dans les cafés -11.1.2.1;Repas dans un restaurant scolaire ou universitaire -11.1.2.2;Repas dans un restaurant d'entreprise ou d'administration -11.2.1.1;Hôtellerie y.c. pension -11.2.1.2;Internat scolaire et universitaire -11.2.1.3;Hébergements de vacances -12.1.1.1;Coiffure -12.1.1.2;Autres services d'esthétique -12.1.3.1;Parfumerie et produits de beauté -12.1.3.2;Produits de l'hygiène corporelle -12.1.3.3;Appareils et autres articles de toilette -12.3.1.1;Horlogerie, bijouterie, joaillerie -12.3.2.1;Maroquinerie et articles de voyages -12.3.2.2;Autres effets personnels y.c. réparation -12.4.1.1;Crèches, assistantes maternelles -12.4.1.2;Maisons de retraite et autres services pour personnes âgées -12.4.1.3;Établissements pour adultes handicapés -12.5.2.1;Assurance habitation -12.5.3.1;Assurance santé complémentaire -12.5.4.1;Assurance automobile -12.6.1.1;Services financiers -12.7.1.1;Services funéraires -12.7.1.2;Prestations administratives et privées diverses diff --git a/openfisca_survey_manager/assets/COICOP/2016/nomenclature_coicop2016_source_by_classes.xls b/openfisca_survey_manager/assets/COICOP/2016/nomenclature_coicop2016_source_by_classes.xls deleted file mode 100644 index 051d371e..00000000 Binary files a/openfisca_survey_manager/assets/COICOP/2016/nomenclature_coicop2016_source_by_classes.xls and /dev/null differ diff --git a/openfisca_survey_manager/assets/COICOP/2016/nomenclature_coicop2016_source_by_divisions.xls b/openfisca_survey_manager/assets/COICOP/2016/nomenclature_coicop2016_source_by_divisions.xls deleted file mode 100644 index 74b3615d..00000000 Binary files a/openfisca_survey_manager/assets/COICOP/2016/nomenclature_coicop2016_source_by_divisions.xls and /dev/null differ diff --git a/openfisca_survey_manager/assets/COICOP/2016/nomenclature_coicop2016_source_by_groupes.xls b/openfisca_survey_manager/assets/COICOP/2016/nomenclature_coicop2016_source_by_groupes.xls deleted file mode 100644 index 33970a8c..00000000 Binary files a/openfisca_survey_manager/assets/COICOP/2016/nomenclature_coicop2016_source_by_groupes.xls and /dev/null differ diff --git a/openfisca_survey_manager/assets/COICOP/2016/nomenclature_coicop2016_source_by_postes.xls b/openfisca_survey_manager/assets/COICOP/2016/nomenclature_coicop2016_source_by_postes.xls deleted file mode 100644 index e4a3a11a..00000000 Binary files a/openfisca_survey_manager/assets/COICOP/2016/nomenclature_coicop2016_source_by_postes.xls and /dev/null differ diff --git a/openfisca_survey_manager/assets/COICOP/2016/nomenclature_coicop2016_source_by_sous_classes.xls b/openfisca_survey_manager/assets/COICOP/2016/nomenclature_coicop2016_source_by_sous_classes.xls deleted file mode 100644 index d9843c41..00000000 Binary files a/openfisca_survey_manager/assets/COICOP/2016/nomenclature_coicop2016_source_by_sous_classes.xls and /dev/null differ diff --git a/openfisca_survey_manager/calibration.py b/openfisca_survey_manager/calibration.py deleted file mode 100644 index f44eccd2..00000000 --- a/openfisca_survey_manager/calibration.py +++ /dev/null @@ -1,409 +0,0 @@ -import logging -import re - -import numpy -import pandas as pd -from numpy import logical_not -from openfisca_core.model_api import Enum - -from openfisca_survey_manager.calmar import calmar - -log = logging.getLogger(__name__) - - -class Calibration(object): - """An object to calibrate survey data of a SurveyScenario.""" - - filter_by = None - initial_entity_count = None - _initial_weight_name = None - initial_weight_by_entity = {} - target_margins = {} - margins_by_variable = {} - parameters = { - "use_proportions": True, - "initial_weight": None, - "method": None, # 'linear', 'raking ratio', 'logit', 'hyperbolic sinus' - "up": None, - "invlo": None, - "alpha": None, - "id_variable": None, - "id_variable_link": None, - } - period = None - simulation = None - target_entity_count = None - other_entity_count = None - target_entity = None - weight_name = None - entities = None - - def __init__( - self, - simulation, - target_margins, - period, - target_entity_count=None, - other_entity_count=None, - parameters=None, - filter_by=None, - entity=None, - ): - target_entity = entity - self.parameters = parameters - self.period = period - self.simulation = simulation - margin_variables = list(target_margins.keys()) if target_margins else [] - search_variable = "[A-Za-z_]+[A-Za-z0-9_]*" - - variable_instance_by_variable_name = simulation.tax_benefit_system.variables - entities = { - variable_instance_by_variable_name[variable].entity.key - for var in margin_variables - for variable in re.findall(search_variable, var) - } - for var in margin_variables: - assert ( - len( - { - variable_instance_by_variable_name[variable].entity.key - for variable in re.findall(search_variable, var) - } - ) - == 1 - ), "An expression use variables that are not based on the same entity" - if entity is not None: - entities.add(entity) - self.entities = list(entities) - - if len(entities) == 0: - assert target_entity_count != 0 - assert target_entity in [entity.key for entity in simulation.tax_benefit_system.entities] - elif len(entities) == 2: - assert "id_variable" in parameters and parameters["id_variable"] is not None, ( - "With two entities involved, an id variable of the largest entity is needed" - ) - assert "id_variable_link" in parameters and parameters["id_variable_link"] is not None, ( - "With two entities involved, an id variable linking entity is needed" - ) - entity_id_variable = variable_instance_by_variable_name[parameters["id_variable"]].entity.key - entity_id_variable_link = variable_instance_by_variable_name[parameters["id_variable_link"]].entity.key - assert entity_id_variable in entities, "'id_variable' do not correspond to a calibrating variable entity" - assert entity_id_variable_link in entities, ( - "'id_variable' do not correspond to a calibrating variable entity" - ) - assert entity_id_variable != entity_id_variable_link, ( - "'id_variable_link' must associate a smaller entity to the id of the greater, 'id_variable'" - ) - id_variable = simulation.calculate(parameters["id_variable"], period) - id_variable_link = simulation.calculate(parameters["id_variable_link"], period) - assert numpy.unique(id_variable_link).sort() == numpy.unique(id_variable).sort(), ( - "There is no inclusion of one entity in the other" - ) - assert len(id_variable) < len(id_variable_link), ( - "{} seems to be included in {}, not the opposite. Try reverse 'id_variable' and 'id_variable_link'".format( - entity_id_variable_link, entity_id_variable - ) - ) - target_entity = entity_id_variable - elif len(entities) > 2: - raise NotImplementedError("Cannot handle multiple entities") - else: - target_entity = list(entities)[0] - if "id_variable" in parameters: - assert variable_instance_by_variable_name[parameters["id_variable"]].entity.key == target_entity, ( - "'id_variable' isn't the id of the entity targeted by the calibration variables" - ) - - assert simulation.weight_variable_by_entity is not None - if isinstance(simulation.weight_variable_by_entity, dict): - weight_variable_name = simulation.weight_variable_by_entity[target_entity] - else: - weight_variable_name = simulation.weight_variable_by_entity.get(entity) - self.weight_name = weight_name = weight_variable_name - - self.target_entity = target_entity - period = self.period - - if filter_by: - self.filter_by = simulation.calculate(filter_by, period=period) - else: - self.filter_by = numpy.array(1.0) - - assert weight_name is not None, "A calibration needs a weight variable name to act on" - weight_variable = simulation.tax_benefit_system.variables[weight_name] - weight_variable.unit = "" - self._initial_weight_name = weight_name + "_ini" - self.initial_weight = initial_weight = simulation.calculate(weight_name, period=period) - - self.initial_entity_count = sum(initial_weight * self.filter_by) - self.target_entity_count = target_entity_count - self.other_entity_count = other_entity_count - - self.weight = initial_weight.copy() - - # TODO does not work - for entity, weight_variable in simulation.weight_variable_by_entity.items(): - self.initial_weight_by_entity[entity] = simulation.calculate(weight_variable, period=period) - - if target_margins: - for variable, target in target_margins.items(): - self.set_target_margin(variable, target) - - def _build_calmar_data(self) -> dict: - """Build the data dictionnary used as calmar input argument. - - Returns: - dict containing one pd.DataFrame by entity: Data used by calmar, and the name of the aggregating entity - """ - # Select only filtered entities - assert self._initial_weight_name is not None - data = {} - for entity in self.entities: - data[entity] = pd.DataFrame() - data[self.target_entity][self._initial_weight_name] = self.initial_weight * self.filter_by - period = self.period - for variable in self.margins_by_variable: - list_var = re.findall("[A-Za-z_]+[A-Za-z0-9_]*", variable) - assert all(var in self.simulation.tax_benefit_system.variables for var in list_var) - dic_eval = {} - for var in list_var: - dic_eval[var] = self.simulation.adaptative_calculate_variable(var, period=period) - value = eval(variable, {}, dic_eval) - data[self.simulation.tax_benefit_system.variables[list_var[0]].entity.key][variable] = value - - if len(self.entities) == 2: - for entity in self.entities: - if entity == self.target_entity: - data[entity]["id_variable"] = self.simulation.adaptative_calculate_variable( - self.parameters["id_variable"], period=period - ) - else: - data[entity][self.simulation.weight_variable_by_entity[entity]] = self.initial_weight_by_entity[ - entity - ] - data[entity]["id_variable"] = self.simulation.adaptative_calculate_variable( - self.parameters["id_variable_link"], period=period - ) - data["target_entity_name"] = self.target_entity - - return data - - def calibrate(self, inplace=False): - """Apply the calibrations by updating weights and margins. - - Args: - inplace (bool, optional): Whether to return the calibrated or to setthem inplace. Defaults to False. - - Returns: - numpy.array: calibrated weights - """ - assert self.margins_by_variable is not None, "Margins by variable should be set" - margins_by_variable = self.margins_by_variable - parameters = self.get_parameters() - - if margins_by_variable is not None: - simple_margins_by_variable = { - variable: margins_by_type["target"] for variable, margins_by_type in margins_by_variable.items() - } - else: - simple_margins_by_variable = {} - - if self.target_entity_count is not None: - simple_margins_by_variable["total_population"] = self.target_entity_count - if self.other_entity_count is not None: - simple_margins_by_variable["total_population_smaller_entity"] = self.other_entity_count - - self._update_weights(simple_margins_by_variable, parameters=parameters) - self._update_margins() - if inplace: - self.set_calibrated_weights() - return - - return self.weight - - def get_parameters(self) -> dict: - """Get the parameters. - - Returns: - dict: Parameters - """ - p = {} - p["method"] = self.parameters.get("method", "linear") - if self.parameters.get("invlo") is not None: - p["lo"] = 1 / self.parameters.get("invlo") - p["up"] = self.parameters.get("up") - if p["method"] == "logit": - assert self.parameters.get("invlo") is not None and self.parameters.get("up") is not None - p["lo"] = 1 / self.parameters.get("invlo") - p["up"] = self.parameters.get("up") - if p["method"] == "hyperbolic sinus": - assert self.parameters.get("alpha") is not None - p["alpha"] = self.parameters.get("alpha") - p["use_proportions"] = self.parameters.get("use_proportions", True) - p["initial_weight"] = self.weight_name + "" - return p - - def set_target_margin(self, variable, target): - """Set variable target margin. - - Args: - variable: Target variable - target: Target value - """ - simulation = self.simulation - period = self.period - list_var = re.findall("[A-Za-z_]+[A-Za-z0-9_]*", variable) - assert all(var in simulation.tax_benefit_system.variables for var in list_var) - variable_instance = simulation.tax_benefit_system.variables[list_var[0]] - - filter_by = self.filter_by - target_by_category = None - categorical_variable = (variable_instance.value_type in [bool, Enum] and variable == list_var[0]) or ( - variable_instance.unit in ["years", "months"] and variable == list_var[0] - ) - for var in list_var: - expr_categ = var + "[ ]*[<>=!]+" - true_var = simulation.tax_benefit_system.variables[var] - if [var] != list_var and true_var.value_type in [bool, Enum] or true_var.unit in ["years", "months"]: - assert len(re.findall(expr_categ, variable)) > 0, ( - "A categorical variable is used in an expression without direct condition on its value. Please use inequality operator to transform it into float" - ) - if categorical_variable: - value = simulation.calculate(variable, period=period) - filtered_value = ( - value if (filter_by == numpy.array(1.0) or all(filter_by)) else value[filter_by.astype(bool)] - ) - categories = numpy.sort(numpy.unique(filtered_value)) - target_by_category = dict(zip(categories, target.values())) - - if not self.margins_by_variable: - self.margins_by_variable = {} - if variable not in self.margins_by_variable: - self.margins_by_variable[variable] = {} - self.margins_by_variable[variable]["target"] = target_by_category or target - self._update_margins() - - def reset(self): - """Reset the calibration to its initial state.""" - simulation = self.simulation - simulation.delete_arrays(self.weight_name, self.period) - simulation.set_input(self.weight_name, self.period, numpy.array(self.initial_weight)) - - def set_calibrated_weights(self): - """Modify the weights to use the calibrated weights.""" - period = self.period - simulation = self.simulation - simulation.set_input(self.weight_name, period, self.weight) - for weight_name in simulation.weight_variable_by_entity.values(): - weight_variable = simulation.tax_benefit_system.variables[weight_name] - if weight_name == self.weight_name: - weight_variable.unit = "base_weight" # The weight variable is flagged as the one that have changed - # Delete other entites already computed weigths - # to ensure that this weights a recomputed if they derive from - # the calibrated weight variable - elif weight_variable.formulas: - simulation.delete_arrays(weight_variable.name, period) - - def summary(self): - """Summarize margins.""" - margins_df = pd.DataFrame.from_dict(self.margins_by_variable).T - margins_df.loc["entity_count", "actual"] = (self.weight * self.filter_by).sum() - margins_df.loc["entity_count", "initial"] = (self.initial_weight * self.filter_by).sum() - margins_df.loc["entity_count", "target"] = self.target_entity_count - return margins_df - - def _update_margins(self): - """Update margins.""" - for variable in self.margins_by_variable: - simulation = self.simulation - period = self.period - target_entity = self.target_entity - - # These are the varying weights - weight = self.weight - filter_by = self.filter_by - initial_weight = self.initial_weight - - list_var = re.findall("[A-Za-z_]+[A-Za-z0-9_]*", variable) - dic_eval = {} - for var in list_var: - dic_eval[var] = simulation.adaptative_calculate_variable(var, period=period) - value = eval(variable, {}, dic_eval) - - weight_variable = simulation.weight_variable_by_entity[target_entity] - - if ( - len(self.entities) == 2 - and simulation.tax_benefit_system.variables[list_var[0]].entity.key != self.target_entity - ): - value_df = pd.DataFrame(value) - id_variable = self.parameters["id_variable_link"] - value_df[id_variable] = simulation.adaptative_calculate_variable(id_variable, period=period) - value = value_df.groupby(id_variable).sum().to_numpy().flatten() - - if filter_by != 1: - if weight_variable != self.weight_name: - NotImplementedError( - "No filtering possible so far when target variable is not on the same entity as varying weights" - ) - - weight = weight[filter_by] - initial_weight = initial_weight[filter_by] - value = value[filter_by] - - margin_items = [ - ("actual", weight), - ("initial", initial_weight), - ] - - variable_instance = simulation.tax_benefit_system.get_variable(list_var[0]) - assert variable_instance is not None - if variable_instance.value_type in [bool, Enum] and variable == list_var[0]: - margin_items.append(("category", value)) - margins_data_frame = pd.DataFrame.from_items(margin_items) - margins_data_frame = margins_data_frame.groupby("category", sort=True).sum() - margin_by_type = margins_data_frame.to_dict() - else: - margin_by_type = { - "actual": (weight * value).sum(), - "initial": (initial_weight * value).sum(), - } - self.margins_by_variable[variable].update(margin_by_type) - - def _update_weights(self, margins, parameters=None): - """Run calmar, stores new weights and returns adjusted margins. - - Args: - margins: margins - parameters: Parameters (Default value = {}) - - Returns: - dict: Updated margins - - """ - if parameters is None: - parameters = {} - - target_entity = self.target_entity - weight_variable = self.simulation.weight_variable_by_entity[target_entity] - - if self.weight_name != weight_variable: - raise NotImplementedError( - "Calmar needs to be adapted. Consider using a projected target on the entity with changing weights" - ) - - data = self._build_calmar_data() - - assert self._initial_weight_name is not None - parameters["initial_weight"] = self._initial_weight_name - if self.target_entity_count: - margins["total_population"] = self.target_entity_count - if self.other_entity_count: - margins["total_population_smaller_entity"] = self.other_entity_count - - val_pondfin, lambdasol, updated_margins = calmar(data, margins, **parameters) - # Updating only after filtering weights - self.weight = val_pondfin * self.filter_by + self.weight * (logical_not(self.filter_by)) - - return updated_margins diff --git a/openfisca_survey_manager/calmar.py b/openfisca_survey_manager/calmar.py deleted file mode 100644 index d1a77305..00000000 --- a/openfisca_survey_manager/calmar.py +++ /dev/null @@ -1,404 +0,0 @@ -"""CALMAR: Calibrates weights to satisfy margins constraints.""" - -import logging -import operator - -import pandas as pd -from numpy import array, dot, exp, float64, ones, sqrt, unique, zeros -from numpy import log as ln - -log = logging.getLogger(__name__) - - -def linear(u): - """ - - Args: - u: - - Returns: - - """ - return 1 + u - - -def linear_prime(u): - """ - - Args: - u: - - Returns: - - """ - return ones(u.shape, dtype=float) - - -def raking_ratio(u): - """ - - Args: - u: - - Returns: - - """ - return exp(u) - - -def raking_ratio_prime(u): - """ - - Args: - u: - - Returns: - - """ - return exp(u) - - -def logit(u, low, up): - """ - - Args: - u: - low: - up: - - Returns: - - """ - a = (up - low) / ((1 - low) * (up - 1)) - return (low * (up - 1) + up * (1 - low) * exp(a * u)) / (up - 1 + (1 - low) * exp(a * u)) - - -def logit_prime(u, low, up): - """ - - Args: - u: - low: - up: - - Returns: - - """ - a = (up - low) / ((1 - low) * (up - 1)) - return ( - (a * up * (1 - low) * exp(a * u)) * (up - 1 + (1 - low) * exp(a * u)) - - (low * (up - 1) + up * (1 - low) * exp(a * u)) * (1 - low) * a * exp(a * u) - ) / (up - 1 + (1 - low) * exp(a * u)) ** 2 - - -def hyperbolic_sinus(u, alpha): - logarithm = ln(2 * alpha * u + sqrt(4 * (alpha**2) * (u**2) + 1)) - return 0.5 * (logarithm / alpha + sqrt((logarithm / alpha) ** 2 + 4)) - - -def hyperbolic_sinus_prime(u, alpha): - square = sqrt(4 * (alpha**2) * (u**2) + 1) - return 0.5 * ( - ((4 * (alpha**2) * u) / square + 2 * alpha) / (alpha * (square + 2 * alpha * u)) - + ((4 * (alpha**2) * u / square + 2 * alpha) * ln(square + 2 * alpha * u)) - / ((alpha**2) * (square + 2 * alpha * u) * sqrt((ln(square + 2 * alpha * u) ** 2) + 4)) - ) - - -def build_dummies_dict(data): - """ - - Args: - data: - - Returns: - - - """ - unique_val_list = unique(data) - output = {} - for val in unique_val_list: - output[val] = data == val - return output - - -def calmar( - data_in, - margins: dict, - initial_weight: str, - method="linear", - lo=None, - up=None, - alpha=None, - use_proportions: bool = False, - xtol: float = 1.49012e-08, - maxfev: int = 256, -): - """Calibrates weights to satisfy margins constraints. - - Args: - data_in (pd.DataFrame): The observations data by entity + a dictionary to identify the target entity - margins (dict): Margins is a dictionnary containing for each variable as key the following values - - a scalar for numeric variables - - a dictionnary with categories as key and populations as values - - eventually a key named `total_population` with value the total population. If absent it is initialized to the actual total population - - eventually a key named `total_population_smaller_entity` with value the total number of the second entity. If absent it is initialized to the actual total population - - initial_weight (str): Initial weight variable. - method (str, optional): Calibration method. Should be 'linear', 'raking ratio', 'logit' or 'hyperbolic sinus'. Defaults to 'linear'. - lo (float, optional): Lower bound on weights ratio. Mandatory when using logit method. Should be < 1. Defaults to None. - up (float, optional): Upper bound on weights ratio. Mandatory when using logit method. Should be > 1. Defaults to None. - alpha (float, optional): Bound on weights ratio. Mandatory when using hyperbolic sinus method. Should be > 0. Defaults to None. - use_proportions (bool, optional): When True use proportions if total population from margins doesn't match total population. Defaults to False. - xtol (float, optional): Relative precision on lagrangian multipliers. Defaults to 1.49012e-08 (fsolve xtol). - maxfev (int, optional): Maximum number of function evaluation. Defaults to 256. - - Raises: - Exception: [description] - Exception: [description] - Exception: [description] - - Returns: - np.array: Margins adjusting weights - float: Lagrangian parameter - dict: Updated margins - - Sources: - https://github.com/InseeFrLab/Calmar2/blob/main/manuel_utilisation.pdf - """ - from scipy.optimize import fsolve - - target_entity = data_in["target_entity_name"] - smaller_entity = None - entities = [target_entity] - for key in data_in: - if key != "target_entity_name" and key != target_entity: - smaller_entity = key - entities += [smaller_entity] - - # remove null weights and keep original data - null_weight_observations = data_in[target_entity][initial_weight].isnull().sum() - if null_weight_observations > 0: - log.info("{} observations have a NaN weight. Not used in the calibration.".format(null_weight_observations)) - - is_non_zero_weight = data_in[target_entity][initial_weight].fillna(0) > 0 - if is_non_zero_weight.sum() > null_weight_observations: - log.info( - "{} observations have a zero weight. Not used in the calibration.".format( - (data_in[target_entity][initial_weight].fillna(0) <= 0).sum() - null_weight_observations - ) - ) - - variables = set(margins.keys()).intersection(set(data_in[target_entity].columns)) - for variable in variables: - null_value_observations = data_in[target_entity][variable].isnull().sum() - if null_value_observations > 0: - log.info( - "For variable {}, {} observations have a NaN value. Not used in the calibration.".format( - variable, null_value_observations - ) - ) - is_non_zero_weight = is_non_zero_weight & data_in[target_entity][variable].notnull() - - if not is_non_zero_weight.all(): - log.info("We drop {} observations.".format((~is_non_zero_weight).sum())) - - data = {} - if smaller_entity: - data[smaller_entity] = pd.DataFrame() - for col in data_in[smaller_entity].columns: - data[smaller_entity][col] = data_in[smaller_entity][col].copy() - data[target_entity] = pd.DataFrame() - for col in data_in[target_entity].columns: - data[target_entity][col] = data_in[target_entity].loc[is_non_zero_weight, col].copy() - - if not margins: - raise Exception("Calmar requires non empty dict of margins") - - # choose method - assert method in ["linear", "raking ratio", "logit", "hyperbolic sinus"], ( - "method should be 'linear', 'raking ratio', 'logit' or 'hyperbolic sinus'" - ) - if method == "linear": - F = linear - F_prime = linear_prime - elif method == "raking ratio": - F = raking_ratio - F_prime = raking_ratio_prime - elif method == "logit": - assert up is not None, "When method == 'logit', a value > 1 for up is mandatory" - assert up > 1, "up should be > 1" - assert lo is not None, "When method == 'logit', a value < 1 for lo is mandatory" - assert lo < 1, "lo should be < 1" - - def F(x): - return logit(x, lo, up) - - def F_prime(x): - return logit_prime(x, lo, up) - elif method == "hyperbolic sinus": - assert alpha is not None, "When method == 'hyperbolic sinus', a value > 0 for alpha is mandatory" - assert alpha > 0, "alpha should be > 0" - - def F(x): - return hyperbolic_sinus(x, alpha) - - def F_prime(x): - return hyperbolic_sinus_prime(x, alpha) - - margins = margins.copy() - # Construction observations matrix - if "total_population" in margins: - total_population = margins.pop("total_population") - else: - total_population = data[target_entity][initial_weight].fillna(0).sum() - if smaller_entity is not None: - if "total_population_smaller_entity" in margins: - total_population_smaller_entity = margins.pop("total_population_smaller_entity") - else: - total_population_smaller_entity = total_population * len(data[smaller_entity]) / len(data[target_entity]) - else: - total_population_smaller_entity = 0 - - nk = len(data[target_entity][initial_weight]) - # number of Lagrange parameters (at least total population, and potentially total population 2) - nj = 1 + (smaller_entity is not None) - - margins_new = {} - margins_new_dict = {} - for entity in list(entities): - for var, val in margins.items(): - if var in data[entity].columns: - if isinstance(val, dict): - dummies_dict = build_dummies_dict(data[entity][var]) - k, pop = 0, 0 - list_col_to_add = [data[entity]] - for cat, nb in val.items(): - cat_varname = var + "_" + str(cat) - list_col_to_add.append(pd.Series(dummies_dict[cat], name=cat_varname)) - margins_new[cat_varname] = nb - if var not in margins_new_dict: - margins_new_dict[var] = {} - margins_new_dict[var][cat] = nb - pop += nb - k += 1 - nj += 1 - data[entity] = pd.concat(list_col_to_add, axis=1) - # Check total popualtion - population = (entity == target_entity) * total_population + ( - entity != target_entity - ) * total_population_smaller_entity - if pop != population: - if use_proportions: - log.info( - "calmar: categorical variable {} is inconsistent with population; using proportions".format( - var - ) - ) - for cat, nb in val.items(): - cat_varname = var + "_" + str(cat) - margins_new[cat_varname] = nb * population / pop - margins_new_dict[var][cat] = nb * population / pop - else: - raise Exception( - "calmar: categorical variable {} weights sums up to {} != {}".format( - var, pop, population - ) - ) - else: - margins_new[var] = val - margins_new_dict[var] = val - nj += 1 - - # On conserve systematiquement la population - if hasattr(data, "dummy_is_in_pop") or hasattr(data, "dummy_is_in_pop_smaller_entity"): - raise Exception("dummy_is_in_pop and dummy_is_in_pop_smaller_entity are not valid variable names") - - data[target_entity]["dummy_is_in_pop"] = ones(nk) - margins_new["dummy_is_in_pop"] = total_population - if smaller_entity: - data[smaller_entity]["dummy_is_in_pop_smaller_entity"] = ones(len(data[smaller_entity]["id_variable"])) - margins_new["dummy_is_in_pop_smaller_entity"] = total_population_smaller_entity - data_final = data[target_entity] - - if smaller_entity: - liste_col_to_sum = [variable for variable in data[smaller_entity] if variable != "id_variable"] - dic_agg = {} - for variable_to_sum in liste_col_to_sum: - dic_agg[variable_to_sum] = "sum" - data_second = data[smaller_entity].groupby("id_variable").agg(dic_agg) - data_final = pd.merge(data_second, data[target_entity], on="id_variable") - nk = len(data_final[initial_weight]) - - # paramètres de Lagrange initialisés à zéro - lambda0 = zeros(nj) - - # initial weights - d = data_final[initial_weight].values - x = zeros((nk, nj)) # nb obs x nb constraints - xmargins = zeros(nj) - margins_dict = {} - j = 0 - for var, val in margins_new.items(): - x[:, j] = data_final[var] - xmargins[j] = val - margins_dict[var] = val - j += 1 - - # Résolution des équations du premier ordre - def constraint(lambda_): - return dot(d * F(dot(x, lambda_)), x) - xmargins - - def constraint_prime(lambda_): - return dot(d * (x.T * F_prime(dot(x, lambda_))), x) - # le jacobien ci-dessus est constraintprime = @(lambda) x*(d.*Fprime(x'*lambda)*x'); - - tries, ier = 0, 2 - err_max = 1 - conv = 1 - while (ier == 2 or ier == 5 or ier == 4) and not (tries >= 10 or (err_max < 1e-6 and conv < 1e-8)): - lambdasol, infodict, ier, mesg = fsolve( - constraint, - lambda0, - fprime=constraint_prime, - maxfev=maxfev, - xtol=xtol, - full_output=1, - ) - lambda0 = 1 * lambdasol - tries += 1 - - pondfin = d * F(dot(x, lambdasol)) - rel_error = {} - for var, val in margins_new.items(): # noqa analysis:ignore - rel_error[var] = abs((data_final[var] * pondfin).sum() - margins_dict[var]) / margins_dict[var] - sorted_err = sorted(rel_error.items(), key=operator.itemgetter(1), reverse=True) - - conv = abs(err_max - sorted_err[0][1]) - err_max = sorted_err[0][1] - - if ier == 2 or ier == 5 or ier == 4: - log.debug("optimization converged after {} tries".format(tries)) - - # rebuilding a weight vector with the same size of the initial one - pondfin_out = array(data_in[target_entity][initial_weight], dtype=float64) - pondfin_out[is_non_zero_weight] = pondfin - - del infodict, mesg # TODO better exploit this information - - return pondfin_out, lambdasol, margins_new_dict - - -def check_calmar(margins, margins_new_dict=None): - """ - - Args: - margins: - margins_new_dict: (Default value = None) - - Returns: - - """ - for variable, margin in margins.items(): - if variable != "total_population": - print(variable, margin, abs(margin - margins_new_dict[variable]) / abs(margin)) # noqa analysis:ignore diff --git a/openfisca_survey_manager/coicop.py b/openfisca_survey_manager/coicop.py deleted file mode 100644 index f2cc8326..00000000 --- a/openfisca_survey_manager/coicop.py +++ /dev/null @@ -1,122 +0,0 @@ -import logging -import os - -import pandas as pd - -from openfisca_survey_manager.paths import openfisca_survey_manager_location - -log = logging.getLogger(__name__) - - -legislation_directory = os.path.join( - openfisca_survey_manager_location, - "openfisca_survey_manager", - "assets", -) - - -sub_levels = ["divisions", "groupes", "classes", "sous_classes", "postes"] -divisions = ["0{}".format(i) for i in range(1, 10)] + ["11", "12"] - - -def build_coicop_level_nomenclature(level, year=2016, keep_code=False, to_csv=False): - assert level in sub_levels - log.debug("Reading nomenclature coicop {} source data for level {}".format(year, level)) - try: - if year == 1998: - data_frame = pd.read_csv( - os.path.join( - legislation_directory, "COICOP/1998/nomenclature_coicop1998_source_by_{}.csv".format(level) - ), - sep=";", - header=None, - ) - if year == 2016: - data_frame = pd.read_excel( - os.path.join( - legislation_directory, "COICOP/2016/nomenclature_coicop2016_source_by_{}.xls".format(level) - ), - header=None, - ) - - except Exception as e: - log.info("Error when reading nomenclature coicop source data for level {}".format(level)) - raise e - - data_frame.reset_index(inplace=True) - data_frame.rename(columns={0: "code_coicop", 1: "label_{}".format(level[:-1])}, inplace=True) - data_frame = data_frame.iloc[2:].copy() - if year == 2016: - data_frame["code_coicop"] = data_frame["code_coicop"].apply(lambda x: x[1:]) - - index, stop = 0, False - for sub_level in sub_levels: - if stop: - continue - if sub_level == "divisions": - data_frame[sub_level] = data_frame["code_coicop"].str[index : index + 2].astype(int) - index = index + 3 - else: - data_frame[sub_level] = data_frame["code_coicop"].str[index : index + 1].astype(int) - index = index + 2 - - if level == sub_level: - stop = True - - if keep_code or level == "postes": - data_frame["code_coicop"] = data_frame["code_coicop"].str.lstrip("0") - else: - del data_frame["code_coicop"] - - data_frame.reset_index(inplace=True, drop=True) - if to_csv: - data_frame.to_csv( - os.path.join(legislation_directory, "nomenclature_coicop{}_by_{}.csv".format(year, level)), - ) - - return data_frame - - -def build_raw_coicop_nomenclature(year=2016): - """Builds raw COICOP nomenclature from ecoicop levels""" - coicop_nomenclature = None - - for index in range(len(sub_levels) - 1): - level = sub_levels[index] - next_level = sub_levels[index + 1] - on = sub_levels[: index + 1] - - df_left = ( - coicop_nomenclature if coicop_nomenclature is not None else build_coicop_level_nomenclature(level, year) - ) - df_right = build_coicop_level_nomenclature(next_level, year) - - # Drop any residual 'index' columns to avoid merge conflicts - for df in (df_left, df_right): - if "index" in df.columns: - df.drop(columns=["index"], inplace=True) - - coicop_nomenclature = pd.merge( - df_left, - df_right, - on=on, - how="inner", - validate="one_to_many", # safety check - ) - - # Reorder and select relevant columns - coicop_nomenclature = coicop_nomenclature[ - ["code_coicop"] + [f"label_{sub_level[:-1]}" for sub_level in sub_levels] + sub_levels - ].copy() - - return coicop_nomenclature[ - ["label_division", "label_groupe", "label_classe", "label_sous_classe", "label_poste", "code_coicop"] - ].copy() - - -if __name__ == "__main__": - import sys - - logging.basicConfig(level=logging.INFO, stream=sys.stdout) - raw_coicop_nomenclature = build_raw_coicop_nomenclature() - log.info(raw_coicop_nomenclature) diff --git a/openfisca_survey_manager/config.py b/openfisca_survey_manager/config.py deleted file mode 100644 index d41199a6..00000000 --- a/openfisca_survey_manager/config.py +++ /dev/null @@ -1,21 +0,0 @@ -import configparser -import os - - -class Config(configparser.ConfigParser): - config_ini = None - - def __init__(self, config_files_directory=None): - configparser.ConfigParser.__init__(self) - if config_files_directory is not None: - config_ini = os.path.join(config_files_directory, "config.ini") - assert os.path.exists(config_ini), f"{config_ini} is not a valid path" - self.config_ini = config_ini - self.read([config_ini]) - - def save(self): - assert self.config_ini, "configuration file path is not defined" - assert os.path.exists(self.config_ini) - config_file = open(self.config_ini, "w") - self.write(config_file) - config_file.close() diff --git a/openfisca_survey_manager/config_files_templates/__init__.py b/openfisca_survey_manager/config_files_templates/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/openfisca_survey_manager/config_files_templates/config_template.ini b/openfisca_survey_manager/config_files_templates/config_template.ini deleted file mode 100644 index 62df1b2c..00000000 --- a/openfisca_survey_manager/config_files_templates/config_template.ini +++ /dev/null @@ -1,20 +0,0 @@ -# Fill in the following options. You may copy this file to config.ini - -[collections] - -# Mandatory -collections_directory = None - -# Fill in or use build_collection -my_first_collection = None - -# Fill in or use build_collection -my_second_collection = None - - -[data] - -# Mandatory -output_directory = None - -tmp_directory = None diff --git a/openfisca_survey_manager/config_files_templates/raw_data_template.ini b/openfisca_survey_manager/config_files_templates/raw_data_template.ini deleted file mode 100644 index 6eae59eb..00000000 --- a/openfisca_survey_manager/config_files_templates/raw_data_template.ini +++ /dev/null @@ -1,8 +0,0 @@ -# This is an example of a survey collection entry that documents the raw data location - -[my_survey_collection] - -my_first_survey = /path/to/my/first/survey - -my_second_survey = /path/to/my/second/survey - diff --git a/openfisca_survey_manager/google_colab.py b/openfisca_survey_manager/google_colab.py deleted file mode 100644 index ed507396..00000000 --- a/openfisca_survey_manager/google_colab.py +++ /dev/null @@ -1,23 +0,0 @@ -import configparser -import os - -from openfisca_survey_manager.paths import default_config_files_directory as config_files_directory - - -def create_raw_data_ini(value_by_option_by_section=None): - """Creates raw_data.ini configureation file - - Args: - value_by_option_by_section(dict(dict)): Options value by section (Default value = None) - - """ - config_parser = configparser.ConfigParser() - - if value_by_option_by_section is not None: - for section, value_by_option in value_by_option_by_section.items(): - config_parser.add_section(section) - for option, value in value_by_option.items(): - config_parser.set(section, option, value) - - with open(os.path.join(config_files_directory, "raw_data.ini"), "w") as raw_data_config_file: - config_parser.write(raw_data_config_file) diff --git a/openfisca_survey_manager/input_dataframe_generator.py b/openfisca_survey_manager/input_dataframe_generator.py deleted file mode 100644 index 58fcafcf..00000000 --- a/openfisca_survey_manager/input_dataframe_generator.py +++ /dev/null @@ -1,317 +0,0 @@ -import configparser -import logging -import os -import random - - -import numpy as np -import pandas as pd -from openfisca_core import periods - -from openfisca_survey_manager.paths import default_config_files_directory, openfisca_survey_manager_location -from openfisca_survey_manager.survey_collections import SurveyCollection -from openfisca_survey_manager.surveys import Survey - -log = logging.getLogger(__name__) - - -def make_input_dataframe_by_entity(tax_benefit_system, nb_persons, nb_groups): - """Generate a dictionnary of dataframes containing nb_persons persons spread in nb_groups groups. - - Args: - tax_benefit_system(TaxBenefitSystem): the tax_benefit_system to use - nb_persons(int): the number of persons in the system - nb_groups(int): the number of collective entities in the system - - Returns: - A dictionary whose keys are entities and values the corresponding data frames - - Example: - - >>> from openfisca_survey_manager.input_dataframe_generator import make_input_dataframe_by_entity - >>> from openfisca_country_template import CountryTaxBenefitSystem - >>> tbs = CountryTaxBenefitSystem() - >>> input_dataframe_by_entity = make_input_dataframe_by_entity(tbs, 400, 100) - >>> sorted(input_dataframe_by_entity['person'].columns.tolist()) - ['household_id', 'household_role', 'household_role_index', 'person_id'] - >>> sorted(input_dataframe_by_entity['household'].columns.tolist()) - [] - """ - input_dataframe_by_entity = {} - person_entity = [entity for entity in tax_benefit_system.entities if entity.is_person][0] - person_id = np.arange(nb_persons) - input_dataframe_by_entity = {} - input_dataframe_by_entity[person_entity.key] = pd.DataFrame( - { - person_entity.key + "_id": person_id, - } - ) - input_dataframe_by_entity[person_entity.key].set_index("person_id") - # - seed = 42 - random.seed(seed) - adults = [0] + sorted(random.sample(range(1, nb_persons), nb_groups - 1)) - members_entity_id = np.empty(nb_persons, dtype=int) - # A role index is an index that every person within an entity has. - # For instance, the 'first_parent' has role index 0, the 'second_parent' 1, the first 'child' 2, the second 2, etc. - members_role_index = np.empty(nb_persons, dtype=int) - id_group = -1 - for id_person in range(nb_persons): - if id_person in adults: - id_group += 1 - role_index = 0 - else: - role_index = 2 - members_role_index[id_person] = role_index - members_entity_id[id_person] = id_group - - for entity in tax_benefit_system.entities: - if entity.is_person: - continue - key = entity.key - person_dataframe = input_dataframe_by_entity[person_entity.key] - person_dataframe[key + "_id"] = members_entity_id - person_dataframe[key + "_role_index"] = members_role_index - person_dataframe[key + "_role"] = np.where( - members_role_index == 0, entity.flattened_roles[0].key, entity.flattened_roles[-1].key - ) - input_dataframe_by_entity[key] = pd.DataFrame({key + "_id": range(nb_groups)}) - input_dataframe_by_entity[key].set_index(key + "_id", inplace=True) - - return input_dataframe_by_entity - - -def random_data_generator(tax_benefit_system, nb_persons, nb_groups, variable_generators_by_period, collection=None): - """ - Generate randomn values for some variables of a tax-benefit system and store them in a specified collection. - - Args: - tax_benefit_system (TaxBenefitSystem): tax_benefit_system: the tax_benefit_system to use - nb_persons (int): the number of persons in the data - nb_groups (int): the number of collective entities in the data - variable_generators_by_period (dict): parameters of the varaibles for every period - collection (str, optional): collection where to store the input survey. Defaults to None. - - Returns: - dict: The entities tables by period - """ - initial_input_dataframe_by_entity = make_input_dataframe_by_entity(tax_benefit_system, nb_persons, nb_groups) - table_by_entity_by_period = {} - for period, variable_generators in sorted(variable_generators_by_period.items()): - input_dataframe_by_entity = initial_input_dataframe_by_entity.copy() - table_by_entity_by_period[period] = table_by_entity = {} - for variable_generator in variable_generators: - variable = variable_generator["variable"] - max_value = variable_generator["max_value"] - condition = variable_generator.get("condition", None) - randomly_init_variable( - tax_benefit_system=tax_benefit_system, - input_dataframe_by_entity=input_dataframe_by_entity, - variable_name=variable, - max_value=max_value, - condition=condition, - ) - - for entity, input_dataframe in input_dataframe_by_entity.items(): - if collection is not None: - set_table_in_survey(input_dataframe, entity, period, collection, survey_name="input") - - table_by_entity[entity] = entity + "_" + str(period) - - return table_by_entity_by_period - - -def randomly_init_variable( - tax_benefit_system, input_dataframe_by_entity, variable_name, max_value, condition=None, seed=None -): - """Initialises a variable with random values (from 0 to max_value). - If a condition vector is provided, only set the value of persons or groups for which condition is True. - - Args: - tax_benefit_system(TaxBenefitSystem): A tax benefit system - input_dataframe_by_entity(dict): A dictionnary of entity dataframes - variable_name: The name of the variable to initialize - max_value: Maximum value of the variable - condition: Boolean vector of obersvations to modify (Default value = None) - seed: Random seed used whe ndrawing the values (Default value = None) - - Examples - >>> from openfisca_survey_manager.input_dataframe_generator import make_input_dataframe_by_entity - >>> from openfisca_country_template import CountryTaxBenefitSystem - >>> tbs = CountryTaxBenefitSystem() - >>> input_dataframe_by_entity = make_input_dataframe_by_entity(tbs, 400, 100) - >>> randomly_init_variable(tbs, input_dataframe_by_entity, 'salary', max_value = 50000, condition = "household_role == 'first_parent'") # Randomly set a salaire_net for all persons between 0 and 50000? - >>> sorted(input_dataframe_by_entity['person'].columns.tolist()) - ['household_id', 'household_role', 'household_role_index', 'person_id', 'salary'] - >>> input_dataframe_by_entity['person'].salary.max() <= 50000 - True - >>> len(input_dataframe_by_entity['person'].salary) - 400 - >>> randomly_init_variable(tbs, input_dataframe_by_entity, 'rent', max_value = 1000) - >>> sorted(input_dataframe_by_entity['household'].columns.tolist()) - ['rent'] - >>> input_dataframe_by_entity['household'].rent.max() <= 1000 - True - >>> input_dataframe_by_entity['household'].rent.max() >= 1 - True - >>> len(input_dataframe_by_entity['household'].rent) - 100 - """ - - variable = tax_benefit_system.variables[variable_name] - entity = variable.entity - - condition = True if condition is None else input_dataframe_by_entity[entity.key].eval(condition).values - - if seed is None: - seed = 42 - np.random.seed(seed) - count = len(input_dataframe_by_entity[entity.key]) - value = (np.random.rand(count) * max_value * condition).astype(variable.dtype) - input_dataframe_by_entity[entity.key][variable_name] = value - - -def set_table_in_survey( - input_dataframe, - entity, - period, - collection, - survey_name, - survey_label=None, - table_label=None, - table_name=None, - config_files_directory=default_config_files_directory, - source_format=None, - parquet_file=None, -): - period = periods.period(period) - if table_name is None: - table_name = entity + "_" + str(period) - if table_label is None: - table_label = f"Input data for entity {entity} at period {period}" - try: - survey_collection = SurveyCollection.load(collection=collection, config_files_directory=config_files_directory) - except configparser.NoOptionError as e: - log.warning(f"set_table_in_survey configparser.NoOptionError : {e}") - survey_collection = SurveyCollection(name=collection, config_files_directory=config_files_directory) - except configparser.NoSectionError as e: # For tests - log.warning(f"set_table_in_survey configparser.NoSectionError : {e}") - data_dir = os.path.join( - openfisca_survey_manager_location, - "openfisca_survey_manager", - "tests", - "data_files", - ) - survey_collection = SurveyCollection( - name=collection, - config_files_directory=data_dir, - ) - except FileNotFoundError as e: - log.warning(f"set_table_in_survey FileNotFoundError : {e}") - survey_collection = SurveyCollection(name=collection, config_files_directory=config_files_directory) - - try: - survey = survey_collection.get_survey(survey_name) - except AssertionError: - log.info(f"Survey {survey_name} does not exist, it will be created.") - survey = Survey( - name=survey_name, - label=survey_label or None, - survey_collection=survey_collection, - ) - - if survey.hdf5_file_path is None and survey.parquet_file_path is None: - config = survey.survey_collection.config - directory_path = config.get("data", "output_directory") - if not os.path.isdir(directory_path): - log.warning(f"{directory_path} who should be the data directory does not exist: we create the directory") - os.makedirs(directory_path) - if source_format is None: - survey.hdf5_file_path = os.path.join(directory_path, survey.name + ".h5") - elif source_format == "parquet": - survey.parquet_file_path = os.path.join(directory_path, survey.name) - if not os.path.isdir(survey.parquet_file_path): - log.warning( - f"{survey.parquet_file_path} who should be the parquet data directory does not exist: we create the directory" - ) - os.makedirs(survey.parquet_file_path) - - assert (survey.hdf5_file_path is not None) or (survey.parquet_file_path is not None) - if source_format == "parquet" and parquet_file is None: - parquet_file = os.path.join(survey.parquet_file_path, table_name + ".parquet") - survey.insert_table(label=table_label, name=table_name, dataframe=input_dataframe, parquet_file=parquet_file) - # If a survey with save name exist it will be overwritten - survey_collection.surveys = [ - kept_survey for kept_survey in survey_collection.surveys if kept_survey.name != survey_name - ] - survey_collection.surveys.append(survey) - collections_directory = survey_collection.config.get("collections", "collections_directory") - assert os.path.isdir( - collections_directory - ), f"""{collections_directory} who should be the collections' directory does not exist. -Fix the option collections_directory in the collections section of your config file.""" - collection_json_path = os.path.join(collections_directory, f"{collection}.json") - survey_collection.dump(json_file_path=collection_json_path) - - -def build_input_dataframe_from_test_case( - survey_scenario, test_case_scenario_kwargs, period=None, computed_variables=None -): - if computed_variables is None: - computed_variables = [] - tax_benefit_system = survey_scenario.tax_benefit_system - simulation = tax_benefit_system.new_scenario().init_single_entity(**test_case_scenario_kwargs).new_simulation() - array_by_variable = {} - period = periods.period(period) - - def compute_variable(variable): - if variable not in tax_benefit_system.variables: - return - if period.unit == periods.YEAR: - try: - array_by_variable[variable] = simulation.calculate(variable, period=period) - except Exception as e: - log.debug(e) - try: - array_by_variable[variable] = simulation.calculate_add(variable, period=period) - except Exception as e: - log.debug(e) - array_by_variable[variable] = simulation.calculate(variable, period=period.first_month) - elif period.unit == periods.MONTH: - try: - array_by_variable[variable] = simulation.calculate(variable, period=period) - except ValueError as e: - log.debug(e) - array_by_variable[variable] = simulation.calculate(variable, period=period.this_year) / 12 - - for scenario_key, value_by_variable in test_case_scenario_kwargs.items(): - if scenario_key == "axes": - variables = [test_case_scenario_kwargs["axes"][0][0]["name"]] - - else: - if value_by_variable is None: # empty parent2 for example - continue - if not isinstance(value_by_variable, dict): # enfants - continue - variables = list(value_by_variable.keys()) - - for variable in variables: - compute_variable(variable) - - for variable in computed_variables: - compute_variable(variable) - - for entity in tax_benefit_system.entities: - if entity.is_person: - continue - array_by_variable[survey_scenario.id_variable_by_entity_key[entity.key]] = range( - test_case_scenario_kwargs["axes"][0][0]["count"] - ) - - input_data_frame = pd.DataFrame(array_by_variable) - - for entity in tax_benefit_system.entities: - if entity.is_person: - continue - input_data_frame[survey_scenario.role_variable_by_entity_key[entity.key]] = 0 - return input_data_frame diff --git a/openfisca_survey_manager/matching.py b/openfisca_survey_manager/matching.py deleted file mode 100644 index 6b4c51ab..00000000 --- a/openfisca_survey_manager/matching.py +++ /dev/null @@ -1,152 +0,0 @@ -import logging -import os - -import pandas as pd - -from openfisca_survey_manager.paths import openfisca_survey_manager_location - -log = logging.getLogger(__name__) - - -config_files_directory = os.path.join(openfisca_survey_manager_location) - - -def nnd_hotdeck_using_feather(receiver=None, donor=None, matching_variables=None, z_variables=None): - """ - Not working - """ - import feather - - assert receiver is not None and donor is not None - assert matching_variables is not None - - temporary_directory_path = os.path.join(config_files_directory, "tmp") - assert os.path.exists(temporary_directory_path) - receiver_path = os.path.join(temporary_directory_path, "receiver.feather") - donor_path = os.path.join(temporary_directory_path, "donor.feather") - feather.write_dataframe(receiver, receiver_path) - feather.write_dataframe(donor, donor_path) - if isinstance(matching_variables, str): - match_vars = '"{}"'.format(matching_variables) - elif len(matching_variables) == 1: - match_vars = '"{}"'.format(matching_variables[0]) - else: - match_vars = '"{}"'.format("todo") - - r_script = """ -rm(list=ls()) -gc() -devtools::install_github("wesm/feather/R") -library(feather) -library(StatMatch) - -receiver <- read_feather({receiver_path}) -donor <- read_feather({donor_path}) -summary(receiver) -summary(donor) - -# variables -receiver = as.data.frame(receiver) -donor = as.data.frame(donor) -gc() -match_vars = {match_vars} -# don_class = c("sexe") -out.nnd <- NND.hotdeck( - data.rec = receiver, data.don = donor, match.vars = match_vars - ) - -# out.nndsummary(out.nnd$mtc.ids) -# head(out.nnd$mtc.ids, 10) -# head(receiver, 10) - -fused.nnd.m <- create.fused( - data.rec = receiver, data.don = donor, - mtc.ids = out.nnd$mtc.ids, - z.vars = "{z_variables}" - ) -summary(fused.nnd.m) -""".format( - receiver_path=receiver_path, - donor_path=donor_path, - match_vars=match_vars, - z_variables=z_variables, - ) - print(r_script) # noqa analysis:ignore - - -def nnd_hotdeck_using_rpy2(receiver=None, donor=None, matching_variables=None, z_variables=None, donor_classes=None): - from rpy2.robjects import pandas2ri - from rpy2.robjects.packages import importr - - assert receiver is not None and donor is not None - assert matching_variables is not None - - pandas2ri.activate() - StatMatch = importr("StatMatch") - - if isinstance(donor_classes, str): - assert donor_classes in receiver, "Donor class not present in receiver" - assert donor_classes in donor, "Donor class not present in donor" - - try: - if donor_classes: - out_NND = StatMatch.NND_hotdeck( - data_rec=receiver, - data_don=donor, - match_vars=pd.Series(matching_variables), - don_class=pd.Series(donor_classes), - ) - else: - out_NND = StatMatch.NND_hotdeck( - data_rec=receiver, - data_don=donor, - match_vars=pd.Series(matching_variables), - # don_class = pd.Series(donor_classes) - ) - except Exception as e: - print(1) # noqa analysis:ignore - print(receiver) # noqa analysis:ignore - print(2) # noqa analysis:ignore - print(donor) # noqa analysis:ignore - print(3) # noqa analysis:ignore - print(pd.Series(matching_variables)) # noqa analysis:ignore - print(e) # noqa analysis:ignore - - # create synthetic data.set, without the - # duplication of the matching variables - - fused_0 = pandas2ri.ri2py( - StatMatch.create_fused(data_rec=receiver, data_don=donor, mtc_ids=out_NND[0], z_vars=pd.Series(z_variables)) - ) - - # create synthetic data.set, with the "duplication" - # of the matching variables - - fused_1 = pandas2ri.ri2py( - StatMatch.create_fused( - data_rec=receiver, - data_don=donor, - mtc_ids=out_NND[0], - z_vars=pd.Series(z_variables), - dup_x=True, - match_vars=pd.Series(matching_variables), - ) - ) - - return fused_0, fused_1 - - -if __name__ == "__main__": - log.setLevel(logging.INFO) - - receiver = pd.DataFrame() - donor = pd.DataFrame() - matching_variables = "sexe" - z_variables = "ident" - - nnd_hotdeck_using_feather( - receiver=receiver, - donor=donor, - matching_variables=matching_variables, - z_variables=z_variables, - ) diff --git a/openfisca_survey_manager/paths.py b/openfisca_survey_manager/paths.py deleted file mode 100644 index 487324f4..00000000 --- a/openfisca_survey_manager/paths.py +++ /dev/null @@ -1,80 +0,0 @@ -import logging -import os -import sys -from pathlib import Path - -log = logging.getLogger(__name__) - -default_config_files_directory = None -openfisca_survey_manager_location = Path(__file__).parent.parent - - -# Hack for use at the CASD (shared user) -# Use taxipp/.config/ directory if exists as default_config_files_directory -try: - import taxipp - - taxipp_location = Path(taxipp.__file__).parent.parent - default_config_files_directory = os.path.join(taxipp_location, ".config", "openfisca-survey-manager") -except ImportError: - taxipp_location = None - -if taxipp_location is None or not os.path.exists(default_config_files_directory): - default_config_files_directory = None - - -# Hack for using with france-data on a CI or locally -try: - import openfisca_france_data - - france_data_location = Path(openfisca_france_data.__file__).parent.parent - from xdg import BaseDirectory - - default_config_files_directory = BaseDirectory.save_config_path("openfisca-survey-manager") -except ImportError: - france_data_location = None - -if france_data_location is None or not os.path.exists(default_config_files_directory): - default_config_files_directory = None - -# Run CI when testing openfisca-survey-manager for example GitHub Actions -test_config_files_directory = os.path.join( - openfisca_survey_manager_location, - "openfisca_survey_manager", - "tests", - "data_files", -) - -with open(os.path.join(test_config_files_directory, "config_template.ini")) as file: - config_ini = file.read() - -config_ini = config_ini.format(location=openfisca_survey_manager_location) -try: - with open(os.path.join(test_config_files_directory, "config.ini"), "w+") as file: - file.write(config_ini) -except PermissionError: - log.debug(f"config.ini can't be written in the test config files directory{test_config_files_directory}") - pass - -# GitHub Actions test -is_in_ci = "CI" in os.environ or os.environ.get("GITHUB_ACTIONS", "").lower() == "true" -private_run_with_data = False - -if (is_in_ci and default_config_files_directory is None) or ("pytest" in sys.modules): - if "CI_RUNNER_TAGS" in os.environ: - private_run_with_data = ( - "data-in" in os.environ["CI_RUNNER_TAGS"] - # or ("data-out" in os.environ["CI_RUNNER_TAGS"]) - ) - if not private_run_with_data: - default_config_files_directory = test_config_files_directory - -if default_config_files_directory is None: - from xdg import BaseDirectory - - default_config_files_directory = BaseDirectory.save_config_path("openfisca-survey-manager") - - log.debug(f"Using default_config_files_directory = {default_config_files_directory}") - - -assert default_config_files_directory is not None diff --git a/openfisca_survey_manager/read_dbf.py b/openfisca_survey_manager/read_dbf.py deleted file mode 100644 index 5bca5d33..00000000 --- a/openfisca_survey_manager/read_dbf.py +++ /dev/null @@ -1,48 +0,0 @@ -import contextlib - -with contextlib.suppress(ModuleNotFoundError): - import pysal as ps -import pandas as pd - - -def read_dbf(dbf_path, index=None, cols=False, incl_index=False): - """ - Read a dbf file as a pandas.DataFrame, optionally selecting the index - variable and which columns are to be loaded. - - __author__ = "Dani Arribas-Bel " - ... - - Arguments - --------- - dbf_path : str - Path to the DBF file to be read - index : str - Name of the column to be used as the index of the DataFrame - cols : list - List with the names of the columns to be read into the - DataFrame. Defaults to False, which reads the whole dbf - incl_index : Boolean - If True index is included in the DataFrame as a - column too. Defaults to False - - Returns - ------- - df : DataFrame - pandas.DataFrame object created - """ - db = ps.open(dbf_path) - if cols: - if incl_index: - cols.append(index) - vars_to_read = cols - else: - vars_to_read = db.header - data = {var: db.by_col(var) for var in vars_to_read} - if index: - index = db.by_col(index) - db.close() - return pd.DataFrame(data, index=index) - else: - db.close() - return pd.DataFrame(data) diff --git a/openfisca_survey_manager/read_sas.py b/openfisca_survey_manager/read_sas.py deleted file mode 100644 index acdca1a4..00000000 --- a/openfisca_survey_manager/read_sas.py +++ /dev/null @@ -1,24 +0,0 @@ -import logging - -from pandas.core.frame import DataFrame - -log = logging.getLogger(__name__) - - -def read_sas(sas_file_path, clean=False) -> DataFrame: - try: - import pyreadstat - - data_frame, _ = pyreadstat.read_sas7bdat(sas_file_path) - except ImportError as e1: - log.info("pyreadstat not available trying SAS7BDAT") - try: - from sas7bdat import SAS7BDAT - - data_frame = SAS7BDAT(sas_file_path).to_data_frame() - except ImportError as e2: - log.info("Neither pyreadstat nor SAS7BDAT are available") - print(e1) # noqa analysis:ignore - raise e2 - - return data_frame diff --git a/openfisca_survey_manager/read_spss.py b/openfisca_survey_manager/read_spss.py deleted file mode 100644 index 0b330064..00000000 --- a/openfisca_survey_manager/read_spss.py +++ /dev/null @@ -1,20 +0,0 @@ -"""Read SPSS data.""" - -import contextlib - -from pandas import DataFrame - -with contextlib.suppress(ModuleNotFoundError): - from savReaderWriter import SavReader - - -def read_spss(spss_file_path): - with SavReader(spss_file_path, returnHeader=True) as reader: - for record in reader: - print(record) # noqa analysis:ignore - # records_got.append(record) - - data_frame = DataFrame(list(SavReader(spss_file_path))) - print(data_frame.info()) # noqa analysis:ignore - - return data_frame diff --git a/openfisca_survey_manager/scenarios/__init__.py b/openfisca_survey_manager/scenarios/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/openfisca_survey_manager/scenarios/abstract_scenario.py b/openfisca_survey_manager/scenarios/abstract_scenario.py deleted file mode 100644 index 499c034d..00000000 --- a/openfisca_survey_manager/scenarios/abstract_scenario.py +++ /dev/null @@ -1,766 +0,0 @@ -"Abstract survey scenario definition." - -import logging -import os -from pathlib import Path -from typing import Dict, List, Optional, Union - -import numpy as np -import pandas as pd -from openfisca_core import periods -from openfisca_core.periods import MONTH, YEAR -from openfisca_core.tools.simulation_dumper import dump_simulation, restore_simulation -from openfisca_core.types import Array, Period, TaxBenefitSystem - -from openfisca_survey_manager.calibration import Calibration -from openfisca_survey_manager.simulations import Simulation # noqa analysis:ignore -from openfisca_survey_manager.surveys import Survey - -log = logging.getLogger(__name__) - - -class AbstractSurveyScenario(object): - """Abstract survey scenario.""" - - cache_blacklist = None - collection = None - debug = False - filtering_variable_by_entity = None - id_variable_by_entity_key = None - inflator_by_variable = None # factor used to inflate variable total - input_data_frame = None - input_data_table_by_entity_by_period = None - input_data_table_by_period = None - non_neutralizable_variables = None - period = None - role_variable_by_entity_key = None - simulations = None - target_by_variable = None # variable total target to inflate to - tax_benefit_systems = None - trace = False - used_as_input_variables = None - used_as_input_variables_by_entity = None - variation_factor = 0.03 # factor used to compute variation when estimating marginal tax rate - varying_variable = None - weight_variable_by_entity = None - - def build_input_data(self, **kwargs): - """Build input data.""" - NotImplementedError - - def calculate_series(self, variable, period=None, simulation=None): - """Compute variable values for period for a given simulation. - - Args: - variable(str, optional): Variable to compute - period(Period, optional): Period, defaults to None - simulation(str, optional): Simulation to use - - Returns: - pandas.Series: Variable values - - """ - return pd.Series( - data=self.calculate_variable(variable, period, simulation=simulation), - name=variable, - ) - - def calculate_variable(self, variable, period=None, simulation=None): - """Compute variable values for period for a given simulation. - - Args: - variable(str, optional): Variable to compute - period(Period, optional): Period, defaults to None - simulation(str, optional): Simulation to use - - Returns: - numpy.ndarray: Variable values - - """ - if simulation is None: - assert len(self.simulations.keys()) == 1 - simulation = list(self.simulations.values())[0] - else: - simulation = self.simulations[simulation] - assert simulation is not None - return simulation.adaptative_calculate_variable(variable, period=period) - - def calibrate( - self, - period: int = None, - target_margins_by_variable: dict = None, - parameters: dict = None, - target_entity_count: float = None, - other_entity_count: float = None, - entity: str = None, - ): - """Calibrate the scenario data. - - Args: - period (int, optionnal): Period of calibration. Defaults to scenario.year - target_margins_by_variable (dict, optional): Variable targets margins. Defaults to None. - parameters (dict, optional): Calibration parameters. Defaults to None. - target_entity_count (float, optional): Total population target. Defaults to None. - other_entity_count (float, optional): Total population target of the second entity. Defaults to None. - entity (str): Entity specified when no variable comes with a target margins but `target_entity_count` is not None. - """ - survey_scenario = self - - if period is None: - assert survey_scenario.period is not None - period = survey_scenario.period - - if parameters is not None: - assert parameters["method"] in ["linear", "raking ratio", "logit", "hyperbolic sinus"], ( - "Incorect parameter value: method should be 'linear', 'raking ratio', 'logit' or 'hyperbolic sinus'" - ) - if parameters["method"] == "logit": - assert parameters["invlo"] is not None - assert parameters["up"] is not None - elif parameters["method"] == "hyperbolic sinus": - assert parameters["alpha"] is not None - else: - parameters = {"method": "logit", "up": 3, "invlo": 3} - - # TODO: filtering using filtering_variable_by_entity - for simulation in self.simulations.values(): - if simulation is None: - continue - calibration = Calibration( - simulation, - target_margins_by_variable, - period, - target_entity_count=target_entity_count, - other_entity_count=other_entity_count, - entity=entity, - parameters=parameters, - # filter_by = self.filter_by, - ) - calibration.calibrate(inplace=True) - simulation.calibration = calibration - - def compute_aggregate( - self, - variable: str = None, - aggfunc: str = "sum", - filter_by: str = None, - period: Optional[Union[int, str, Period]] = None, - simulation: str = None, - baseline_simulation: str = None, - missing_variable_default_value=np.nan, - weighted: bool = True, - alternative_weights: Optional[Union[str, int, float, Array]] = None, - ): - """Compute variable aggregate. - - Args: - variable (str, optional): Variable to aggregate. Defaults to None. - aggfunc (str, optional): Aggregation function. Defaults to 'sum'. - filter_by (str, optional): Filter variable or expression to use. Defaults to None. - period (Optional[Union[int, str, Period]], optional): Period. Defaults to None. - simulation(str, optional): Simulation to use - baseline_simulation(str, optional): Baseline simulation to use when computing a difference - missing_variable_default_value (optional): Value to use for missing values. Defaults to np.nan. - weighted (bool, optional): Whether to weight the variable or not. Defaults to True. - alternative_weights (Optional[Union[str, int, float, Array]], optional): Alternative weigh to use. Defaults to None. - filtering_variable_by_entity (Dict, optional): Filtering variable by entity. Defaults to None. - - Returns: - float: Aggregate - """ - assert aggfunc in ["count", "mean", "sum", "count_non_zero"] - assert period is not None - assert variable is not None - if simulation is None: - assert len(self.simulations.keys()) == 1 - simulation = list(self.simulations.values())[0] - else: - simulation = self.simulations[simulation] - - assert simulation is not None, f"Missing {simulation} simulation" - - if baseline_simulation: - baseline_simulation = self.simulations[baseline_simulation] - return simulation.compute_aggregate( - variable=variable, - aggfunc=aggfunc, - filter_by=filter_by, - period=period, - missing_variable_default_value=missing_variable_default_value, - weighted=weighted, - alternative_weights=alternative_weights, - filtering_variable_by_entity=self.filtering_variable_by_entity, - ) - baseline_simulation.compute_aggregate( - variable=variable, - aggfunc=aggfunc, - filter_by=filter_by, - period=period, - missing_variable_default_value=missing_variable_default_value, - weighted=weighted, - alternative_weights=alternative_weights, - filtering_variable_by_entity=self.filtering_variable_by_entity, - ) - - return simulation.compute_aggregate( - variable=variable, - aggfunc=aggfunc, - filter_by=filter_by, - period=period, - missing_variable_default_value=missing_variable_default_value, - weighted=weighted, - alternative_weights=alternative_weights, - filtering_variable_by_entity=self.filtering_variable_by_entity, - ) - - def compute_quantiles( - self, - simulation: Simulation, - variable: str, - nquantiles: int = None, - period: Optional[Union[int, str, Period]] = None, - filter_by=None, - weighted: bool = True, - alternative_weights=None, - filtering_variable_by_entity=None, - ) -> List[float]: - """ - Compute quantiles of a variable. - - Args: - simulation (Simulation, optional): Simulation to be used. Defaults to None. - variable (str, optional): Variable which quantiles are computed. Defaults to None. - nquantiles (int, optional): Number of quantiles. Defaults to None. - period (Optional[Union[int, str, Period]], optional): Period. Defaults to None. - missing_variable_default_value (optional): Value to use for missing values. Defaults to np.nan. - weighted (bool, optional): Whether to weight the variable or not. Defaults to True. - alternative_weights (Optional[Union[str, int, float, Array]], optional): Alternative weigh to use. Defaults to None. - filtering_variable_by_entity (Dict, optional): Filtering variable by entity. Defaults to None. - - Returns: - List(float): The quantiles values - """ - assert variable is not None - assert nquantiles is not None - simulation = self.simulations[simulation] - assert simulation is not None, f"Missing {simulation} simulation" - - return simulation.compute_quantiles( - variable=variable, - period=period, - nquantiles=nquantiles, - filter_by=filter_by, - weighted=weighted, - alternative_weights=alternative_weights, - ) - - def compute_marginal_tax_rate( - self, - target_variable: str, - period: Optional[Union[int, str, Period]], - simulation: str = None, - value_for_zero_varying_variable: float = 0.0, - ) -> Array: - """ - Compute marginal a rate of a target (MTR) with respect to a varying variable. - - Args: - target_variable (str): the variable which marginal tax rate is computed - period (Optional[Union[int, str, Period]], optional): Period. Defaults to None. - simulation(str, optional): Simulation to use - value_for_zero_varying_variable (float, optional): value of MTR when the varying variable is zero. Defaults to 0. - - Returns: - numpy.array: Vector of marginal rates - """ - varying_variable = self.varying_variable - if simulation is None: - assert len(self.simulations.keys()) == 2 - simulation_name = [name for name in self.simulations if not name.startswith("_modified_")][0] - simulation = self.simulations[simulation_name] - else: - simulation_name = simulation - simulation = self.simulations[simulation_name] - - modified_simulation = self.simulations[f"_modified_{simulation_name}"] - - variables = simulation.tax_benefit_system.variables - assert target_variable in variables - - variables_belong_to_same_entity = ( - variables[varying_variable].entity.key == variables[target_variable].entity.key - ) - varying_variable_belongs_to_person_entity = variables[varying_variable].entity.is_person - - assert variables_belong_to_same_entity or varying_variable_belongs_to_person_entity - - if variables_belong_to_same_entity: - modified_varying = modified_simulation.calculate_add(varying_variable, period=period) - varying = simulation.calculate_add(varying_variable, period=period) - else: - target_variable_entity_key = variables[target_variable].entity.key - - def cast_to_target_entity(simulation: Simulation): - population = simulation.populations[target_variable_entity_key] - df = ( - pd.DataFrame( - { - "members_entity_id": population._members_entity_id, - varying_variable: simulation.calculate_add(varying_variable, period=period), - } - ) - .groupby("members_entity_id") - .sum() - ) - varying_variable_for_target_entity = df.loc[population.ids, varying_variable].values - return varying_variable_for_target_entity - - modified_varying = cast_to_target_entity(modified_simulation) - varying = cast_to_target_entity(simulation) - - modified_target = modified_simulation.calculate_add(target_variable, period=period) - target = simulation.calculate_add(target_variable, period=period) - - numerator = modified_target - target - denominator = modified_varying - varying - marginal_rate = 1 - np.divide( - numerator, - denominator, - out=np.full_like(numerator, value_for_zero_varying_variable, dtype=np.floating), - where=(denominator != 0), - ) - - return marginal_rate - - def compute_pivot_table( - self, - aggfunc="mean", - columns=None, - baseline_simulation=None, - filter_by=None, - index=None, - period=None, - simulation=None, - difference=False, - use_baseline_for_columns=None, - values=None, - missing_variable_default_value=np.nan, - concat_axis=None, - weighted=True, - alternative_weights=None, - ): - """Compute a pivot table of agregated values casted along specified index and columns. - - Args: - aggfunc(str, optional): Aggregation function, defaults to 'mean' - columns(list, optional): Variable(s) in columns, defaults to None - difference(bool, optional): Compute difference, defaults to False - filter_by(str, optional): Boolean variable to filter by, defaults to None - index(list, optional): Variable(s) in index (lines), defaults to None - period(Period, optional): Period, defaults to None - simulation(str, optional): Simulation to use - baseline_simulation(str, optional): Baseline simulation to use when computing a difference - use_baseline_for_columns(bool, optional): Use columns from baseline columns values, defaults to None - values(list, optional): Aggregated variable(s) within cells, defaults to None - missing_variable_default_value(float, optional): Default value for missing variables, defaults to np.nan - concat_axis(int, optional): Axis to concatenate along (index = 0, columns = 1), defaults to None - weighted(bool, optional): Whether to weight te aggregates (Default value = True) - alternative_weights(str or int or float, optional): Weight variable name or numerical value. Use Simulation's weight_variable_by_entity if None, and if the later is None uses 1 ((Default value = None) - - Returns: - pd.DataFrame: Pivot table - - """ - assert (not difference) or (baseline_simulation is not None), ( - "Can't have difference when not baseline simulation" - ) - - simulation = self.simulations[simulation] - if baseline_simulation: - baseline_simulation = self.simulations[baseline_simulation] - - filtering_variable_by_entity = self.filtering_variable_by_entity - - return simulation.compute_pivot_table( - baseline_simulation=baseline_simulation, - aggfunc=aggfunc, - columns=columns, - difference=difference, - filter_by=filter_by, - index=index, - period=period, - use_baseline_for_columns=use_baseline_for_columns, - values=values, - missing_variable_default_value=missing_variable_default_value, - concat_axis=concat_axis, - weighted=weighted, - alternative_weights=alternative_weights, - filtering_variable_by_entity=filtering_variable_by_entity, - ) - - def compute_winners_losers( - self, - variable, - simulation, - baseline_simulation=None, - filter_by=None, - period=None, - absolute_minimal_detected_variation=0, - relative_minimal_detected_variation=0.01, - observations_threshold=None, - weighted=True, - alternative_weights=None, - ): - simulation = self.simulations[simulation] - if baseline_simulation: - baseline_simulation = self.simulations[baseline_simulation] - - return simulation.compute_winners_losers( - baseline_simulation, - variable=variable, - filter_by=filter_by, - period=period, - absolute_minimal_detected_variation=absolute_minimal_detected_variation, - relative_minimal_detected_variation=relative_minimal_detected_variation, - observations_threshold=observations_threshold, - weighted=weighted, - alternative_weights=alternative_weights, - filtering_variable_by_entity=self.filtering_variable_by_entity, - ) - - def create_data_frame_by_entity( - self, variables=None, expressions=None, filter_by=None, index=False, period=None, simulation=None, merge=False - ): - """Create dataframe(s) of computed variable for every entity (eventually merged in a unique dataframe). - - Args: - variables(list, optional): Variable to compute, defaults to None - expressions(str, optional): Expressions to compute, defaults to None - filter_by(str, optional): Boolean variable or expression, defaults to None - index(bool, optional): Index by entity id, defaults to False - period(Period, optional): Period, defaults to None - simulation(str, optional): Simulation to use - merge(bool, optional): Merge all the entities in one data frame, defaults to False - - Returns: - dict or pandas.DataFrame: Dictionnary of dataframes by entities or dataframe with all the computed variables - - """ - if simulation is None: - assert len(self.simulations.keys()) == 1 - simulation = list(self.simulations.values())[0] - else: - simulation = self.simulations[simulation] - - return simulation.create_data_frame_by_entity( - variables=variables, - expressions=expressions, - filter_by=filter_by, - index=index, - period=period, - merge=merge, - ) - - def custom_input_data_frame(self, input_data_frame, **kwargs): - """Customize input data frame. - - Args: - input_data_frame: original input data frame. - kwargs: keyword arguments. - """ - pass - - def dump_data_frame_by_entity(self, variables=None, survey_collection=None, survey_name=None): - assert survey_collection is not None - assert survey_name is not None - assert variables is not None - openfisca_data_frame_by_entity = self.create_data_frame_by_entity(variables=variables) - for entity_key, data_frame in openfisca_data_frame_by_entity.items(): - survey = Survey(name=survey_name) - survey.insert_table(name=entity_key, data_frame=data_frame) - survey_collection.surveys.append(survey) - survey_collection.dump(collection="openfisca") - - def dump_simulations(self, directory: str): - """ - Dump simulations. - - Args: - directory (str, optional): Dump directory - """ - assert directory is not None - use_sub_directories = len(self.simulations) >= 2 - - if use_sub_directories: - for simulation_name, simulation in self.simulations.items(): - dump_simulation(simulation, directory=str(Path(directory) / simulation_name)) - else: - assert len(self.simulations.keys()) == 1 - simulation = list(self.simulations.values())[0] - dump_simulation(simulation, directory) - - def generate_performance_data(self, output_dir: str): - if not self.trace: - raise ValueError("Method generate_performance_data cannot be used if trace hasn't been activated.") - - for simulation_name, simulation in self.simulations.items(): - simulation_dir = os.path.join(output_dir, f"{simulation_name}_perf_log") - if not Path(output_dir).exists(): - Path(output_dir).mkdir() - if not Path(simulation_dir).exists(): - Path(simulation_dir).mkdir() - simulation.tracer.generate_performance_graph(simulation_dir) - simulation.tracer.generate_performance_tables(simulation_dir) - - def inflate(self, inflator_by_variable=None, period=None, target_by_variable=None): - assert inflator_by_variable or target_by_variable - assert period is not None - inflator_by_variable = {} if inflator_by_variable is None else inflator_by_variable - target_by_variable = {} if target_by_variable is None else target_by_variable - self.inflator_by_variable = inflator_by_variable - self.target_by_variable = target_by_variable - - for _, simulation in self.simulations.items(): - simulation.inflate(inflator_by_variable, period, target_by_variable) - - def init_from_data( - self, - calibration_kwargs=None, - inflation_kwargs=None, - rebuild_input_data=False, - rebuild_kwargs=None, - data=None, - memory_config=None, - use_marginal_tax_rate=False, - ): - """Initialise a survey scenario from data. - - Args: - rebuild_input_data(bool): Whether or not to clean, format and save data. Take a look at :func:`build_input_data` - data(dict): Contains the data, or metadata needed to know where to find it. - use_marginal_tax_rate(bool): True to go into marginal effective tax rate computation mode. - calibration_kwargs(dict): Calibration options (Default value = None) - inflation_kwargs(dict): Inflations options (Default value = None) - rebuild_input_data(bool): Whether to rebuild the data (Default value = False) - rebuild_kwargs: Rebuild options (Default value = None) - """ - # When not ``None``, it'll try to get the data for *period*. - if data is not None: - data_year = data.get("data_year", self.period) - - # When ``True`` it'll assume it is raw data and do all that described supra. - # When ``False``, it'll assume data is ready for consumption. - if rebuild_input_data: - if rebuild_kwargs is not None: - self.build_input_data(year=data_year, **rebuild_kwargs) - else: - self.build_input_data(year=data_year) - - debug = self.debug - trace = self.trace - - if use_marginal_tax_rate: - for name, tax_benefit_system in self.tax_benefit_systems.items(): - assert self.varying_variable in tax_benefit_system.variables, ( - f"Variable {self.varying_variable} is not present tax benefit system named {name}" - ) - - # Inverting reform and baseline because we are more likely - # to use baseline input in reform than the other way around - self.simulations = {} - for simulation_name, _ in self.tax_benefit_systems.items(): - self.new_simulation(simulation_name, debug=debug, data=data, trace=trace, memory_config=memory_config) - if use_marginal_tax_rate: - self.new_simulation( - simulation_name, - debug=debug, - data=data, - trace=trace, - memory_config=memory_config, - marginal_tax_rate_only=True, - ) - - if calibration_kwargs is not None: - assert set(calibration_kwargs.keys()).issubset( - {"target_margins_by_variable", "parameters", "target_entity_count", "other_entity_count", "entity"} - ) - - if inflation_kwargs is not None: - assert set(inflation_kwargs.keys()).issubset({"inflator_by_variable", "target_by_variable", "period"}) - - if calibration_kwargs: - self.calibrate(**calibration_kwargs) - - if inflation_kwargs: - self.inflate(**inflation_kwargs) - - def new_simulation( - self, simulation_name, debug=False, trace=False, data=None, memory_config=None, marginal_tax_rate_only=False - ): - tax_benefit_system = self.tax_benefit_systems[simulation_name] - assert tax_benefit_system is not None - - period = periods.period(self.period) - - if "custom_initialize" in dir(self): - custom_initialize = None if marginal_tax_rate_only else self.custom_initialize - else: - custom_initialize = None - - data["collection"] = self.collection - data["id_variable_by_entity_key"] = self.id_variable_by_entity_key - data["role_variable_by_entity_key"] = self.role_variable_by_entity_key - data["used_as_input_variables"] = self.used_as_input_variables - - simulation = Simulation.new_from_tax_benefit_system( - tax_benefit_system=tax_benefit_system, - debug=debug, - trace=trace, - data=data, - memory_config=memory_config, - period=period, - custom_initialize=custom_initialize, - ) - - if marginal_tax_rate_only: - self._apply_modification(simulation, period) - if custom_initialize: - custom_initialize(simulation) - self.simulations[f"_modified_{simulation_name}"] = simulation - else: - self.simulations[simulation_name] = simulation - - simulation.weight_variable_by_entity = self.weight_variable_by_entity - - if self.period is not None: - simulation.period = periods.period(self.period) - - return simulation - - def memory_usage(self): - """Print memory usage.""" - for simulation_name, simulation in self.simulations.items(): - print(f"simulation : {simulation_name}") # noqa analysis:ignore - simulation.print_memory_usage() - - def neutralize_variables(self, tax_benefit_system): - """Neutralizes input variables not in input dataframe and keep some crucial variables. - - Args: - tax_benefit_system: The TaxBenefitSystem variables belongs to - - """ - for variable_name, variable in tax_benefit_system.variables.items(): - if variable.formulas: - continue - if self.used_as_input_variables and (variable_name in self.used_as_input_variables): - continue - if self.non_neutralizable_variables and (variable_name in self.non_neutralizable_variables): - continue - if self.weight_variable_by_entity and variable_name in list(self.weight_variable_by_entity.values()): - continue - - tax_benefit_system.neutralize_variable(variable_name) - - def restore_simulations(self, directory, **kwargs): - """Restores SurveyScenario's simulations. - - Args: - directory: Directory to restore simulations from - kwargs: Restoration options - - """ - assert Path(directory).exists(), "Cannot restore simulations from non existent directory" - use_sub_directories = len(self.tax_benefit_systems) >= 2 - - self.simulations = {} - if use_sub_directories: - for simulation_name, tax_benefit_system in self.tax_benefit_systems.items(): - simulation = restore_simulation(str(Path(directory) / simulation_name), tax_benefit_system, **kwargs) - simulation.id_variable_by_entity_key = self.id_variable_by_entity_key - self.simulations[simulation_name] = simulation - else: - simulation = restore_simulation(directory, list(self.tax_benefit_systems.values())[0], **kwargs) - simulation.id_variable_by_entity_key = self.id_variable_by_entity_key - self.simulations["unique_simulation"] = simulation - - def set_input_data_frame(self, input_data_frame): - """Set the input dataframe. - - Args: - input_data_frame (pd.DataFrame): Input data frame - - """ - self.input_data_frame = input_data_frame - - def set_tax_benefit_systems(self, tax_benefit_systems: Dict[str, TaxBenefitSystem]): - """ - Set the tax and benefit systems of the scenario. - - Args: - tax_benefit_systems (Dict[str, TaxBenefitSystem]): The tax benefit systems - """ - for tax_benefit_system in tax_benefit_systems.values(): - assert tax_benefit_system is not None - if self.cache_blacklist is not None: - tax_benefit_system.cache_blacklist = self.cache_blacklist - # - self.tax_benefit_systems = tax_benefit_systems - - def set_weight_variable_by_entity(self, weight_variable_by_entity=None): - if weight_variable_by_entity is not None: - self.weight_variable_by_entity = weight_variable_by_entity - - if self.simulations is not None: - for simulation in self.simulations.values(): - simulation.set_weight_variable_by_entity(self.weight_variable_by_entity) - - def summarize_variable(self, variable=None, weighted=False, force_compute=False): - """Print a summary of a variable including its memory usage for all the siulations. - - Args: - variable(string): The variable being summarized - weighted(bool): Whether the produced statistics should be weigthted or not - force_compute(bool): Whether the computation of the variable should be forced - - Example: - >>> from openfisca_survey_manager.tests.test_scenario import create_randomly_initialized_survey_scenario - >>> survey_scenario = create_randomly_initialized_survey_scenario(collection = None) - >>> survey_scenario.summarize_variable(variable = "housing_occupancy_status", force_compute = True) - - housing_occupancy_status: 1 periods * 5 cells * item size 2 (int16, default = HousingOccupancyStatus.tenant) = 10B - Details: - 2017-01: owner = 1.00e+00 (20.0%), tenant = 1.00e+00 (20.0%), free_lodger = 2.00e+00 (40.0%), homeless = 1.00e+00 (20.0%). - >>> survey_scenario.summarize_variable(variable = "rent", force_compute = True) - - rent: 1 periods * 5 cells * item size 4 (float32, default = 0) = 20B - Details: - 2017-01: mean = 562.3850708007812, min = 156.01864624023438, max = 950.7142944335938, mass = 2.81e+03, default = 0.0%, median = 598.6585083007812 - >>> survey_scenario.tax_benefit_systems["baseline"].neutralize_variable('age') - >>> survey_scenario.summarize_variable(variable = "age") - - age: neutralized variable (int64, default = 0) - """ - for _simulation_name, simulation in self.simulations.items(): - simulation.summarize_variable(variable, weighted, force_compute) - - def _apply_modification(self, simulation, period): - period = periods.period(period) - varying_variable = self.varying_variable - definition_period = simulation.tax_benefit_system.variables[varying_variable].definition_period - - def set_variable(varying_variable, varying_variable_value, period_): - delta = self.variation_factor * varying_variable_value - new_variable_value = varying_variable_value + delta - simulation.delete_arrays(varying_variable, period_) - simulation.set_input(varying_variable, period_, new_variable_value) - - if period.unit == definition_period: - varying_variable_value = simulation.calculate(varying_variable, period=period) - set_variable(varying_variable, varying_variable_value, period) - - elif (definition_period == MONTH) and (period.unit == YEAR and period.size_in_months == 12): - varying_variable_value = simulation.calculate_add(varying_variable, period=period) - for period_ in [periods.Period(("month", period.start.offset(month, "month"), 1)) for month in range(12)]: - set_variable(varying_variable, varying_variable_value / 12, period_) - else: - ValueError() diff --git a/openfisca_survey_manager/scenarios/reform_scenario.py b/openfisca_survey_manager/scenarios/reform_scenario.py deleted file mode 100644 index bea3606b..00000000 --- a/openfisca_survey_manager/scenarios/reform_scenario.py +++ /dev/null @@ -1,291 +0,0 @@ -"""Abstract survey scenario definition.""" - -import logging -from typing import Optional, Union - -import numpy as np -import pandas as pd -from openfisca_core.types import Array, Period - -from openfisca_survey_manager.scenarios.abstract_scenario import AbstractSurveyScenario -from openfisca_survey_manager.simulations import Simulation - -log = logging.getLogger(__name__) - - -class ReformScenario(AbstractSurveyScenario): - """Reform survey scenario.""" - - def _get_simulation(self, use_baseline: bool = False): - """ - Get relevant simulation - - Args: - use_baseline (bool, optional): Whether to get baseline or reform simulation. Defaults to False. - """ - - if len(self.simulations) == 1: - return list(self.simulations.values())[0] - - simulation_name = "baseline" if use_baseline else "reform" - simulation = self.simulations[simulation_name] - assert simulation is not None, f"{simulation_name} does not exist" - return simulation - - def build_input_data(self, **kwargs): - """Build input data.""" - NotImplementedError - - def calculate_series(self, variable, period=None, use_baseline=False): - """Compute variable values for period and baseline or reform tax benefit and system. - - Args: - variable(str, optional): Variable to compute - period(Period, optional): Period, defaults to None - use_baseline(bool, optional): Use baseline tax and benefit system, defaults to False - - Returns: - pandas.Series: Variable values - - """ - return pd.Series( - data=self.calculate_variable(variable, period, use_baseline), - name=variable, - ) - - def calculate_variable(self, variable, period=None, use_baseline=False): - """Compute variable values for period and baseline or reform tax benefit and system. - - Args: - variable(str, optional): Variable to compute - period(Period, optional): Period, defaults to None - use_baseline(bool, optional): Use baseline tax and benefit system, defaults to False - - Returns: - numpy.ndarray: Variable values - - """ - simulation = self._get_simulation(use_baseline) - return simulation.adaptative_calculate_variable(variable, period=period) - - def compute_aggregate( - self, - variable: str = None, - aggfunc: str = "sum", - filter_by: str = None, - period: Optional[Union[int, str, Period]] = None, - use_baseline: bool = False, - difference: bool = False, - missing_variable_default_value=np.nan, - weighted: bool = True, - alternative_weights: Optional[Union[str, int, float, Array]] = None, - ): - """Compute variable aggregate. - - Args: - variable (str, optional): Variable to aggregate. Defaults to None. - aggfunc (str, optional): Aggregation function. Defaults to 'sum'. - filter_by (str, optional): Filter variable or expression to use. Defaults to None. - period (Optional[Union[int, str, Period]], optional): Period. Defaults to None. - use_baseline: Use baseline simulation. Defaults to False. - missing_variable_default_value (optional): Value to use for missing values. Defaults to np.nan. - weighted (bool, optional): Whether to weight the variable or not. Defaults to True. - alternative_weights (Optional[Union[str, int, float, Array]], optional): Alternative weigh to use. Defaults to None. - filtering_variable_by_entity (Dict, optional): Filtering variable by entity. Defaults to None. - - Returns: - float: Aggregate - """ - assert aggfunc in ["count", "mean", "sum", "count_non_zero"] - assert period is not None - assert not (difference and use_baseline), "Can't have difference and use_baseline both set to True" - - if difference: - return self.compute_aggregate( - variable=variable, - aggfunc=aggfunc, - filter_by=filter_by, - period=period, - use_baseline=False, - missing_variable_default_value=missing_variable_default_value, - weighted=weighted, - alternative_weights=alternative_weights, - ) - self.compute_aggregate( - variable=variable, - aggfunc=aggfunc, - filter_by=filter_by, - period=period, - use_baseline=True, - missing_variable_default_value=missing_variable_default_value, - weighted=weighted, - alternative_weights=alternative_weights, - ) - - assert variable is not None - simulation = self._get_simulation(use_baseline) - return simulation.compute_aggregate( - variable=variable, - aggfunc=aggfunc, - filter_by=filter_by, - period=period, - missing_variable_default_value=missing_variable_default_value, - weighted=weighted, - alternative_weights=alternative_weights, - filtering_variable_by_entity=self.filtering_variable_by_entity, - ) - - def compute_quantiles( - self, - variable: str = None, - nquantiles=None, - period=None, - use_baseline=False, - filter_by=None, - weighted=True, - alternative_weights=None, - ): - assert variable is not None - assert nquantiles is not None - simulation = self._get_simulation(use_baseline) - - return simulation.compute_quantiles( - variable=variable, - period=period, - nquantiles=nquantiles, - filter_by=filter_by, - weighted=weighted, - alternative_weights=alternative_weights, - ) - - def compute_marginal_tax_rate( - self, - target_variable: str, - period: Optional[Union[int, str, Period]], - use_baseline: bool = False, - value_for_zero_varying_variable: float = 0.0, - ) -> Array: - """ - Compute marginal a rate of a target (MTR) with respect to a varying variable. - - Args: - target_variable (str): the variable which marginal tax rate is computed - period (Optional[Union[int, str, Period]], optional): Period. Defaults to None. - use_baseline: Use baseline simulation. Defaults to False. - value_for_zero_varying_variable (float, optional): value of MTR when the varying variable is zero. Defaults to 0. - - Returns: - numpy.array: Vector of marginal rates - """ - if use_baseline: - return super(ReformScenario, self).compute_marginal_tax_rate( - target_variable=target_variable, - period=period, - simulation="baseline", - value_for_zero_varying_variable=value_for_zero_varying_variable, - ) - else: - return super(ReformScenario, self).compute_marginal_tax_rate( - target_variable=target_variable, - period=period, - simulation="reform", - value_for_zero_varying_variable=value_for_zero_varying_variable, - ) - - def compute_pivot_table( - self, - aggfunc="mean", - columns=None, - difference=False, - filter_by=None, - index=None, - period=None, - use_baseline=False, - use_baseline_for_columns=None, - values=None, - missing_variable_default_value=np.nan, - concat_axis=None, - weighted=True, - alternative_weights=None, - ): - filtering_variable_by_entity = self.filtering_variable_by_entity - - return Simulation.compute_pivot_table( - aggfunc=aggfunc, - columns=columns, - baseline_simulation=self._get_simulation(use_baseline=True), - filter_by=filter_by, - index=index, - period=period, - simulation=self._get_simulation(use_baseline), - difference=difference, - use_baseline_for_columns=use_baseline_for_columns, - values=values, - missing_variable_default_value=missing_variable_default_value, - concat_axis=concat_axis, - weighted=weighted, - alternative_weights=alternative_weights, - filtering_variable_by_entity=filtering_variable_by_entity, - ) - - def compute_winners_losers( - self, - variable=None, - filter_by=None, - period=None, - absolute_minimal_detected_variation=0, - relative_minimal_detected_variation=0.01, - observations_threshold=None, - weighted=True, - alternative_weights=None, - ): - return super(ReformScenario, self).compute_winners_losers( - simulation="reform", - baseline_simulation="baseline", - variable=variable, - filter_by=filter_by, - period=period, - absolute_minimal_detected_variation=getattr( - self, "absolute_minimal_detected_variation", absolute_minimal_detected_variation - ), - relative_minimal_detected_variation=getattr( - self, "relative_minimal_detected_variation", relative_minimal_detected_variation - ), - observations_threshold=getattr(self, "observations_threshold", observations_threshold), - weighted=weighted, - alternative_weights=alternative_weights, - ) - - def create_data_frame_by_entity( - self, - variables=None, - expressions=None, - filter_by=None, - index=False, - period=None, - use_baseline=False, - merge=False, - ): - """Create dataframe(s) of computed variable for every entity (eventually merged in a unique dataframe). - - Args: - variables(list, optional): Variable to compute, defaults to None - expressions(str, optional): Expressions to compute, defaults to None - filter_by(str, optional): Boolean variable or expression, defaults to None - index(bool, optional): Index by entity id, defaults to False - period(Period, optional): Period, defaults to None - use_baseline(bool, optional): Use baseline tax and benefit system, defaults to False - merge(bool, optional): Merge all the entities in one data frame, defaults to False - - Returns: - dict or pandas.DataFrame: Dictionnary of dataframes by entities or dataframe with all the computed variables - - """ - simulation = self._get_simulation(use_baseline) - return simulation.create_data_frame_by_entity( - variables=variables, - expressions=expressions, - filter_by=filter_by, - index=index, - period=period, - merge=merge, - ) diff --git a/openfisca_survey_manager/scripts/__init__.py b/openfisca_survey_manager/scripts/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/openfisca_survey_manager/scripts/build_collection.py b/openfisca_survey_manager/scripts/build_collection.py deleted file mode 100755 index 49aa2ed6..00000000 --- a/openfisca_survey_manager/scripts/build_collection.py +++ /dev/null @@ -1,285 +0,0 @@ -#! /usr/bin/env python - - -"""Build or update a collection from raw surveys data.""" - -import argparse -import configparser -import datetime -import logging -import os -import pdb -import re -import shutil -import sys -from pathlib import Path - -from openfisca_survey_manager.paths import default_config_files_directory, openfisca_survey_manager_location -from openfisca_survey_manager.survey_collections import SurveyCollection -from openfisca_survey_manager.surveys import Survey - -app_name = Path(__file__).stem -log = logging.getLogger(app_name) - - -def add_survey_to_collection( - survey_name=None, survey_collection=None, sas_files=None, stata_files=None, csv_files=None, parquet_files=None -): - if sas_files is None: - sas_files = [] - if stata_files is None: - stata_files = [] - if csv_files is None: - csv_files = [] - if parquet_files is None: - parquet_files = [] - - assert survey_collection is not None - overwrite = True - label = survey_name - - for test_survey in survey_collection.surveys: - if test_survey.name == survey_name: - survey = survey_collection.get_survey(survey_name) - if overwrite: - survey = Survey( - name=survey_name, - label=label, - csv_files=csv_files, - sas_files=sas_files, - stata_files=stata_files, - parquet_files=parquet_files, - survey_collection=survey_collection, - ) - else: - survey = survey_collection.get(survey_name) - survey.label = label - survey.informations.update( - { - "csv_files": csv_files, - "sas_files": sas_files, - "stata_files": stata_files, - "parquet_files": parquet_files, - } - ) - survey_collection.surveys = [ - kept_survey for kept_survey in survey_collection.surveys if kept_survey.name != survey_name - ] - survey_collection.surveys.append(survey) - - -def create_data_file_by_format(directory_path=None): - """Browse subdirectories to extract stata and sas files.""" - stata_files = [] - sas_files = [] - csv_files = [] - parquet_files = [] - - for root, _subdirs, files in os.walk(directory_path): - # reference _subdirs to avoid "not accessed" warnings - _ = _subdirs - for file_name in files: - file_path = Path(root) / file_name - if file_path.suffix == ".csv": - log.info(f"Found csv file {file_path}") - csv_files.append(str(file_path)) - if file_path.suffix == ".dta": - log.info(f"Found stata file {file_path}") - stata_files.append(str(file_path)) - if file_path.suffix == ".sas7bdat": - log.info(f"Found sas file {file_path}") - sas_files.append(str(file_path)) - if file_path.suffix == ".parquet": - log.info(f"Found parquet file {file_path}") - relative = str(file_path)[str(file_path).find(directory_path) :] - if ("/" in relative or "\\" in relative) and re.match(r".*-\d$", str(file_name)): - # Keep only the folder name if parquet files are in subfolders and name contains "-" - file_path = file_path.parent - parquet_files.append(str(file_path)) - return {"csv": csv_files, "stata": stata_files, "sas": sas_files, "parquet": parquet_files} - - -def build_survey_collection( - config_files_directory: str, - collection_name=None, - replace_metadata=False, - replace_data=False, - data_directory_path_by_survey_suffix=None, - source_format="sas", - keep_original_parquet_file=False, - encoding=None, -): - assert collection_name is not None - assert data_directory_path_by_survey_suffix is not None - surveys_name = list(data_directory_path_by_survey_suffix.keys()) - assert surveys_name is not None, "A list of surveys to process is needed" - - if replace_metadata: - survey_collection = SurveyCollection(name=collection_name, config_files_directory=config_files_directory) - else: - try: - survey_collection = SurveyCollection.load( - collection=collection_name, config_files_directory=config_files_directory - ) - except configparser.NoOptionError: - survey_collection = SurveyCollection(name=collection_name, config_files_directory=config_files_directory) - - for survey_suffix, data_directory_path in data_directory_path_by_survey_suffix.items(): - assert Path(data_directory_path).is_dir(), "{} is not a valid directory path".format(data_directory_path) - - data_file_by_format = create_data_file_by_format(data_directory_path) - survey_name = "{}_{}".format(collection_name, survey_suffix) - # Save the originals files list in the survey collection - add_survey_to_collection( - survey_name=survey_name, - survey_collection=survey_collection, - csv_files=data_file_by_format.get("csv"), - sas_files=data_file_by_format.get("sas"), - stata_files=data_file_by_format.get("stata"), - parquet_files=data_file_by_format.get("parquet"), - ) - - valid_source_format = [ - _format for _format in list(data_file_by_format.keys()) if data_file_by_format.get((_format)) - ] - log.info("Valid source formats are: {}".format(valid_source_format)) - source_format = valid_source_format[0] - log.info("Using the following format: {}".format(source_format)) - collections_directory = survey_collection.config.get("collections", "collections_directory") - if Path(collections_directory).is_dir() is False: - log.info( - "{} who should be the collections' directory does not exist. Creating directory.".format( - collections_directory - ) - ) - Path(collections_directory).mkdir() - collection_json_path = Path(collections_directory) / "{}.json".format(collection_name) - survey_collection.dump(json_file_path=collection_json_path) - surveys = [] - for survey in survey_collection.surveys: - if survey.name.endswith(str(survey_suffix)) and survey.name.startswith(collection_name): - surveys.append(survey) - survey_collection.fill_store( - source_format=source_format, - surveys=surveys, - overwrite=replace_data, - keep_original_parquet_file=keep_original_parquet_file, - encoding=encoding, - ) - return survey_collection - - -def check_template_config_files(config_files_directory: str): - """ - Create template config files if they do not exist. - """ - raw_data_ini_path = Path(config_files_directory) / "raw_data.ini" - config_ini_path = Path(config_files_directory) / "config.ini" - raw_data_template_ini_path = Path(config_files_directory) / "raw_data_template.ini" - config_template_ini_path = Path(config_files_directory) / "config_template.ini" - - if Path(config_files_directory).exists(): - config_files_do_not_exist = not (raw_data_ini_path.exists() and config_ini_path.exists()) - templates_config_files_do_not_exist = not ( - raw_data_template_ini_path.exists() and config_template_ini_path.exists() - ) - - if config_files_do_not_exist: - if templates_config_files_do_not_exist: - log.info("Creating configuration template files in {}".format(config_files_directory)) - template_files = ["raw_data_template.ini", "config_template.ini"] - templates_config_files_directory = ( - Path(openfisca_survey_manager_location) / "openfisca_survey_manager" / "config_files_templates" - ) - for template_file in template_files: - shutil.copy( - templates_config_files_directory / template_file, - Path(config_files_directory) / template_file, - ) - print("Rename and fill the template files in {}".format(config_files_directory)) # noqa analysis:ignore - return False - else: - Path(config_files_directory).mkdir(parents=True) - return check_template_config_files(config_files_directory) - - return True - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument("-c", "--collection", help="name of collection to build or update", required=True) - parser.add_argument( - "-d", - "--replace-data", - action="store_true", - default=False, - help="erase existing survey data HDF5 file (instead of failing when HDF5 file already exists)", - ) - parser.add_argument( - "-m", - "--replace-metadata", - action="store_true", - default=False, - help="erase existing collection metadata JSON file (instead of just adding new surveys)", - ) - parser.add_argument( - "-p", "--path", help=f"path to the config files directory (default = {default_config_files_directory})" - ) - parser.add_argument("-s", "--survey", help="name of survey to build or update (default = all)") - parser.add_argument( - "-k", - "--keep_original_parquet_file", - action="store_true", - default=False, - help="Keep original and point to original parquet files", - ) - parser.add_argument("-v", "--verbose", action="store_true", default=False, help="increase output verbosity") - parser.add_argument("-e", "--encoding", default=None, help="encoding to be used") - - args = parser.parse_args() - logging.basicConfig(level=logging.DEBUG if args.verbose else logging.WARNING, stream=sys.stdout) - - config_files_directory = args.path or default_config_files_directory - - if not check_template_config_files(config_files_directory=config_files_directory): - return - - config_parser = configparser.ConfigParser() - config_parser.read(Path(config_files_directory) / "raw_data.ini") - assert config_parser.has_section(args.collection), ( - "{} is an unkown collection. Please add a section to raw_data.ini configuration file".format(args.collection) - ) - data_directory_path_by_survey_suffix = dict(config_parser.items(args.collection)) - if args.survey is not None: - assert args.survey in data_directory_path_by_survey_suffix, "Unknown survey data directory for {}".format( - args.collection - ) - data_directory_path_by_survey_suffix = { - args.survey: data_directory_path_by_survey_suffix[args.survey], - } - - start_time = datetime.datetime.now() - - try: - build_survey_collection( - collection_name=args.collection, - data_directory_path_by_survey_suffix=data_directory_path_by_survey_suffix, - replace_metadata=args.replace_metadata, - replace_data=args.replace_data, - source_format="sas", - config_files_directory=config_files_directory, - keep_original_parquet_file=args.keep_original_parquet_file, - encoding=args.encoding, - ) - except Exception as e: - log.info(e) - pdb.post_mortem(sys.exc_info()[2]) - raise e - - log.info("The program has been executed in {}".format(datetime.datetime.now() - start_time)) - - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/openfisca_survey_manager/simulation_builder.py b/openfisca_survey_manager/simulation_builder.py deleted file mode 100644 index 63eb6f02..00000000 --- a/openfisca_survey_manager/simulation_builder.py +++ /dev/null @@ -1,297 +0,0 @@ -import logging -from typing import Dict, List - -from openfisca_core.model_api import MONTH, YEAR -from openfisca_core.simulations.simulation_builder import SimulationBuilder - -SimulationBuilder.id_variable_by_entity_key = None -SimulationBuilder.role_variable_by_entity_key = None -SimulationBuilder.used_as_input_variables = None -SimulationBuilder.used_as_input_variables_by_entity = None - - -log = logging.getLogger(__name__) - - -# Helpers - - -def diagnose_variable_mismatch(used_as_input_variables, input_data_frame): - """Diagnose variables mismatch. - - Args: - used_as_input_variables(lsit): List of variable to test presence - input_data_frame: DataFrame in which to test variables presence - - """ - variables_mismatch = ( - set(used_as_input_variables).difference(set(input_data_frame.columns)) if used_as_input_variables else None - ) - if variables_mismatch: - log.info( - f"The following variables used as input variables are not present in the input data frame: \n {sorted(variables_mismatch)}" - ) - if variables_mismatch: - log.debug("The following variables are used as input variables: \n {}".format(sorted(used_as_input_variables))) - log.debug( - "The input_data_frame contains the following variables: \n {}".format(sorted(input_data_frame.columns)) - ) - - -# SimulationBuilder monkey-patched methods - - -def _set_id_variable_by_entity_key(builder) -> Dict[str, str]: - """Identify and sets the correct ids for the different entities.""" - if builder.id_variable_by_entity_key is None: - log.debug("Use default id_variable names") - builder.id_variable_by_entity_key = { - entity.key: entity.key + "_id" for entity in builder.tax_benefit_system.entities - } - - return builder.id_variable_by_entity_key - - -def _set_role_variable_by_entity_key(builder) -> Dict[str, str]: - """Identify and sets the correct roles for the different entities.""" - if builder.role_variable_by_entity_key is None: - builder.role_variable_by_entity_key = { - entity.key: entity.key + "_role_index" for entity in builder.tax_benefit_system.entities - } - - return builder.role_variable_by_entity_key - - -def _set_used_as_input_variables_by_entity(builder) -> Dict[str, List[str]]: - """Identify and sets the correct input variables for the different entities.""" - if builder.used_as_input_variables_by_entity is not None: - return - - tax_benefit_system = builder.tax_benefit_system - - assert set(builder.used_as_input_variables) <= set(tax_benefit_system.variables.keys()), ( - "Some variables used as input variables are not part of the tax benefit system:\n {}".format( - set(builder.used_as_input_variables).difference(set(tax_benefit_system.variables.keys())) - ) - ) - - builder.used_as_input_variables_by_entity = {} - - for entity in tax_benefit_system.entities: - builder.used_as_input_variables_by_entity[entity.key] = [ - variable - for variable in builder.used_as_input_variables - if tax_benefit_system.get_variable(variable).entity.key == entity.key - ] - - return builder.used_as_input_variables_by_entity - - -def filter_input_variables(builder, input_data_frame, tax_benefit_system): - """Filter the input data frame from variables that won't be used or are set to be computed. - - Args: - input_data_frame: Input dataframe (Default value = None) - - Returns: - pd.DataFrame: filtered dataframe - - """ - assert input_data_frame is not None - id_variable_by_entity_key = builder.id_variable_by_entity_key - role_variable_by_entity_key = builder.role_variable_by_entity_key - used_as_input_variables = builder.used_as_input_variables - - variables = tax_benefit_system.variables - - id_variables = [id_variable_by_entity_key[_entity.key] for _entity in tax_benefit_system.group_entities] - role_variables = [role_variable_by_entity_key[_entity.key] for _entity in tax_benefit_system.group_entities] - - log.debug("Variable used_as_input_variables in filter: \n {}".format(used_as_input_variables)) - - unknown_columns = [] - for column_name in input_data_frame: - if column_name in id_variables + role_variables: - continue - if column_name not in variables: - unknown_columns.append(column_name) - - input_data_frame.drop(unknown_columns, axis=1, inplace=True) - - if unknown_columns: - log.debug("The following unknown columns {}, are dropped from input table".format(sorted(unknown_columns))) - - used_columns = [] - dropped_columns = [] - for column_name in input_data_frame: - if column_name in id_variables + role_variables: - continue - variable = variables[column_name] - # Keeping the calculated variables that are initialized by the input data - if variable.formulas: - if column_name in used_as_input_variables: - used_columns.append(column_name) - continue - - dropped_columns.append(column_name) - - input_data_frame.drop(dropped_columns, axis=1, inplace=True) - - if used_columns: - log.debug( - "These columns are not dropped because present in used_as_input_variables:\n {}".format( - sorted(used_columns) - ) - ) - if dropped_columns: - log.debug( - "These columns in survey are set to be calculated, we drop them from the input table:\n {}".format( - sorted(dropped_columns) - ) - ) - - log.debug(f"Keeping the following variables in the input_data_frame:\n {sorted(input_data_frame.columns)}") - return input_data_frame - - -def init_all_entities(builder, input_data_frame, period=None): - assert period is not None - log.debug(f"Initialasing simulation using input_data_frame for period {period}") - builder._set_id_variable_by_entity_key() - builder._set_role_variable_by_entity_key() - - if period.unit == YEAR: # 1. year - simulation = builder.init_simulation_with_data_frame( - input_data_frame=input_data_frame, - period=period, - ) - elif period.unit == MONTH and period.size == 3: # 2. quarter - for offset in range(period.size): - period_item = period.first_month.offset(offset, MONTH) - simulation = builder.init_simulation_with_data_frame( - input_data_frame=input_data_frame, - period=period_item, - ) - elif period.unit == MONTH and period.size == 1: # 3. months - simulation = builder.init_simulation_with_data_frame( - input_data_frame=input_data_frame, - period=period, - ) - else: - raise ValueError("Invalid period {}".format(period)) - - simulation.id_variable_by_entity_key = builder.id_variable_by_entity_key - return simulation - - -def init_entity_structure(builder, entity, input_data_frame): - """Initialize sthe simulation with tax_benefit_system entities and input_data_frame. - - Args: - tax_benefit_system(TaxBenfitSystem): The TaxBenefitSystem to get the structure from - entity(Entity): The entity to initialize structure - input_data_frame(pd.DataFrame): The input - builder(Builder): The builder - - """ - tax_benefit_system = builder.tax_benefit_system - builder._set_id_variable_by_entity_key() - builder._set_role_variable_by_entity_key() - builder._set_used_as_input_variables_by_entity() - - input_data_frame = builder.filter_input_variables(input_data_frame, tax_benefit_system) - - id_variables = [builder.id_variable_by_entity_key[_entity.key] for _entity in tax_benefit_system.group_entities] - role_variables = [ - builder.role_variable_by_entity_key[_entity.key] for _entity in tax_benefit_system.group_entities - ] - - if entity.is_person: - for id_variable in id_variables + role_variables: - assert id_variable in input_data_frame.columns, "Variable {} is not present in input dataframe".format( - id_variable - ) - - ids = range(len(input_data_frame)) - if entity.is_person: - builder.declare_person_entity(entity.key, ids) - for group_entity in tax_benefit_system.group_entities: - _key = group_entity.key - _id_variable = builder.id_variable_by_entity_key[_key] - _role_variable = builder.role_variable_by_entity_key[_key] - group_population = builder.declare_entity( - _key, input_data_frame[_id_variable].drop_duplicates().sort_values().values - ) - builder.join_with_persons( - group_population, - input_data_frame[_id_variable].astype("int").values, - input_data_frame[_role_variable].astype("int").values, - ) - - -def init_simulation_with_data_frame(builder, input_data_frame, period): - """Initialize the simulation period with current input_data_frame for an entity if specified.""" - used_as_input_variables = builder.used_as_input_variables - id_variable_by_entity_key = builder.id_variable_by_entity_key - role_variable_by_entity_key = builder.role_variable_by_entity_key - tax_benefit_system = builder.tax_benefit_system - assert tax_benefit_system is not None - - diagnose_variable_mismatch(used_as_input_variables, input_data_frame) - - id_variables = [id_variable_by_entity_key[_entity.key] for _entity in tax_benefit_system.group_entities] - role_variables = [role_variable_by_entity_key[_entity.key] for _entity in tax_benefit_system.group_entities] - - for id_variable in id_variables + role_variables: - assert id_variable in input_data_frame.columns, "Variable {} is not present in input dataframe".format( - id_variable - ) - - input_data_frame = builder.filter_input_variables(input_data_frame, tax_benefit_system) - - index_by_entity_key = {} - - for entity in tax_benefit_system.entities: - builder.init_entity_structure(entity, input_data_frame) - - if entity.is_person: - continue - - else: - index_by_entity_key[entity.key] = ( - input_data_frame.loc[ - input_data_frame[role_variable_by_entity_key[entity.key]] == 0, - id_variable_by_entity_key[entity.key], - ] - .sort_values() - .index - ) - - for column_name, column_serie in input_data_frame.items(): - if role_variable_by_entity_key is not None and column_name in role_variable_by_entity_key.values(): - continue - - if id_variable_by_entity_key is not None and column_name in id_variable_by_entity_key.values(): - continue - - simulation = builder.build(tax_benefit_system) - entity = tax_benefit_system.variables[column_name].entity - if entity.is_person: - simulation.init_variable_in_entity(entity.key, column_name, column_serie, period) - else: - simulation.init_variable_in_entity( - entity.key, column_name, column_serie[index_by_entity_key[entity.key]], period - ) - - assert builder.id_variable_by_entity_key is not None - simulation.id_variable_by_entity_key = builder.id_variable_by_entity_key - return simulation - - -SimulationBuilder._set_id_variable_by_entity_key = _set_id_variable_by_entity_key -SimulationBuilder._set_role_variable_by_entity_key = _set_role_variable_by_entity_key -SimulationBuilder._set_used_as_input_variables_by_entity = _set_used_as_input_variables_by_entity -SimulationBuilder.filter_input_variables = filter_input_variables -SimulationBuilder.init_all_entities = init_all_entities -SimulationBuilder.init_entity_structure = init_entity_structure -SimulationBuilder.init_simulation_with_data_frame = init_simulation_with_data_frame diff --git a/openfisca_survey_manager/simulations.py b/openfisca_survey_manager/simulations.py deleted file mode 100644 index bcabd049..00000000 --- a/openfisca_survey_manager/simulations.py +++ /dev/null @@ -1,1433 +0,0 @@ -"""Monkey-patch openfisca_core.simulations.Simulation to work with pandas.""" - -import logging -import re -import warnings -from typing import Callable, Dict, List, Optional, Union - -import humanize -import numpy as np -import pandas as pd -from numpy import logical_or as or_ -from openfisca_core import periods -from openfisca_core.indexed_enums import Enum, EnumArray -from openfisca_core.memory_config import MemoryConfig -from openfisca_core.periods import ETERNITY, MONTH, YEAR -from openfisca_core.simulations import Simulation -from openfisca_core.types import Array, Period, TaxBenefitSystem -from openfisca_core.types import CoreEntity as Entity - -from openfisca_survey_manager.simulation_builder import SimulationBuilder, diagnose_variable_mismatch -from openfisca_survey_manager.statshelpers import mark_weighted_percentiles -from openfisca_survey_manager.survey_collections import SurveyCollection -from openfisca_survey_manager.utils import do_nothing, load_table - -log = logging.getLogger(__name__) - - -# Helpers - - -def assert_variables_in_same_entity(tax_benefit_system: TaxBenefitSystem, variables: List): - """ - Assert that variables are in the same entity. - - Args: - tax_benefit_system (TaxBenefitSystem): Host tax benefit system - variables (List): Variables supposed to belong to the same entity - - Returns: - str: Common entity of the variables - """ - entity = None - for variable_name in variables: - variable = tax_benefit_system.variables.get(variable_name) - assert variable - if entity is None: - entity = variable.entity - assert variable.entity == entity, "{} are not from the same entity: {} doesn't belong to {}".format( - variables, variable_name, entity.key - ) - return entity.key - - -def get_words(text: str): - return re.compile("[A-Za-z_]+[A-Za-z0-9_]*").findall(text) - - -# Main functions - - -def adaptative_calculate_variable( - simulation: Simulation, variable: str, period: Optional[Union[int, str, Period]] -) -> Array: - """ - Calculate variable by adpating it definition period to the target period. - - Args: - simulation (Simulation): Simulation to suse - variable (str): Variable to be computed - period (Optional[Union[int, str, Period]]): Target period - - Returns: - Array: Values of the variable on the target period - """ - if not isinstance(period, periods.Period): - period = periods.period(str(period)) - - tax_benefit_system = simulation.tax_benefit_system - assert tax_benefit_system is not None - - assert variable in tax_benefit_system.variables, "{} is not a valid variable".format(variable) - period_size_independent = tax_benefit_system.get_variable(variable).is_period_size_independent - definition_period = tax_benefit_system.get_variable(variable).definition_period - - if period_size_independent is False and definition_period != "eternity": - values = simulation.calculate_add(variable, period=period) - elif period_size_independent is True and definition_period == "month" and period.size_in_months > 1: - values = simulation.calculate(variable, period=period.first_month) - elif period_size_independent is True and definition_period == "month" and period.size_in_months == 1: - values = simulation.calculate(variable, period=period) - elif period_size_independent is True and definition_period == "year" and period.size_in_months > 12: - values = simulation.calculate(variable, period=period.start.offset("first-of", "year").period("year")) - elif period_size_independent is True and definition_period == "year" and period.size_in_months == 12: - values = simulation.calculate(variable, period=period) - elif period_size_independent is True and definition_period == "year": - values = simulation.calculate(variable, period=period.this_year) - elif definition_period == "eternity": - values = simulation.calculate(variable, period=period) - else: - values = None - assert values is not None, f"Unspecified calculation period for variable {variable}" - - return values - - -def compute_aggregate( - simulation: Simulation, - variable: str = None, - aggfunc: str = "sum", - filter_by: str = None, - period: Optional[Union[int, str, Period]] = None, - missing_variable_default_value=np.nan, - weighted: bool = True, - alternative_weights: Optional[Union[str, int, float, Array]] = None, - filtering_variable_by_entity: Dict = None, -) -> Optional[Union[None, float]]: - """ - Compute aggregate of a variable. - - Args: - simulation (Simulation): Simulation to use for the computation - variable (str, optional): Variable to aggregate. Defaults to None. - aggfunc (str, optional): Aggregation function. Defaults to 'sum'. - filter_by (str, optional): Filter variable or expression to use. Defaults to None. - period (Optional[Union[int, str, Period]], optional): Period. Defaults to None. - missing_variable_default_value (optional): Value to use for missing values. Defaults to np.nan. - weighted (bool, optional): Whether to weight the variable or not. Defaults to True. - alternative_weights (Optional[Union[str, int, float, Array]], optional): Alternative weigh to use. Defaults to None. - filtering_variable_by_entity (Dict, optional): Filtering variable by entity. Defaults to None. - - Returns: - float: Aggregate - """ - weight_variable_by_entity = simulation.weight_variable_by_entity - tax_benefit_system = simulation.tax_benefit_system - - if period is None: - period = simulation.period - - assert variable in tax_benefit_system.variables, f"{variable} is not a variable of the tax benefit system" - entity_key = tax_benefit_system.variables[variable].entity.key - - if filter_by is None and filtering_variable_by_entity is not None: - filter_by_variable = filtering_variable_by_entity.get(entity_key) - - if filter_by: - filter_by_variable = get_words(filter_by)[0] - assert filter_by_variable in tax_benefit_system.variables, ( - f"{filter_by_variable} is not a variable of the tax benefit system" - ) - entity_key = tax_benefit_system.variables[variable].entity.key - filter_by_entity_key = tax_benefit_system.variables[filter_by_variable].entity.key - assert filter_by_entity_key == entity_key, ( - f"You tried to compute agregates for variable '{variable}', of entity {entity_key}" - f" filtering by variable '{filter_by_variable}', of entity {filter_by_entity_key}. This is not possible." - f" Please choose a filter-by variable of same entity as '{variable}'." - ) - - expressions = [] - if filter_by is not None: - if filter_by in tax_benefit_system.variables: - filter_entity_key = tax_benefit_system.variables.get(filter_by).entity.key - assert filter_entity_key == entity_key, ( - "You tried to compute agregates for variable '{0}', of entity {1}" - " filtering by variable '{2}', of entity {3}. This is not possible." - " Please choose a filter-by variable of same entity as '{0}'.".format( - variable, entity_key, filter_by_variable, filter_by_entity_key - ) - ) - else: - filter_entity_key = assert_variables_in_same_entity(tax_benefit_system, get_words(filter_by)) - expressions.extend([filter_by]) - assert filter_entity_key == entity_key - else: - filter_dummy = np.array(1.0) - - uniform_weight = np.array(1.0) - weight_variable = None - if weighted: - assert or_(alternative_weights, weight_variable_by_entity), ( - "The weighted option is set at True but there is no weight variable for entity {} nor alternative weights. Either define a weight variable or switch to unweighted".format( - entity_key - ) - ) - if alternative_weights: - if isinstance(alternative_weights, str): - assert alternative_weights in tax_benefit_system.variables, ( - f"{alternative_weights} is not a valid variable of the tax benefit system" - ) - weight_variable = alternative_weights - - elif (type(alternative_weights) is int) or (type(alternative_weights) is float): - weight_variable = None - uniform_weight = float(alternative_weights) - elif weight_variable_by_entity: - weight_variable = weight_variable_by_entity[entity_key] - - if variable in simulation.tax_benefit_system.variables: - value = simulation.adaptative_calculate_variable(variable=variable, period=period) - else: - log.debug("Variable {} not found. Assigning {}".format(variable, missing_variable_default_value)) - return missing_variable_default_value - - weight = ( - simulation.adaptative_calculate_variable(weight_variable, period=period).astype(float) - if weight_variable - else uniform_weight - ) - if weight_variable: - assert any(weight != 0), "Weights shall not be all zeroes" - else: - assert uniform_weight != 0 - - if filter_by is not None: - expression_data_frame = simulation.create_data_frame_by_entity( - variables=get_words(filter_by), period=period, index=False - )[entity_key] - for expression in expressions: - expression_data_frame[expression] = expression_data_frame.eval(expression) - - filter_dummy = expression_data_frame[filter_by] - else: - filter_dummy = 1.0 - - if aggfunc == "sum": - aggregate = (value * weight * filter_dummy).sum() - elif aggfunc == "mean": - aggregate = (value * weight * filter_dummy).sum() / (weight * filter_dummy).sum() - elif aggfunc == "count": - aggregate = (weight * filter_dummy).sum() - elif aggfunc == "count_non_zero": - aggregate = (weight * (value != 0) * filter_dummy).sum() - else: - aggregate = None - - return aggregate - - -def compute_quantiles( - simulation: Simulation, - variable: str, - nquantiles: int = None, - period: Optional[Union[int, str, Period]] = None, - filter_by=None, - weighted: bool = True, - alternative_weights=None, - filtering_variable_by_entity=None, -) -> List[float]: - """ - Compute quantiles of a variable. - - Args: - simulation (Simulation, optional): Simulation to be used. Defaults to None. - variable (str, optional): Variable which quantiles are computed. Defaults to None. - nquantiles (int, optional): Number of quantiles. Defaults to None. - period (Optional[Union[int, str, Period]], optional): Period. Defaults to None. - missing_variable_default_value (optional): Value to use for missing values. Defaults to np.nan. - weighted (bool, optional): Whether to weight the variable or not. Defaults to True. - alternative_weights (Optional[Union[str, int, float, Array]], optional): Alternative weigh to use. Defaults to None. - filtering_variable_by_entity (Dict, optional): Filtering variable by entity. Defaults to None. - - Returns: - List(float): The quantiles values - """ - weight_variable_by_entity = simulation.weight_variable_by_entity - weight_variable = None - entity_key = simulation.tax_benefit_system.variables[variable].entity.key - if weight_variable_by_entity: - weight_variable = weight_variable_by_entity[entity_key] - - variable_values = simulation.adaptative_calculate_variable(variable, period) - if weighted: - assert (alternative_weights is not None) or (weight_variable is not None) - weight = ( - alternative_weights if alternative_weights is not None else simulation.calculate(weight_variable, period) - ) - else: - weight = np.ones(len(variable_values)) - - if filtering_variable_by_entity is not None and filter_by is None: - filter_by = filtering_variable_by_entity.get(entity_key) - - if filter_by is not None: - filter_entity_key = simulation.tax_benefit_system.variables.get(filter_by).entity.key - assert filter_entity_key == entity_key - filter_dummy = simulation.calculate(filter_by, period=period).astype(bool) - - variable_values = variable_values[filter_dummy].copy() - weight = weight[filter_dummy].copy() - - labels = np.arange(1, nquantiles + 1) - method = 2 - _, values = mark_weighted_percentiles(variable_values, labels, weight, method, return_quantiles=True) - return values - - -def compute_pivot_table( - simulation: Simulation = None, - baseline_simulation: Simulation = None, - aggfunc="mean", - columns: Optional[List[str]] = None, - difference: bool = False, - filter_by=None, - index: Optional[List[str]] = None, - period: Optional[Union[int, str, Period]] = None, - use_baseline_for_columns: bool = None, - values: Optional[List[str]] = None, - missing_variable_default_value=np.nan, - concat_axis: Optional[int] = None, - weighted: bool = True, - alternative_weights=None, - filtering_variable_by_entity=None, -): - """ - Compute pivot table. - - Args: - simulation (Simulation, optional): Main simulation. Defaults to None. - baseline_simulation (Simulation, optional): Baseline simulation. Defaults to None. - aggfunc (str, optional): Aggregation function. Defaults to 'mean'. - columns (List[str], optional): Variables to use in columns. Defaults to None. - difference (bool, optional): Whether to compute the difference with baseline. Defaults to False. - filter_by (str, optional): Filter variable or expression to use. Defaults to None. - index (List[str], optional): _description_. Defaults to None. - period (Optional[Union[int, str, Period]], optional): Period. Defaults to None. - use_baseline_for_columns (bool, optional): _description_. Defaults to None. - values (List[str], optional): _description_. Defaults to None. - missing_variable_default_value (optional): _description_. Defaults to np.nan. - concat_axis (int, optional): _description_. Defaults to None. - weighted (bool, optional): Whether to weight the variable or not. Defaults to True. - alternative_weights (Optional[Union[str, int, float, Array]], optional): Alternative weigh to use. Defaults to None. - filtering_variable_by_entity (Dict, optional): Filtering variable by entity. Defaults to None. - - Returns: - _type_: _description_ - """ - weight_variable_by_entity = simulation.weight_variable_by_entity - - admissible_aggfuncs = ["max", "mean", "min", "sum", "count", "sum_abs"] - assert aggfunc in admissible_aggfuncs - assert columns or index or values - - if baseline_simulation is not None: - tax_benefit_system = baseline_simulation.tax_benefit_system - else: - tax_benefit_system = simulation.tax_benefit_system - - assert period is not None - - if isinstance(columns, str): - columns = [columns] - elif columns is None: - columns = [] - assert isinstance(columns, list) - - if isinstance(index, str): - index = [index] - elif index is None: - index = [] - assert isinstance(index, list) - - if isinstance(values, str): - values = [values] - elif values is None: - values = [] - assert isinstance(values, list) - - entity_key = None - for axe in [columns, index, values]: - if (len(axe) != 0) and (entity_key is None): - entity_key = tax_benefit_system.variables[axe[0]].entity.key - continue - - if filter_by is None and filtering_variable_by_entity is not None: - filter_by = filtering_variable_by_entity.get(entity_key) - - variables = set(index + columns) - - # Select the entity weight corresponding to the variables that will provide values - uniform_weight = 1.0 - weight_variable = None - if weighted: - if alternative_weights: - if isinstance(alternative_weights, str): - assert alternative_weights in tax_benefit_system.variables, ( - f"{alternative_weights} is not a valid variable of the tax benefit system" - ) - weight_variable = alternative_weights - - elif (type(alternative_weights) is int) or (type(alternative_weights) is float): - weight_variable = None - uniform_weight = float(alternative_weights) - - else: - if weight_variable_by_entity: - weight_variable = weight_variable_by_entity[entity_key] - variables.add(weight_variable) - - else: - log.warn( - "There is no weight variable for entity {} nor alternative weights. Switch to unweighted".format( - entity_key - ) - ) - - expressions = [] - if filter_by is not None: - if filter_by in tax_benefit_system.variables: - variables.add(filter_by) - filter_entity_key = tax_benefit_system.variables.get(filter_by).entity.key - assert filter_entity_key == entity_key - else: - filter_entity_key = assert_variables_in_same_entity(tax_benefit_system, get_words(filter_by)) - expressions.extend([filter_by]) - assert filter_entity_key == entity_key - else: - filter_dummy = np.array(1.0) - - for expression in expressions: - expression_variables = get_words(expression) - expression_entity_key = assert_variables_in_same_entity(tax_benefit_system, expression_variables) - assert expression_entity_key == entity_key - for variable in expression_variables: - variables.add(variable) - - for variable in variables | set(values): - if variable in tax_benefit_system.variables: - assert tax_benefit_system.variables[variable].entity.key == entity_key, ( - "The variable {} does not belong to entity {}".format( - variable, - entity_key, - ) - ) - - if difference: - assert simulation is not None and baseline_simulation is not None - reform_data_frame = simulation.create_data_frame_by_entity(values, period=period, index=False)[ - entity_key - ].fillna(missing_variable_default_value) - baseline_data_frame = baseline_simulation.create_data_frame_by_entity(values, period=period, index=False)[ - entity_key - ].fillna(missing_variable_default_value) - for value_variable in values: - if value_variable not in baseline_data_frame: - baseline_data_frame[value_variable] = missing_variable_default_value - if value_variable not in reform_data_frame: - reform_data_frame[value_variable] = missing_variable_default_value - - data_frame = reform_data_frame - baseline_data_frame - - else: - if values: - data_frame = simulation.create_data_frame_by_entity(values, period=period, index=False)[entity_key] - for value_variable in values: - if value_variable not in data_frame: - data_frame[value_variable] = missing_variable_default_value - else: - data_frame = None - - use_baseline_data = difference or use_baseline_for_columns - - # use baseline if explicited or when computing difference - if use_baseline_data: - baseline_vars_data_frame = baseline_simulation.create_data_frame_by_entity( - variables=variables, period=period, index=False - )[entity_key] - else: - baseline_vars_data_frame = simulation.create_data_frame_by_entity( - variables=variables, period=period, index=False - )[entity_key] - - for expression in expressions: - baseline_vars_data_frame[expression] = baseline_vars_data_frame.eval(expression) - if filter_by is not None: - filter_dummy = baseline_vars_data_frame[filter_by] - if weight_variable is None: - weight_variable = "weight" - baseline_vars_data_frame[weight_variable] = uniform_weight - baseline_vars_data_frame[weight_variable] = baseline_vars_data_frame[weight_variable] * filter_dummy - # We drop variables that are in values from baseline_vars_data_frame - dropped_columns = [column for column in baseline_vars_data_frame.columns if column in values] - baseline_vars_data_frame.drop(columns=dropped_columns, inplace=True) - - data_frame = pd.concat( - [baseline_vars_data_frame, data_frame], - axis=1, - ) - - if values: - data_frame_by_value = {} - for value in values: - if aggfunc in ["mean", "sum", "sum_abs", "count"]: - data_frame[value] = ( - data_frame[value] * data_frame[weight_variable] - if aggfunc != "sum_abs" - else data_frame[value].abs() * data_frame[weight_variable] - ) - data_frame[value] = data_frame[value].fillna(missing_variable_default_value) - pivot_sum = data_frame.pivot_table(index=index, columns=columns, values=value, aggfunc="sum") - pivot_mass = data_frame.pivot_table( - index=index, columns=columns, values=weight_variable, aggfunc="sum" - ) - if aggfunc == "mean": - try: # Deal with a pivot_table pandas bug https://github.com/pandas-dev/pandas/issues/17038 - result = pivot_sum / pivot_mass.loc[weight_variable] - except KeyError: - result = pivot_sum / pivot_mass - elif aggfunc in ["sum", "sum_abs"]: - result = pivot_sum - elif aggfunc == "count": - result = pivot_mass.rename(columns={weight_variable: value}, index={weight_variable: value}) - - elif aggfunc in ["min", "max"]: - data_frame[value].fillna(missing_variable_default_value, inplace=True) - result = data_frame.pivot_table(index=index, columns=columns, values=value, aggfunc=aggfunc) - - data_frame_by_value[value] = result - - if len(list(data_frame_by_value.keys())) > 1: - if concat_axis is None: - return data_frame_by_value - else: - assert concat_axis in [0, 1] - return pd.concat(data_frame_by_value.values(), axis=concat_axis) - else: - return next(iter(data_frame_by_value.values())) - - else: - assert aggfunc == "count", "Can only use count for aggfunc if no values" - return data_frame.pivot_table(index=index, columns=columns, values=weight_variable, aggfunc="sum") - - -def create_data_frame_by_entity( - simulation: Simulation, - variables: Optional[List] = None, - expressions: Optional[List[str]] = None, - filter_by=None, - index: bool = False, - period: Optional[Union[int, str, Period]] = None, - merge: bool = False, -) -> Union[pd.DataFrame, Dict]: - """ - Create dataframe(s) of variables for the whole selected population. - - Args: - simulation (Simulation): Simulation to use. - variables (Optional[List], optional): Variables to retrieve, None means all. Defaults to None. - expressions (Optional[List[str]], optional): _description_. Defaults to None. - filter_by (str, optional): Filter variable or expression to use. Defaults to None. - index (bool, optional): Whether to use index (id) variables. Defaults to False. - period (Optional[Union[int, str, Period]], optional): Period of the computation. Defaults to None. - merge (bool, optional): Wheter to merge the datafrales into one. Defaults to False. - - Returns: - pd.DataFrame of Dict: Dataframe(s) with the variables values - """ - assert simulation is not None - id_variable_by_entity_key = simulation.id_variable_by_entity_key - tax_benefit_system = simulation.tax_benefit_system - assert tax_benefit_system is not None - - if period is None: - period = simulation.period - - assert variables or index or expressions or filter_by - - if merge: - index = True - if expressions is None: - expressions = [] - - if filter_by is not None: - if filter_by in tax_benefit_system.variables: - variables.append(filter_by) - filter_entity_key = tax_benefit_system.variables.get(filter_by).entity.key - else: - filter_entity_key = assert_variables_in_same_entity(tax_benefit_system, get_words(filter_by)) - expressions.append(filter_by) - - expressions_by_entity_key = {} - for expression in expressions: - expression_variables = get_words(expression) - entity_key = assert_variables_in_same_entity(tax_benefit_system, expression_variables) - if entity_key in expressions_by_entity_key: - expressions_by_entity_key[entity_key].append(expression) - else: - expressions_by_entity_key[entity_key] = [expression] - variables += expression_variables - - variables = set(variables) - - missing_variables = set(variables).difference(set(tax_benefit_system.variables.keys())) - if missing_variables: - log.info(f"These variables aren't part of the tax-benefit system: {missing_variables} and thus ignored") - - columns_to_fetch = [ - tax_benefit_system.variables.get(variable_name) - for variable_name in variables - if tax_benefit_system.variables.get(variable_name) is not None - ] - - assert len(columns_to_fetch) >= 1, "None of the requested variables {} are in the tax-benefit-system {}".format( - variables, list(tax_benefit_system.variables.keys()) - ) - - assert simulation is not None - - openfisca_data_frame_by_entity_key = {} - non_person_entities = [] - - for entity in tax_benefit_system.entities: - entity_key = entity.key - column_names = [column.name for column in columns_to_fetch if column.entity.key == entity_key] - openfisca_data_frame_by_entity_key[entity_key] = pd.DataFrame( - { - column_name: simulation.adaptative_calculate_variable(column_name, period=period) - for column_name in column_names - } - ) - if entity.is_person: - person_entity = entity - else: - non_person_entities.append(entity) - - if index: - person_data_frame = openfisca_data_frame_by_entity_key.get(person_entity.key) - person_data_frame.index.name = id_variable_by_entity_key.get("person", "person_id") - if person_data_frame is None: - person_data_frame = pd.DataFrame() - for entity in non_person_entities: - entity_key_id = id_variable_by_entity_key[entity.key] - person_data_frame[entity_key_id] = simulation.populations[entity.key].members_entity_id - flattened_roles = entity.flattened_roles - index_by_role = {flattened_roles[index]: index for index in range(len(flattened_roles))} - person_data_frame["{}_{}".format(entity.key, "role")] = pd.Series( - simulation.populations[entity.key].members_role - ).map(index_by_role) - person_data_frame["{}_{}".format(entity.key, "position")] = simulation.populations[ - entity.key - ].members_position - - # Set index names as entity_id - openfisca_data_frame_by_entity_key[entity.key].index.name = entity_key_id - openfisca_data_frame_by_entity_key[entity.key].reset_index(inplace=True) - person_data_frame.reset_index(inplace=True) - - for entity_key, expressions in expressions_by_entity_key.items(): - data_frame = openfisca_data_frame_by_entity_key[entity_key] - for expression in expressions: - data_frame[expression] = data_frame.eval(expression) - - if filter_by is not None: - openfisca_data_frame_by_entity_key[filter_entity_key] = ( - openfisca_data_frame_by_entity_key[filter_entity_key] - .loc[openfisca_data_frame_by_entity_key[filter_entity_key][filter_by]] - .copy() - ) - - if not merge: - return openfisca_data_frame_by_entity_key - else: - for entity_key, openfisca_data_frame in openfisca_data_frame_by_entity_key.items(): - if entity_key != person_entity.key: - entity_key_id = id_variable_by_entity_key[entity_key] - if len(openfisca_data_frame) > 0: - person_data_frame = person_data_frame.merge( - openfisca_data_frame.reset_index(), - left_on=entity_key_id, - right_on=entity_key_id, - ) - return person_data_frame - - -class SecretViolationError(Exception): - """Raised if the result of the simulation do not comform with regulators rules.""" - - pass - - -def compute_winners_losers( - simulation: Simulation, - baseline_simulation: Simulation, - variable: str, - filter_by: Optional[str] = None, - period: Optional[Union[int, str, Period]] = None, - absolute_minimal_detected_variation: float = 0, - relative_minimal_detected_variation: float = 0.01, - observations_threshold: int = None, - weighted: bool = True, - alternative_weights=None, - filtering_variable_by_entity=None, -) -> Dict[str, int]: - """ - Compute the number of winners and losers for a given variable. - - Args: - simulation (_type_): The main simulation. - baseline_simulation (_type_): The baseline simulation - variable (str): The variable to use. - filter_by (str, optional): The variable or expression to be used as a filter. Defaults to None. - period (Optional[Union[int, str, Period]], optional): The period of the simulation. Defaults to None. - absolute_minimal_detected_variation (float, optional): Absolute minimal variation to be detected, in ratio. Ie 0.5 means 5% of variation wont be counted.. - relative_minimal_detected_variation (float, optional): Relative minimal variation to be detected, in ratio. Defaults to .01. - observations_threshold (int, optional): Number of observations needed to avoid a statistical secret violation. Defaults to None. - weighted (bool, optional): Whether to use weights. Defaults to True. - alternative_weights (Optional[Union[str, int, float, Array]], optional): Alternative weigh to use. Defaults to None. - filtering_variable_by_entity (_type_, optional): The variable to be used as a filter for each entity. Defaults to None. - - Raises: - SecretViolationError: Raised when statistical secret is violated. - - Returns: - Dict[str, int]: Statistics about winners and losers between the main simulation and the baseline. - """ - weight_variable_by_entity = simulation.weight_variable_by_entity - entity_key = baseline_simulation.tax_benefit_system.variables[variable].entity.key - - # Get the results of the simulation - after = simulation.adaptative_calculate_variable(variable, period=period) - before = baseline_simulation.adaptative_calculate_variable(variable, period=period) - - # Filter if needed - if filtering_variable_by_entity is not None and filter_by is None: - filter_by = filtering_variable_by_entity.get(entity_key) - - if filter_by is not None: - filter_entity_key = baseline_simulation.tax_benefit_system.variables.get(filter_by).entity.key - assert filter_entity_key == entity_key - filter_dummy = baseline_simulation.calculate(filter_by, period=period).astype(bool) - - after = after[filter_dummy].copy() - before = before[filter_dummy].copy() - - # Define weights - weight = np.ones(len(after)) - if weighted: - if alternative_weights is not None: - weight = alternative_weights - elif weight_variable_by_entity is not None: - weight_variable = weight_variable_by_entity[entity_key] - weight = baseline_simulation.calculate(weight_variable, period=period) - else: - log.warn( - "There is no weight variable for entity {} nor alternative weights. Switch to unweighted".format( - entity_key - ) - ) - - # Compute the weigthed number of zeros or non zeros - value_by_simulation = {"after": after, "before": before} - stats_by_simulation = {} - for simulation_prefix, value in value_by_simulation.items(): - stats = {} - stats["count_zero"] = (weight.astype("float64") * (absolute_minimal_detected_variation > np.abs(value))).sum() - stats["count_non_zero"] = sum(weight.astype("float64")) - stats["count_zero"] - stats_by_simulation[simulation_prefix] = stats - del stats - - # Compute the number of entity above or below after - after_value = after - before_value = before - with np.errstate(divide="ignore", invalid="ignore"): - above_after = ((after_value - before_value) / np.abs(before_value)) > relative_minimal_detected_variation - almost_zero_before = np.abs(before_value) < absolute_minimal_detected_variation - above_after[almost_zero_before * (after_value >= 0)] = (after_value >= absolute_minimal_detected_variation)[ - almost_zero_before * (after_value >= 0) - ] - with np.errstate(divide="ignore", invalid="ignore"): - below_after = ((after_value - before_value) / np.abs(before_value)) < -relative_minimal_detected_variation - below_after[almost_zero_before * (after_value < 0)] = (after_value < -absolute_minimal_detected_variation)[ - almost_zero_before * (after_value < 0) - ] - - # Check if there is a secret violation, without weights - if observations_threshold is not None: - not_legit_below = (below_after.sum() < observations_threshold) & (below_after.sum() > 0) - not_legit_above = (above_after.sum() < observations_threshold) & (above_after.sum() > 0) - if not_legit_below | not_legit_above: - raise SecretViolationError("Not enough observations involved") - - # Apply weights - above_after_count = (above_after.astype("float64") * weight.astype("float64")).sum() - below_after_count = (below_after.astype("float64") * weight.astype("float64")).sum() - total = sum(weight) - neutral = total - above_after_count - below_after_count - - return { - "total": total, - "non_zero_before": stats_by_simulation["before"]["count_non_zero"], - "non_zero_after": stats_by_simulation["after"]["count_non_zero"], - "above_after": above_after_count, - "lower_after": below_after_count, - "neutral": neutral, - "tolerance_factor_used": relative_minimal_detected_variation, - "weight_factor": 1, - } - - -def init_entity_data( - simulation: Simulation, - entity: Entity, - filtered_input_data_frame: pd.DataFrame, - period: Period, - used_as_input_variables_by_entity: Dict, -): - """ - Initialize entity in simulation at some period with input provided by a dataframe. - - Args: - simulation (Simulation): The simulation to initialize. - entity (Entity): The entity which variables to initialize. - filtered_input_data_frame (pd.DataFrame): The dataframe with the variables values. - period (Period): The period to initialize. - used_as_input_variables_by_entity (Dict): The variable to be used to initialize each entity. - """ - used_as_input_variables = used_as_input_variables_by_entity[entity.key] - input_data_frame = filtered_input_data_frame - # input_data_frame = self.filter_input_variables(input_data_frame = input_data_frame) - diagnose_variable_mismatch(used_as_input_variables, input_data_frame) - - for column_name, column_serie in input_data_frame.items(): - variable_instance = simulation.tax_benefit_system.variables.get(column_name) - if variable_instance is None: - log.debug(f"Ignoring {column_name} in input data") - continue - - if variable_instance.entity.key != entity.key: - log.debug( - f"Ignoring variable {column_name} which is not part of entity {entity.key} but {variable_instance.entity.key}" - ) - continue - init_variable_in_entity(simulation, entity.key, column_name, column_serie, period) - - -def inflate( - simulation: Simulation, - inflator_by_variable: Optional[Dict] = None, - period: Optional[Union[int, str, Period]] = None, - target_by_variable: Optional[Dict] = None, -): - tax_benefit_system = simulation.tax_benefit_system - for variable_name in set(inflator_by_variable.keys()).union(set(target_by_variable.keys())): - assert variable_name in tax_benefit_system.variables, ( - "Variable {} is not a valid variable of the tax-benefit system".format(variable_name) - ) - if variable_name in target_by_variable: - inflator = inflator_by_variable[variable_name] = target_by_variable[ - variable_name - ] / simulation.compute_aggregate(variable=variable_name, period=period) - log.debug( - f"Using {inflator} as inflator for {variable_name} to reach the target {target_by_variable[variable_name]} " - ) - else: - assert variable_name in inflator_by_variable, "variable_name is not in inflator_by_variable" - log.debug( - f"Using inflator {inflator_by_variable[variable_name]} for {variable_name}. The target is thus {inflator_by_variable[variable_name] * simulation.compute_aggregate(variable=variable_name, period=period)}" - ) - inflator = inflator_by_variable[variable_name] - - array = simulation.calculate_add(variable_name, period=period) - assert array is not None - simulation.delete_arrays(variable_name, period=period) # delete existing arrays - simulation.set_input(variable_name, period, inflator * array) # insert inflated array - - -def _load_table_for_survey( - config_files_directory, collection, survey, table, batch_size=None, batch_index=None, filter_by=None -): - if survey is not None: - input_data_frame = load_table( - config_files_directory=config_files_directory, - collection=collection, - survey=survey, - table=table, - batch_size=batch_size, - batch_index=batch_index, - filter_by=filter_by, - ) - else: - input_data_frame = load_table( - config_files_directory=config_files_directory, - collection=collection, - survey="input", - table=table, - batch_size=batch_size, - batch_index=batch_index, - filter_by=filter_by, - ) - return input_data_frame - - -def _input_data_table_by_entity_by_period_monolithic( - tax_benefit_system, - simulation, - period, - input_data_table_by_entity, - builder, - custom_input_data_frame, - config_files_directory, - collection, - survey=None, -): - """ - Initialize simulation with input data from a table for each entity and period. - """ - period = periods.period(period) - simulation_datasets = {} - entities = tax_benefit_system.entities - for entity in entities: - # Read all tables for the entity - log.debug(f"init_simulation - {period=} {entity.key=}") - table = input_data_table_by_entity.get(entity.key) - filter_by = input_data_table_by_entity.get("filter_by", None) - if table is None: - continue - input_data_frame = _load_table_for_survey( - config_files_directory, collection, survey, table, filter_by=filter_by - ) - simulation_datasets[entity.key] = input_data_frame - - if simulation is None: - # Instantiate simulation only for the fist period - # Next period will reuse the same simulation - for entity in entities: - table = input_data_table_by_entity.get(entity.key) - if table is None: - continue - custom_input_data_frame(input_data_frame, period=period, entity=entity.key) - builder.init_entity_structure(entity, simulation_datasets[entity.key]) # TODO complete args - simulation = builder.build(tax_benefit_system) - simulation.id_variable_by_entity_key = ( - builder.id_variable_by_entity_key - ) # Should be propagated to enhanced build - - for entity in entities: - # Load data in the simulation - table = input_data_table_by_entity.get(entity.key) - if table is None: - continue - log.debug(f"init_simulation - {entity.key=} {len(input_data_frame)=}") - simulation.init_entity_data( - entity, simulation_datasets[entity.key], period, builder.used_as_input_variables_by_entity - ) - del simulation_datasets[entity.key] - return simulation - - -def _input_data_table_by_entity_by_period_batch( - tax_benefit_system, - simulation, - period, - input_data_table_by_entity, - builder, - custom_input_data_frame, - config_files_directory, - collection, - survey=None, -): - """ - Initialize simulation with input data from a table for each entity and period. - """ - period = periods.period(period) - batch_size = input_data_table_by_entity.get("batch_size") - batch_index = input_data_table_by_entity.get("batch_index", 0) - batch_entity = input_data_table_by_entity.get("batch_entity") - batch_entity_key = input_data_table_by_entity.get("batch_entity_key") - filtered_entity = input_data_table_by_entity.get("filtered_entity") - filtered_entity_on_key = input_data_table_by_entity.get("filtered_entity_on_key") - if not batch_entity or not batch_entity_key or not filtered_entity or not filtered_entity_on_key: - raise ValueError("batch_entity, batch_entity_key, filtered_entity and filtered_entity_on_key are required") - simulation_datasets = { - batch_entity: { - "table_key": batch_entity_key, - "input_data_frame": None, - "entity": None, - }, - filtered_entity: { - "table_key": filtered_entity_on_key, - "input_data_frame": None, - "entity": None, - }, - } - batch_entity_ids = None - entities = tax_benefit_system.entities - - if len(entities) > 2: - # Batch mode could work only with batch_entity and filtered_entity, and no others - warnings.warn( - f"survey-manager.simulation._input_data_table_by_entity_by_period_batch : Your TaxBenefitSystem has {len(entities)} entities but we will only load {batch_entity} and {filtered_entity}.", - stacklevel=2, - ) - - for entity_name, entity_data in simulation_datasets.items(): - # Find Identity object from TaxBenefitSystem - for entity in entities: - if entity.key == entity_name: - entity_data["entity"] = entity - break - - # Load the batch entity - table = input_data_table_by_entity[batch_entity] - input_data_frame = _load_table_for_survey( - config_files_directory, collection, survey, table, batch_size, batch_index - ) - batch_entity_ids = input_data_frame[batch_entity_key].to_list() - simulation_datasets[batch_entity]["input_data_frame"] = input_data_frame - - # Load the filtered entity - table = input_data_table_by_entity[filtered_entity] - filter_by = [(filtered_entity_on_key, "in", batch_entity_ids)] - input_data_frame = _load_table_for_survey(config_files_directory, collection, survey, table, filter_by=filter_by) - simulation_datasets[filtered_entity]["input_data_frame"] = input_data_frame - - if simulation is None: - for entity_name, entity_data in simulation_datasets.items(): - custom_input_data_frame(entity_data["input_data_frame"], period=period, entity=entity_name) - builder.init_entity_structure(entity_data["entity"], entity_data["input_data_frame"]) - simulation = builder.build(tax_benefit_system) - simulation.id_variable_by_entity_key = ( - builder.id_variable_by_entity_key - ) # Should be propagated to enhanced build - for _entity_name, entity_data in simulation_datasets.items(): - simulation.init_entity_data( - entity_data["entity"], entity_data["input_data_frame"], period, builder.used_as_input_variables_by_entity - ) - return simulation - - -def init_simulation(tax_benefit_system, period, data): - builder = SimulationBuilder() - builder.create_entities(tax_benefit_system) - - collection = data.get("collection") - custom_input_data_frame = data.get("custom_input_data_frame", do_nothing) - data_year = data.get("data_year") - survey = data.get("survey") - config_files_directory = data.get("config_files_directory") - builder.used_as_input_variables = data.get("used_as_input_variables") - builder.id_variable_by_entity_key = data.get("id_variable_by_entity_key") - builder.role_variable_by_entity_key = data.get("role_variable_by_entity_key") - builder.tax_benefit_system = tax_benefit_system - - default_source_types = [ - "input_data_frame", - "input_data_table", - "input_data_frame_by_entity", - "input_data_frame_by_entity_by_period", - "input_data_table_by_entity_by_period", - "input_data_table_by_period", - ] - source_types = [source_type_ for source_type_ in default_source_types if data.get(source_type_, None) is not None] - assert len(source_types) < 2, "There are too many data source types" - assert len(source_types) >= 1, "There should be one data source type included in {}".format(default_source_types) - source_type = source_types[0] - source = data[source_type] - - if source_type == "input_data_frame_by_entity": - assert data_year is not None - source_type = "input_data_frame_by_entity_by_period" - source = {periods.period(data_year): source} - - input_data_survey_prefix = data.get("input_data_survey_prefix") if data is not None else None - - if source_type == "input_data_frame": - simulation = builder.init_all_entities(source, period) - - if source_type == "input_data_table": - # Case 1: fill simulation with a unique input_data_frame given by the attribute - if input_data_survey_prefix is not None: - openfisca_survey_collection = SurveyCollection.load(collection=collection) - openfisca_survey = openfisca_survey_collection.get_survey( - "{}_{}".format(input_data_survey_prefix, data_year) - ) - input_data_frame = openfisca_survey.get_values(table="input").reset_index(drop=True) - else: - NotImplementedError - - custom_input_data_frame(input_data_frame, period=period) - simulation = builder.init_all_entities(input_data_frame, builder, period) # monolithic dataframes - - elif source_type == "input_data_table_by_period": - # Case 2: fill simulation with input_data_frame by period containing all entity variables - input_data_table_by_period = data.get("input_data_table_by_period") - for period, table in input_data_table_by_period.items(): - period = periods.period(period) - log.debug("From survey {} loading table {}".format(survey, table)) - input_data_frame = load_table( - config_files_directory=config_files_directory, - collection=collection, - survey=survey, - input_data_survey_prefix=input_data_survey_prefix, - table=table, - ) - custom_input_data_frame(input_data_frame, period=period) - simulation = builder.init_all_entities(input_data_frame, builder, period) # monolithic dataframes - - elif source_type == "input_data_frame_by_entity_by_period": - for period, input_data_frame_by_entity in source.items(): - period = periods.period(period) - for entity in tax_benefit_system.entities: - input_data_frame = input_data_frame_by_entity.get(entity.key) - if input_data_frame is None: - continue - custom_input_data_frame(input_data_frame, period=period, entity=entity.key) - builder.init_entity_structure(entity, input_data_frame) # TODO complete args - - simulation = builder.build(tax_benefit_system) - simulation.id_variable_by_entity_key = ( - builder.id_variable_by_entity_key - ) # Should be propagated to enhanced build - - for period, input_data_frame_by_entity in source.items(): - for entity in tax_benefit_system.entities: - input_data_frame = input_data_frame_by_entity.get(entity.key) - if input_data_frame is None: - log.debug("No input_data_frame found for entity {} at period {}".format(entity, period)) - continue - custom_input_data_frame(input_data_frame, period=period, entity=entity.key) - simulation.init_entity_data( - entity, input_data_frame, period, builder.used_as_input_variables_by_entity - ) - - elif source_type == "input_data_table_by_entity_by_period": - # Case 3: fill simulation with input_data_table by entity_by_period containing a dictionnary - # of all periods containing a dictionnary of entity variables - input_data_table_by_entity_by_period = source - simulation = None - for period, input_data_table_by_entity in input_data_table_by_entity_by_period.items(): - if input_data_table_by_entity.get("batch_size"): - simulation = _input_data_table_by_entity_by_period_batch( - tax_benefit_system, - simulation, - period, - input_data_table_by_entity, - builder, - custom_input_data_frame, - config_files_directory, - collection, - survey, - ) - else: - simulation = _input_data_table_by_entity_by_period_monolithic( - tax_benefit_system, - simulation, - period, - input_data_table_by_entity, - builder, - custom_input_data_frame, - config_files_directory, - collection, - survey, - ) - - else: - pass - - if data_year is not None: - simulation.period = periods.period(data_year) - - return simulation - - -def init_variable_in_entity(simulation: Simulation, entity, variable_name, series, period): - variable = simulation.tax_benefit_system.variables[variable_name] - - # np.issubdtype cannot handles categorical variables - if (not isinstance(series.dtype, pd.CategoricalDtype)) and np.issubdtype(series.values.dtype, np.floating): - if series.isnull().any(): - log.debug( - "There are {} NaN values for {} non NaN values in variable {}".format( - series.isnull().sum(), series.notnull().sum(), variable_name - ) - ) - log.debug( - "We convert these NaN values of variable {} to {} its default value".format( - variable_name, variable.default_value - ) - ) - series = pd.to_numeric(series).fillna(variable.default_value).astype(variable.value_type) - assert series.notnull().all(), "There are {} NaN values for {} non NaN values in variable {}".format( - series.isnull().sum(), series.notnull().sum(), variable_name - ) - - enum_variable_imputed_as_enum = variable.value_type == Enum and ( - isinstance(series.dtype, pd.CategoricalDtype) - or not (np.issubdtype(series.values.dtype, np.integer) or np.issubdtype(series.values.dtype, float)) - ) - - if enum_variable_imputed_as_enum: - if series.isnull().any(): - log.debug( - "There are {} NaN values ({}% of the array) in variable {}".format( - series.isnull().sum(), series.isnull().mean() * 100, variable_name - ) - ) - log.debug( - "We convert these NaN values of variable {} to {} its default value".format( - variable_name, variable.default_value._name_ - ) - ) - series.fillna(variable.default_value._name_, inplace=True) - possible_values = variable.possible_values - if isinstance(series.dtype, pd.CategoricalDtype): - series = series.cat.codes - else: - msg = "There are errors with {}".format(variable_name) - assert series.isin(list(possible_values._member_names_)).all(), msg - series = series.apply(lambda v: variable.possible_values[v].index) - - if series.values.dtype != variable.dtype: - log.debug("Converting {} from dtype {} to {}".format(variable_name, series.values.dtype, variable.dtype)) - - array = series.values.astype(variable.dtype) - np_array = np.array(array, dtype=variable.dtype) - - if (variable.value_type == Enum) and ( - np.issubdtype(series.values.dtype, np.integer) or np.issubdtype(series.values.dtype, float) - ): - np_array = EnumArray(np_array, variable.possible_values) - - if variable.definition_period == YEAR and period.unit == MONTH: - # Some variables defined for a year are present in month/quarter dataframes - # Cleaning the dataframe would probably be better in the long run - log.warn( - f"Trying to set a monthly value for variable {variable_name}, which is defined on a year. The montly values you provided will be summed." - ) - - if simulation.get_array(variable_name, period.this_year) is not None: - array_sum = simulation.get_array(variable_name, period.this_year) + np_array - simulation.set_input(variable_name, period.this_year, array_sum) - else: - simulation.set_input(variable_name, period.this_year, np_array) - - else: - simulation.set_input(variable_name, period, np_array) - - -def new_from_tax_benefit_system( - tax_benefit_system: TaxBenefitSystem, - debug: bool = False, - trace: bool = False, - data: Dict = None, - memory_config: MemoryConfig = None, - period: Optional[Union[int, str, Period]] = None, - custom_initialize: Callable = None, -) -> Simulation: - """ - Create and initialize a simulation from a tax and benefit system and data. - - Args: - tax_benefit_system (TaxBenefitSystem): The tax and benefit system - debug (bool, optional): Whether to activate debugging. Defaults to False. - trace (bool, optional): Whether to activate tracing. Defaults to False. - data (Dict, optional): The information about data. Defaults to None. - memory_config (MemoryConfig, optional): The memory handling config. Defaults to None. - period (Optional[Union[int, str, Period]], optional): The period of the simulation. Defaults to None. - custom_initialize (Callable, optional): The post-processing initialization function. Defaults to None. - - Returns: - Simulation: The completely initialized function - """ - - simulation = Simulation.init_simulation(tax_benefit_system, period, data) - simulation.debug = debug - simulation.trace = trace - simulation.opt_out_cache = simulation.tax_benefit_system.cache_blacklist is not None - simulation.memory_config = memory_config - - if custom_initialize: - custom_initialize(simulation) - - return simulation - - -def print_memory_usage(simulation: Simulation): - """ - Print memory usage. - - Args: - simulation (Simulation): The simulation which memory usage is to be printed - """ - memory_usage_by_variable = simulation.get_memory_usage()["by_variable"] - try: - usage_stats = simulation.tracer.usage_stats - except AttributeError: - log.warning( - "The simulation trace mode is not activated. You need to activate it to get stats about variable usage (hits)." - ) - usage_stats = None - infos_lines = [] - - for variable, infos in memory_usage_by_variable.items(): - hits = usage_stats[variable]["nb_requests"] if usage_stats else None - infos_lines.append( - ( - infos["total_nb_bytes"], - variable, - "{}: {} periods * {} cells * item size {} ({}) = {} with {} hits".format( - variable, - infos["nb_arrays"], - infos["nb_cells_by_array"], - infos["cell_size"], - infos["dtype"], - humanize.naturalsize(infos["total_nb_bytes"], gnu=True), - hits, - ), - ) - ) - infos_lines.sort() - for _, _, line in infos_lines: - print(line.rjust(100)) # noqa analysis:ignore - - -def set_weight_variable_by_entity( - simulation: Simulation, - weight_variable_by_entity: Dict, -): - """ - Set weight variable for each entity. - - Args: - simulation (Simulation): The simulation concerned. - weight_variable_by_entity (Dict): The weight variable for each entity. - """ - simulation.weight_variable_by_entity = weight_variable_by_entity - - -def summarize_variable(simulation: Simulation, variable=None, weighted=False, force_compute=False): - """Print a summary of a variable including its memory usage. - - Args: - variable(string): The variable being summarized - use_baseline(bool): The tax-benefit-system considered - weighted(bool): Whether the produced statistics should be weigthted or not - force_compute(bool): Whether the computation of the variable should be forced - """ - tax_benefit_system = simulation.tax_benefit_system - variable_instance = tax_benefit_system.variables.get(variable) - assert variable_instance is not None, "{} is not a valid variable".format(variable) - - default_value = variable_instance.default_value - value_type = variable_instance.value_type - - if variable_instance.is_neutralized: - print("") # noqa analysis:ignore - print("{}: neutralized variable ({}, default = {})".format(variable, str(np.dtype(value_type)), default_value)) # noqa analysis:ignore - return - - if weighted: - weight_variable = simulation.weight_variable_by_entity[variable_instance.entity.key] - weights = simulation.calculate(weight_variable, simulation.period) - - infos = simulation.get_memory_usage(variables=[variable])["by_variable"].get(variable) - if not infos: - if force_compute: - simulation.adaptative_calculate_variable(variable=variable, period=simulation.period) - simulation.summarize_variable(variable=variable, weighted=weighted) - return - else: - print("{} is not computed yet. Use keyword argument force_compute = True".format(variable)) # noqa analysis:ignore - return - - header_line = "{}: {} periods * {} cells * item size {} ({}, default = {}) = {}".format( - variable, - infos["nb_arrays"], - infos["nb_cells_by_array"], - infos["cell_size"], - str(np.dtype(infos["dtype"])), - default_value, - humanize.naturalsize(infos["total_nb_bytes"], gnu=True), - ) - print("") # noqa analysis:ignore - print(header_line) # noqa analysis:ignore - print("Details:") # noqa analysis:ignore - holder = simulation.get_holder(variable) - if holder is not None: - if holder.variable.definition_period == ETERNITY: - array = holder.get_array(ETERNITY) - else: - for period in sorted(simulation.get_known_periods(variable)): - array = holder.get_array(period) - if array.shape == (): # noqa analysis:ignore - print("{}: always = {}".format(period, array)) # noqa analysis:ignore - continue - - if value_type == Enum: - possible_values = variable_instance.possible_values - categories_by_index = dict( - zip(range(len(possible_values._member_names_)), possible_values._member_names_) - ) - categories_type = pd.api.types.CategoricalDtype( - categories=possible_values._member_names_, ordered=True - ) - df = pd.DataFrame({variable: array}).replace(categories_by_index).astype(categories_type) - df["weights"] = weights if weighted else 1 - groupby = df.groupby(variable)["weights"].sum() - total = groupby.sum() - expr = [" {} = {:.2e} ({:.1%})".format(index, row, row / total) for index, row in groupby.items()] - print("{}: {}.".format(period, ",".join(expr))) # noqa analysis:ignore - continue - - # Handle numeric types - default_array = np.array(default_value, dtype=infos["dtype"]) - total = np.sum(array * weights) if weighted else np.sum(array) - print( # noqa T201 - "{}: mean = {}, min = {}, max = {}, mass = {:.2e}, default = {:.1%}, median = {}".format( - period, - (array * weights).sum() / weights.sum() if weighted else array.mean(), - array.min(), - array.max(), - total, - (array == default_array).sum() / len(array), - np.median(array), - ) - ) - - -# Monkey patching - -Simulation.adaptative_calculate_variable = adaptative_calculate_variable -Simulation.compute_aggregate = compute_aggregate -Simulation.compute_pivot_table = compute_pivot_table -Simulation.create_data_frame_by_entity = create_data_frame_by_entity -Simulation.compute_quantiles = compute_quantiles -Simulation.compute_winners_losers = compute_winners_losers -Simulation.new_from_tax_benefit_system = new_from_tax_benefit_system -Simulation.inflate = inflate -Simulation.init_entity_data = init_entity_data -Simulation.init_simulation = init_simulation -Simulation.init_variable_in_entity = init_variable_in_entity -Simulation.print_memory_usage = print_memory_usage -Simulation.set_weight_variable_by_entity = set_weight_variable_by_entity -Simulation.summarize_variable = summarize_variable diff --git a/openfisca_survey_manager/statshelpers.py b/openfisca_survey_manager/statshelpers.py deleted file mode 100644 index 605195aa..00000000 --- a/openfisca_survey_manager/statshelpers.py +++ /dev/null @@ -1,371 +0,0 @@ -import numpy as np -import pandas as pd -import weightedcalcs as wc -import wquantiles -from numpy import argsort, asarray, cumsum, linspace, ones, repeat, zeros -from numpy import logical_and as and_ - - -def gini(values, weights=None): - """Computes Gini coefficient (normalized to 1). - # Using fastgini formula : - # i=N j=i - # SUM W_i*(SUM W_j*X_j - W_i*X_i/2) - # i=1 j=1 - # G = 1 - 2* ---------------------------------- - # i=N i=N - # SUM W_i*X_i * SUM W_i - # i=1 i=1 - # where observations are sorted in ascending order of X. - # From http://fmwww.bc.edu/RePec/bocode/f/fastgini.html - - Args: - values: Vector of values - weights: Weights vector (Default value = None) - - Returns: - float: Gini - """ - if weights is None: - weights = ones(len(values)) - - df = pd.DataFrame({"x": values, "w": weights}) - df = df.sort_values(by="x") - x = df["x"] - w = df["w"] - wx = w * x - - cdf = cumsum(wx) - 0.5 * wx - numerator = (w * cdf).sum() - denominator = ((wx).sum()) * (w.sum()) - gini = 1 - 2 * (numerator / denominator) - - return gini - - -def kakwani(values, ineq_axis, weights=None): - """Computes the Kakwani index - - Args: - values: Vector of values - ineq_axis: Inequality axis - weights: Weights vector (Default value = None) - - Returns: - float: Kakwani index - """ - from scipy.integrate import simps - - if weights is None: - weights = ones(len(values)) - - PLCx, PLCy = pseudo_lorenz(values, ineq_axis, weights) - LCx, LCy = lorenz(ineq_axis, weights) - - del PLCx - - return simps((LCy - PLCy), LCx) - - -def lorenz(values, weights=None): - """Computes Lorenz curve coordinates (x, y) - - Args: - values: Vector of values - weights: Weights vector (Default value = None) - - Returns: - (np.array, np.array): Lorenz curve coordinates - """ - if weights is None: - weights = ones(len(values)) - - df = pd.DataFrame({"v": values, "w": weights}) - df = df.sort_values(by="v") - x = cumsum(df["w"]) - x = x / float(x[-1:]) - y = cumsum(df["v"] * df["w"]) - y = y / float(y[-1:]) - - return x, y - - -def mark_weighted_percentiles(a, labels, weights, method, return_quantiles=False): - """ - - Args: - a: - labels: - weights: - method: - return_quantiles: (Default value = False) - - Returns: - - """ - # from http://pastebin.com/KTLip9ee - # a is an input array of values. - # weights is an input array of weights, so weights[i] goes with a[i] - # labels are the names you want to give to the xtiles - # method refers to which weighted algorithm. - # 1 for wikipedia, 2 for the stackexchange post. - - # The code outputs an array the same shape as 'a', but with - # labels[i] inserted into spot j if a[j] falls in x-tile i. - # The number of xtiles requested is inferred from the length of 'labels'. - - np.random.seed(42) - - # First method, "vanilla" weights from Wikipedia article. - if method == 1: - # Sort the values and apply the same sort to the weights. - N = len(a) - sort_indx = argsort(a) - tmp_a = a[sort_indx].copy() - tmp_weights = weights[sort_indx].copy() - - # 'labels' stores the name of the x-tiles the user wants, - # and it is assumed to be linearly spaced between 0 and 1 - # so 5 labels implies quintiles, for example. - num_categories = len(labels) - breaks = linspace(0, 1, num_categories + 1) - - # Compute the percentile values at each explicit data point in a. - cu_weights = cumsum(tmp_weights) - p_vals = (1.0 / cu_weights[-1]) * (cu_weights - 0.5 * tmp_weights) - - # Set up the output array. - ret = repeat(0, len(a)) - if len(a) < num_categories: - return ret - - # Set up the array for the values at the breakpoints. - quantiles = [] - - # Find the two indices that bracket the breakpoint percentiles. - # then do interpolation on the two a_vals for those indices, using - # interp-weights that involve the cumulative sum of weights. - for brk in breaks: - if brk <= p_vals[0]: - i_low = 0 - i_high = 0 - elif brk >= p_vals[-1]: - i_low = N - 1 - i_high = N - 1 - else: - for ii in range(N - 1): - if (p_vals[ii] <= brk) and (brk < p_vals[ii + 1]): - i_low = ii - i_high = ii + 1 - - if i_low == i_high: - v = tmp_a[i_low] - else: - # If there are two brackets, then apply the formula as per Wikipedia. - v = tmp_a[i_low] + ((brk - p_vals[i_low]) / (p_vals[i_high] - p_vals[i_low])) * ( - tmp_a[i_high] - tmp_a[i_low] - ) - - # Append the result. - quantiles.append(v) - - # Now that the weighted breakpoints are set, just categorize - # the elements of a with logical indexing. - for i in range(0, len(quantiles) - 1): - lower = quantiles[i] - upper = quantiles[i + 1] - ret[and_(a >= lower, a < upper)] = labels[i] - - # make sure upper and lower indices are marked - ret[a <= quantiles[0]] = labels[0] - ret[a >= quantiles[-1]] = labels[-1] - - return ret - - # The stats.stackexchange suggestion. - elif method == 2: - N = len(a) - sort_indx = argsort(a) - tmp_a = a[sort_indx].copy() - tmp_weights = weights[sort_indx].copy() - - num_categories = len(labels) - breaks = linspace(0, 1, num_categories + 1) - - cu_weights = cumsum(tmp_weights) - - # Formula from stats.stackexchange.com post. - s_vals = [0.0] - for ii in range(1, N): - s_vals.append(ii * tmp_weights[ii] + (N - 1) * cu_weights[ii - 1]) - s_vals = asarray(s_vals) - - # Normalized s_vals for comapring with the breakpoint. - norm_s_vals = (1.0 / s_vals[-1]) * s_vals - - # Set up the output variable. - ret = repeat(0, N) - if num_categories > N: - return ret - - # Set up space for the values at the breakpoints. - quantiles = [] - - # Find the two indices that bracket the breakpoint percentiles. - # then do interpolation on the two a_vals for those indices, using - # interp-weights that involve the cumulative sum of weights. - for brk in breaks: - if brk <= norm_s_vals[0]: - i_low = 0 - i_high = 0 - elif brk >= norm_s_vals[-1]: - i_low = N - 1 - i_high = N - 1 - else: - for ii in range(N - 1): - if (norm_s_vals[ii] <= brk) and (brk < norm_s_vals[ii + 1]): - i_low = ii - i_high = ii + 1 - - if i_low == i_high: - v = tmp_a[i_low] - else: - # Interpolate as in the method 1 method, but using the s_vals instead. - v = tmp_a[i_low] + (((brk * s_vals[-1]) - s_vals[i_low]) / (s_vals[i_high] - s_vals[i_low])) * ( - tmp_a[i_high] - tmp_a[i_low] - ) - quantiles.append(v) - - # Now that the weighted breakpoints are set, just categorize - # the elements of a as usual. - for i in range(0, len(quantiles) - 1): - lower = quantiles[i] - upper = quantiles[i + 1] - ret[and_(a >= lower, a < upper)] = labels[i] - - # make sure upper and lower indices are marked - ret[a <= quantiles[0]] = labels[0] - ret[a >= quantiles[-1]] = labels[-1] - - if return_quantiles: - return ret, quantiles - else: - return ret - - -def pseudo_lorenz(values, ineq_axis, weights=None): - """Computes The pseudo Lorenz Curve coordinates - - Args: - values: - ineq_axis: - weights: (Default value = None) - - Returns: - - """ - if weights is None: - weights = ones(len(values)) - df = pd.DataFrame({"v": values, "a": ineq_axis, "w": weights}) - df = df.sort_values(by="a") - x = cumsum(df["w"]) - x = x / float(x[-1:]) - y = cumsum(df["v"] * df["w"]) - y = y / float(y[-1:]) - - return x, y - - -def bottom_share(values, rank_from_bottom, weights=None): - """ - - Args: - values(np.array): Vector of values - rank_from_bottom(float): Rank from bottom (bottom is 0 and top is 1) - weights(np.array): Weights vector (Default value = None) - - Returns: - - """ - if weights is None: - weights = ones(len(values)) - - calc = wc.Calculator("weights") - data_frame = pd.DataFrame( - { - "weights": weights, - "data": values, - } - ) - quantile = calc.quantile(data_frame, "data", rank_from_bottom) - - return ((data_frame["data"] < quantile) * data_frame["data"] * data_frame["weights"]).sum() / ( - data_frame["data"] * data_frame["weights"] - ).sum() - - -def top_share(values, rank_from_top, weights=None): - """ - - Args: - values(np.array): Vector of values - rank_from_top(float): Rank from top (bottom is 1 and top is 0) - weights(np.array): Weights vector (Default value = None) - - Returns: - - """ - if weights is None: - weights = ones(len(values)) - - calc = wc.Calculator("weights") - data_frame = pd.DataFrame( - { - "weights": weights, - "data": values, - } - ) - quantile = calc.quantile(data_frame, "data", 1 - rank_from_top) - return ((data_frame["data"] >= quantile) * data_frame["data"] * data_frame["weights"]).sum() / ( - data_frame["data"] * data_frame["weights"] - ).sum() - - -def weighted_quantiles(data, labels, weights, return_quantiles=False): - num_categories = len(labels) - breaks = linspace(0, 1, num_categories + 1) - quantiles = [wquantiles.quantile_1D(data, weights, mybreak) for mybreak in breaks[1:]] - ret = zeros(len(data)) - for i in range(0, len(quantiles) - 1): - lower = quantiles[i] - upper = quantiles[i + 1] - ret[and_(data >= lower, data < upper)] = labels[i] - - if return_quantiles: - return ret + 1, quantiles - else: - return ret + 1 - - -def weightedcalcs_quantiles(data, labels, weights, return_quantiles=False): - calc = wc.Calculator("weights") - num_categories = len(labels) - breaks = linspace(0, 1, num_categories + 1) - data_frame = pd.DataFrame( - { - "weights": weights, - "data": data, - } - ) - quantiles = [calc.quantile(data_frame, "data", mybreak) for mybreak in breaks[1:]] - - ret = zeros(len(data)) - for i in range(0, len(quantiles) - 1): - lower = quantiles[i] - upper = quantiles[i + 1] - ret[and_(data > lower, data <= upper)] = labels[i] - - if return_quantiles: - return ret + 1, quantiles - else: - return ret + 1 diff --git a/openfisca_survey_manager/survey_collections.py b/openfisca_survey_manager/survey_collections.py deleted file mode 100644 index 9afb3f74..00000000 --- a/openfisca_survey_manager/survey_collections.py +++ /dev/null @@ -1,146 +0,0 @@ -import codecs -import collections -import json -import logging -from pathlib import Path - -from openfisca_survey_manager.config import Config -from openfisca_survey_manager.paths import default_config_files_directory -from openfisca_survey_manager.surveys import Survey - -log = logging.getLogger(__name__) - - -class SurveyCollection(object): - """A collection of Surveys""" - - config = None - json_file_path = None - label = None - name = None - surveys = [] - - def __init__( - self, config_files_directory=default_config_files_directory, label=None, name=None, json_file_path=None - ): - log.debug("Initializing SurveyCollection from config file found in {} ..".format(config_files_directory)) - config = Config(config_files_directory=config_files_directory) - if label is not None: - self.label = label - if name is not None: - self.name = name - if json_file_path is not None: - self.json_file_path = json_file_path - if "collections" not in config.sections(): - config["collections"] = {} - config.set("collections", self.name, str(self.json_file_path)) - config.save() - elif config is not None: - if config.has_option("collections", self.name): - self.json_file_path = config.get("collections", self.name) - elif config.get("collections", "collections_directory") is not None: - self.json_file_path = str(Path(config.get("collections", "collections_directory")) / (name + ".json")) - - self.config = config - - def __repr__(self): - header = """{} -Survey collection of {} -Contains the following surveys : -""".format(self.name, self.label) - surveys = [" {} : {} \n".format(survey.name, survey.label) for survey in self.surveys] - return header + "".join(surveys) - - def dump(self, config_files_directory=None, json_file_path=None): - """ - Dump the survey collection to a json file - And set the json file path in the config file - """ - if self.config is not None: - config = self.config - else: - if config_files_directory is not None: - pass - else: - config_files_directory = default_config_files_directory - self.config = Config(config_files_directory=config_files_directory) - - if json_file_path is None: - assert self.json_file_path is not None, "A json_file_path shoud be provided" - else: - self.json_file_path = json_file_path - - config.set("collections", self.name, str(self.json_file_path)) - config.save() - with codecs.open(self.json_file_path, "w", encoding="utf-8") as _file: - json.dump(self.to_json(), _file, ensure_ascii=False, indent=2) - - def fill_store( - self, - source_format=None, - surveys=None, - tables=None, - overwrite=False, - keep_original_parquet_file=False, - encoding=None, - ): - if surveys is None: - surveys = self.surveys - for survey in surveys: - survey.fill_store( - source_format=source_format, - tables=tables, - overwrite=overwrite, - keep_original_parquet_file=keep_original_parquet_file, - encoding=encoding, - ) - self.dump() - - def get_survey(self, survey_name): - available_surveys_names = [survey.name for survey in self.surveys] - assert survey_name in available_surveys_names, ( - "Survey {} cannot be found for survey collection {}.\nAvailable surveys are :{}".format( - survey_name, self.name, available_surveys_names - ) - ) - return [survey for survey in self.surveys if survey.name == survey_name].pop() - - @classmethod - def load(cls, json_file_path=None, collection=None, config_files_directory=default_config_files_directory): - assert Path(config_files_directory).exists() - config = Config(config_files_directory=config_files_directory) - if json_file_path is None: - assert collection is not None, "A collection is needed" - try: - json_file_path = config.get("collections", collection) - except Exception as error: - msg = "Looking for config file in {}".format(config_files_directory) - log.debug(msg) - log.error(error) - raise Exception(msg) from error - - with Path(json_file_path).open("r") as _file: - self_json = json.load(_file) - name = self_json["name"] - - self = cls(config_files_directory=config_files_directory, name=name) - self.config = config - with Path(json_file_path).open("r") as _file: - self_json = json.load(_file) - self.json_file_path = json_file_path - self.label = self_json.get("label") - self.name = self_json.get("name") - - surveys = self_json["surveys"] - for survey_name, survey_json in surveys.items(): - survey = Survey(name=survey_name) - self.surveys.append(survey.create_from_json(survey_json)) - return self - - def to_json(self): - self_json = collections.OrderedDict(()) - self_json["name"] = self.name - self_json["surveys"] = collections.OrderedDict(()) - for survey in self.surveys: - self_json["surveys"][survey.name] = survey.to_json() - return self_json diff --git a/openfisca_survey_manager/surveys.py b/openfisca_survey_manager/surveys.py deleted file mode 100644 index fafdc749..00000000 --- a/openfisca_survey_manager/surveys.py +++ /dev/null @@ -1,382 +0,0 @@ -#! /usr/bin/env python - - -import collections -import logging -import re -from pathlib import Path - -import pandas -import pyarrow as pa -import pyarrow.parquet as pq -import yaml - -from .tables import Table - -ident_re = re.compile(r"(?i)ident\d{2,4}$") # noqa - -log = logging.getLogger(__name__) - - -source_format_by_extension = { - "csv": "csv", - "sas7bdat": "sas", - "dta": "stata", - "Rdata": "Rdata", - "spss": "sav", - "parquet": "parquet", -} - -admissible_source_formats = list(source_format_by_extension.values()) - - -class NoMoreDataError(Exception): - # Exception when the user ask for more data than available in file - pass - - -class Survey(object): - """An object to describe survey data""" - - hdf5_file_path = None - parquet_file_path = None - informations = {} - label = None - name = None - tables = collections.OrderedDict() - tables_index = {} - survey_collection = None - - def __init__( - self, name=None, label=None, hdf5_file_path=None, parquet_file_path=None, survey_collection=None, **kwargs - ): - assert name is not None, "A survey should have a name" - self.name = name - self.tables = {} - - if label is not None: - self.label = label - - if hdf5_file_path is not None: - self.hdf5_file_path = hdf5_file_path - - if parquet_file_path is not None: - self.parquet_file_path = parquet_file_path - - if survey_collection is not None: - self.survey_collection = survey_collection - - self.informations = kwargs - - def __repr__(self): - header = f"""{self.name} : survey data {self.label} -Contains the following tables : \n""" - tables = yaml.safe_dump(list(self.tables.keys()), default_flow_style=False) - informations = yaml.safe_dump(self.informations, default_flow_style=False) - return header + tables + informations - - @classmethod - def create_from_json(cls, survey_json): - self = cls( - name=survey_json.get("name"), - label=survey_json.get("label"), - hdf5_file_path=survey_json.get("hdf5_file_path"), - parquet_file_path=survey_json.get("parquet_file_path"), - **survey_json.get("informations", {}), - ) - self.tables = survey_json.get("tables") - return self - - def dump(self): - assert self.survey_collection is not None - self.survey_collection.dump() - - def fill_store( - self, - source_format=None, - tables=None, - overwrite=True, - keep_original_parquet_file=False, - encoding=None, - store_format="hdf5", - ): - """ - Convert data from the source files to store format either hdf5 or parquet. - If the source is in parquet, the data is not converted. - """ - assert self.survey_collection is not None - assert isinstance(overwrite, (bool, list)) - survey = self - # Create folder if it does not exist - config = survey.survey_collection.config - directory_path = config.get("data", "output_directory") - if not Path(directory_path).is_dir(): - log.warn( - f"{directory_path} who should be the store data directory does not exist: we create the directory" - ) - Path(directory_path).mkdir(parents=True) - - if source_format == "parquet": - store_format = "parquet" - - if store_format == "hdf5" and survey.hdf5_file_path is None: - survey.hdf5_file_path = str(Path(directory_path) / (survey.name + ".h5")) - - if store_format == "parquet" and survey.parquet_file_path is None: - survey.parquet_file_path = str(Path(directory_path) / survey.name) - - self.store_format = store_format - - if source_format is not None: - assert source_format in admissible_source_formats, f"Data source format {source_format} is unknown" - source_formats = [source_format] - else: - source_formats = admissible_source_formats - - for source_format in source_formats: - files = f"{source_format}_files" - for data_file in survey.informations.get(files, []): - name = Path(data_file).stem - extension = Path(data_file).suffix - if tables is None or name in tables: - if keep_original_parquet_file: - # Use folder instead of files if numeric at end of file - if re.match(r".*-\d$", name): - name = name.split("-")[0] - parquet_file = str(Path(data_file).parent) - # Get the parent folder - survey.parquet_file_path = str(Path(data_file).parent.parent) - else: - parquet_file = data_file - survey.parquet_file_path = str(Path(data_file).parent) - table = Table( - label=name, - name=name, - source_format=source_format_by_extension[extension[1:]], - survey=survey, - parquet_file=parquet_file, - ) - table.read_parquet_columns(data_file) - - else: - table = Table( - label=name, - name=name, - source_format=source_format_by_extension[extension[1:]], - survey=survey, - ) - table.fill_store( - data_file, - clean=True, - overwrite=overwrite if isinstance(overwrite, bool) else table.name in overwrite, - encoding=encoding, - ) - self.dump() - - def get_value(self, variable, table, lowercase=False, ignorecase=False): - """Get variable value from a survey table. - - Args: - variable: variable to retrieve - table(str): name of the table - lowercase(bool, optional, optional): lowercase variable names, defaults to False - ignorecase: ignore case of table name, defaults to False - - Returns: - pd.DataFrame: dataframe containing the variable - - """ - return self.get_values([variable], table) - - def get_values( - self, - variables=None, - table=None, - lowercase=False, - ignorecase=False, - rename_ident=True, - batch_size=None, - batch_index=0, - filter_by=None, - ) -> pandas.DataFrame: - """Get variables values from a survey table. - - Args: - variables(list, optional, optional): variables to retrieve, defaults to None (retrieve all variables) - table(str, optional, optional): name of the table, defaults to None - ignorecase: ignore case of table name, defaults to False - lowercase(bool, optional, optional): lowercase variable names, defaults to False - rename_ident(bool, optional, optional): rename ident+yr (e.g. ident08) into ident, defaults to True - batch_size(int, optional, optional): batch size for parquet file, defaults to None - batch_index(int, optional, optional): batch index for parquet file, defaults to 0 - - Returns: - pd.DataFrame: dataframe containing the variables - - Raises: - Exception: - - """ - if self.parquet_file_path is None and self.hdf5_file_path is None: - raise Exception(f"No data file found for survey {self.name}") - if self.hdf5_file_path is not None: - assert Path(self.hdf5_file_path).exists(), ( - f"{self.hdf5_file_path} is not a valid path. This could happen because your data were not builded yet. Please consider using a rebuild option in your code." - ) - store = pandas.HDFStore(self.hdf5_file_path, "r") - if ignorecase: - keys = store.keys() - eligible_tables = [] - for string in keys: - match = re.findall(table, string, re.IGNORECASE) - if match: - eligible_tables.append(match[0]) - if len(eligible_tables) > 1: - raise ValueError( - f"{table} is ambiguous since the following tables are available: {eligible_tables}" - ) - elif len(eligible_tables) == 0: - raise ValueError(f"No eligible available table in {keys}") - else: - table = eligible_tables[0] - try: - df = store.select(table) - except KeyError: - log.error(f"No table {table} in the file {self.hdf5_file_path}") - log.error( - f"This could happen because your data were not builded yet. Available tables are: {store.keys()}" - ) - store.close() - raise - - store.close() - - elif self.parquet_file_path is not None: - if table is None: - raise Exception("A table name is needed to retrieve data from a parquet file") - for table_name, table_content in self.tables.items(): - if table == table_name: - parquet_file = table_content.get("parquet_file") - # Is parquet_file a folder or a file? - if Path(parquet_file).is_dir(): - # find first parquet file in folder - for file in Path(parquet_file).iterdir(): - if file.suffix == ".parquet": - one_parquet_file = str(Path(parquet_file) / file) - break - else: - raise Exception(f"No parquet file found in {parquet_file}") - else: - one_parquet_file = parquet_file - parquet_schema = pq.read_schema(one_parquet_file) - assert len(parquet_schema.names) >= 1, ( - f"The parquet file {table_content.get('parquet_file')} is empty" - ) - if variables is None: - variables = table_content.get("variables") - if filter_by: - df = pq.ParquetDataset(parquet_file, filters=filter_by).read(columns=variables).to_pandas() - elif batch_size: - if Path(parquet_file).is_dir(): - parquet_file = [str(p) for p in Path(parquet_file).glob("*.parquet")] - else: - parquet_file = [parquet_file] - # Initialize an empty list to store the Parquet tables - tables = [] - # Loop through the file paths and read each Parquet file - for file_path in parquet_file: - table = pq.read_table(file_path, columns=variables) - tables.append(table) - - # Concatenate the tables if needed - final_table = pa.concat_tables(tables) if len(tables) > 1 else tables[0] - record_batches = final_table.to_batches(max_chunksize=batch_size) - if len(record_batches) <= batch_index: - raise NoMoreDataError( - f"Batch {batch_index} not found in {table_name}. Max index is {len(record_batches)}" - ) - df = record_batches[batch_index].to_pandas() - # iter_parquet = parquet_file.iter_batches(batch_size=batch_size, columns=variables) - # index = 0 - # while True: - # try: - # batch = next(iter_parquet) - # except StopIteration: - # raise NoMoreDataError(f"Batch {batch_index} not found in {table_name}. Max index is {index}") - # break - # if batch_index == index: - # df = batch.to_pandas() - # break - # index += 1 - else: - df = pq.ParquetDataset(parquet_file).read(columns=variables).to_pandas() - break - else: - raise Exception(f"No table {table} found in {self.parquet_file_path}") - - if lowercase: - columns = {column_name: column_name.lower() for column_name in df} - df.rename(columns=columns, inplace=True) - - if rename_ident is True: - for column_name in df: - if ident_re.match(str(column_name)) is not None: - df.rename(columns={column_name: "ident"}, inplace=True) - log.info(f"{column_name} column have been replaced by ident") - break - - if variables is None: - return df - else: - diff = set(variables) - set(df.columns) - if diff: - raise Exception(f"The following variable(s) {diff} are missing") - variables = list(set(variables).intersection(df.columns)) - df = df[variables] - return df - - def insert_table(self, label=None, name=None, **kwargs): - """Insert a table in the Survey object. - - If a pandas dataframe is provided, it is saved in the store file - """ - parquet_file = kwargs.pop("parquet_file", None) - data_frame = kwargs.pop("data_frame", None) - if data_frame is None: - # Try without underscore - data_frame = kwargs.pop("dataframe", None) - - if data_frame is not None: - assert isinstance(data_frame, pandas.DataFrame) - variables = kwargs.pop("variables", None) - if variables is not None: - assert set(variables) < set(data_frame.columns) - else: - variables = list(data_frame.columns) - if label is None: - label = name - table = Table(label=label, name=name, survey=self, variables=variables, parquet_file=parquet_file) - assert (table.survey.hdf5_file_path is not None) or (table.survey.parquet_file_path is not None) - if parquet_file is not None: - log.debug(f"Saving table {name} in {table.survey.parquet_file_path}") - data_frame.to_parquet(parquet_file) - else: - log.debug(f"Saving table {name} in {table.survey.hdf5_file_path}") - to_hdf_kwargs = kwargs.pop("to_hdf_kwargs", {}) - table.save_data_frame_to_hdf5(data_frame, **to_hdf_kwargs) - - if name not in self.tables: - self.tables[name] = {} - for key, val in kwargs.items(): - self.tables[name][key] = val - - def to_json(self): - """Convert the survey to a JSON object.""" - self_json = collections.OrderedDict(()) - self_json["hdf5_file_path"] = self.hdf5_file_path - self_json["parquet_file_path"] = self.parquet_file_path - self_json["label"] = self.label - self_json["name"] = self.name - self_json["tables"] = self.tables - self_json["informations"] = collections.OrderedDict(sorted(self.informations.items())) - return self_json diff --git a/openfisca_survey_manager/tables.py b/openfisca_survey_manager/tables.py deleted file mode 100644 index 0f89daa5..00000000 --- a/openfisca_survey_manager/tables.py +++ /dev/null @@ -1,316 +0,0 @@ -"""Tables.""" - -import collections -import csv -import datetime -import errno -import gc -import logging -import os -from pathlib import Path - -import pandas -from chardet.universaldetector import UniversalDetector -from pyarrow import parquet as pq - -from openfisca_survey_manager import read_sas - -try: - from openfisca_survey_manager.read_spss import read_spss -except ImportError: - read_spss = None - - -log = logging.getLogger(__name__) - - -reader_by_source_format = { - # Rdata = pandas.rpy.common.load_data, - "csv": pandas.read_csv, - "sas": read_sas.read_sas, - "spss": read_spss, - "stata": pandas.read_stata, - "parquet": pandas.read_parquet, -} - - -class Table(object): - """A table of a survey.""" - - label = None - name = None - source_format = None - survey = None - variables = None - parquet_file = None - - def __init__( - self, survey=None, name=None, label=None, source_format=None, variables=None, parquet_file=None, **kwargs - ): - assert name is not None, "A table should have a name" - self.name = name - self.label = label - self.source_format = source_format - self.variables = variables - self.parquet_file = parquet_file - self.informations = kwargs - - from .surveys import Survey # Keep it here to avoid infinite recursion - - assert isinstance(survey, Survey), f"survey is of type {type(survey)} and not {Survey}" - self.survey = survey - if not survey.tables: - survey.tables = collections.OrderedDict() - - survey.tables[name] = collections.OrderedDict( - source_format=source_format, - variables=variables, - parquet_file=parquet_file, - ) - - def _check_and_log(self, data_file_path, store_file_path): - """ - Check if the file exists and log the insertion. - - Args: - data_file_path: Data file path - store_file_path: Store file or dir path - - Raises: - Exception: File not found - """ - assert store_file_path is not None, "Store file path cannot be None" - if not Path(data_file_path).is_file(): - raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), data_file_path) - - log.info( - f"Inserting table {self.name} from file {data_file_path} in store file {store_file_path} at point {self.name}" - ) - - def _is_stored(self): - if self.survey.hdf5_file_path is not None: - store = pandas.HDFStore(self.survey.hdf5_file_path) - if self.name in store: - log.info(f"Exiting without overwriting {self.name} in {self.survey.hdf5_file_path}") - store.close() - return True - - store.close() - return False - else: - return False - - def _save(self, data_frame: pandas.DataFrame = None, store_format="hdf5"): - """ - Save a data frame in the store according to is format (HDF5 or Parque). - """ - assert data_frame is not None - variables = self.variables - - if variables: - stored_variables = list(set(variables).intersection(set(data_frame.columns))) - log.info(f"The folloging variables are stored: {stored_variables}") - if set(stored_variables) != set(variables): - log.info( - f"variables wanted by the user that were not available: {list(set(variables) - set(stored_variables))}" - ) - data_frame = data_frame[stored_variables].copy() - - assert store_format in ["hdf5", "parquet"], f"invalid store_format: {store_format}" - if store_format == "hdf5": - self.save_data_frame_to_hdf5(data_frame) - else: - parquet_file_path = self.survey.parquet_file_path - log.info(f"Inserting table {self.name} in Parquet file {parquet_file_path}") - self.save_data_frame_to_parquet(data_frame) - gc.collect() - - def fill_store(self, data_file, overwrite: bool = False, clean: bool = False, **kwargs): - """ - Fill the store (HDF5 or parquet file) with the table. - Read the `data_file` in parameter and save it to the store. - - Args: - data_file (_type_, optional): The data file path. Defaults to None. - overwrite (bool, optional): Overwrite the data. Defaults to False. - clean (bool, optional): Clean the raw data befoe saving. Defaults to False. - store_format (str, optional): _description_. Defaults to "hdf5". - - Raises: - e: Skip file if error - """ - if not overwrite and self._is_stored(): - log.info(f"Exiting without overwriting {self.name} in {self.survey.hdf5_file_path}") - return - - start_table_time = datetime.datetime.now() - if self.source_format in ["sas", "parquet"]: - del kwargs["encoding"] - data_frame = self.read_source(data_file, **kwargs) - try: - if clean: - clean_data_frame(data_frame) - self._save(data_frame=data_frame, store_format=self.survey.store_format) - log.info(f"File {data_file} has been processed in {datetime.datetime.now() - start_table_time}") - except Exception as e: - log.info(f"Skipping file {data_file} because of following error \n {e}") - raise e - - def read_parquet_columns(self, parquet_file=None) -> list: - """ - Initialize the table from a parquet file. - """ - if parquet_file is None: - parquet_file = self.parquet_file - log.info(f"Initializing table {self.name} from parquet file {parquet_file}") - self.source_format = "parquet" - parquet_schema = pq.read_schema(parquet_file) - self.variables = parquet_schema.names - self.survey.tables[self.name]["variables"] = self.variables - return self.variables - - def read_source(self, data_file, **kwargs): - source_format = self.source_format - store_file_path = ( - self.survey.hdf5_file_path if self.survey.store_format == "hdf5" else self.survey.parquet_file_path - ) - - self._check_and_log(data_file, store_file_path=store_file_path) - reader = reader_by_source_format[source_format] - try: - if source_format == "csv": - try: - data_frame = reader(data_file, **kwargs) - - if len(data_frame.columns) == 1 and ";" in len(data_frame.columns[0]): - raise ValueError( - "A ';' is present in the unique column name. Looks like we got the wrong separator." - ) - - except Exception: - log.debug(f"Failing to read {data_file}, Trying to infer encoding and dialect/separator") - - # Detect encoding - detector = UniversalDetector() - with Path(data_file).open("rb") as csvfile: - for line in csvfile: - detector.feed(line) - if detector.done: - break - detector.close() - - encoding = detector.result["encoding"] - confidence = detector.result["confidence"] - - # Sniff dialect - try: - with Path(data_file).open("r", newline="", encoding=encoding) as csvfile: - dialect = csv.Sniffer().sniff(csvfile.read(1024), delimiters=";,") - except Exception: - # Sometimes the sniffer fails, we switch back to the default ... of french statistical data - dialect = None - delimiter = ";" - - log.debug( - f"dialect.delimiter = {dialect.delimiter if dialect is not None else delimiter}, encoding = {encoding}, confidence = {confidence}" - ) - kwargs["engine"] = "python" - if dialect: - kwargs["dialect"] = dialect - else: - kwargs["delimiter"] = delimiter - kwargs["encoding"] = encoding - data_frame = reader(data_file, **kwargs) - - else: - data_frame = reader(data_file, **kwargs) - - except Exception as e: - log.info(f"Error while reading {data_file}") - raise e - - gc.collect() - return data_frame - - def save_data_frame_to_hdf5(self, data_frame, **kwargs): - """Save a data frame in the HDF5 file format.""" - hdf5_file_path = self.survey.hdf5_file_path - log.info(f"Inserting table {self.name} in HDF file {hdf5_file_path}") - store_path = self.name - try: - data_frame.to_hdf(hdf5_file_path, store_path, append=False, **kwargs) - except (TypeError, NotImplementedError): - log.info(f"Type problem(s) when creating {store_path} in {hdf5_file_path}") - dtypes = data_frame.dtypes - # Checking for strings - converted_dtypes = dtypes.isin(["mixed", "unicode"]) - if converted_dtypes.any(): - log.info(f"The following types are converted to strings \n {dtypes[converted_dtypes]}") - # Conversion to strings - for column in dtypes[converted_dtypes].index: - data_frame[column] = data_frame[column].copy().astype(str) - - # Checking for remaining categories - dtypes = data_frame.dtypes - converted_dtypes = dtypes.isin(["category"]) - if not converted_dtypes.empty: # With category table format is needed - log.info( - f"The following types are added as category using the table format\n {dtypes[converted_dtypes]}" - ) - data_frame.to_hdf(hdf5_file_path, store_path, append=False, format="table", **kwargs) - - self.variables = list(data_frame.columns) - - def save_data_frame_to_parquet(self, data_frame): - """Save a data frame in the Parquet file format.""" - parquet_file_path = self.survey.parquet_file_path - - if not Path(parquet_file_path).is_dir(): - log.warn( - f"{parquet_file_path} where to store table {self.name} data does not exist: we create the directory" - ) - Path(parquet_file_path).mkdir(parents=True) - self.parquet_file = parquet_file_path + "/" + self.name - data_frame.to_parquet(self.parquet_file) - self.variables = list(data_frame.columns) - - self.survey.tables[self.name]["parquet_file"] = self.parquet_file - self.survey.tables[self.name]["variables"] = self.variables - - -def clean_data_frame(data_frame): - """Clean a data frame. - - The following steps are executed: - - drop empty columns - - replace empty strings with zeros - - convert string columns to integers - """ - data_frame.columns = data_frame.columns.str.lower() - object_column_names = list(data_frame.select_dtypes(include=["object"]).columns) - log.info(f"The following variables are to be cleaned or left as strings : \n {object_column_names}") - for column_name in object_column_names: - if data_frame[column_name].isnull().all(): # - log.info(f"Drop empty column {column_name}") - data_frame.drop(column_name, axis=1, inplace=True) - continue - - values = [str(value) for value in data_frame[column_name].value_counts()] - empty_string_present = "" in values - if empty_string_present: - values.remove("") - all_digits = all(value.strip().isdigit() for value in values) - no_zero = all(value != 0 for value in values) - if all_digits and no_zero: - log.info(f"Replacing empty string with zero for variable {column_name}") - data_frame.replace( - to_replace={column_name: {"": 0}}, - inplace=True, - ) - log.info(f"Converting string variable {column_name} to integer") - try: - data_frame[column_name] = data_frame[column_name].astype("int") - except (OverflowError, ValueError): - log.info( - f"OverflowError when converting {column_name} to int. Keeping as {data_frame[column_name].dtype}" - ) diff --git a/openfisca_survey_manager/temporary.py b/openfisca_survey_manager/temporary.py deleted file mode 100644 index bb0717b4..00000000 --- a/openfisca_survey_manager/temporary.py +++ /dev/null @@ -1,90 +0,0 @@ -import gc -import logging -from pathlib import Path -from configparser import ConfigParser - -from pandas import HDFStore - -from openfisca_survey_manager.paths import default_config_files_directory - -log = logging.getLogger(__name__) - - -temporary_store_by_file_path = {} - - -def temporary_store_decorator(config_files_directory=default_config_files_directory, file_name=None): - parser = ConfigParser() - config_ini = str(Path(config_files_directory) / "config.ini") - assert Path(config_ini).exists(), "{} is not a valid path".format(config_ini) - read_config_file_name = parser.read([config_ini]) - tmp_directory = parser.get("data", "tmp_directory") - assert tmp_directory is not None, "tmp_directory is not set: {!r} in {}".format( - tmp_directory, read_config_file_name - ) - assert Path(tmp_directory).is_absolute(), "tmp_directory should be an absolut path: {!r} in {}".format( - tmp_directory, read_config_file_name - ) - if not Path(tmp_directory).is_dir(): - log.info("tmp_directory does not exist: {!r} in {}. Creating it.".format(tmp_directory, read_config_file_name)) - Path(tmp_directory).mkdir(parents=True) - - assert file_name is not None - if not file_name.endswith(".h5"): - file_name = "{}.h5".format(file_name) - file_path = str(Path(tmp_directory) / file_name) - - def actual_decorator(func): - def func_wrapper(*args, **kwargs): - just_openned = False - temporary_store = temporary_store_by_file_path.get(file_path) - if temporary_store is None: - temporary_store = HDFStore(file_path) - temporary_store_by_file_path[file_path] = temporary_store - just_openned = True - - try: - return func(*args, temporary_store=temporary_store, **kwargs) - except Exception as e: - raise e - finally: - gc.collect() - if just_openned: - temporary_store.close() - del temporary_store_by_file_path[file_path] - - return func_wrapper - - return actual_decorator - - -def get_store(config_files_directory=default_config_files_directory, file_name=None): - parser = ConfigParser() - config_ini = str(Path(config_files_directory) / "config.ini") - parser.read(config_ini) - tmp_directory = parser.get("data", "tmp_directory") - assert file_name is not None - if not file_name.endswith(".h5"): - file_name = "{}.h5".format(file_name) - file_path = str(Path(tmp_directory) / file_name) - return HDFStore(file_path) - - -def save_hdf_r_readable( - data_frame, config_files_directory=default_config_files_directory, file_name=None, file_path=None -): - if file_path is None: - parser = ConfigParser() - config_ini = str(Path(config_files_directory) / "config.ini") - parser.read(config_ini) - tmp_directory = parser.get("data", "tmp_directory") - if file_name is not None: - if not file_name.endswith(".h5"): - file_name = "{}.h5".format(file_name) - file_path = str(Path(tmp_directory) / file_name) - else: - file_path = str(Path(tmp_directory) / "temp.h5") - - store = HDFStore(file_path, "w", complib=str("zlib"), complevel=5) - store.put("dataframe", data_frame, data_columns=data_frame.columns) - store.close() diff --git a/openfisca_survey_manager/tests/__init__.py b/openfisca_survey_manager/tests/__init__.py deleted file mode 100644 index 24069b5e..00000000 --- a/openfisca_survey_manager/tests/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -from openfisca_core.model_api import ETERNITY, Reform, Variable # noqa analysis:ignore -from openfisca_country_template import CountryTaxBenefitSystem -from openfisca_country_template.entities import Household, Person - - -class Plugin(Reform): - def apply(self): - class person_weight(Variable): - is_period_size_independent = True - value_type = float - entity = Person - label = "Person weight" - definition_period = ETERNITY - - def formula(person, period): - return person.household("household_weight", period) - - class household_weight(Variable): - is_period_size_independent = True - value_type = float - entity = Household - label = "Household weight" - definition_period = ETERNITY - - class household_id(Variable): - is_period_size_independent = True - value_type = float - entity = Household - label = "Household id" - definition_period = ETERNITY - - class household_id_ind(Variable): - is_period_size_independent = True - value_type = float - entity = Person - label = "Household id of person" - definition_period = ETERNITY - - self.add_variable(person_weight) - self.add_variable(household_weight) - self.add_variable(household_id) - self.add_variable(household_id_ind) - - -tax_benefit_system = Plugin(CountryTaxBenefitSystem()) diff --git a/openfisca_survey_manager/tests/data_files/__init__.py b/openfisca_survey_manager/tests/data_files/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/openfisca_survey_manager/tests/data_files/config_template.ini b/openfisca_survey_manager/tests/data_files/config_template.ini deleted file mode 100644 index 92d24e40..00000000 --- a/openfisca_survey_manager/tests/data_files/config_template.ini +++ /dev/null @@ -1,10 +0,0 @@ -[collections] -collections_directory = {location}/openfisca_survey_manager/tests/data_files -fake = {location}/openfisca_survey_manager/tests/data_files/fake.json -test_parquet_collection = {location}/openfisca_survey_manager/tests/data_files/test_parquet_collection.json -test_random_generator = {location}/openfisca_survey_manager/tests/data_files/test_random_generator.json - -[data] -input_directory = {location}/openfisca_survey_manager/tests/data_files -output_directory = {location}/openfisca_survey_manager/tests/data_files -tmp_directory = {location}/openfisca_survey_manager/tests/data_files diff --git a/openfisca_survey_manager/tests/data_files/help.sas7bdat b/openfisca_survey_manager/tests/data_files/help.sas7bdat deleted file mode 100644 index d58ecf11..00000000 Binary files a/openfisca_survey_manager/tests/data_files/help.sas7bdat and /dev/null differ diff --git a/openfisca_survey_manager/tests/test_add_survey_to_collection.py b/openfisca_survey_manager/tests/test_add_survey_to_collection.py deleted file mode 100644 index f4f71894..00000000 --- a/openfisca_survey_manager/tests/test_add_survey_to_collection.py +++ /dev/null @@ -1,143 +0,0 @@ -import os - -import pandas as pd -import pytest - -from openfisca_survey_manager.input_dataframe_generator import set_table_in_survey -from openfisca_survey_manager.paths import openfisca_survey_manager_location -from openfisca_survey_manager.scripts.build_collection import add_survey_to_collection -from openfisca_survey_manager.survey_collections import SurveyCollection - - -@pytest.mark.order(after="test_write_parquet.py::TestWriteParquet::test_write_parquet_one_file_per_entity") -def test_add_survey_to_collection_parquet(): - name = "fake" - survey_name = "test_parquet" - data_dir = os.path.join( - openfisca_survey_manager_location, - "openfisca_survey_manager", - "tests", - "data_files", - ) - survey_collection = SurveyCollection(name=name) - saved_fake_survey_file_path = os.path.join(data_dir, "test.parquet") - add_survey_to_collection( - survey_name=survey_name, - survey_collection=survey_collection, - parquet_files=[saved_fake_survey_file_path], - ) - ordered_dict = survey_collection.to_json() - assert survey_name in list(ordered_dict["surveys"].keys()) - - -@pytest.mark.order(after="test_write_parquet.py::TestWriteParquet::test_write_parquet_one_file_per_entity") -def test_set_table_in_survey_parquet(): - data_dir = os.path.join( - openfisca_survey_manager_location, - "openfisca_survey_manager/tests/data_files", - ) - filepath = os.path.join(data_dir, "test_parquet_collection", "household.parquet") - input_dataframe = pd.read_parquet(filepath) - survey_name = "test_parquet" - collection = "fake" - set_table_in_survey( - input_dataframe, - entity="foyer", - period="2023", - collection=collection, - survey_name=survey_name, - config_files_directory=data_dir, - ) - - # Read survey - survey_collection = SurveyCollection.load(config_files_directory=data_dir, collection=collection) - survey = survey_collection.get_survey(survey_name) - table = survey.get_values(table="foyer_2023", ignorecase=True) - assert len(table) == 4 - assert (table.columns == ["household_id", "rent", "household_weight", "accommodation_size"]).all() - assert table.household_weight.sum() == 2950 - - -def test_add_survey_to_collection(): - name = "fake" - survey_name = "fake_survey" - data_dir = os.path.join( - openfisca_survey_manager_location, - "openfisca_survey_manager", - "tests", - "data_files", - ) - survey_collection = SurveyCollection(name=name) - saved_fake_survey_file_path = os.path.join(data_dir, "help.sas7bdat") - add_survey_to_collection( - survey_name=survey_name, - survey_collection=survey_collection, - sas_files=[saved_fake_survey_file_path], - stata_files=[], - ) - ordered_dict = survey_collection.to_json() - assert survey_name in list(ordered_dict["surveys"].keys()) - - -@pytest.mark.order(after="test_surveys.py::test_survey") -def test_set_table_in_survey_first_year(): - data_dir = os.path.join( - openfisca_survey_manager_location, - "openfisca_survey_manager/tests/data_files", - ) - input_dataframe = pd.DataFrame({"rfr": [1_000, 2_000, 100_000]}) - survey_name = "test_set_table_in_survey_2020" - collection = "fake" - set_table_in_survey( - input_dataframe, - entity="foyer", - period="2020", - collection=collection, - survey_name=survey_name, - config_files_directory=data_dir, - ) - - # Read survey - survey_collection = SurveyCollection.load(config_files_directory=data_dir, collection=collection) - survey = survey_collection.get_survey(survey_name) - table = survey.get_values(table="foyer_2020", ignorecase=True) - assert len(table) == 3 - assert table.columns == ["rfr"] - assert table.rfr.sum() == 103000 - - -@pytest.mark.order(after="test_set_table_in_survey_first_year") -def test_set_table_in_survey_second_year(): - data_dir = os.path.join( - openfisca_survey_manager_location, - "openfisca_survey_manager/tests/data_files", - ) - input_dataframe = pd.DataFrame({"rfr": [1_021, 2_021, 100_021]}) - survey_name = "test_set_table_in_survey_2021" - collection = "fake" - set_table_in_survey( - input_dataframe, - entity="foyer", - period="2021", - collection=collection, - survey_name=survey_name, - config_files_directory=data_dir, - ) - - # Read first survey - survey_name = "test_set_table_in_survey_2020" - survey_collection = SurveyCollection.load(config_files_directory=data_dir, collection=collection) - survey = survey_collection.get_survey(survey_name) - table = survey.get_values(table="foyer_2020", ignorecase=True) - assert len(table) == 3 - assert table.columns == ["rfr"] - assert table.rfr.sum() == 103000 - - # Read second survey - survey_name = "test_set_table_in_survey_2021" - survey_collection = SurveyCollection.load(config_files_directory=data_dir, collection=collection) - survey = survey_collection.get_survey(survey_name) - table = survey.get_values(table="foyer_2021", ignorecase=True) - assert len(table) == 3 - assert table.columns == ["rfr"] - assert table.rfr.sum() == 103063 diff --git a/openfisca_survey_manager/tests/test_aggregates.py b/openfisca_survey_manager/tests/test_aggregates.py deleted file mode 100644 index 5a88674e..00000000 --- a/openfisca_survey_manager/tests/test_aggregates.py +++ /dev/null @@ -1,50 +0,0 @@ -import pytest -from openfisca_country_template.reforms.modify_social_security_taxation import modify_social_security_taxation - -from openfisca_survey_manager.aggregates import AbstractAggregates -from openfisca_survey_manager.tests.test_scenario import create_randomly_initialized_survey_scenario - - -@pytest.fixture -def aggregates_test_setup(): - survey_scenario = create_randomly_initialized_survey_scenario(reform=modify_social_security_taxation) - aggregates = AbstractAggregates(survey_scenario=survey_scenario) - aggregates.amount_unit = 1.0 - aggregates.beneficiaries_unit = 1.0 - return survey_scenario, aggregates - - -def test_aggregates(aggregates_test_setup): - survey_scenario, aggregates = aggregates_test_setup - period = "2017-01" - variables = ["social_security_contribution", "salary"] - aggregates.aggregate_variables = variables - - df = aggregates.compute_aggregates(reform=True, actual=False) - - for variable in variables: - aggregate_before = survey_scenario.compute_aggregate(variable, period=period, use_baseline=True) - aggregate_after = survey_scenario.compute_aggregate(variable, period=period) - assert df.loc[variable, "baseline_amount"] == int(aggregate_before) - assert df.loc[variable, "reform_amount"] == int(aggregate_after) - - -def test_aggregates_winners_losers(aggregates_test_setup): - survey_scenario, aggregates = aggregates_test_setup - period = "2017-01" - variable = "social_security_contribution" - aggregates.aggregate_variables = [variable] - - df = aggregates.get_data_frame(target="reform", default="baseline") - - assert "Gagnants" in df.columns - assert "Perdants" in df.columns - assert "Neutres" in df.columns - - stats = survey_scenario.simulations["reform"].compute_winners_losers( - baseline_simulation=survey_scenario.simulations["baseline"], variable=variable, period=period - ) - - assert df.loc[0, "Gagnants"] == str(int(round(stats["above_after"]))) - assert df.loc[0, "Perdants"] == str(int(round(stats["lower_after"]))) - assert df.loc[0, "Neutres"] == str(int(round(stats["neutral"]))) diff --git a/openfisca_survey_manager/tests/test_calibration.py b/openfisca_survey_manager/tests/test_calibration.py deleted file mode 100644 index 3e9a4048..00000000 --- a/openfisca_survey_manager/tests/test_calibration.py +++ /dev/null @@ -1,161 +0,0 @@ -from openfisca_core import periods -from openfisca_core.tools import assert_near - -from openfisca_survey_manager.calibration import Calibration -from openfisca_survey_manager.paths import default_config_files_directory -from openfisca_survey_manager.scenarios.abstract_scenario import AbstractSurveyScenario -from openfisca_survey_manager.tests import tax_benefit_system -from openfisca_survey_manager.tests.test_scenario import ( - create_randomly_initialized_survey_scenario, - generate_input_input_dataframe_by_entity, -) - - -def test_calibration_variable_entity_is_weight_entity(): - survey_scenario = create_randomly_initialized_survey_scenario(collection=None) - period = "2017-01" - survey_scenario.period = period - person_weight_before = survey_scenario.calculate_series("person_weight", period) - # 'initial_rent_aggregate' is assigned to but never used initial_rent_aggregate = survey_scenario.compute_aggregate("rent", period = period) - target_rent_aggregate = 200000 - - survey_scenario.calibrate( - period, - target_margins_by_variable={ - "rent": target_rent_aggregate, - }, - parameters={"method": "raking ratio"}, - ) - - for _, simulation in survey_scenario.simulations.items(): - assert all(simulation.calibration.weight != simulation.calibration.initial_weight) - - assert_near(survey_scenario.compute_aggregate("rent", period=period), target_rent_aggregate) - - # See if propagation to derived weights is done well - person_weight_after = survey_scenario.calculate_series("person_weight", period) - assert (person_weight_after != person_weight_before).all() - - -def test_calibration_variable_entity_is_not_weight_entity(): - survey_scenario = create_randomly_initialized_survey_scenario(collection=None) - period = "2017-01" - survey_scenario.period = period - target_rent_aggregate = 200000 - target_salary_aggregate = 1e7 - - survey_scenario.calibrate( - period, - target_margins_by_variable={"rent": target_rent_aggregate, "salary": target_salary_aggregate}, - parameters={"method": "raking ratio", "id_variable": "household_id", "id_variable_link": "household_id_ind"}, - other_entity_count=700, - target_entity_count=300, - ) - assert_near( - survey_scenario.compute_aggregate("rent", period=period), target_rent_aggregate, relative_error_margin=0.1 - ) - assert_near( - survey_scenario.compute_aggregate("salary", period=period), target_salary_aggregate, relative_error_margin=0.1 - ) - dataframe_by_entity = survey_scenario.create_data_frame_by_entity( - ["household_weight", "person_weight"], merge=False - ) - assert_near(sum(dataframe_by_entity["household"]["household_weight"]), 300, relative_error_margin=0.1) - assert_near(sum(dataframe_by_entity["person"]["person_weight"]), 700, relative_error_margin=0.1) - - -def test_simulation_calibration_variable_entity_is_weight_entity(): - survey_scenario = create_randomly_initialized_survey_scenario(collection=None) - period = "2017-01" - survey_scenario.period = period - simulation = list(survey_scenario.simulations.values())[0] - person_weight_before = simulation.calculate("person_weight", period) - - # initial_rent_aggregate = simulation.compute_aggregate("rent", period = period) - target_rent_aggregate = 200000 - - calibration = Calibration( - simulation, - period="2017-01", - target_margins={ - "rent": target_rent_aggregate, - }, - target_entity_count=300, - parameters={"method": "raking ratio"}, - ) - - calibration.calibrate(inplace=True) - assert all(calibration.weight != calibration.initial_weight) - - assert_near(simulation.compute_aggregate("rent", period=period), target_rent_aggregate) - - # See if propagation to derived weights is done well - person_weight_after = simulation.calculate("person_weight", period) - assert all(person_weight_after != person_weight_before) - assert calibration.initial_entity_count != calibration.target_entity_count - assert simulation.calculate("household_weight", period).sum() == calibration.target_entity_count - - -def test_simulation_calibration_variable_entity_is_weight_entity_with_hyperbolic_sinus(): - survey_scenario = create_randomly_initialized_survey_scenario(collection=None) - period = "2017-01" - survey_scenario.period = period - simulation = list(survey_scenario.simulations.values())[0] - person_weight_before = simulation.calculate("person_weight", period) - - # initial_rent_aggregate = simulation.compute_aggregate("rent", period = period) - target_rent_aggregate = 200000 - - calibration = Calibration( - simulation, - period="2017-01", - target_margins={ - "rent": target_rent_aggregate, - }, - target_entity_count=300, - parameters={"method": "hyperbolic sinus", "alpha": 1.2}, - ) - - calibration.calibrate(inplace=True) - assert all(calibration.weight != calibration.initial_weight) - - assert_near(simulation.compute_aggregate("rent", period=period), target_rent_aggregate) - - # See if propagation to derived weights is done well - person_weight_after = simulation.calculate("person_weight", period) - assert all(person_weight_after != person_weight_before) - assert calibration.initial_entity_count != calibration.target_entity_count - assert simulation.calculate("household_weight", period).sum() == calibration.target_entity_count - - -def test_simulation_calibration_input_from_data(): - input_data_frame_by_entity = generate_input_input_dataframe_by_entity(10, 5, 5000, 1000) - survey_scenario = AbstractSurveyScenario() - weight_variable_by_entity = { - "person": "person_weight", - "household": "household_weight", - } - survey_scenario.set_tax_benefit_systems({"baseline": tax_benefit_system}) - survey_scenario.period = "2017-01" - survey_scenario.used_as_input_variables = ["salary", "rent", "household_weight"] - period = periods.period("2017-01") - target_rent_aggregate = 200000 - - data = { - "input_data_frame_by_entity_by_period": {period: input_data_frame_by_entity}, - "config_files_directory": default_config_files_directory, - } - calibration_kwargs = { - "target_margins_by_variable": {"rent": target_rent_aggregate}, - "target_entity_count": 300, - "parameters": {"method": "logit", "up": 4, "invlo": 4}, - } - survey_scenario.set_weight_variable_by_entity(weight_variable_by_entity) - assert survey_scenario.weight_variable_by_entity == weight_variable_by_entity - survey_scenario.init_from_data(data=data, calibration_kwargs=calibration_kwargs) - for simulation_name, simulation in survey_scenario.simulations.items(): - assert simulation.weight_variable_by_entity == weight_variable_by_entity, ( - f"{simulation_name} weight_variable_by_entity does not match {weight_variable_by_entity}" - ) - assert (survey_scenario.calculate_series("household_weight", period, simulation=simulation_name) != 0).all() - return survey_scenario diff --git a/openfisca_survey_manager/tests/test_calmar.py b/openfisca_survey_manager/tests/test_calmar.py deleted file mode 100644 index 7f4e2cba..00000000 --- a/openfisca_survey_manager/tests/test_calmar.py +++ /dev/null @@ -1,178 +0,0 @@ -""" -Example from page 23 of calmar documentation -https://www.insee.fr/fr/statistiques/fichier/2021902/doccalmar.pdf -""" - -import numpy as np -import pandas as pd - -from openfisca_survey_manager.calmar import calmar - - -def create_input_dataframe(entities=1): - columns = ["X", "Y", "Z", "POND", "id_variable"] - index = [ - "A", - "B", - "C", - "D", - "E", - "F", - "G", - "H", - "I", - "J", - "K", - ] - df1 = pd.DataFrame(columns=columns, index=index) - values_by_index = { - "A": [1, 1, 1, 10, "A"], - "B": [1, 2, 2, 0, "B"], - "C": [1, 2, 3, np.nan, "C"], - "D": [2, 1, 1, 11, "D"], - "E": [2, 1, 3, 13, "E"], - "F": [2, 2, 2, 7, "F"], - "G": [2, 2, 2, 8, "G"], - "H": [1, 2, 2, 8, "H"], - "I": [2, 1, 2, 9, "I"], - "J": [np.nan, 2, 2, 10, "J"], - "K": [2, 2, 2, 14, "K"], - } - for index, values in values_by_index.items(): - df1.loc[index] = pd.Series(dict(zip(columns, values))) - df1["Z"] = df1.Z.astype(float) - df1["X"] = df1.X.astype(float) - df1["POND"] = df1.POND.astype(float) - df = {"main_entity": df1, "target_entity_name": "main_entity"} - - if entities == 2: - columns2 = ["A", "B", "C", "D", "id_variable"] - index2 = [ - 0, - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12, - 13, - 14, - 15, - 16, - ] - df2 = pd.DataFrame(columns=columns2, index=index2) - values_by_index2 = { - 0: [1, 1, 1, 10, "A"], - 1: [1, 2, 0, 0, "A"], - 2: [1, 2, 3, np.nan, "B"], - 3: [2, 1, 1, 11, "C"], - 4: [2, 1, 3, 13, "C"], - 5: [2, 2, 2, 7, "C"], - 6: [2, 2, 5, 8, "D"], - 7: [1, 2, 2, 8, "E"], - 8: [2, 1, 2, 9, "E"], - 9: [np.nan, 2, 2, 10, "F"], - 10: [2, 2, 2, 14, "G"], - 11: [1, 2, 3, 7, "G"], - 12: [2, 2, 8, 7, "H"], - 13: [1, 1, 3, 7, "I"], - 14: [1, 2, 4, 7, "I"], - 15: [2, 1, 0, 7, "J"], - 16: [2, 2, 3, 7, "K"], - } - for index, values in values_by_index2.items(): - df2.loc[index] = pd.Series(dict(zip(columns2, values))) - - df = {"main_entity": df1, "target_entity_name": "main_entity", "second_entity": df2} - return df - - -def create_margins(entities=1): - margins_by_variable = { - "X": { - 1: 20, - 2: 60, - }, - "Y": { - 1: 30, - 2: 50, - }, - "Z": 140.0, - "Z > 0": 80, - "(Z > 0) * (X > 0)": 80, - } - if entities == 2: - margins_by_variable["C"] = 85 - margins_by_variable["total_population"] = 80 - margins_by_variable["total_population_smaller_entity"] = 120 - return margins_by_variable - - -def test_calmar(): - target_weight_ratio = pd.Series( - [ - 1.01683, - np.nan, - 1.22897, - np.nan, - 1.14602, - 0.49456, - 0.21342, - 1.38511, - 1.38511, - 1.38511, - 1.00000, - ] - ) - - data = create_input_dataframe(1) - margins_by_variable = create_margins(1) - pondfin_out, lambdasol, margins_new_dict = calmar( - data, margins_by_variable, method="raking ratio", initial_weight="POND" - ) - - data[data["target_entity_name"]]["weight_ratio"] = pondfin_out / data[data["target_entity_name"]].POND - weight_ratio = data[data["target_entity_name"]].sort_values(["X", "Y", "Z"])["weight_ratio"].round(5) - null_target_weight_ratio = target_weight_ratio.isnull() - - assert weight_ratio.loc[null_target_weight_ratio.values].isnull().all(), "Error on Nan" - - assert ( - target_weight_ratio.loc[~null_target_weight_ratio.values].values - == weight_ratio.loc[~null_target_weight_ratio.values].values - ).all(), "Errors on non NaN values" - - -def test_calmar_2_entities(): - data = create_input_dataframe(2) - margins_by_variable = create_margins(2) - pondfin_out, lambdasol, margins_new_dict = calmar( - data, - margins_by_variable, - method="raking ratio", - initial_weight="POND", - ) - pondfin_out[np.isnan(pondfin_out)] = 0 - - data["main_entity"]["final_pond"] = pondfin_out - pondfin_ind = data["main_entity"].merge(data["second_entity"], on="id_variable")["final_pond"] - - assert -3 < sum(pondfin_out) - 80 < 3 - assert -5 < sum(pondfin_ind) - 120 < 5 - - -if __name__ == "__main__": - import logging - import sys - - log = logging.getLogger(__name__) - verbose = True - logging.basicConfig(level=logging.DEBUG, stream=sys.stdout) - test_calmar() - test_calmar_2_entities() diff --git a/openfisca_survey_manager/tests/test_compute_aggregate.py b/openfisca_survey_manager/tests/test_compute_aggregate.py deleted file mode 100644 index 4073fbe5..00000000 --- a/openfisca_survey_manager/tests/test_compute_aggregate.py +++ /dev/null @@ -1,47 +0,0 @@ -from openfisca_country_template.reforms.modify_social_security_taxation import modify_social_security_taxation - -from openfisca_survey_manager.tests.test_scenario import create_randomly_initialized_survey_scenario - - -def test_compute_aggregate(): - survey_scenario = create_randomly_initialized_survey_scenario(reform=modify_social_security_taxation) - period = "2017-01" - variable = "social_security_contribution" - - aggregate_after = survey_scenario.compute_aggregate(variable, period=period) - aggregate_before = survey_scenario.compute_aggregate(variable, period=period, use_baseline=True) - - assert aggregate_after > aggregate_before - - survey_scenario.calculate_variable("social_security_contribution", period=period) - survey_scenario.calculate_variable("salary", period=period, use_baseline=True) - - assert ( - survey_scenario.compute_aggregate( - "social_security_contribution", - period=period, - filter_by="salary < 3000", - ) - == 0 - ) - - assert ( - survey_scenario.compute_aggregate( - "social_security_contribution", - period=period, - filter_by="3000 < salary < 10000", - ).astype(int) - == 34489 - ) - - del survey_scenario.weight_variable_by_entity - survey_scenario.set_weight_variable_by_entity() - assert ( - survey_scenario.compute_aggregate( - "social_security_contribution", - period=period, - filter_by="3000 < salary < 10000", - weighted=False, - ).astype(int) - == 576 - ) diff --git a/openfisca_survey_manager/tests/test_compute_pivot_table.py b/openfisca_survey_manager/tests/test_compute_pivot_table.py deleted file mode 100644 index 75706e21..00000000 --- a/openfisca_survey_manager/tests/test_compute_pivot_table.py +++ /dev/null @@ -1,24 +0,0 @@ -from openfisca_country_template.reforms.modify_social_security_taxation import modify_social_security_taxation - -from openfisca_survey_manager.tests.test_scenario import create_randomly_initialized_survey_scenario - - -def test_compute_pivot_table(): - survey_scenario = create_randomly_initialized_survey_scenario(reform=modify_social_security_taxation) - period = "2017-01" - - return survey_scenario.compute_pivot_table( - aggfunc="mean", - columns=["age"], - difference=False, - filter_by=None, - index=None, - period=period, - use_baseline=True, - use_baseline_for_columns=True, - values=["salary"], - missing_variable_default_value=0, - concat_axis=None, - weighted=True, - alternative_weights=None, - ) diff --git a/openfisca_survey_manager/tests/test_compute_winners_losers.py b/openfisca_survey_manager/tests/test_compute_winners_losers.py deleted file mode 100644 index b9a41a2d..00000000 --- a/openfisca_survey_manager/tests/test_compute_winners_losers.py +++ /dev/null @@ -1,113 +0,0 @@ -import pytest -from openfisca_country_template.reforms.modify_social_security_taxation import modify_social_security_taxation -from openfisca_survey_manager.tests.test_scenario import create_randomly_initialized_survey_scenario -from openfisca_survey_manager.simulations import SecretViolationError - - -def test_compute_winners_losers_basics(): - survey_scenario = create_randomly_initialized_survey_scenario() - del survey_scenario.weight_variable_by_entity - survey_scenario.set_weight_variable_by_entity() - period = survey_scenario.period - variable = "pension" - - simulation = survey_scenario.simulations["baseline"] - baseline_simulation = survey_scenario.simulations["baseline"] - - simulation.adaptative_calculate_variable(variable, period=period) - absolute_minimal_detected_variation = 1 - relative_minimal_detected_variation = 0.05 - observations_threshold = 1 - - winners_losers = simulation.compute_winners_losers( - baseline_simulation, - variable, - period=period, - absolute_minimal_detected_variation=absolute_minimal_detected_variation, - relative_minimal_detected_variation=relative_minimal_detected_variation, - observations_threshold=observations_threshold, - ) - assert winners_losers == { - "total": 10.0, - "non_zero_before": 0.0, - "non_zero_after": 0.0, - "above_after": 0.0, - "lower_after": 0.0, - "neutral": 10.0, - "tolerance_factor_used": 0.05, - "weight_factor": 1, - } - - -def test_compute_winners_losers(): - survey_scenario = create_randomly_initialized_survey_scenario(reform=modify_social_security_taxation) - del survey_scenario.weight_variable_by_entity - survey_scenario.set_weight_variable_by_entity() - period = survey_scenario.period - variable = "social_security_contribution" - - simulation = survey_scenario.simulations["reform"] - baseline_simulation = survey_scenario.simulations["baseline"] - - survey_scenario.absolute_minimal_detected_variation = 0.9 - survey_scenario.relative_minimal_detected_variation = 0.05 - survey_scenario.observations_threshold = 1 - - winners_losers_scenario = survey_scenario.compute_winners_losers( - variable, - period=period, - ) - - assert winners_losers_scenario == { - "total": 10.0, - "non_zero_before": 10.0, - "non_zero_after": 9.0, - "above_after": 9.0, - "lower_after": 1.0, - "neutral": 0.0, - "tolerance_factor_used": 0.05, - "weight_factor": 1, - } - - observations_threshold = 10 - - absolute_minimal_detected_variation = 0.9 - relative_minimal_detected_variation = 0.05 - - with pytest.raises(SecretViolationError): - simulation.compute_winners_losers( - baseline_simulation, - variable, - period=period, - absolute_minimal_detected_variation=absolute_minimal_detected_variation, - relative_minimal_detected_variation=relative_minimal_detected_variation, - observations_threshold=observations_threshold, - ) - - -def test_compute_winners_losers_with_new_attributes(): - survey_scenario = create_randomly_initialized_survey_scenario(reform=modify_social_security_taxation) - survey_scenario.absolute_minimal_detected_variation = 100 - survey_scenario.relative_minimal_detected_variation = 0.1 - survey_scenario.observations_threshold = 1 - - del survey_scenario.weight_variable_by_entity - survey_scenario.set_weight_variable_by_entity() - period = survey_scenario.period - variable = "social_security_contribution" - - winners_losers_scenario = survey_scenario.compute_winners_losers( - variable, - period=period, - ) - - assert winners_losers_scenario == { - "total": 10.0, - "non_zero_before": 9.0, - "non_zero_after": 9.0, - "above_after": 9.0, - "lower_after": 1.0, - "neutral": 0.0, - "tolerance_factor_used": 0.1, - "weight_factor": 1, - } diff --git a/openfisca_survey_manager/tests/test_create_data_frame_by_entity.py b/openfisca_survey_manager/tests/test_create_data_frame_by_entity.py deleted file mode 100644 index 16e0e3b0..00000000 --- a/openfisca_survey_manager/tests/test_create_data_frame_by_entity.py +++ /dev/null @@ -1,35 +0,0 @@ -import logging - -from openfisca_survey_manager.tests.test_scenario import create_randomly_initialized_survey_scenario - -log = logging.getLogger(__name__) - - -def test_create_data_frame_by_entity(): - survey_scenario = create_randomly_initialized_survey_scenario() - period = "2017-01" - df_by_entity = survey_scenario.create_data_frame_by_entity( - variables=["salary", "rent"], - period=period, - ) - salary = survey_scenario.calculate_variable("salary", period=period) - rent = survey_scenario.calculate_variable("rent", period=period) - for entity, df in df_by_entity.items(): - assert not df.empty, f"{entity} dataframe is empty" - assert (df_by_entity["person"]["salary"] == salary).all().all() - assert (df_by_entity["household"]["rent"] == rent).all().all() - - -def test_create_data_frame_by_entity_with_index(): - survey_scenario = create_randomly_initialized_survey_scenario() - period = "2017-01" - data_frame_by_entity = survey_scenario.create_data_frame_by_entity( - variables=["salary", "rent", "person_id"], period=period, index=True - ) - for entity, input_dataframe in data_frame_by_entity.items(): - print(f"{entity} for {period}") # noqa T201 - print(input_dataframe.columns) # noqa T201 - if entity == "person": - assert "person_id" in input_dataframe.columns.to_list() - if entity == "household": - assert "household_id" in input_dataframe.columns.to_list() diff --git a/openfisca_survey_manager/tests/test_enum.py b/openfisca_survey_manager/tests/test_enum.py deleted file mode 100644 index 8189acb8..00000000 --- a/openfisca_survey_manager/tests/test_enum.py +++ /dev/null @@ -1,26 +0,0 @@ -import pandas as pd -from openfisca_country_template.variables.housing import HousingOccupancyStatus - -from openfisca_survey_manager.scenarios.abstract_scenario import AbstractSurveyScenario -from openfisca_survey_manager.tests import tax_benefit_system - - -def test_generation(): - survey_scenario = AbstractSurveyScenario() - survey_scenario.set_tax_benefit_systems({"baseline": tax_benefit_system}) - survey_scenario.period = "2025-06" - survey_scenario.used_as_input_variables = ["housing_occupancy_status"] - - statuses = [HousingOccupancyStatus.free_lodger, HousingOccupancyStatus.tenant] - data = { - "input_data_frame": pd.DataFrame( - { - "housing_occupancy_status": pd.Series([v.name for v in statuses]), - "household_id": [0, 1], - "household_role_index": [0, 0], - } - ) - } - survey_scenario.init_from_data(data=data) - result = survey_scenario.calculate_variable("housing_occupancy_status", survey_scenario.period) - assert (result == pd.Series([v.index for v in statuses])).all() diff --git a/openfisca_survey_manager/tests/test_legislation_inflator.py b/openfisca_survey_manager/tests/test_legislation_inflator.py deleted file mode 100644 index a3aef506..00000000 --- a/openfisca_survey_manager/tests/test_legislation_inflator.py +++ /dev/null @@ -1,109 +0,0 @@ -from openfisca_core import periods -from openfisca_country_template import CountryTaxBenefitSystem - -from openfisca_survey_manager.utils import inflate_parameters, parameters_asof - - -def test_asof_simple_annual_parameter(): - """ - Test parameters_asof on a simple parameter - """ - tax_benefit_system = CountryTaxBenefitSystem() - parameters = tax_benefit_system.parameters - income_tax_rate_2014 = parameters.taxes.income_tax_rate(2014) - income_tax_rate_2015 = parameters.taxes.income_tax_rate(2015) - assert income_tax_rate_2015 != income_tax_rate_2014 - parameters_asof(parameters, instant="2014") - assert parameters.taxes.income_tax_rate(2014) == income_tax_rate_2014, "{} != {}".format( - parameters.taxes.income_tax_rate(2014), income_tax_rate_2014 - ) - assert parameters.taxes.income_tax_rate(2015) == income_tax_rate_2014, "{} != {}".format( - parameters.taxes.income_tax_rate(2015), income_tax_rate_2014 - ) - - -def test_asof_scale_parameters(): - """ - Test parameters_asof on a scale parameter - """ - tax_benefit_system = CountryTaxBenefitSystem() - parameters = tax_benefit_system.parameters - social_security_contribution_2016 = parameters.taxes.social_security_contribution(2016).thresholds[1] - social_security_contribution_2017 = parameters.taxes.social_security_contribution(2017).thresholds[1] - assert social_security_contribution_2016 != social_security_contribution_2017 - parameters_asof(parameters, instant="2016") - assert parameters.taxes.social_security_contribution(2016).thresholds[1] == social_security_contribution_2016 - assert parameters.taxes.social_security_contribution(2017).thresholds[1] == social_security_contribution_2016 - - -def test_inflate_simple_parameter(): - """ - Test parameters inflator on a simple parameter as the basic income - """ - tax_benefit_system = CountryTaxBenefitSystem() - parameters = tax_benefit_system.parameters - basic_income_2016 = parameters.benefits.basic_income(2016) - basic_income_2017 = parameters.benefits.basic_income(2017) - assert basic_income_2017 == basic_income_2016 - inflate_parameters(parameters, inflator=0.1, base_year=2016, last_year=2017) - - assert basic_income_2016 == parameters.benefits.basic_income(2016) - assert 1.1 * basic_income_2016 == parameters.benefits.basic_income(2017) - - -def test_inflate_scale(): - """ - Test parameters inflator on a scale parameter as the social security contributions tax_scale - """ - tax_benefit_system = CountryTaxBenefitSystem() - parameters = tax_benefit_system.parameters - inflate_parameters(parameters, inflator=0.3, base_year=2015, last_year=2016) - for threshold_2016, threshold_2015 in zip( - parameters.taxes.social_security_contribution(2016).thresholds, - parameters.taxes.social_security_contribution(2015).thresholds, - ): - assert threshold_2016 == threshold_2015 * 1.3 - - -def test_inflate_scale_with_changing_number_of_brackets(): - """ - Test parameters inflator on a scale parameter when the number of brackets changes - - Use parameters_asof to use the present legislation the future pre-inflated legislation - Test on the social security contributions tax_scale - """ - tax_benefit_system = CountryTaxBenefitSystem() - parameters = tax_benefit_system.parameters - parameters_asof(parameters, instant=periods.instant(2016)) # Remove post 2016 legislation changes - inflate_parameters(parameters, inflator=0.3, base_year=2016, last_year=2017) - for threshold_2017, threshold_2016 in zip( - parameters.taxes.social_security_contribution(2017).thresholds, - parameters.taxes.social_security_contribution(2016).thresholds, - ): - assert threshold_2017 == threshold_2016 * 1.3, "{} != {}".format(threshold_2017, threshold_2016 * 1.3) - - -def test_inflate_start_instant_option(): - """ - Test parameters inflator with a specific start_instant - """ - tax_benefit_system = CountryTaxBenefitSystem() - parameters = tax_benefit_system.parameters - parameters_asof(parameters, instant=periods.instant(2022)) # Remove post 2022 legislation changes - inflate_parameters(parameters, inflator=0.3, base_year=2022, last_year=2023, start_instant="2023-07-01") - for threshold_2023_06, threshold_2023_07, threshold_2022 in zip( - parameters.taxes.social_security_contribution("2023-06").thresholds, - parameters.taxes.social_security_contribution("2023-07").thresholds, - parameters.taxes.social_security_contribution(2022).thresholds, - ): - assert threshold_2023_07 == threshold_2022 * 1.3, "{} != {}".format(threshold_2023_07, threshold_2022 * 1.3) - assert threshold_2023_06 == threshold_2022, "{} != {}".format(threshold_2023_06, threshold_2022) - - -if __name__ == "__main__": - test_inflate_simple_parameter() - test_inflate_scale() - test_inflate_scale_with_changing_number_of_brackets() - test_asof_simple_annual_parameter() - test_asof_scale_parameters() - test_inflate_start_instant_option() diff --git a/openfisca_survey_manager/tests/test_marginal_tax_rate.py b/openfisca_survey_manager/tests/test_marginal_tax_rate.py deleted file mode 100644 index bb0d3c7b..00000000 --- a/openfisca_survey_manager/tests/test_marginal_tax_rate.py +++ /dev/null @@ -1,30 +0,0 @@ -import logging - -from openfisca_core.tools import assert_near -from openfisca_country_template import CountryTaxBenefitSystem - -from openfisca_survey_manager.tests.test_scenario import create_randomly_initialized_survey_scenario - -log = logging.getLogger(__name__) - - -tax_benefit_system = CountryTaxBenefitSystem() - - -def test_compute_marginal_tax_rate(): - survey_scenario = create_randomly_initialized_survey_scenario(use_marginal_tax_rate=True) - assert "_modified_baseline" in survey_scenario.simulations - assert_near( - survey_scenario.compute_marginal_tax_rate(target_variable="income_tax", period=2017, simulation="baseline"), - (1 - 0.15), - relative_error_margin=1e-6, - ) - # survey_scenario.compute_marginal_tax_rate(target_variable = 'disposable_income', period = 2017, simulation = "baseline") - - -if __name__ == "__main__": - import sys - - log = logging.getLogger(__name__) - logging.basicConfig(level=logging.DEBUG, stream=sys.stdout) - test_compute_marginal_tax_rate() diff --git a/openfisca_survey_manager/tests/test_matching.py b/openfisca_survey_manager/tests/test_matching.py deleted file mode 100644 index cca00116..00000000 --- a/openfisca_survey_manager/tests/test_matching.py +++ /dev/null @@ -1,142 +0,0 @@ -"""Tests for the matching functionality in OpenFisca Survey Manager.""" - -import pandas as pd - -from openfisca_survey_manager.matching import nnd_hotdeck_using_rpy2 - -try: - import rpy2 - from rpy2.robjects import pandas2ri, r - from rpy2.robjects.packages import importr -except ImportError: - rpy2 = None - - -def test_reproduction(): - """Test the reproduction of examples from the StatMatch documentation.""" - if rpy2 is None: - return - - # Reproducing examples from StatMatch documenation - # https://cran.r-project.org/web/packages/StatMatch/StatMatch.pdf - - r.data("iris") - - pandas2ri.activate() - # or explcitly do: - # iris = pandas2ri.ri2py(r['iris']) - - iris = r["iris"] - - # lab = list([1:15, 51:65, 101:115) - # recipient data.frame - iris_rec = pd.concat( - [ - iris.loc[1:15], - iris.loc[51:65], - iris.loc[101:115], - ] - ) - iris_rec.columns - del iris_rec["Petal.Width"] - - # donor data.frame - iris_don = pd.concat( - [ - iris.loc[16:50], - iris.loc[66:100], - iris.loc[116:150], - ] - ) - del iris_rec["Petal.Length"] - - # Now iris.rec and iris.don have the variables - # "Sepal.Length", "Sepal.Width" and "Species" - # in common. - # "Petal.Length" is available only in iris.rec - # "Petal.Width" is available only in iris.don - # find the closest donors using NND hot deck; - # distances are computed on "Sepal.Length" and "Sepal.Width" - - StatMatch = importr("StatMatch") - - out_NND = StatMatch.NND_hotdeck( - data_rec=iris_rec, - data_don=iris_don, - match_vars=pd.Series(["Sepal.Length", "Sepal.Width"]), - don_class="Species", - ) - - # create synthetic data.set, without the - # duplication of the matching variables - fused_0 = pandas2ri.ri2py( - StatMatch.create_fused(data_rec=iris_rec, data_don=iris_don, mtc_ids=out_NND[0], z_vars="Petal.Width") - ) - - # create synthetic data.set, with the "duplication" - # of the matching variables - fused_1 = pandas2ri.ri2py( - StatMatch.create_fused( - data_rec=iris_rec, - data_don=iris_don, - mtc_ids=out_NND[0], - z_vars="Petal.Width", - dup_x=True, - match_vars=pd.Series(["Sepal.Length", "Sepal.Width"]), - ) - ) - del fused_0, fused_1 - - -def test_nnd_hotdeck_using_rpy2(): - """Test the nnd_hotdeck_using_rpy2 function with iris dataset.""" - if rpy2 is None: - print("rpy2 is absent: skipping test") # noqa analysis:ignore - return - - r.data("iris") - - pandas2ri.activate() - # or explcitly do: - # iris = pandas2ri.ri2py(r['iris']) - - iris = r["iris"] - - # lab = list([1:15, 51:65, 101:115) - # recipient data.frame - iris_rec = pd.concat( - [ - iris.loc[1:15], - iris.loc[51:65], - iris.loc[101:115], - ] - ) - iris_rec.columns - del iris_rec["Petal.Width"] - - # donor data.frame - iris_don = pd.concat( - [ - iris.loc[16:50], - iris.loc[66:100], - iris.loc[116:150], - ] - ) - del iris_rec["Petal.Length"] - - # Now iris.rec and iris.don have the variables - # "Sepal.Length", "Sepal.Width" and "Species" - # in common. - # "Petal.Length" is available only in iris.rec - # "Petal.Width" is available only in iris.don - - # find the closest donors using NND hot deck; - # distances are computed on "Sepal.Length" and "Sepal.Width" - - x, y = nnd_hotdeck_using_rpy2( - receiver=iris_rec, - donor=iris_don, - donor_classes="Species", - z_variables="Petal.Width", - matching_variables=["Sepal.Length", "Sepal.Width"], - ) diff --git a/openfisca_survey_manager/tests/test_parquet.py b/openfisca_survey_manager/tests/test_parquet.py deleted file mode 100644 index c6d02828..00000000 --- a/openfisca_survey_manager/tests/test_parquet.py +++ /dev/null @@ -1,257 +0,0 @@ -""" -Test the ability to store parquet files in collections, without converting them to HDF5. -""" - -import logging -import os -from unittest import TestCase - -import pandas as pd -import pytest -from openfisca_core import periods - -from openfisca_survey_manager.paths import default_config_files_directory, openfisca_survey_manager_location -from openfisca_survey_manager.scenarios.abstract_scenario import AbstractSurveyScenario -from openfisca_survey_manager.scripts.build_collection import add_survey_to_collection, build_survey_collection -from openfisca_survey_manager.survey_collections import SurveyCollection -from openfisca_survey_manager.surveys import NoMoreDataError -from openfisca_survey_manager.tests import tax_benefit_system - -logger = logging.getLogger(__name__) - - -@pytest.mark.order(after="test_write_parquet.py::TestWriteParquet::test_write_parquet_one_file_per_entity") -class TestParquet(TestCase): - """Tests for Parquet file operations.""" - - data_dir = os.path.join( - openfisca_survey_manager_location, - "openfisca_survey_manager", - "tests", - "data_files", - ) - collection_name = "test_parquet_collection" - survey_name = "test_parquet_survey" - - def test_add_survey_to_collection_parquet(self): - """Test adding a parquet survey to a collection.""" - survey_collection = SurveyCollection(name=self.collection_name) - survey_file_path = os.path.join(self.data_dir, self.collection_name) - add_survey_to_collection( - survey_name=self.survey_name, - survey_collection=survey_collection, - parquet_files=[survey_file_path], - ) - ordered_dict = survey_collection.to_json() - assert self.survey_name in list(ordered_dict["surveys"].keys()) - - def test_build_collection(self): - """Test building a survey collection from parquet files.""" - collection_name = self.collection_name - json_file = os.path.join( - self.data_dir, - collection_name + ".json", - ) - with open(json_file, "w") as f: - f.write( - """ - { - "label": "Test parquet collection", - "name": "collection_name", - "surveys": { - } - } - """.replace("collection_name", collection_name) - ) - data_directory_path_by_survey_suffix = { - "2020": os.path.join(self.data_dir, collection_name), - } - build_survey_collection( - collection_name=collection_name, - data_directory_path_by_survey_suffix=data_directory_path_by_survey_suffix, - replace_metadata=False, - replace_data=False, - source_format="parquet", - config_files_directory=self.data_dir, - ) - - @pytest.mark.order(after="test_build_collection") - def test_load_single_parquet_monolithic(self): - """Test loading all the data from parquet files in memory.""" - # Create survey scenario - survey_scenario = AbstractSurveyScenario() - survey_name = self.collection_name + "_2020" - survey_collection = SurveyCollection.load( - config_files_directory=self.data_dir, - collection=self.collection_name, - ) - survey = survey_collection.get_survey(survey_name) - table = survey.get_values(table="household", ignorecase=True) - input_data_frame_by_entity = table - survey_scenario.set_tax_benefit_systems({"baseline": tax_benefit_system}) - survey_scenario.period = 2020 - survey_scenario.used_as_input_variables = ["rent"] - survey_scenario.collection = self.collection_name - period = periods.period("2020-01") - results = { - "rent": [], - "housing_tax": [], - "income_tax": [], - } - data = { - "collection": self.collection_name, - "survey": survey_name, - "input_data_table_by_entity_by_period": { - period: { - "household": "household", - "person": "person", - } - }, - "config_files_directory": self.data_dir, - } - survey_scenario.init_from_data(data=data) - - simulation = survey_scenario.simulations["baseline"] - sim_res = simulation.calculate("housing_tax", period.this_year).flatten().tolist() - results["housing_tax"] += sim_res - sim_res = simulation.calculate("rent", period).flatten().tolist() - results["rent"] += sim_res - sim_res = simulation.calculate("income_tax", period).flatten().tolist() - results["income_tax"] += sim_res - - logger.debug(f"{results=}") - assert len(results["rent"]) == 4 - assert (results["rent"] == input_data_frame_by_entity["rent"]).all() - assert results["housing_tax"] == [500.0, 1000.0, 1500.0, 2000.0] - assert results["income_tax"] == [195.00001525878906, 3.0, 510.0000305175781, 600.0, 750.0] - - def test_load_multiple_parquet_monolithic(self): - """Test loading all data from parquet files in memory.""" - collection_name = "test_multiple_parquet_collection" - data_dir = os.path.join(self.data_dir, collection_name) - # Create survey scenario - survey_scenario = AbstractSurveyScenario() - survey_name = collection_name + "_2020" - survey_collection = SurveyCollection.load( - config_files_directory=data_dir, - collection=collection_name, - ) - survey = survey_collection.get_survey(survey_name) - table = survey.get_values(table="household", ignorecase=True) - input_data_frame_by_entity = table - survey_scenario.set_tax_benefit_systems({"baseline": tax_benefit_system}) - survey_scenario.period = 2020 - survey_scenario.used_as_input_variables = ["rent"] - survey_scenario.collection = collection_name - period = periods.period("2020-01") - results = { - "rent": [], - "housing_tax": [], - "income_tax": [], - } - data = { - "collection": collection_name, - "survey": survey_name, - "input_data_table_by_entity_by_period": { - period: { - "household": "household", - "person": "person", - } - }, - "config_files_directory": data_dir, - } - survey_scenario.init_from_data(data=data) - - simulation = survey_scenario.simulations["baseline"] - sim_res = simulation.calculate("housing_tax", period.this_year).flatten().tolist() - results["housing_tax"] += sim_res - sim_res = simulation.calculate("rent", period).flatten().tolist() - results["rent"] += sim_res - sim_res = simulation.calculate("income_tax", period).flatten().tolist() - results["income_tax"] += sim_res - - logger.debug(f"{results=}") - assert len(results["rent"]) == 4 - assert (results["rent"] == input_data_frame_by_entity["rent"]).all() - assert results["housing_tax"] == [500.0, 1000.0, 1500.0, 2000.0] - assert results["income_tax"] == [195.00001525878906, 3.0, 510.0000305175781, 600.0, 750.0] - - def test_load_parquet_batch(self): - """ - Test the batch loading of data from parquet files. - - This allow loading larger than memory datasets. - """ - df = pd.read_parquet(os.path.join(self.data_dir, self.collection_name, "household.parquet")) - assert len(df) == 4 - assert (df.columns == ["household_id", "rent", "household_weight", "accommodation_size"]).all() - assert df.rent.sum() == 10300 - - collection_name = "test_multiple_parquet_collection" - data_dir = os.path.join(self.data_dir, collection_name) - # Create survey scenario - survey_scenario = AbstractSurveyScenario() - survey_scenario.set_tax_benefit_systems({"baseline": tax_benefit_system}) - survey_scenario.period = 2020 - survey_scenario.used_as_input_variables = ["rent"] - survey_scenario.collection = collection_name - period = periods.period("2020-01") - results = { - "rent": [], - "housing_tax": [], - "income_tax": [], - } - batch_size = 2 - batch_index = 0 - while True: - try: - data = { - "collection": collection_name, - "survey": collection_name + "_2020", - "input_data_table_by_entity_by_period": { - period: { - "household": "household", - "person": "person", - "batch_size": batch_size, - "batch_index": batch_index, - "batch_entity": "household", - "batch_entity_key": "household_id", - "filtered_entity": "person", - "filtered_entity_on_key": "household_id", - } - }, - "config_files_directory": data_dir, - } - survey_scenario.init_from_data(data=data) - - simulation = survey_scenario.simulations["baseline"] - sim_res = simulation.calculate("housing_tax", period.this_year).flatten().tolist() - results["housing_tax"] += sim_res - sim_res = simulation.calculate("rent", period).flatten().tolist() - results["rent"] += sim_res - sim_res = simulation.calculate("income_tax", period).flatten().tolist() - results["income_tax"] += sim_res - logger.debug("XXXXXXXXXXXXXXXXXXXXXx Next batch XXXXXXXXXXXXXXXXXXXXXx") - batch_index += 1 - except NoMoreDataError: - logger.debug("No more data") - break - logger.debug(f"{results=}") - assert len(results["rent"]) == 4 - # We check the sum as in CI the results are not in the same order - assert sum(results["rent"]) == df.rent.sum() - assert sum(results["housing_tax"]) == sum([500.0, 1000.0, 1500.0, 2000.0]) - self.assertAlmostEqual( - sum(results["income_tax"]), sum([195.00001525878906, 3.0, 510.0000305175781, 600.0, 750.0]) - ) - - -if __name__ == "__main__": - # openfisca_survey_manager_location = Path(__file__).parent.parent - # os.environ["CI"] = "True" - logger.debug(openfisca_survey_manager_location) - logger.debug(f"Default config files directory: {default_config_files_directory}") - test = TestParquet() - test.test_build_collection() - test.test_load_parquet_batch() - logger.debug("Done") diff --git a/openfisca_survey_manager/tests/test_quantile.py b/openfisca_survey_manager/tests/test_quantile.py deleted file mode 100644 index f1b01310..00000000 --- a/openfisca_survey_manager/tests/test_quantile.py +++ /dev/null @@ -1,146 +0,0 @@ -import numpy as np -import pandas as pd -from openfisca_core.entities import build_entity -from openfisca_core.model_api import YEAR, Variable -from openfisca_core.taxbenefitsystems import TaxBenefitSystem - -from openfisca_survey_manager.paths import default_config_files_directory -from openfisca_survey_manager.scenarios.abstract_scenario import AbstractSurveyScenario -from openfisca_survey_manager.statshelpers import mark_weighted_percentiles -from openfisca_survey_manager.variables import quantile - -Individu = build_entity( - key="individu", - plural="individus", - label="Individu", - is_person=True, -) - -entities = [Individu] - - -class salaire(Variable): - value_type = float - entity = Individu - label = "Salaire" - definition_period = YEAR - - -class decile_salaire_from_quantile(Variable): - entity = Individu - value_type = int - label = "Décile de salaire nouveau calcul" - definition_period = YEAR - formula = quantile(q=10, variable="salaire") - - -class decile_salaire(Variable): - value_type = int - entity = Individu - label = "Décile de salaire" - definition_period = YEAR - - def formula(individu, period): - salaire = individu("salaire", period) - labels = np.arange(1, 11) - weights = 1.0 * np.ones(shape=len(salaire)) - decile, _ = mark_weighted_percentiles( - salaire, # + np.random.uniform(size = len(salaire)) - 0.5, - labels, - weights, - method=2, - return_quantiles=True, - ) - return decile - - -class QuantileTestTaxBenefitSystem(TaxBenefitSystem): - """PPDLand tax and benefit system""" - - CURRENCY = "" - - def __init__(self): - super(QuantileTestTaxBenefitSystem, self).__init__(entities) - for variable in [ - decile_salaire_from_quantile, - decile_salaire, - salaire, - ]: - self.add_variable(variable) - - -class QuantileTestSurveyScenario(AbstractSurveyScenario): - def __init__(self, input_data_frame=None, tax_benefit_system=None, baseline_tax_benefit_system=None, period=None): - super(QuantileTestSurveyScenario, self).__init__() - assert input_data_frame is not None - assert period is not None - self.period = period - if tax_benefit_system is None: - tax_benefit_system = QuantileTestTaxBenefitSystem() - - tax_benefit_systems = ( - {"reform": tax_benefit_system, "baseline": baseline_tax_benefit_system} - if baseline_tax_benefit_system - else {"baseline": tax_benefit_system} - ) - - self.set_tax_benefit_systems(tax_benefit_systems) - - self.used_as_input_variables = list( - set(tax_benefit_system.variables.keys()).intersection(set(input_data_frame.columns)) - ) - data = {"input_data_frame": input_data_frame, "config_files_directory": default_config_files_directory} - self.init_from_data(data=data) - - -def create_input_dataframe(size=9): - """ - Create input dataframe with variable salaire and pension_retraite - """ - np.random.seed(216) - household_weight = 1.0 - number_of_indididual = size - size = int(number_of_indididual / household_weight) - salaire = np.linspace(0, 100, size) - return pd.DataFrame( - { - "salaire": salaire, - } - ) - - -def test_quantile(): - size = 10000 - input_data_frame = create_input_dataframe(size=size) - survey_scenario = QuantileTestSurveyScenario( - input_data_frame=input_data_frame, - tax_benefit_system=QuantileTestTaxBenefitSystem(), - period=2017, - ) - data = np.linspace(1, 11 - 1e-5, size) - target = np.floor(data) - result = survey_scenario.calculate_variable( - variable="decile_salaire_from_quantile", - period="2017", - simulation="baseline", - ) - assert all( - (result == target) + (abs(result - target + 1) < 0.001) # Finite size problem handling - ), "{} != {}, \n{} , \n{},".format( - result[result != target], - target[result != target], - data[result != target], - abs(result - target + 1)[result != target], - ) - - # No reason that method coincides so close to the quantiles thresholds - # assert all(survey_scenario.calculate_variable( - # variable = 'decile_salaire_from_quantile', period = '2017' - # ) == survey_scenario.calculate_variable( - # variable = 'decile_salaire', period = '2017' - # ) - # ) - - -if __name__ == "__main__": - test_quantile() diff --git a/openfisca_survey_manager/tests/test_read_sas.py b/openfisca_survey_manager/tests/test_read_sas.py deleted file mode 100644 index f3b94673..00000000 --- a/openfisca_survey_manager/tests/test_read_sas.py +++ /dev/null @@ -1,27 +0,0 @@ -# OpenFisca -- A versatile microsimulation software -# By: OpenFisca Team -# -# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team -import os - -from pandas.testing import assert_frame_equal - -from openfisca_survey_manager.paths import openfisca_survey_manager_location -from openfisca_survey_manager.read_sas import read_sas - - -def test(): - sas_file_path = os.path.join( - openfisca_survey_manager_location, - "openfisca_survey_manager", - "tests", - "data_files", - "help.sas7bdat", - ) - data_frame = read_sas(sas_file_path, clean=False) - data_frame_clean = read_sas(sas_file_path, clean=True) - - assert_frame_equal(data_frame, data_frame_clean) - - assert len(data_frame.columns) == 88 - assert len(data_frame) == 453 diff --git a/openfisca_survey_manager/tests/test_scenario.py b/openfisca_survey_manager/tests/test_scenario.py deleted file mode 100644 index 850cf277..00000000 --- a/openfisca_survey_manager/tests/test_scenario.py +++ /dev/null @@ -1,572 +0,0 @@ -"""Tests for the survey scenario functionality in OpenFisca Survey Manager.""" - -import logging -import os -import shutil -from typing import Any, Callable, Dict, List, Optional - -import pytest -from openfisca_core import periods -from openfisca_core.tools import assert_near - -from openfisca_survey_manager.input_dataframe_generator import ( - make_input_dataframe_by_entity, - random_data_generator, - randomly_init_variable, -) -from openfisca_survey_manager.paths import ( - default_config_files_directory, - openfisca_survey_manager_location, -) -from openfisca_survey_manager.scenarios.abstract_scenario import AbstractSurveyScenario -from openfisca_survey_manager.scenarios.reform_scenario import ReformScenario -from openfisca_survey_manager.tests import tax_benefit_system - -log = logging.getLogger(__name__) - - -def create_randomly_initialized_survey_scenario( - nb_persons: int = 10, - nb_groups: int = 5, - salary_max_value: float = 50000, - rent_max_value: float = 1000, - collection: Optional[str] = "test_random_generator", - use_marginal_tax_rate: bool = False, - reform: Optional[Callable] = None, -) -> AbstractSurveyScenario: - """ - Create a randomly initialized survey scenario. - - Args: - nb_persons (int): Number of persons - nb_groups (int): Number of groups - salary_max_value (float): Maximum salary value - rent_max_value (float): Maximum rent value - collection (Optional[str]): Collection name - use_marginal_tax_rate (bool): Use marginal tax rate - reform (Optional[Callable]): Reform function - - Returns: - AbstractSurveyScenario: Initialized survey scenario - """ - if collection is not None: - return create_randomly_initialized_survey_scenario_from_table( - nb_persons, - nb_groups, - salary_max_value, - rent_max_value, - collection, - use_marginal_tax_rate, - reform=reform, - ) - else: - return create_randomly_initialized_survey_scenario_from_data_frame( - nb_persons, - nb_groups, - salary_max_value, - rent_max_value, - use_marginal_tax_rate, - reform=reform, - ) - - -def create_randomly_initialized_survey_scenario_from_table( - nb_persons: int, - nb_groups: int, - salary_max_value: float, - rent_max_value: float, - collection: str, - use_marginal_tax_rate: bool, - reform: Optional[Callable] = None, -) -> AbstractSurveyScenario: - """ - Create a randomly initialized survey scenario from a table. - - Args: - nb_persons (int): Number of persons - nb_groups (int): Number of groups - salary_max_value (float): Maximum salary value - rent_max_value (float): Maximum rent value - collection (str): Collection name - use_marginal_tax_rate (bool): Use marginal tax rate - reform (Optional[Callable]): Reform function - - Returns: - AbstractSurveyScenario: Initialized survey scenario - """ - variable_generators_by_period = { - periods.period("2017-01"): [ - { - "variable": "salary", - "max_value": salary_max_value, - }, - { - "variable": "rent", - "max_value": rent_max_value, - }, - { - "variable": "household_weight", - "max_value": 100, - }, - ], - periods.period("2018-01"): [ - { - "variable": "salary", - "max_value": salary_max_value, - }, - { - "variable": "rent", - "max_value": rent_max_value, - }, - ], - } - table_by_entity_by_period = random_data_generator( - tax_benefit_system, - nb_persons, - nb_groups, - variable_generators_by_period, - collection, - ) - if reform is None: - survey_scenario = AbstractSurveyScenario() - survey_scenario.set_tax_benefit_systems({"baseline": tax_benefit_system}) - else: - survey_scenario = ReformScenario() - survey_scenario.set_tax_benefit_systems( - { - "reform": reform(tax_benefit_system), - "baseline": tax_benefit_system, - } - ) - - survey_scenario.used_as_input_variables = [ - "salary", - "rent", - "housing_occupancy_status", - "household_weight", - ] - survey_scenario.period = 2017 - survey_scenario.collection = collection - data = { - "survey": "input", - "input_data_table_by_entity_by_period": table_by_entity_by_period, - "config_files_directory": default_config_files_directory, - } - if use_marginal_tax_rate: - survey_scenario.varying_variable = "salary" - - survey_scenario.weight_variable_by_entity = { - "person": "person_weight", - "household": "household_weight", - } - survey_scenario.init_from_data(data=data, use_marginal_tax_rate=use_marginal_tax_rate) - return survey_scenario - - -def create_randomly_initialized_survey_scenario_from_data_frame( - nb_persons: int, - nb_groups: int, - salary_max_value: float, - rent_max_value: float, - use_marginal_tax_rate: bool = False, - reform: Optional[Callable] = None, -) -> AbstractSurveyScenario: - """ - Create a randomly initialized survey scenario from a data frame. - - Args: - nb_persons (int): Number of persons - nb_groups (int): Number of groups - salary_max_value (float): Maximum salary value - rent_max_value (float): Maximum rent value - use_marginal_tax_rate (bool): Use marginal tax rate - reform (Optional[Callable]): Reform function - - Returns: - AbstractSurveyScenario: Initialized survey scenario - """ - input_data_frame_by_entity = generate_input_input_dataframe_by_entity( - nb_persons, nb_groups, salary_max_value, rent_max_value - ) - for entity in input_data_frame_by_entity: - if entity == "person": - input_data_frame_by_entity[entity]["household_id_ind"] = input_data_frame_by_entity[entity]["household_id"] - if entity == "household": - input_data_frame_by_entity[entity]["household_id"] = input_data_frame_by_entity[entity].index - - weight_variable_by_entity = { - "person": "person_weight", - "household": "household_weight", - } - if reform is None: - survey_scenario = AbstractSurveyScenario() - survey_scenario.set_tax_benefit_systems({"baseline": tax_benefit_system}) - else: - survey_scenario = ReformScenario() - survey_scenario.set_tax_benefit_systems( - { - "reform": reform(tax_benefit_system), - "baseline": tax_benefit_system, - } - ) - survey_scenario.period = 2017 - survey_scenario.used_as_input_variables = [ - "salary", - "rent", - "household_weight", - "household_id", - "household_id_ind", - ] - period = periods.period("2017-01") - - data = { - "input_data_frame_by_entity_by_period": {period: input_data_frame_by_entity}, - "config_files_directory": default_config_files_directory, - } - survey_scenario.set_weight_variable_by_entity(weight_variable_by_entity) - assert survey_scenario.weight_variable_by_entity == weight_variable_by_entity - survey_scenario.init_from_data(data=data, use_marginal_tax_rate=use_marginal_tax_rate) - if isinstance(survey_scenario, ReformScenario): - assert (survey_scenario.calculate_series("household_weight", period, use_baseline=True) != 0).all() - else: - for simulation_name, simulation in survey_scenario.simulations.items(): - assert simulation.weight_variable_by_entity == weight_variable_by_entity, ( - f"{simulation_name} weight_variable_by_entity does not match {weight_variable_by_entity}" - ) - assert ( - survey_scenario.calculate_series("household_weight", period, simulation=simulation_name) != 0 - ).all() - return survey_scenario - - -def generate_input_input_dataframe_by_entity( - nb_persons: int, nb_groups: int, salary_max_value: float, rent_max_value: float -) -> Dict[str, Any]: - """ - Generate input dataframe by entity with randomly initialized variables. - - Args: - nb_persons (int): Number of persons - nb_groups (int): Number of groups - salary_max_value (float): Maximum salary value - rent_max_value (float): Maximum rent value - - Returns: - Dict[str, Any]: Input dataframe by entity - """ - input_dataframe_by_entity = make_input_dataframe_by_entity(tax_benefit_system, nb_persons, nb_groups) - randomly_init_variable( - tax_benefit_system, - input_dataframe_by_entity, - "salary", - max_value=salary_max_value, - condition="household_role == 'first_parent'", - ) - randomly_init_variable(tax_benefit_system, input_dataframe_by_entity, "rent", max_value=rent_max_value) - randomly_init_variable( - tax_benefit_system, - input_dataframe_by_entity, - "household_weight", - max_value=100, - ) - randomly_init_variable( - tax_benefit_system, - input_dataframe_by_entity, - "housing_occupancy_status", - max_value=4, - ) - return input_dataframe_by_entity - - -def test_input_dataframe_generator( - nb_persons: int = 10, - nb_groups: int = 5, - salary_max_value: float = 50000, - rent_max_value: float = 1000, -) -> None: - """Test the input dataframe generator function.""" - input_dataframe_by_entity = generate_input_input_dataframe_by_entity( - nb_persons, nb_groups, salary_max_value, rent_max_value - ) - assert (input_dataframe_by_entity["person"]["household_role"] == "first_parent").sum() == 5 - assert ( - input_dataframe_by_entity["person"].loc[ - input_dataframe_by_entity["person"]["household_role"] != "first_parent", - "salary", - ] - == 0 - ).all() - - assert ( - input_dataframe_by_entity["person"].loc[ - input_dataframe_by_entity["person"]["household_role"] == "first_parent", - "salary", - ] - > 0 - ).all() - assert ( - input_dataframe_by_entity["person"].loc[ - input_dataframe_by_entity["person"]["household_role"] == "first_parent", - "salary", - ] - <= salary_max_value - ).all() - - assert (input_dataframe_by_entity["household"]["rent"] > 0).all() - assert (input_dataframe_by_entity["household"]["rent"] < rent_max_value).all() - assert input_dataframe_by_entity["household"]["housing_occupancy_status"].isin([0, 1, 2, 3]).all() - - -def test_init_from_data( - nb_persons: int = 10, - nb_groups: int = 5, - salary_max_value: float = 50000, - rent_max_value: float = 1000, -) -> None: - """ - Test the initialization of data in the survey scenario. - - Args: - nb_persons: Number of persons to generate in the test data. - nb_groups: Number of household groups to generate. - salary_max_value: Maximum value for randomly generated salaries. - rent_max_value: Maximum value for randomly generated rents. - """ - # We check that the attribute `used_as_input_variables` matches the list of variables - # that are used in the calculation of the simulations. Other variables are not used in the calculation, - # being present in the input dataset but not in the output dataset (the simulation dataset) - - # Set up test : the minimum necessary data to perform an `init_from_data` - survey_scenario = AbstractSurveyScenario() - assert survey_scenario.simulations is None - # Generate some data and its period - input_data_frame_by_entity = generate_input_input_dataframe_by_entity( - nb_persons, nb_groups, salary_max_value, rent_max_value - ) - period = periods.period("2017-01") - # Creating a data object associated to its period, and we give it a name - data_in = {"input_data_frame_by_entity_by_period": {period: input_data_frame_by_entity}} - # data_in = copy.deepcopy(data_in) # Pour comparer avec la sortie de `init_from_data` - table_ind = input_data_frame_by_entity["person"].copy(deep=True) - table_men = input_data_frame_by_entity["household"].copy(deep=True) - # print(table_ind) - - # We must add a TBS to the scenario to indicate what are the entities - survey_scenario.set_tax_benefit_systems({"baseline": tax_benefit_system}) - assert len(survey_scenario.tax_benefit_systems) == 1 - assert list(survey_scenario.tax_benefit_systems.keys()) == ["baseline"] - assert survey_scenario.simulations is None - # We must add the `used_as_input_variables` even though they don't seem necessary - survey_scenario.used_as_input_variables = ["salary", "rent", "household_weight"] - # We must add the year to initiate a .new_simulation - survey_scenario.period = 2017 - # Then we can input the data+period dict inside the scenario - survey_scenario.init_from_data(data=data_in) - assert len(survey_scenario.simulations) == 1 - # We are looking for the dataframes inside the survey_scenario - all_var = list(set(list(table_ind.columns) + list(table_men.columns))) - # print('Variables', all_var) - data_out = survey_scenario.create_data_frame_by_entity(variables=all_var, period=period, merge=False) - - # 1 - Has the data object changed ? We only compare variables because Id's and others are lost in the process - for cols in table_ind: - if cols in data_out["person"]: - pass - else: - print("Columns lost in person table: ", cols) # noqa T201 - assert data_out["person"]["salary"].equals(table_ind["salary"]) - - for cols in table_men: - if cols in data_out["household"]: - pass - else: - print("Columns lost in household table: ", cols) # noqa T201 - assert data_out["household"]["rent"].equals(table_men["rent"]) - - -def test_survey_scenario_input_dataframe_import( - nb_persons: int = 10, - nb_groups: int = 5, - salary_max_value: float = 50000, - rent_max_value: float = 1000, -) -> None: - """ - Test the import of input dataframes into a survey scenario. - - Args: - nb_persons: Number of persons to generate. - nb_groups: Number of household groups. - salary_max_value: Maximum salary value. - rent_max_value: Maximum rent value. - """ - input_data_frame_by_entity = generate_input_input_dataframe_by_entity( - nb_persons, nb_groups, salary_max_value, rent_max_value - ) - survey_scenario = AbstractSurveyScenario() - survey_scenario.set_tax_benefit_systems({"baseline": tax_benefit_system}) - survey_scenario.period = 2017 - survey_scenario.used_as_input_variables = ["salary", "rent"] - period = periods.period("2017-01") - data = { - "input_data_frame_by_entity_by_period": {period: input_data_frame_by_entity}, - "config_files_directory": default_config_files_directory, - } - survey_scenario.init_from_data(data=data) - - simulation = survey_scenario.simulations["baseline"] - assert (simulation.calculate("salary", period) == input_data_frame_by_entity["person"]["salary"]).all() - assert (simulation.calculate("rent", period) == input_data_frame_by_entity["household"]["rent"]).all() - - -def test_survey_scenario_input_dataframe_import_scrambled_ids( - nb_persons: int = 10, - nb_groups: int = 5, - salary_max_value: float = 50000, - rent_max_value: float = 1000, -) -> None: - """ - Test survey scenario input dataframe import with scrambled IDs. - - Args: - nb_persons: Number of persons to generate. - nb_groups: Number of household groups. - salary_max_value: Maximum salary value. - rent_max_value: Maximum rent value. - """ - input_data_frame_by_entity = generate_input_input_dataframe_by_entity( - nb_persons, nb_groups, salary_max_value, rent_max_value - ) # Un dataframe d'exemple que l'on injecte - input_data_frame_by_entity["person"]["household_id"] = 4 - input_data_frame_by_entity["person"]["household_id"] - survey_scenario = AbstractSurveyScenario() - survey_scenario.set_tax_benefit_systems({"baseline": tax_benefit_system}) - survey_scenario.period = 2017 - survey_scenario.used_as_input_variables = ["salary", "rent"] - period = periods.period("2017-01") - data = { - "input_data_frame_by_entity_by_period": {period: input_data_frame_by_entity}, - "config_files_directory": default_config_files_directory, - } - survey_scenario.init_from_data(data=data) - simulation = survey_scenario.simulations["baseline"] - period = periods.period("2017-01") - assert (simulation.calculate("salary", period) == input_data_frame_by_entity["person"]["salary"]).all() - assert (simulation.calculate("rent", period) == input_data_frame_by_entity["household"]["rent"]).all() - - -def test_dump_survey_scenario() -> None: - """Test the dump and restore functionality of survey scenarios.""" - survey_scenario = create_randomly_initialized_survey_scenario() - directory = os.path.join( - openfisca_survey_manager_location, - "openfisca_survey_manager", - "tests", - "data_files", - "dump", - ) - if os.path.exists(directory): - shutil.rmtree(directory) - - survey_scenario.dump_simulations(directory=directory) - period = "2017-01" - df = survey_scenario.create_data_frame_by_entity(variables=["salary", "rent"], period=period) - household = df["household"] - person = df["person"] - assert not household.empty - assert not person.empty - del survey_scenario - survey_scenario = AbstractSurveyScenario() - survey_scenario.set_tax_benefit_systems({"baseline": tax_benefit_system}) - survey_scenario.used_as_input_variables = ["salary", "rent"] - survey_scenario.period = 2017 - survey_scenario.restore_simulations(directory=directory) - df2 = survey_scenario.create_data_frame_by_entity(variables=["salary", "rent"], period="2017-01") - - assert (df2["household"] == household).all().all() - assert (df2["person"] == person).all().all() - - -@pytest.mark.order(before="test_add_survey_to_collection.py::test_add_survey_to_collection") -def test_inflate() -> None: - """Test the inflate method of the survey scenario.""" - survey_scenario = create_randomly_initialized_survey_scenario(collection=None) - period = "2017-01" - inflator = 2.42 - inflator_by_variable = {"rent": inflator} - - rent_before_inflate = survey_scenario.compute_aggregate("rent", period=period) - survey_scenario.inflate(inflator_by_variable=inflator_by_variable, period=period) - rent_after_inflate = survey_scenario.compute_aggregate("rent", period=period) - assert_near( - rent_after_inflate, - inflator * rent_before_inflate, - relative_error_margin=1e-6, - message="Failing inflate with inflator_by_variable: rent_after_inflate = {} != {} = rent_before_inflate ({}) x inflator ({})".format( - rent_after_inflate, - rent_before_inflate * inflator, - rent_before_inflate, - inflator, - ), - ) - - target = 3e5 - target_by_variable = {"salary": target} - salary_before_inflate = survey_scenario.compute_aggregate("salary", period=period) - survey_scenario.inflate(target_by_variable=target_by_variable, period=period) - - salary_after_inflate = survey_scenario.compute_aggregate("salary", period=period) - assert_near( - salary_after_inflate, - target, - relative_error_margin=1e-6, - message="Failing inflate with inflator_by_variable: salary_after_inflate = {} != {} = target (salary_before_inflate = {})\n".format( - salary_after_inflate, - target, - salary_before_inflate, - ), - ) - - -@pytest.mark.order(before="test_add_survey_to_collection.py::test_add_survey_to_collection") -def test_compute_pivot_table() -> None: - """Test the compute_pivot_table method of the survey scenario.""" - survey_scenario = create_randomly_initialized_survey_scenario(collection=None) - period = "2017-01" - pivot_table = survey_scenario.compute_pivot_table( - columns=["age"], values=["salary"], period=period, simulation="baseline" - ) - - assert pivot_table.index == "salary" - assert pivot_table.values.round() == 21748 - - del survey_scenario.weight_variable_by_entity - survey_scenario.set_weight_variable_by_entity() - pivot_table = survey_scenario.compute_pivot_table( - columns=["age"], values=["salary"], period=period, simulation="baseline" - ) - - assert pivot_table.values.round() == 13570.0 - - -def test_compute_quantile() -> List[float]: - """Test the compute_quantiles method of the survey scenario.""" - survey_scenario = create_randomly_initialized_survey_scenario() - period = "2017-01" - quintiles = survey_scenario.compute_quantiles( - variable="salary", - nquantiles=5, - period=period, - weighted=False, - simulation="baseline", - ) - return quintiles - - -if __name__ == "__main__": - import sys - - log = logging.getLogger(__name__) - logging.basicConfig(level=logging.DEBUG, stream=sys.stdout) - quintiles = test_compute_quantile() - # pivot_table = test_compute_pivot_table() - # test_inflate() - # test_create_data_frame_by_entity() diff --git a/openfisca_survey_manager/tests/test_summarize_variables.py b/openfisca_survey_manager/tests/test_summarize_variables.py deleted file mode 100644 index 085b0f29..00000000 --- a/openfisca_survey_manager/tests/test_summarize_variables.py +++ /dev/null @@ -1,22 +0,0 @@ -import logging - -from openfisca_country_template import CountryTaxBenefitSystem - -from openfisca_survey_manager.tests.test_scenario import create_randomly_initialized_survey_scenario - -log = logging.getLogger(__name__) - - -tax_benefit_system = CountryTaxBenefitSystem() - - -def test_summarize_variable(): - survey_scenario = create_randomly_initialized_survey_scenario() - survey_scenario.summarize_variable(variable="rent", force_compute=True) - survey_scenario.summarize_variable(variable="housing_occupancy_status", force_compute=True) - - -if __name__ == "__main__": - # log = logging.getLogger(__name__) - # logging.basicConfig(level = logging.DEBUG, stream = sys.stdout) - test_summarize_variable() diff --git a/openfisca_survey_manager/tests/test_surveys.py b/openfisca_survey_manager/tests/test_surveys.py deleted file mode 100644 index f1eb20af..00000000 --- a/openfisca_survey_manager/tests/test_surveys.py +++ /dev/null @@ -1,74 +0,0 @@ -import os - -from openfisca_survey_manager.paths import openfisca_survey_manager_location -from openfisca_survey_manager.survey_collections import SurveyCollection -from openfisca_survey_manager.surveys import Survey - - -def test_survey_parquet(): - name = "fake" - data_dir = os.path.join( - openfisca_survey_manager_location, - "openfisca_survey_manager", - "tests", - "data_files", - ) - - survey_collection = SurveyCollection( - name=name, config_files_directory=data_dir, json_file_path=os.path.join(data_dir, "fake.json") - ) - - saved_fake_survey_hdf5_file_path = os.path.join(data_dir, "fake.hdf5") - saved_fake_survey_file_path = os.path.join(data_dir, "test.parquet") - survey = Survey( - hdf5_file_path=saved_fake_survey_hdf5_file_path, - name="fake_survey", - sas_files=[saved_fake_survey_file_path], - survey_collection=survey_collection, - ) - survey.insert_table(name="test_parquet") - survey.fill_store(source_format="parquet") - - -def test_survey(): - name = "fake" - data_dir = os.path.join( - openfisca_survey_manager_location, - "openfisca_survey_manager", - "tests", - "data_files", - ) - - survey_collection = SurveyCollection( - name=name, config_files_directory=data_dir, json_file_path=os.path.join(data_dir, "fake.json") - ) - - saved_fake_survey_hdf5_file_path = os.path.join(data_dir, "fake.hdf5") - saved_fake_survey_file_path = os.path.join(data_dir, "help.sas7bdat") - survey = Survey( - hdf5_file_path=saved_fake_survey_hdf5_file_path, - name="fake_survey", - sas_files=[saved_fake_survey_file_path], - survey_collection=survey_collection, - ) - survey.insert_table(name="help") - survey.fill_store(source_format="sas") - - -def test_survey_load(): - survey_name = "test_set_table_in_survey_2021" - collection = "fake" - data_dir = os.path.join( - openfisca_survey_manager_location, - "openfisca_survey_manager", - "tests", - "data_files", - ) - survey_collection = SurveyCollection.load(collection=collection, config_files_directory=data_dir) - survey = survey_collection.get_survey(survey_name) - for table_name, _ in survey.tables.items(): - assert table_name == "foyer_2021" - - -if __name__ == "__main__": - test_survey() diff --git a/openfisca_survey_manager/tests/test_tax_benefit_system_asof.py b/openfisca_survey_manager/tests/test_tax_benefit_system_asof.py deleted file mode 100644 index 994f7f44..00000000 --- a/openfisca_survey_manager/tests/test_tax_benefit_system_asof.py +++ /dev/null @@ -1,46 +0,0 @@ -from openfisca_core import periods -from openfisca_core.parameters import ParameterNode, Scale -from openfisca_country_template import CountryTaxBenefitSystem - -from openfisca_survey_manager.utils import parameters_asof, variables_asof - - -def check_max_instant_leaf(sub_parameter, instant): - for parameter_at_instant in sub_parameter.values_list: - assert periods.instant(parameter_at_instant.instant_str) <= instant, ( - f"Error for {sub_parameter.name}: \n {sub_parameter.values_list}" - ) - - -def check_max_instant(parameters, instant): - for _, sub_parameter in parameters.children.items(): - if isinstance(sub_parameter, ParameterNode): - check_max_instant(sub_parameter, instant) - else: - if isinstance(sub_parameter, Scale): - for bracket in sub_parameter.brackets: - threshold = bracket.children["threshold"] - rate = bracket.children["rate"] - check_max_instant_leaf(threshold, instant) - check_max_instant_leaf(rate, instant) - else: - check_max_instant_leaf(sub_parameter, instant) - - -def test_parameters_as_of(): - tax_benefit_system = CountryTaxBenefitSystem() - parameters = tax_benefit_system.parameters - instant = periods.instant("2012-12-31") - parameters_asof(parameters, instant) - check_max_instant(parameters, instant) - - -def test_variables_as_of(): - tax_benefit_system = CountryTaxBenefitSystem() - instant = periods.instant("2015-12-31") - variables_asof(tax_benefit_system, instant) - - -if __name__ == "__main__": - test_parameters_as_of() - test_variables_as_of() diff --git a/openfisca_survey_manager/tests/test_top_bottom_share.py b/openfisca_survey_manager/tests/test_top_bottom_share.py deleted file mode 100644 index 05497b56..00000000 --- a/openfisca_survey_manager/tests/test_top_bottom_share.py +++ /dev/null @@ -1,17 +0,0 @@ -import numpy as np - -from openfisca_survey_manager.statshelpers import bottom_share, top_share - -size = 1000 -x = np.ones(size) + np.random.uniform(0, 0.00000001, size) - - -def test_bottom_share(): - np.testing.assert_almost_equal( - bottom_share(x, 0.4), - 0.4, - ) - - -def test_to_share(): - np.testing.assert_almost_equal(top_share(x, 0.1), 0.1) diff --git a/openfisca_survey_manager/tests/test_write_parquet.py b/openfisca_survey_manager/tests/test_write_parquet.py deleted file mode 100644 index 06091721..00000000 --- a/openfisca_survey_manager/tests/test_write_parquet.py +++ /dev/null @@ -1,144 +0,0 @@ -import os -import unittest - -import pandas as pd - -from openfisca_survey_manager.paths import openfisca_survey_manager_location -from openfisca_survey_manager.scripts.build_collection import build_survey_collection - - -class TestWriteParquet(unittest.TestCase): - def test_write_parquet_one_file_per_entity(self): - data_dir = os.path.join( - openfisca_survey_manager_location, - "openfisca_survey_manager", - "tests", - "data_files", - "test_parquet_collection", - ) - os.makedirs(data_dir, exist_ok=True) - df = pd.DataFrame( - { - "household_id": [1, 2, 3, 4], - "rent": [1100, 2200, 3_000, 4_000], - "household_weight": [550, 1500, 700, 200], - "accommodation_size": [50, 100, 150, 200], - } - ) - filepath = os.path.join(data_dir, "household.parquet") - df.to_parquet(filepath) - df = pd.DataFrame( - { - "person_id": [11, 22, 33, 44, 55], - "household_id": [1, 1, 2, 3, 4], - "salary": [1300, 20, 3400, 4_000, 5_000], - "person_weight": [500, 50, 1500, 700, 200], - "household_role_index": [0, 1, 0, 0, 0], - } - ) - filepath = os.path.join(data_dir, "person.parquet") - df.to_parquet(filepath) - df2 = pd.read_parquet(filepath) - assert df.equals(df2) - - def test_write_parquet_multiple_files_per_entity(self): - collection_name = "test_multiple_parquet_collection" - data_dir = os.path.join( - openfisca_survey_manager_location, - "openfisca_survey_manager", - "tests", - "data_files", - collection_name, - ) - os.makedirs(os.path.join(data_dir, "person"), exist_ok=True) - os.makedirs(os.path.join(data_dir, "household"), exist_ok=True) - # Create a file config.ini in the current directory - config = os.path.join( - data_dir, - "config.ini", - ) - with open(config, "w") as f: - f.write( - f""" - [collections] - collections_directory = {data_dir} - {collection_name} = {data_dir}/{collection_name}.json - - [data] - output_directory = {data_dir} - tmp_directory = /tmp - """ - ) - # Create a file test_parquet_collection.json in the current directory - json_file = os.path.join( - data_dir, - collection_name + ".json", - ) - with open(json_file, "w") as f: - f.write( - """ - { - "label": "Test parquet collection", - "name": "collection_name", - "surveys": { - } - } - """.replace("collection_name", collection_name) - ) - df = pd.DataFrame( - { - "household_id": [1, 2], - "rent": [1100, 2200], - "household_weight": [550, 1500], - "accommodation_size": [50, 100], - } - ) - filepath = os.path.join(data_dir, "household", "household-0.parquet") - df.to_parquet(filepath) - df = pd.DataFrame( - { - "household_id": [3, 4], - "rent": [3_000, 4_000], - "household_weight": [700, 200], - "accommodation_size": [150, 200], - } - ) - filepath = os.path.join(data_dir, "household", "household-1.parquet") - df.to_parquet(filepath) - df = pd.DataFrame( - { - "person_id": [11, 22], - "household_id": [1, 1], - "salary": [1300, 20], - "person_weight": [500, 50], - "household_role_index": [0, 1], - } - ) - filepath = os.path.join(data_dir, "person", "person-0.parquet") - df.to_parquet(filepath) - df = pd.DataFrame( - { - "person_id": [33, 44, 55], - "household_id": [2, 3, 4], - "salary": [3400, 4_000, 5_000], - "person_weight": [1500, 700, 200], - "household_role_index": [0, 0, 0], - } - ) - filepath = os.path.join(data_dir, "person", "person-1.parquet") - df.to_parquet(filepath) - df2 = pd.read_parquet(filepath) - assert df.equals(df2) - collection_name = collection_name - data_directory_path_by_survey_suffix = { - "2020": os.path.join(data_dir), - } - build_survey_collection( - collection_name=collection_name, - data_directory_path_by_survey_suffix=data_directory_path_by_survey_suffix, - replace_metadata=False, - replace_data=False, - source_format="parquet", - config_files_directory=data_dir, - keep_original_parquet_file=True, - ) diff --git a/openfisca_survey_manager/utils.py b/openfisca_survey_manager/utils.py deleted file mode 100644 index aaae69d3..00000000 --- a/openfisca_survey_manager/utils.py +++ /dev/null @@ -1,293 +0,0 @@ -import logging -import os -from typing import List, Optional - -import pandas as pd -from openfisca_core import periods -from openfisca_core.parameters import ParameterNode, Scale - -from openfisca_survey_manager.survey_collections import SurveyCollection - -log = logging.getLogger(__name__) - - -def do_nothing(*args, **kwargs): - return None - - -def inflate_parameters( - parameters, inflator, base_year, last_year=None, ignore_missing_units=False, start_instant=None, round_ndigits=2 -): - """ - Inflate a Parameter node or a Parameter lead according for the years between base_year and last_year - ::parameters:: af Parameter node or a Parameter leaf - ::inflator:: rate used to inflate the parameter. The rate is unique for all the years - ::base_year:: base year of the parameter - ::last_year:: last year of inflation - ::ignore_missing_units:: if True, a parameter leaf without unit in metadata will not be inflate - ::start_instant :: Instant of the year when the update should start, if None will be Junuary 1st - ::round_ndigits:: Number of digits to keep in the rounded result - """ - - if (last_year is not None) and (last_year > base_year + 1): - for year in range( - base_year + 1, last_year + 1 - ): # For each year we inflate with the same inflator rate. Example : base_year + 1 : paramaters = paramaters * inflator ; base_year + 2 : parameters = parameters * inflator * inflator - inflate_parameters( - parameters, - inflator, - year - 1, - last_year=year, - ignore_missing_units=ignore_missing_units, - start_instant=start_instant, - round_ndigits=round_ndigits, - ) - - else: - if last_year is None: - last_year = base_year + 1 - - assert last_year == base_year + 1 - - if isinstance(parameters, ParameterNode): - for sub_parameter in parameters.children.values(): - inflate_parameters( - sub_parameter, - inflator, - base_year, - last_year, - ignore_missing_units=ignore_missing_units, - start_instant=start_instant, - round_ndigits=round_ndigits, - ) - else: - acceptable_units = [ - "rate_unit", - "threshold_unit", - "unit", - ] - if ignore_missing_units: - if not hasattr(parameters, "metadata"): - return - # Empty intersection: not unit present in metadata - if not bool(set(parameters.metadata.keys()) & set(acceptable_units)): - return - assert hasattr(parameters, "metadata"), "{} doesn't have metadata".format(parameters.name) - unit_types = set(parameters.metadata.keys()).intersection(set(acceptable_units)) - assert unit_types, ( - "No admissible unit in metadata for parameter {}. You may consider using the option 'ignore_missing_units' from the inflate_paramaters() function.".format( - parameters.name - ) - ) - if len(unit_types) > 1: - assert unit_types == {"threshold_unit", "rate_unit"}, ( - "Too much admissible units in metadata for parameter {}".format(parameters.name) - ) - unit_by_type = {unit_type: parameters.metadata[unit_type] for unit_type in unit_types} - for unit_type in unit_by_type: - if parameters.metadata[unit_type].startswith("currency"): - inflate_parameter_leaf( - parameters, - base_year, - inflator, - unit_type=unit_type, - start_instant=start_instant, - round_ndigits=round_ndigits, - ) - - -def inflate_parameter_leaf(sub_parameter, base_year, inflator, unit_type="unit", start_instant=None, round_ndigits=2): - """ - Inflate a Parameter leaf according to unit type for the year after base_year - ::sub_parameter:: af Parameter leaf - ::base_year:: base year of the parameter - ::inflator:: rate used to inflate the parameter - ::unit_type:: unit supposed by default. Other admissible unit types are threshold_unit and rate_unit - ::start_instant:: Instant of the year when the update should start, if None will be Junuary 1st - ::round_ndigits:: Number of digits to keep in the rounded result - """ - if isinstance(sub_parameter, Scale): - if unit_type == "threshold_unit": - for bracket in sub_parameter.brackets: - threshold = bracket.children["threshold"] - inflate_parameter_leaf( - threshold, base_year, inflator, start_instant=start_instant, round_ndigits=round_ndigits - ) - return - else: - # Remove new values for year > base_year - kept_instants_str = [ - parameter_at_instant.instant_str - for parameter_at_instant in sub_parameter.values_list - if periods.instant(parameter_at_instant.instant_str).year <= base_year - ] - if not kept_instants_str: - return - - last_admissible_instant_str = max(kept_instants_str) - sub_parameter.update(start=last_admissible_instant_str, value=sub_parameter(last_admissible_instant_str)) - if start_instant is not None: - assert periods.instant(start_instant).year == (base_year + 1), ( - "Year of start_instant should be base_year + 1" - ) - value = ( - round(sub_parameter("{}-12-31".format(base_year)) * (1 + inflator), round_ndigits) - if sub_parameter("{}-12-31".format(base_year)) is not None - else None - ) - sub_parameter.update( - start=start_instant, - value=value, - ) - else: - restricted_to_base_year_value_list = [ - parameter_at_instant - for parameter_at_instant in sub_parameter.values_list - if periods.instant(parameter_at_instant.instant_str).year == base_year - ] - # When value is changed in the base year - if restricted_to_base_year_value_list: - for parameter_at_instant in reversed(restricted_to_base_year_value_list): - if parameter_at_instant.instant_str.startswith(str(base_year)): - value = ( - round(parameter_at_instant.value * (1 + inflator), round_ndigits) - if parameter_at_instant.value is not None - else None - ) - sub_parameter.update( - start=parameter_at_instant.instant_str.replace(str(base_year), str(base_year + 1)), - value=value, - ) - # Or use the value at that instant even when it is defined earlier tahn the base year - else: - value = ( - round(sub_parameter("{}-12-31".format(base_year)) * (1 + inflator), round_ndigits) - if sub_parameter("{}-12-31".format(base_year)) is not None - else None - ) - sub_parameter.update( - start="{}-01-01".format(base_year + 1), - value=value, - ) - - -def asof(tax_benefit_system, instant): - parameters = tax_benefit_system.parameters - parameters_asof(parameters, instant) - variables_asof(tax_benefit_system, instant) - - -def leaf_asof(sub_parameter, instant): - kept_instants_str = [ - parameter_at_instant.instant_str - for parameter_at_instant in sub_parameter.values_list - if periods.instant(parameter_at_instant.instant_str) <= instant - ] - if not kept_instants_str: - sub_parameter.values_list = [] - return - - last_admissible_instant_str = max(kept_instants_str) - sub_parameter.update(start=last_admissible_instant_str, value=sub_parameter(last_admissible_instant_str)) - return - - -def parameters_asof(parameters, instant): - if isinstance(instant, str): - instant = periods.instant(instant) - assert isinstance(instant, periods.Instant) - - for sub_parameter in parameters.children.values(): - if isinstance(sub_parameter, ParameterNode): - parameters_asof(sub_parameter, instant) - else: - if isinstance(sub_parameter, Scale): - for bracket in sub_parameter.brackets: - threshold = bracket.children["threshold"] - rate = bracket.children.get("rate") - amount = bracket.children.get("amount") - leaf_asof(threshold, instant) - if rate: - leaf_asof(rate, instant) - if amount: - leaf_asof(amount, instant) - else: - leaf_asof(sub_parameter, instant) - - -def variables_asof(tax_benefit_system, instant, variables_list=None): - if isinstance(instant, str): - instant = periods.instant(instant) - assert isinstance(instant, periods.Instant) - - if variables_list is None: - variables_list = tax_benefit_system.variables.keys() - - for variable_name, variable in tax_benefit_system.variables.items(): - if variable_name in variables_list: - formulas = variable.formulas - for instant_str in list(formulas.keys()): - if periods.instant(instant_str) > instant: - del formulas[instant_str] - - if variable.end is not None and periods.instant(variable.end) >= instant: - variable.end = None - - -def stata_files_to_data_frames(data, period=None): - assert period is not None - period = periods.period(period) - - stata_file_by_entity = data.get("stata_file_by_entity") - if stata_file_by_entity is None: - return - - variables_from_stata_files = [] - input_data_frame_by_entity_by_period = {} - input_data_frame_by_entity_by_period[periods.period(period)] = input_data_frame_by_entity = {} - for entity, file_path in stata_file_by_entity.items(): - assert os.path.exists(file_path), "Invalid file path: {}".format(file_path) - entity_data_frame = input_data_frame_by_entity[entity] = pd.read_stata(file_path) - variables_from_stata_files += list(entity_data_frame.columns) - data["input_data_frame_by_entity_by_period"] = input_data_frame_by_entity_by_period - - return variables_from_stata_files - - -def load_table( - config_files_directory, - variables: Optional[List] = None, - collection: Optional[str] = None, - survey: Optional[str] = None, - input_data_survey_prefix: Optional[str] = None, - data_year=None, - table: Optional[str] = None, - batch_size=None, - batch_index=0, - filter_by=None, -) -> pd.DataFrame: - """ - Load values from table from a survey in a collection. - - Args: - config_files_directory : _description_. - variables (List, optional): List of the variables to retrieve in the table. Defaults to None to get all the variables. - collection (str, optional): Collection. Defaults to None. - survey (str, optional): Survey. Defaults to None. - input_data_survey_prefix (str, optional): Prefix of the survey to be combined with data year. Defaults to None. - data_year (_type_, optional): Year of the survey data. Defaults to None. - table (str, optional): Table. Defaults to None. - - Returns: - pandas.DataFrame: A table with the retrieved variables - """ - survey_collection = SurveyCollection.load(collection=collection, config_files_directory=config_files_directory) - survey = survey if survey is not None else f"{input_data_survey_prefix}_{data_year}" - survey_ = survey_collection.get_survey(survey) - log.debug("Loading table {} in survey {} from collection {}".format(table, survey, collection)) - if batch_size: - return survey_.get_values( - table=table, variables=variables, batch_size=batch_size, batch_index=batch_index, filter_by=filter_by - ) - else: - return survey_.get_values(table=table, variables=variables, filter_by=filter_by) diff --git a/openfisca_survey_manager/variables.py b/openfisca_survey_manager/variables.py deleted file mode 100644 index f91cbcc3..00000000 --- a/openfisca_survey_manager/variables.py +++ /dev/null @@ -1,88 +0,0 @@ -import logging - -from numpy import arange -from openfisca_core.model_api import ADD, YEAR, Variable, where - -from openfisca_survey_manager.statshelpers import mark_weighted_percentiles, weightedcalcs_quantiles - -log = logging.getLogger(__name__) - - -def create_quantile(x, nquantiles, weight_variable, entity_name): - class quantile(Variable): - value_type = int - entity = entity_name - label = "Quantile" - definition_period = YEAR - - def formula(entity, period): - try: - variable = entity(x, period) - except ValueError as e: - log.debug(f"Caught {e}") - log.debug(f"Computing on whole period {period} via the ADD option") - variable = entity(x, period, options=[ADD]) - - weight = entity(weight_variable, period) - labels = arange(1, nquantiles + 1) - method = 2 - if len(weight) == 1: - return weight * 0 - quantile, values = mark_weighted_percentiles(variable, labels, weight, method, return_quantiles=True) - del values - return quantile - - return quantile - - -def quantile(q, variable, weight_variable=None, filter_variable=None): - """ - Return quantile of a variable with weight provided by a specific wieght variable potentially filtered - """ - - def formula(entity, period): - value = entity(variable, period) - if weight_variable is not None: - weight = entity(weight_variable, period) - weight = entity.filled_array(1) - if filter_variable is not None: - filter_value = entity(filter_variable, period) - weight = filter_value * weight - - labels = arange(1, q + 1) - quantile, _ = weightedcalcs_quantiles( - value, - labels, - weight, - return_quantiles=True, - ) - if filter_variable is not None: - quantile = where(weight > 0, quantile, -1) - return quantile - - return formula - - -def old_quantile(q, variable, weight_variable=None, filter_variable=None): - def formula(entity, period): - value = entity(variable, period) - if weight_variable is not None: - weight = entity(weight_variable, period) - weight = entity.filled_array(1) - if filter_variable is not None: - filter_value = entity(filter_variable, period) - weight = filter_value * weight - - labels = arange(1, q + 1) - quantile, _ = mark_weighted_percentiles( - value, - labels, - weight, - method=2, # * filter, - return_quantiles=True, - ) - if filter_variable is not None: - quantile = where(weight > 0, quantile, -1) - return quantile - - return formula diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index 54aa7087..00000000 --- a/pyproject.toml +++ /dev/null @@ -1,148 +0,0 @@ -[project] -name = "OpenFisca-Survey-Manager" -version = "4.0.0" -description = "A tool for managing survey/administrative data and import them in OpenFisca" -readme = "README.md" -keywords = ["microsimulation", "tax", "benefit", "rac", "rules-as-code", "survey", "data"] -authors = [ - {name = "OpenFisca Team", email = "contact@openfisca.fr"}, -] -license = "AGPL-3.0-or-later" -classifiers = [ - "Development Status :: 5 - Production/Stable", - "Operating System :: POSIX", - "Programming Language :: Python", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Topic :: Scientific/Engineering :: Information Analysis", -] -requires-python = ">= 3.9" -dependencies = [ - 'chardet >=5.1.0, < 6.0', - 'configparser >=5.3.0, < 8.0', - 'humanize >=4.6.0, < 5.0', - 'numpy >=1.24.2, <2.0', - 'openfisca-core >=43.0.0, <44.0.0', - 'pandas >=2.0.3, < 3.0', - 'pyarrow >=13.0.0, < 19.0.0', - 'pyxdg >=0.28, < 0.29', - 'PyYAML >=6.0, < 7.0', - 'tables >=3.8.0, < 4.0', - 'tabulate >=0.9.0, < 0.10.0', - 'weightedcalcs >=0.1.2, < 0.2.0', - 'wquantiles >=0.6, < 0.7', -] - -[project.urls] -Homepage = "https://github.com/openfisca/openfisca-survey-manager" -Repository = "https://github.com/openfisca/openfisca-survey-manager" -Issues = "https://github.com/openfisca/openfisca-survey-manager/issues" -Changelog = "https://github.com/openfisca/openfisca-survey-manager/blob/main/CHANGELOG.md" - -[build-system] -requires = ["setuptools", "setuptools-scm"] -build-backend = "setuptools.build_meta" - -[project.scripts] -# Command-line scripts -build-collection = "openfisca_survey_manager.scripts.build_collection:main" - -[project.optional-dependencies] -matching = [ - # 'feather', - 'rpy2 >=3.5.10, < 4.0' - ] -dev = [ - 'coveralls >=3.3.1, < 5.0', - 'openfisca-country-template >=7.1.5, <8.0.0', - 'pyreadstat >=1.2.1, < 1.3', - 'pytest', # Let OpenFisca-Core decide pytest version - 'pytest-cov', - 'scipy >=1.10.1, < 2.0', - 'pytest-order', - 'ruff', - 'isort', - 'pre-commit', - 'ipdb', - 'pdbpp' -] -casd = [ - 'pytest', - 'scipy >=1.10.1, < 2.0', - 'ruff', - 'isort' -] -sas = [ - 'pyreadstat >=1.2.1, < 1.3', - 'sas7bdat >=2.2.3, < 3.0' -] - -[tool.ruff] -line-length = 119 -extend-exclude = ["openfisca_survey_manager/tests/data_files/*", "openfisca_survey_manager/tests/storage/*"] - -[tool.ruff.lint] -select = ["E", "F", "W", "I001", "B", "A", "C4", "T20", "SIM", "PTH", "N"] -ignore = [ - "N801", # Class name should use CapWords convention (ignored for Variable children) - "N805", # First argument of a method should be named `self` (ignored for Variable children) - "E501", # Line too long (will be fixed manually or ignored if unavoidable) - "B018", # Found useless expression (will be fixed manually or ignored if intended) - "N806", # Variable in function should be lowercase (ignored for specific cases like StatMatch, PLCx, PLCy, LCx, LCy, N) - "F401", # Unused imports (will be cleaned up manually) - "F821", # Undefined name (will be fixed manually, likely missing import or instantiation) - "I001", # Import block is un-sorted or un-formatted (will be fixed by isort) - "PTH118", # os.path.join() should be replaced by Path with / operator (will be fixed manually) - "PTH110", # os.path.exists() should be replaced by Path.exists() (will be fixed manually) - "PTH123", # open() should be replaced by Path.open() (will be fixed manually) - "PTH103", # os.makedirs() should be replaced by Path.mkdir(parents=True) (will be fixed manually) - "PTH117", # os.path.isabs() should be replaced by Path.is_absolute() (will be fixed manually) - "PTH112", # os.path.isdir() should be replaced by Path.is_dir() (will be fixed manually) - "PTH208", # Use pathlib.Path.iterdir() instead (will be fixed manually) - "PTH207", # Replace glob with Path.glob or Path.rglob (will be fixed manually) - "PTH113", # os.path.isfile() should be replaced by Path.is_file() (will be fixed manually) - "PTH119", # os.path.basename() should be replaced by Path.name (will be fixed manually) - "PTH120", # os.path.dirname() should be replaced by Path.parent (will be fixed manually) - "PTH206", # Replace .split(os.sep) with Path.parts (will be fixed manually) - "N999", # Invalid module name (ignored for __init__.py files) - "N802", # Function name should be lowercase (ignored for mathematical functions F and F_prime) - "SIM113", # Use enumerate() for index variable in for loop (ignored for simple counters) - "SIM115", # Use a context manager for opening files (ignored for compatibility) -] - -[tool.isort] -profile = "black" -line_length = 119 - -[tool.pylint.message_control] -disable = ["all"] -enable = ["C0115", "C0116", "R0401"] -score = ["no"] - -[tool.pytest.ini_options] -addopts = "--doctest-modules --disable-pytest-warnings --showlocals" -testpaths = "openfisca_survey_manager" -doctest_optionflags = "ELLIPSIS IGNORE_EXCEPTION_DETAIL NUMBER NORMALIZE_WHITESPACE" -python_files = ["**/*.py"] - - -[tool.coverage.paths] -source = [ - ".", - "*/site-packages" -] - -[tool.coverage.run] -branch = true -source = ["openfisca_survey_manager"] - -[tool.coverage.report] -fail_under = 75 -show_missing = true -skip_covered = true -skip_empty = true - - -[tool.setuptools.packages.find] -include = ["openfisca_survey_manager*"] diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 02be0b85..00000000 --- a/setup.cfg +++ /dev/null @@ -1,10 +0,0 @@ -[mypy] -ignore_missing_imports = True -install_types = True -non_interactive = True - -[mypy-openfisca_survey_manager.tests.*] -ignore_errors = True - -[mypy-openfisca_survey_manager.scripts.*] -ignore_errors = True diff --git a/uv.lock b/uv.lock deleted file mode 100644 index 3a4b6efc..00000000 --- a/uv.lock +++ /dev/null @@ -1,2333 +0,0 @@ -version = 1 -revision = 1 -requires-python = ">=3.9" -resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", - "python_full_version == '3.10.*'", - "python_full_version < '3.10'", -] - -[[package]] -name = "asttokens" -version = "3.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4a/e7/82da0a03e7ba5141f05cce0d302e6eed121ae055e0456ca228bf693984bc/asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7", size = 61978 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/25/8a/c46dcc25341b5bce5472c718902eb3d38600a903b14fa6aeecef3f21a46f/asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2", size = 26918 }, -] - -[[package]] -name = "blinker" -version = "1.9.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458 }, -] - -[[package]] -name = "blosc2" -version = "2.5.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "msgpack", marker = "python_full_version < '3.10'" }, - { name = "ndindex", marker = "python_full_version < '3.10'" }, - { name = "numpy", marker = "python_full_version < '3.10'" }, - { name = "py-cpuinfo", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f7/60/5bc8601f8ffcd5d8787b346898de8a0b454d031c3e158e3bbc312003984e/blosc2-2.5.1.tar.gz", hash = "sha256:47d5df50e7286edf81e629ece35f87f13f55c13c5e8545832188c420c75d1659", size = 4676483 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b2/75/4511f1f9cea0aad8c5464736720a9de1e762e60e359cde6b7d69186e97b0/blosc2-2.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c861262b7fe317c1614a9b59b6c9edf409532b4a6aaf5b2f4ad0d79c6f800b57", size = 4417584 }, - { url = "https://files.pythonhosted.org/packages/c0/8a/c7ce322f90750eb7fdde0948e5c6b9c47ed03e276f2481b701f9d2360d3a/blosc2-2.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f35b5d69a7a41e9d5054297d2540c25f8af5ea3c62e4a80ca7359292d783c04", size = 3471945 }, - { url = "https://files.pythonhosted.org/packages/32/72/32bbaf583eea51b27c91ddb145de017d970ec0a835ec70e966925b00c9cf/blosc2-2.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:546fa39f397dd54b13d7c42a4f890afaf16c70fe478712070942d464c440ce03", size = 4210024 }, - { url = "https://files.pythonhosted.org/packages/a5/8c/d4ab68a40004f93c1fd20dceb1899e54477425542bffa1f5ebab2647956f/blosc2-2.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5455af77e7e94159bb4966cae554f232ca2d52bb80cd3f878ecef39cf569da2a", size = 4374211 }, - { url = "https://files.pythonhosted.org/packages/16/7d/c81b1caa0aded9f3e63c4dbab7a334681ded12ad80a18de34e7199939df5/blosc2-2.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b4dc4f595bf95c350c50bb77a8749cdd08a5dc2bdf3bdb18983d49a52d60b595", size = 4467025 }, - { url = "https://files.pythonhosted.org/packages/9b/cd/56150f9367516f039b62923a5cb243771e2a1f440de0232b195f6e79f1ab/blosc2-2.5.1-cp310-cp310-win32.whl", hash = "sha256:873483bd5c6afb8d139039180ee57b74373232e87b032cb80389fd8bb883ea8e", size = 1948573 }, - { url = "https://files.pythonhosted.org/packages/67/ff/0cf994e434b5ae860546cfe6941c58bdcdc27ed10009caffac9a29b66651/blosc2-2.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:d5a7ef00b82fbca069e949335f9c92ce7cbe2039a9fa2e2bd4f5f418043d6262", size = 2343858 }, - { url = "https://files.pythonhosted.org/packages/c3/f4/25a82b7191109beb0fc20c580f35667a2e86087ebac2f6a9e50469f618d7/blosc2-2.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:da826d42d616f8a939f27e1501b40e764fded66bc80177eeaefcebdbf3b3afb8", size = 4419509 }, - { url = "https://files.pythonhosted.org/packages/03/50/b1d2bad6b9f77c3540b511b5dee77f213c8b179e942bbc34f5c410b92ede/blosc2-2.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ae2e0c5dc8561a6b17842ee4320b49621434c20e622c9e9f5c67c9c6eb3b06a3", size = 3472870 }, - { url = "https://files.pythonhosted.org/packages/17/26/d34d521fc60b164738d986144642c58675bcdc344cc9fbb8c08e58bb11ab/blosc2-2.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af3cab9c12a4364c643266ee7d9583b526c0f484a291d72ec6efb09ea7ffbbf9", size = 4208463 }, - { url = "https://files.pythonhosted.org/packages/2a/00/35169a0765044be69a3bfb73441ff8b9ea05d92d4660be622bbf90c46913/blosc2-2.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f03a723130cf07e4309fe34b1360c868f4376e862f8ff664eb40d019fdd3f6", size = 4371942 }, - { url = "https://files.pythonhosted.org/packages/8f/9b/923e29af25f4c34b5b331829a87bc8397b18e8a361551230b70e6b0a785a/blosc2-2.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0fd109eef815ea1e50fde4f676388aa2f3bb5543502d125fb63f16ec7a014464", size = 4468495 }, - { url = "https://files.pythonhosted.org/packages/9f/39/21d6d1dc00d765f5c1671f8c0b323096f0dde360e8e1d8005160679b4831/blosc2-2.5.1-cp311-cp311-win32.whl", hash = "sha256:1a3edc3256bad04d3db30c9de7eac3a820f96e741fc754cdabb6a9991e5c37e8", size = 1948151 }, - { url = "https://files.pythonhosted.org/packages/d1/48/7d4be4e57342ec60c05d11478257b235cf33dc96c31049fb47e6ae98b652/blosc2-2.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:e7499e277c13334d54f84e74f429f32341f99f7b978deaf9a7c2e963904cb48c", size = 2344506 }, - { url = "https://files.pythonhosted.org/packages/5b/4d/00824754a3b5b5c6ffb92a043b5122be6202039fc76a25fa913fe0d4e235/blosc2-2.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ab849d3adaeb035f2f16cf495cff1792b28d58dfb3de21b9459ee355c6bb8df3", size = 4414447 }, - { url = "https://files.pythonhosted.org/packages/9b/21/9a887c9fdc46a5cfe4aa250eac4ed4f18213bf1996e113c1d1c662e31678/blosc2-2.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dd66e60dafcc93d4c1f815d726d76f9fb067ecc9106a6c661010e709135c79ce", size = 3470989 }, - { url = "https://files.pythonhosted.org/packages/92/8d/b13ea33ea4d5da344d1170638b2d7b3cc63a921b97f9a8184128fd78aacd/blosc2-2.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb5fcd1775b3884d9825aa51fb45253f45cfa21c77f4135fad5dc5db710c2a34", size = 4190080 }, - { url = "https://files.pythonhosted.org/packages/33/bf/52c8385aa71ed8c42296016d48c80dcc41dd005e951a83970bde4e6d4ff2/blosc2-2.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19f79071a336fcf1eda01cd0171291a4ab82b16cf9a15d2b4d26c010146f13b5", size = 4355536 }, - { url = "https://files.pythonhosted.org/packages/78/21/f1815f711f98c04eba5116d55041b2f64e17e8a9437d7c832d1168ed2fa8/blosc2-2.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:956a63231f1b448803e9b4bc3e704ea424c89fc14418d99093472c74f19c19e1", size = 4448554 }, - { url = "https://files.pythonhosted.org/packages/a1/e6/e05e987db27986ca8bdaaf442b94212e116a1eb937a8cd35250548704aa6/blosc2-2.5.1-cp312-cp312-win32.whl", hash = "sha256:5856e57e0e81f9018f1a12e803b9f768fa5533175092d72d165ac60069c7d2ab", size = 1940131 }, - { url = "https://files.pythonhosted.org/packages/29/80/871cf959e5e0d3d2a177caf7b9f4b714850a50c33bb5cedd86a95c6e05b2/blosc2-2.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:585d780c5e85f251dec72b75a47666e4a261dbfe1d228769bca545e9fe07f480", size = 2338243 }, - { url = "https://files.pythonhosted.org/packages/c9/4b/799dcec670a1d511d11a40e919241c26a273cfd2305a4f3fe7444c39758a/blosc2-2.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0cb9a6ac1abc466c12bdc90052f17545512de8f854e672a1ea4d2b40292323f5", size = 4417609 }, - { url = "https://files.pythonhosted.org/packages/07/cb/e97de5b2c40b6c2106d5b68c1d823707a16386fc08781a6c7c609000866e/blosc2-2.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3def4650faa1db43143d821228ef58797108cc95d6698c4b1581909cc2b149ca", size = 3471971 }, - { url = "https://files.pythonhosted.org/packages/65/c4/0c9740c4f5efb7adde87ed8b5af88c171a3ec007df912cc2fbfb2736963d/blosc2-2.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf6efecc1a22da26c73ff5c60d0dc086db1e7edcceb6b360dd193cda893bef28", size = 4209984 }, - { url = "https://files.pythonhosted.org/packages/a6/e2/a1367ad174006e5625a1c8ef1b526fc135adc6f546bfc46264ac8e508ebe/blosc2-2.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b473472b977b770aab3bf20d0feeee84ecd5bb8b15a675287e090ce818c1cd40", size = 4374484 }, - { url = "https://files.pythonhosted.org/packages/62/88/a185ab2a50b930011396b601ae8e82e83dfa24597e5f1fd91f483f178a94/blosc2-2.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7afe59d35d93bf8da7db8de43f4d8aef277514de43953c1e5e416ca839b9023a", size = 4466917 }, - { url = "https://files.pythonhosted.org/packages/b5/7c/6a7d46a6094a100e1bdfba7e1df584971efea00732006fdddcf0320d4846/blosc2-2.5.1-cp39-cp39-win32.whl", hash = "sha256:4315ae8d467fe91efa0dbe22004e967008f5fe021ebb3945518f5213d7c4511f", size = 1948282 }, - { url = "https://files.pythonhosted.org/packages/5c/c1/f80115c66a181e2ff18027818d3e1f37a3d5133b350b3e25189e53e28cd8/blosc2-2.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:73eb5e569a91fbe67f7dd78efe6a1ca9a54afff2c847db5dfa675bfd6a424f60", size = 2343778 }, -] - -[[package]] -name = "blosc2" -version = "3.5.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", - "python_full_version == '3.10.*'", -] -dependencies = [ - { name = "msgpack", marker = "python_full_version >= '3.10'" }, - { name = "ndindex", marker = "python_full_version >= '3.10'" }, - { name = "numexpr", version = "2.11.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10' and platform_machine != 'wasm32'" }, - { name = "numpy", marker = "python_full_version >= '3.10'" }, - { name = "platformdirs", marker = "python_full_version >= '3.10'" }, - { name = "py-cpuinfo", marker = "python_full_version >= '3.10' and platform_machine != 'wasm32'" }, - { name = "requests", marker = "python_full_version >= '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/10/a0/1abec67127972fabf07bc1b7208324399fb25576ba3a2104738dbd40fc8a/blosc2-3.5.1.tar.gz", hash = "sha256:5d72f7a9a8b3b523c588be9d66e9e7f2463483716c4c01e5056c1f7e37167f85", size = 3653626 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/55/4c/89aa228bf6e64b0daa1b7e485d404601e8e136c3834f87909f2803cb60fa/blosc2-3.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ff031df2707cd7143b7c744bf10c55932151001cc5ff9d79a899714714732a2d", size = 4001801 }, - { url = "https://files.pythonhosted.org/packages/fe/d3/22581f254299c27a81a2fb2356665e15fc0e7186f626e508d82514d22b26/blosc2-3.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0fc7976024a89d490911ce9ef78968369acca908f0f771cf00e072a457ee342d", size = 3376036 }, - { url = "https://files.pythonhosted.org/packages/24/f1/c4112715dece9e0eb837f63c53ce1cd5e889afbe19712222ef0a5da3690d/blosc2-3.5.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0fcc6c64315121f02555a398e6fbaf4b71edc39a4ca7567e089ce1af7288e9ed", size = 4291062 }, - { url = "https://files.pythonhosted.org/packages/84/a2/8019187fb5a1789203dd96eea0d4311bc56f3386ddfe9e723357acb8a7bd/blosc2-3.5.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d3174273decf69b9e61b2a57d4cad5bf651de04d22526568f5a5e3036631831f", size = 4433984 }, - { url = "https://files.pythonhosted.org/packages/a1/d7/e8e98d5a2252d8aafd84a4a1431bd44612159ce339dd9a2199087998d8ea/blosc2-3.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a4dcaf581cd7ec91d91f379e7e3dc7d5dd6c300e88ae162b04b106d21f10f5c1", size = 2224498 }, - { url = "https://files.pythonhosted.org/packages/fa/18/2001026c7bdac8fb3d9df6f31d3c4e2a2f22f73870509ddd163e36b0e521/blosc2-3.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ebf860878dd7a4fb41dcb90b1c6b3c5db7e34dc64ab51bea3679534af4a54a4a", size = 4004794 }, - { url = "https://files.pythonhosted.org/packages/a4/4e/0f94eb790fb86bfd0f4d92496bc515db849987a7b9cbb10c49a9cf3a0037/blosc2-3.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1a9fc80970d5bb135bd9579db26d96ec79739bd1f187bbaf52e04102e7c664de", size = 3379124 }, - { url = "https://files.pythonhosted.org/packages/ee/4e/d3d80033a20e97a1d5c3eccfecce343144407334e8b7f17b1532987a4974/blosc2-3.5.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fcf6658546da153fc8537dbf8edab1b22d6cedba32ec8c2a9a4165e450375b28", size = 4295187 }, - { url = "https://files.pythonhosted.org/packages/a6/31/171ed90aa74936f1335cb471d95915232328b280d4888e1a8c9cca1fd3f0/blosc2-3.5.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d77e0abe67675ec21ce812a4da3d01406dd4942c8929a5a78d91b2f558dd8c9e", size = 4437993 }, - { url = "https://files.pythonhosted.org/packages/b4/59/e3fb8114b369a6f7467e642e7e9818fb77ca47a177de767eb2414f8678ec/blosc2-3.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:0e6f5ccfb817363191577dd0e40839788f664891908b82c2dcd969fb37e0e954", size = 2222769 }, - { url = "https://files.pythonhosted.org/packages/d4/ab/bbc1771dc66815277c3d3e316d0f2864209bc15421608ee13ce955b9de3b/blosc2-3.5.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:780213c917d9ac28b52a09ac82baa761262e8421688937b67f6516987a96fc58", size = 4012189 }, - { url = "https://files.pythonhosted.org/packages/20/84/dafbb363539313c5d5e2f4c082c42a652c5767831e7e1fe77eb509aed73e/blosc2-3.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e3d1d0885e955e184efae81168448106644d5a0a60c0911b98e94fe9d756fa9", size = 3372462 }, - { url = "https://files.pythonhosted.org/packages/07/7f/8e77b054ce2eff7b7c9810378cfcc78f73a4e45c2bf122833395f5dce61d/blosc2-3.5.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d7c23e48414283dcfa5676abc6a4be9fb92a7b372a20f7acf90438abc69a746e", size = 4278243 }, - { url = "https://files.pythonhosted.org/packages/a6/fd/df0d0c7aec2d135b9c8df9d3d8136c84d614d04595f051b1f5b8aeab238d/blosc2-3.5.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5df464652ddcae9ef8d8b3354ad224b1e147187dde5b6470e0e9f95bec328756", size = 4420536 }, - { url = "https://files.pythonhosted.org/packages/6b/64/a7497c8ba3aceed462d9cbe8227aeee20fbf77415702a0ced11a03fd0d5a/blosc2-3.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:530965c444186bfb92a4406ab4c2d801939df35a5c58658131000f68ce1a37ec", size = 2212429 }, - { url = "https://files.pythonhosted.org/packages/c1/17/38adc448a44c8d0e6f3700de4c072d5cbf3cdd7dffdfe62ccf179c985641/blosc2-3.5.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:44b4e51fd0b5a7e6ccae8053e133037f5984f6990a1e9b787376f4ec42aaf5bf", size = 4010715 }, - { url = "https://files.pythonhosted.org/packages/0c/3b/bd2803d030c204984f8b14ce9919bcb67b6582141898a28887ae1fa58d80/blosc2-3.5.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b59805aa56ff1f9e6e4ba2f293e54cdf356b64db983bb019dbdbac72f88f2e38", size = 3371665 }, - { url = "https://files.pythonhosted.org/packages/2b/c8/b86f3ecfecff76cab9c3c1f54fcbb05ab654b6d0a940b9b614b896f8105f/blosc2-3.5.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:485ca015db7352fa0e3b78b912c66ec495d60b94cab36a82abfb9f87715e6abd", size = 4280046 }, - { url = "https://files.pythonhosted.org/packages/c9/38/8815054e054e45fda3c9d1e25ac77e6387d10d8e33c64bcf79f432b3c4fa/blosc2-3.5.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f88cfa1f1d64d77278c7fae0ab8c42291b7b2e8f36fcee154a0c55388e697fe9", size = 4422773 }, - { url = "https://files.pythonhosted.org/packages/b1/f0/55b5090c8fb6e9a12810cf141cd59875718148a78f62a4da818045f7e4b6/blosc2-3.5.1-cp313-cp313-win_amd64.whl", hash = "sha256:9d36c4a8489c0f8719040a7543918e2701821340b8643d508c450d0d012259bf", size = 2213487 }, -] - -[[package]] -name = "certifi" -version = "2025.7.9" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/de/8a/c729b6b60c66a38f590c4e774decc4b2ec7b0576be8f1aa984a53ffa812a/certifi-2025.7.9.tar.gz", hash = "sha256:c1d2ec05395148ee10cf672ffc28cd37ea0ab0d99f9cc74c43e588cbd111b079", size = 160386 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/66/f3/80a3f974c8b535d394ff960a11ac20368e06b736da395b551a49ce950cce/certifi-2025.7.9-py3-none-any.whl", hash = "sha256:d842783a14f8fdd646895ac26f719a061408834473cfc10203f6a575beb15d39", size = 159230 }, -] - -[[package]] -name = "cffi" -version = "1.17.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pycparser" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/90/07/f44ca684db4e4f08a3fdc6eeb9a0d15dc6883efc7b8c90357fdbf74e186c/cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", size = 182191 }, - { url = "https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", size = 178592 }, - { url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024 }, - { url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188 }, - { url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571 }, - { url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687 }, - { url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211 }, - { url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325 }, - { url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784 }, - { url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564 }, - { url = "https://files.pythonhosted.org/packages/f8/fe/4d41c2f200c4a457933dbd98d3cf4e911870877bd94d9656cc0fcb390681/cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", size = 171804 }, - { url = "https://files.pythonhosted.org/packages/d1/b6/0b0f5ab93b0df4acc49cae758c81fe4e5ef26c3ae2e10cc69249dfd8b3ab/cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", size = 181299 }, - { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264 }, - { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651 }, - { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259 }, - { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200 }, - { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235 }, - { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721 }, - { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242 }, - { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999 }, - { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242 }, - { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604 }, - { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727 }, - { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400 }, - { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 }, - { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 }, - { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 }, - { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 }, - { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 }, - { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 }, - { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 }, - { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 }, - { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 }, - { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 }, - { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 }, - { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 }, - { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 }, - { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 }, - { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 }, - { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 }, - { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 }, - { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 }, - { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 }, - { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 }, - { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 }, - { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 }, - { url = "https://files.pythonhosted.org/packages/b9/ea/8bb50596b8ffbc49ddd7a1ad305035daa770202a6b782fc164647c2673ad/cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16", size = 182220 }, - { url = "https://files.pythonhosted.org/packages/ae/11/e77c8cd24f58285a82c23af484cf5b124a376b32644e445960d1a4654c3a/cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36", size = 178605 }, - { url = "https://files.pythonhosted.org/packages/ed/65/25a8dc32c53bf5b7b6c2686b42ae2ad58743f7ff644844af7cdb29b49361/cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8", size = 424910 }, - { url = "https://files.pythonhosted.org/packages/42/7a/9d086fab7c66bd7c4d0f27c57a1b6b068ced810afc498cc8c49e0088661c/cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576", size = 447200 }, - { url = "https://files.pythonhosted.org/packages/da/63/1785ced118ce92a993b0ec9e0d0ac8dc3e5dbfbcaa81135be56c69cabbb6/cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87", size = 454565 }, - { url = "https://files.pythonhosted.org/packages/74/06/90b8a44abf3556599cdec107f7290277ae8901a58f75e6fe8f970cd72418/cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0", size = 435635 }, - { url = "https://files.pythonhosted.org/packages/bd/62/a1f468e5708a70b1d86ead5bab5520861d9c7eacce4a885ded9faa7729c3/cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3", size = 445218 }, - { url = "https://files.pythonhosted.org/packages/5b/95/b34462f3ccb09c2594aa782d90a90b045de4ff1f70148ee79c69d37a0a5a/cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595", size = 460486 }, - { url = "https://files.pythonhosted.org/packages/fc/fc/a1e4bebd8d680febd29cf6c8a40067182b64f00c7d105f8f26b5bc54317b/cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a", size = 437911 }, - { url = "https://files.pythonhosted.org/packages/e6/c3/21cab7a6154b6a5ea330ae80de386e7665254835b9e98ecc1340b3a7de9a/cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e", size = 460632 }, - { url = "https://files.pythonhosted.org/packages/cb/b5/fd9f8b5a84010ca169ee49f4e4ad6f8c05f4e3545b72ee041dbbcb159882/cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7", size = 171820 }, - { url = "https://files.pythonhosted.org/packages/8c/52/b08750ce0bce45c143e1b5d7357ee8c55341b52bdef4b0f081af1eb248c2/cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662", size = 181290 }, -] - -[[package]] -name = "cfgv" -version = "3.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249 }, -] - -[[package]] -name = "chardet" -version = "5.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/f7b6ab21ec75897ed80c17d79b15951a719226b9fababf1e40ea74d69079/chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7", size = 2069618 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/38/6f/f5fbc992a329ee4e0f288c1fe0e2ad9485ed064cac731ed2fe47dcc38cbf/chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970", size = 199385 }, -] - -[[package]] -name = "charset-normalizer" -version = "3.4.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", size = 201818 }, - { url = "https://files.pythonhosted.org/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", size = 144649 }, - { url = "https://files.pythonhosted.org/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", size = 155045 }, - { url = "https://files.pythonhosted.org/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", size = 147356 }, - { url = "https://files.pythonhosted.org/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", size = 149471 }, - { url = "https://files.pythonhosted.org/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", size = 151317 }, - { url = "https://files.pythonhosted.org/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", size = 146368 }, - { url = "https://files.pythonhosted.org/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", size = 154491 }, - { url = "https://files.pythonhosted.org/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", size = 157695 }, - { url = "https://files.pythonhosted.org/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", size = 154849 }, - { url = "https://files.pythonhosted.org/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", size = 150091 }, - { url = "https://files.pythonhosted.org/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", size = 98445 }, - { url = "https://files.pythonhosted.org/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", size = 105782 }, - { url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794 }, - { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846 }, - { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350 }, - { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657 }, - { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260 }, - { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164 }, - { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571 }, - { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952 }, - { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959 }, - { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030 }, - { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015 }, - { url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106 }, - { url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402 }, - { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936 }, - { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790 }, - { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924 }, - { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626 }, - { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567 }, - { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957 }, - { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408 }, - { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399 }, - { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815 }, - { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537 }, - { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565 }, - { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357 }, - { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776 }, - { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622 }, - { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435 }, - { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653 }, - { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231 }, - { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243 }, - { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442 }, - { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147 }, - { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057 }, - { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454 }, - { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174 }, - { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166 }, - { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064 }, - { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641 }, - { url = "https://files.pythonhosted.org/packages/28/f8/dfb01ff6cc9af38552c69c9027501ff5a5117c4cc18dcd27cb5259fa1888/charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4", size = 201671 }, - { url = "https://files.pythonhosted.org/packages/32/fb/74e26ee556a9dbfe3bd264289b67be1e6d616329403036f6507bb9f3f29c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7", size = 144744 }, - { url = "https://files.pythonhosted.org/packages/ad/06/8499ee5aa7addc6f6d72e068691826ff093329fe59891e83b092ae4c851c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836", size = 154993 }, - { url = "https://files.pythonhosted.org/packages/f1/a2/5e4c187680728219254ef107a6949c60ee0e9a916a5dadb148c7ae82459c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597", size = 147382 }, - { url = "https://files.pythonhosted.org/packages/4c/fe/56aca740dda674f0cc1ba1418c4d84534be51f639b5f98f538b332dc9a95/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7", size = 149536 }, - { url = "https://files.pythonhosted.org/packages/53/13/db2e7779f892386b589173dd689c1b1e304621c5792046edd8a978cbf9e0/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f", size = 151349 }, - { url = "https://files.pythonhosted.org/packages/69/35/e52ab9a276186f729bce7a0638585d2982f50402046e4b0faa5d2c3ef2da/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba", size = 146365 }, - { url = "https://files.pythonhosted.org/packages/a6/d8/af7333f732fc2e7635867d56cb7c349c28c7094910c72267586947561b4b/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12", size = 154499 }, - { url = "https://files.pythonhosted.org/packages/7a/3d/a5b2e48acef264d71e036ff30bcc49e51bde80219bb628ba3e00cf59baac/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518", size = 157735 }, - { url = "https://files.pythonhosted.org/packages/85/d8/23e2c112532a29f3eef374375a8684a4f3b8e784f62b01da931186f43494/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5", size = 154786 }, - { url = "https://files.pythonhosted.org/packages/c7/57/93e0169f08ecc20fe82d12254a200dfaceddc1c12a4077bf454ecc597e33/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3", size = 150203 }, - { url = "https://files.pythonhosted.org/packages/2c/9d/9bf2b005138e7e060d7ebdec7503d0ef3240141587651f4b445bdf7286c2/charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471", size = 98436 }, - { url = "https://files.pythonhosted.org/packages/6d/24/5849d46cf4311bbf21b424c443b09b459f5b436b1558c04e45dbb7cc478b/charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e", size = 105772 }, - { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626 }, -] - -[[package]] -name = "click" -version = "8.1.8" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 }, -] - -[[package]] -name = "click" -version = "8.2.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", - "python_full_version == '3.10.*'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215 }, -] - -[[package]] -name = "colorama" -version = "0.4.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, -] - -[[package]] -name = "configparser" -version = "7.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8b/ac/ea19242153b5e8be412a726a70e82c7b5c1537c83f61b20995b2eda3dcd7/configparser-7.2.0.tar.gz", hash = "sha256:b629cc8ae916e3afbd36d1b3d093f34193d851e11998920fdcfc4552218b7b70", size = 51273 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/09/fe/f61e7129e9e689d9e40bbf8a36fb90f04eceb477f4617c02c6a18463e81f/configparser-7.2.0-py3-none-any.whl", hash = "sha256:fee5e1f3db4156dcd0ed95bc4edfa3580475537711f67a819c966b389d09ce62", size = 17232 }, -] - -[[package]] -name = "coverage" -version = "7.9.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/04/b7/c0465ca253df10a9e8dae0692a4ae6e9726d245390aaef92360e1d6d3832/coverage-7.9.2.tar.gz", hash = "sha256:997024fa51e3290264ffd7492ec97d0690293ccd2b45a6cd7d82d945a4a80c8b", size = 813556 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/0d/5c2114fd776c207bd55068ae8dc1bef63ecd1b767b3389984a8e58f2b926/coverage-7.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:66283a192a14a3854b2e7f3418d7db05cdf411012ab7ff5db98ff3b181e1f912", size = 212039 }, - { url = "https://files.pythonhosted.org/packages/cf/ad/dc51f40492dc2d5fcd31bb44577bc0cc8920757d6bc5d3e4293146524ef9/coverage-7.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4e01d138540ef34fcf35c1aa24d06c3de2a4cffa349e29a10056544f35cca15f", size = 212428 }, - { url = "https://files.pythonhosted.org/packages/a2/a3/55cb3ff1b36f00df04439c3993d8529193cdf165a2467bf1402539070f16/coverage-7.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f22627c1fe2745ee98d3ab87679ca73a97e75ca75eb5faee48660d060875465f", size = 241534 }, - { url = "https://files.pythonhosted.org/packages/eb/c9/a8410b91b6be4f6e9c2e9f0dce93749b6b40b751d7065b4410bf89cb654b/coverage-7.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b1c2d8363247b46bd51f393f86c94096e64a1cf6906803fa8d5a9d03784bdbf", size = 239408 }, - { url = "https://files.pythonhosted.org/packages/ff/c4/6f3e56d467c612b9070ae71d5d3b114c0b899b5788e1ca3c93068ccb7018/coverage-7.9.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c10c882b114faf82dbd33e876d0cbd5e1d1ebc0d2a74ceef642c6152f3f4d547", size = 240552 }, - { url = "https://files.pythonhosted.org/packages/fd/20/04eda789d15af1ce79bce5cc5fd64057c3a0ac08fd0576377a3096c24663/coverage-7.9.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:de3c0378bdf7066c3988d66cd5232d161e933b87103b014ab1b0b4676098fa45", size = 240464 }, - { url = "https://files.pythonhosted.org/packages/a9/5a/217b32c94cc1a0b90f253514815332d08ec0812194a1ce9cca97dda1cd20/coverage-7.9.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1e2f097eae0e5991e7623958a24ced3282676c93c013dde41399ff63e230fcf2", size = 239134 }, - { url = "https://files.pythonhosted.org/packages/34/73/1d019c48f413465eb5d3b6898b6279e87141c80049f7dbf73fd020138549/coverage-7.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:28dc1f67e83a14e7079b6cea4d314bc8b24d1aed42d3582ff89c0295f09b181e", size = 239405 }, - { url = "https://files.pythonhosted.org/packages/49/6c/a2beca7aa2595dad0c0d3f350382c381c92400efe5261e2631f734a0e3fe/coverage-7.9.2-cp310-cp310-win32.whl", hash = "sha256:bf7d773da6af9e10dbddacbf4e5cab13d06d0ed93561d44dae0188a42c65be7e", size = 214519 }, - { url = "https://files.pythonhosted.org/packages/fc/c8/91e5e4a21f9a51e2c7cdd86e587ae01a4fcff06fc3fa8cde4d6f7cf68df4/coverage-7.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:0c0378ba787681ab1897f7c89b415bd56b0b2d9a47e5a3d8dc0ea55aac118d6c", size = 215400 }, - { url = "https://files.pythonhosted.org/packages/39/40/916786453bcfafa4c788abee4ccd6f592b5b5eca0cd61a32a4e5a7ef6e02/coverage-7.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a7a56a2964a9687b6aba5b5ced6971af308ef6f79a91043c05dd4ee3ebc3e9ba", size = 212152 }, - { url = "https://files.pythonhosted.org/packages/9f/66/cc13bae303284b546a030762957322bbbff1ee6b6cb8dc70a40f8a78512f/coverage-7.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123d589f32c11d9be7fe2e66d823a236fe759b0096f5db3fb1b75b2fa414a4fa", size = 212540 }, - { url = "https://files.pythonhosted.org/packages/0f/3c/d56a764b2e5a3d43257c36af4a62c379df44636817bb5f89265de4bf8bd7/coverage-7.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:333b2e0ca576a7dbd66e85ab402e35c03b0b22f525eed82681c4b866e2e2653a", size = 245097 }, - { url = "https://files.pythonhosted.org/packages/b1/46/bd064ea8b3c94eb4ca5d90e34d15b806cba091ffb2b8e89a0d7066c45791/coverage-7.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:326802760da234baf9f2f85a39e4a4b5861b94f6c8d95251f699e4f73b1835dc", size = 242812 }, - { url = "https://files.pythonhosted.org/packages/43/02/d91992c2b29bc7afb729463bc918ebe5f361be7f1daae93375a5759d1e28/coverage-7.9.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19e7be4cfec248df38ce40968c95d3952fbffd57b400d4b9bb580f28179556d2", size = 244617 }, - { url = "https://files.pythonhosted.org/packages/b7/4f/8fadff6bf56595a16d2d6e33415841b0163ac660873ed9a4e9046194f779/coverage-7.9.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0b4a4cb73b9f2b891c1788711408ef9707666501ba23684387277ededab1097c", size = 244263 }, - { url = "https://files.pythonhosted.org/packages/9b/d2/e0be7446a2bba11739edb9f9ba4eff30b30d8257370e237418eb44a14d11/coverage-7.9.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2c8937fa16c8c9fbbd9f118588756e7bcdc7e16a470766a9aef912dd3f117dbd", size = 242314 }, - { url = "https://files.pythonhosted.org/packages/9d/7d/dcbac9345000121b8b57a3094c2dfcf1ccc52d8a14a40c1d4bc89f936f80/coverage-7.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:42da2280c4d30c57a9b578bafd1d4494fa6c056d4c419d9689e66d775539be74", size = 242904 }, - { url = "https://files.pythonhosted.org/packages/41/58/11e8db0a0c0510cf31bbbdc8caf5d74a358b696302a45948d7c768dfd1cf/coverage-7.9.2-cp311-cp311-win32.whl", hash = "sha256:14fa8d3da147f5fdf9d298cacc18791818f3f1a9f542c8958b80c228320e90c6", size = 214553 }, - { url = "https://files.pythonhosted.org/packages/3a/7d/751794ec8907a15e257136e48dc1021b1f671220ecccfd6c4eaf30802714/coverage-7.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:549cab4892fc82004f9739963163fd3aac7a7b0df430669b75b86d293d2df2a7", size = 215441 }, - { url = "https://files.pythonhosted.org/packages/62/5b/34abcedf7b946c1c9e15b44f326cb5b0da852885312b30e916f674913428/coverage-7.9.2-cp311-cp311-win_arm64.whl", hash = "sha256:c2667a2b913e307f06aa4e5677f01a9746cd08e4b35e14ebcde6420a9ebb4c62", size = 213873 }, - { url = "https://files.pythonhosted.org/packages/53/d7/7deefc6fd4f0f1d4c58051f4004e366afc9e7ab60217ac393f247a1de70a/coverage-7.9.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ae9eb07f1cfacd9cfe8eaee6f4ff4b8a289a668c39c165cd0c8548484920ffc0", size = 212344 }, - { url = "https://files.pythonhosted.org/packages/95/0c/ee03c95d32be4d519e6a02e601267769ce2e9a91fc8faa1b540e3626c680/coverage-7.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9ce85551f9a1119f02adc46d3014b5ee3f765deac166acf20dbb851ceb79b6f3", size = 212580 }, - { url = "https://files.pythonhosted.org/packages/8b/9f/826fa4b544b27620086211b87a52ca67592622e1f3af9e0a62c87aea153a/coverage-7.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8f6389ac977c5fb322e0e38885fbbf901743f79d47f50db706e7644dcdcb6e1", size = 246383 }, - { url = "https://files.pythonhosted.org/packages/7f/b3/4477aafe2a546427b58b9c540665feff874f4db651f4d3cb21b308b3a6d2/coverage-7.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff0d9eae8cdfcd58fe7893b88993723583a6ce4dfbfd9f29e001922544f95615", size = 243400 }, - { url = "https://files.pythonhosted.org/packages/f8/c2/efffa43778490c226d9d434827702f2dfbc8041d79101a795f11cbb2cf1e/coverage-7.9.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fae939811e14e53ed8a9818dad51d434a41ee09df9305663735f2e2d2d7d959b", size = 245591 }, - { url = "https://files.pythonhosted.org/packages/c6/e7/a59888e882c9a5f0192d8627a30ae57910d5d449c80229b55e7643c078c4/coverage-7.9.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:31991156251ec202c798501e0a42bbdf2169dcb0f137b1f5c0f4267f3fc68ef9", size = 245402 }, - { url = "https://files.pythonhosted.org/packages/92/a5/72fcd653ae3d214927edc100ce67440ed8a0a1e3576b8d5e6d066ed239db/coverage-7.9.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d0d67963f9cbfc7c7f96d4ac74ed60ecbebd2ea6eeb51887af0f8dce205e545f", size = 243583 }, - { url = "https://files.pythonhosted.org/packages/5c/f5/84e70e4df28f4a131d580d7d510aa1ffd95037293da66fd20d446090a13b/coverage-7.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:49b752a2858b10580969ec6af6f090a9a440a64a301ac1528d7ca5f7ed497f4d", size = 244815 }, - { url = "https://files.pythonhosted.org/packages/39/e7/d73d7cbdbd09fdcf4642655ae843ad403d9cbda55d725721965f3580a314/coverage-7.9.2-cp312-cp312-win32.whl", hash = "sha256:88d7598b8ee130f32f8a43198ee02edd16d7f77692fa056cb779616bbea1b355", size = 214719 }, - { url = "https://files.pythonhosted.org/packages/9f/d6/7486dcc3474e2e6ad26a2af2db7e7c162ccd889c4c68fa14ea8ec189c9e9/coverage-7.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:9dfb070f830739ee49d7c83e4941cc767e503e4394fdecb3b54bfdac1d7662c0", size = 215509 }, - { url = "https://files.pythonhosted.org/packages/b7/34/0439f1ae2593b0346164d907cdf96a529b40b7721a45fdcf8b03c95fcd90/coverage-7.9.2-cp312-cp312-win_arm64.whl", hash = "sha256:4e2c058aef613e79df00e86b6d42a641c877211384ce5bd07585ed7ba71ab31b", size = 213910 }, - { url = "https://files.pythonhosted.org/packages/94/9d/7a8edf7acbcaa5e5c489a646226bed9591ee1c5e6a84733c0140e9ce1ae1/coverage-7.9.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:985abe7f242e0d7bba228ab01070fde1d6c8fa12f142e43debe9ed1dde686038", size = 212367 }, - { url = "https://files.pythonhosted.org/packages/e8/9e/5cd6f130150712301f7e40fb5865c1bc27b97689ec57297e568d972eec3c/coverage-7.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82c3939264a76d44fde7f213924021ed31f55ef28111a19649fec90c0f109e6d", size = 212632 }, - { url = "https://files.pythonhosted.org/packages/a8/de/6287a2c2036f9fd991c61cefa8c64e57390e30c894ad3aa52fac4c1e14a8/coverage-7.9.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae5d563e970dbe04382f736ec214ef48103d1b875967c89d83c6e3f21706d5b3", size = 245793 }, - { url = "https://files.pythonhosted.org/packages/06/cc/9b5a9961d8160e3cb0b558c71f8051fe08aa2dd4b502ee937225da564ed1/coverage-7.9.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdd612e59baed2a93c8843c9a7cb902260f181370f1d772f4842987535071d14", size = 243006 }, - { url = "https://files.pythonhosted.org/packages/49/d9/4616b787d9f597d6443f5588619c1c9f659e1f5fc9eebf63699eb6d34b78/coverage-7.9.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:256ea87cb2a1ed992bcdfc349d8042dcea1b80436f4ddf6e246d6bee4b5d73b6", size = 244990 }, - { url = "https://files.pythonhosted.org/packages/48/83/801cdc10f137b2d02b005a761661649ffa60eb173dcdaeb77f571e4dc192/coverage-7.9.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f44ae036b63c8ea432f610534a2668b0c3aee810e7037ab9d8ff6883de480f5b", size = 245157 }, - { url = "https://files.pythonhosted.org/packages/c8/a4/41911ed7e9d3ceb0ffb019e7635468df7499f5cc3edca5f7dfc078e9c5ec/coverage-7.9.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:82d76ad87c932935417a19b10cfe7abb15fd3f923cfe47dbdaa74ef4e503752d", size = 243128 }, - { url = "https://files.pythonhosted.org/packages/10/41/344543b71d31ac9cb00a664d5d0c9ef134a0fe87cb7d8430003b20fa0b7d/coverage-7.9.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:619317bb86de4193debc712b9e59d5cffd91dc1d178627ab2a77b9870deb2868", size = 244511 }, - { url = "https://files.pythonhosted.org/packages/d5/81/3b68c77e4812105e2a060f6946ba9e6f898ddcdc0d2bfc8b4b152a9ae522/coverage-7.9.2-cp313-cp313-win32.whl", hash = "sha256:0a07757de9feb1dfafd16ab651e0f628fd7ce551604d1bf23e47e1ddca93f08a", size = 214765 }, - { url = "https://files.pythonhosted.org/packages/06/a2/7fac400f6a346bb1a4004eb2a76fbff0e242cd48926a2ce37a22a6a1d917/coverage-7.9.2-cp313-cp313-win_amd64.whl", hash = "sha256:115db3d1f4d3f35f5bb021e270edd85011934ff97c8797216b62f461dd69374b", size = 215536 }, - { url = "https://files.pythonhosted.org/packages/08/47/2c6c215452b4f90d87017e61ea0fd9e0486bb734cb515e3de56e2c32075f/coverage-7.9.2-cp313-cp313-win_arm64.whl", hash = "sha256:48f82f889c80af8b2a7bb6e158d95a3fbec6a3453a1004d04e4f3b5945a02694", size = 213943 }, - { url = "https://files.pythonhosted.org/packages/a3/46/e211e942b22d6af5e0f323faa8a9bc7c447a1cf1923b64c47523f36ed488/coverage-7.9.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:55a28954545f9d2f96870b40f6c3386a59ba8ed50caf2d949676dac3ecab99f5", size = 213088 }, - { url = "https://files.pythonhosted.org/packages/d2/2f/762551f97e124442eccd907bf8b0de54348635b8866a73567eb4e6417acf/coverage-7.9.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cdef6504637731a63c133bb2e6f0f0214e2748495ec15fe42d1e219d1b133f0b", size = 213298 }, - { url = "https://files.pythonhosted.org/packages/7a/b7/76d2d132b7baf7360ed69be0bcab968f151fa31abe6d067f0384439d9edb/coverage-7.9.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcd5ebe66c7a97273d5d2ddd4ad0ed2e706b39630ed4b53e713d360626c3dbb3", size = 256541 }, - { url = "https://files.pythonhosted.org/packages/a0/17/392b219837d7ad47d8e5974ce5f8dc3deb9f99a53b3bd4d123602f960c81/coverage-7.9.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9303aed20872d7a3c9cb39c5d2b9bdbe44e3a9a1aecb52920f7e7495410dfab8", size = 252761 }, - { url = "https://files.pythonhosted.org/packages/d5/77/4256d3577fe1b0daa8d3836a1ebe68eaa07dd2cbaf20cf5ab1115d6949d4/coverage-7.9.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc18ea9e417a04d1920a9a76fe9ebd2f43ca505b81994598482f938d5c315f46", size = 254917 }, - { url = "https://files.pythonhosted.org/packages/53/99/fc1a008eef1805e1ddb123cf17af864743354479ea5129a8f838c433cc2c/coverage-7.9.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6406cff19880aaaadc932152242523e892faff224da29e241ce2fca329866584", size = 256147 }, - { url = "https://files.pythonhosted.org/packages/92/c0/f63bf667e18b7f88c2bdb3160870e277c4874ced87e21426128d70aa741f/coverage-7.9.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d0d4f6ecdf37fcc19c88fec3e2277d5dee740fb51ffdd69b9579b8c31e4232e", size = 254261 }, - { url = "https://files.pythonhosted.org/packages/8c/32/37dd1c42ce3016ff8ec9e4b607650d2e34845c0585d3518b2a93b4830c1a/coverage-7.9.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c33624f50cf8de418ab2b4d6ca9eda96dc45b2c4231336bac91454520e8d1fac", size = 255099 }, - { url = "https://files.pythonhosted.org/packages/da/2e/af6b86f7c95441ce82f035b3affe1cd147f727bbd92f563be35e2d585683/coverage-7.9.2-cp313-cp313t-win32.whl", hash = "sha256:1df6b76e737c6a92210eebcb2390af59a141f9e9430210595251fbaf02d46926", size = 215440 }, - { url = "https://files.pythonhosted.org/packages/4d/bb/8a785d91b308867f6b2e36e41c569b367c00b70c17f54b13ac29bcd2d8c8/coverage-7.9.2-cp313-cp313t-win_amd64.whl", hash = "sha256:f5fd54310b92741ebe00d9c0d1d7b2b27463952c022da6d47c175d246a98d1bd", size = 216537 }, - { url = "https://files.pythonhosted.org/packages/1d/a0/a6bffb5e0f41a47279fd45a8f3155bf193f77990ae1c30f9c224b61cacb0/coverage-7.9.2-cp313-cp313t-win_arm64.whl", hash = "sha256:c48c2375287108c887ee87d13b4070a381c6537d30e8487b24ec721bf2a781cb", size = 214398 }, - { url = "https://files.pythonhosted.org/packages/62/ab/b4b06662ccaa00ca7bbee967b7035a33a58b41efb92d8c89a6c523a2ccd5/coverage-7.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ddc39510ac922a5c4c27849b739f875d3e1d9e590d1e7b64c98dadf037a16cce", size = 212037 }, - { url = "https://files.pythonhosted.org/packages/bb/5e/04619995657acc898d15bfad42b510344b3a74d4d5bc34f2e279d46c781c/coverage-7.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a535c0c7364acd55229749c2b3e5eebf141865de3a8f697076a3291985f02d30", size = 212412 }, - { url = "https://files.pythonhosted.org/packages/14/e7/1465710224dc6d31c534e7714cbd907210622a044adc81c810e72eea873f/coverage-7.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df0f9ef28e0f20c767ccdccfc5ae5f83a6f4a2fbdfbcbcc8487a8a78771168c8", size = 241164 }, - { url = "https://files.pythonhosted.org/packages/ab/f2/44c6fbd2794afeb9ab6c0a14d3c088ab1dae3dff3df2624609981237bbb4/coverage-7.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f3da12e0ccbcb348969221d29441ac714bbddc4d74e13923d3d5a7a0bebef7a", size = 239032 }, - { url = "https://files.pythonhosted.org/packages/6a/d2/7a79845429c0aa2e6788bc45c26a2e3052fa91082c9ea1dea56fb531952c/coverage-7.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a17eaf46f56ae0f870f14a3cbc2e4632fe3771eab7f687eda1ee59b73d09fe4", size = 240148 }, - { url = "https://files.pythonhosted.org/packages/9c/7d/2731d1b4c9c672d82d30d218224dfc62939cf3800bc8aba0258fefb191f5/coverage-7.9.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:669135a9d25df55d1ed56a11bf555f37c922cf08d80799d4f65d77d7d6123fcf", size = 239875 }, - { url = "https://files.pythonhosted.org/packages/1b/83/685958715429a9da09cf172c15750ca5c795dd7259466f2645403696557b/coverage-7.9.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9d3a700304d01a627df9db4322dc082a0ce1e8fc74ac238e2af39ced4c083193", size = 238127 }, - { url = "https://files.pythonhosted.org/packages/34/ff/161a4313308b3783126790adfae1970adbe4886fda8788792e435249910a/coverage-7.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:71ae8b53855644a0b1579d4041304ddc9995c7b21c8a1f16753c4d8903b4dfed", size = 239064 }, - { url = "https://files.pythonhosted.org/packages/17/14/fe33f41b2e80811021de059621f44c01ebe4d6b08bdb82d54a514488e933/coverage-7.9.2-cp39-cp39-win32.whl", hash = "sha256:dd7a57b33b5cf27acb491e890720af45db05589a80c1ffc798462a765be6d4d7", size = 214522 }, - { url = "https://files.pythonhosted.org/packages/6e/30/63d850ec31b5c6f6a7b4e853016375b846258300320eda29376e2786ceeb/coverage-7.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f65bb452e579d5540c8b37ec105dd54d8b9307b07bcaa186818c104ffda22441", size = 215419 }, - { url = "https://files.pythonhosted.org/packages/d7/85/f8bbefac27d286386961c25515431482a425967e23d3698b75a250872924/coverage-7.9.2-pp39.pp310.pp311-none-any.whl", hash = "sha256:8a1166db2fb62473285bcb092f586e081e92656c7dfa8e9f62b4d39d7e6b5050", size = 204013 }, - { url = "https://files.pythonhosted.org/packages/3c/38/bbe2e63902847cf79036ecc75550d0698af31c91c7575352eb25190d0fb3/coverage-7.9.2-py3-none-any.whl", hash = "sha256:e425cd5b00f6fc0ed7cdbd766c70be8baab4b7839e4d4fe5fac48581dd968ea4", size = 204005 }, -] - -[package.optional-dependencies] -toml = [ - { name = "tomli", marker = "python_full_version <= '3.11'" }, -] - -[[package]] -name = "coveralls" -version = "4.0.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "coverage", extra = ["toml"] }, - { name = "docopt" }, - { name = "requests" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/61/75/a454fb443eb6a053833f61603a432ffbd7dd6ae53a11159bacfadb9d6219/coveralls-4.0.1.tar.gz", hash = "sha256:7b2a0a2bcef94f295e3cf28dcc55ca40b71c77d1c2446b538e85f0f7bc21aa69", size = 12419 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/63/e5/6708c75e2a4cfca929302d4d9b53b862c6dc65bd75e6933ea3d20016d41d/coveralls-4.0.1-py3-none-any.whl", hash = "sha256:7a6b1fa9848332c7b2221afb20f3df90272ac0167060f41b5fe90429b30b1809", size = 13599 }, -] - -[[package]] -name = "decorator" -version = "5.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190 }, -] - -[[package]] -name = "distlib" -version = "0.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047 }, -] - -[[package]] -name = "docopt" -version = "0.6.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/55/8f8cab2afd404cf578136ef2cc5dfb50baa1761b68c9da1fb1e4eed343c9/docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491", size = 25901 } - -[[package]] -name = "dpath" -version = "2.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b5/ce/e1fd64d36e4a5717bd5e6b2ad188f5eaa2e902fde871ea73a79875793fc9/dpath-2.2.0.tar.gz", hash = "sha256:34f7e630dc55ea3f219e555726f5da4b4b25f2200319c8e6902c394258dd6a3e", size = 28266 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/05/d1/8952806fbf9583004ab479d8f58a9496c3d35f6b6009ddd458bdd9978eaf/dpath-2.2.0-py3-none-any.whl", hash = "sha256:b330a375ded0a0d2ed404440f6c6a715deae5313af40bbb01c8a41d891900576", size = 17618 }, -] - -[[package]] -name = "exceptiongroup" -version = "1.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674 }, -] - -[[package]] -name = "executing" -version = "2.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/28/c14e053b6762b1044f34a13aab6859bbf40456d37d23aa286ac24cfd9a5d/executing-2.2.1.tar.gz", hash = "sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4", size = 1129488 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317 }, -] - -[[package]] -name = "fancycompleter" -version = "0.11.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pyreadline3", marker = "python_full_version < '3.13' and sys_platform == 'win32'" }, - { name = "pyrepl", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/4e/4c/d11187dee93eff89d082afda79b63c79320ae1347e49485a38f05ad359d0/fancycompleter-0.11.1.tar.gz", hash = "sha256:5b4ad65d76b32b1259251516d0f1cb2d82832b1ff8506697a707284780757f69", size = 341776 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/30/c3/6f0e3896f193528bbd2b4d2122d4be8108a37efab0b8475855556a8c4afa/fancycompleter-0.11.1-py3-none-any.whl", hash = "sha256:44243d7fab37087208ca5acacf8f74c0aa4d733d04d593857873af7513cdf8a6", size = 11207 }, -] - -[[package]] -name = "filelock" -version = "3.19.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -sdist = { url = "https://files.pythonhosted.org/packages/40/bb/0ab3e58d22305b6f5440629d20683af28959bf793d98d11950e305c1c326/filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58", size = 17687 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988 }, -] - -[[package]] -name = "filelock" -version = "3.20.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", - "python_full_version == '3.10.*'", -] -sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054 }, -] - -[[package]] -name = "flask" -version = "2.3.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "blinker" }, - { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, - { name = "itsdangerous" }, - { name = "jinja2" }, - { name = "werkzeug" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/46/b7/4ace17e37abd9c21715dea5ee11774a25e404c486a7893fa18e764326ead/flask-2.3.3.tar.gz", hash = "sha256:09c347a92aa7ff4a8e7f3206795f30d826654baf38b873d0744cd571ca609efc", size = 672756 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/56/26f0be8adc2b4257df20c1c4260ddd0aa396cf8e75d90ab2f7ff99bc34f9/flask-2.3.3-py3-none-any.whl", hash = "sha256:f69fcd559dc907ed196ab9df0e48471709175e696d6e698dd4dbe940f96ce66b", size = 96112 }, -] - -[[package]] -name = "flask-cors" -version = "3.0.10" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flask" }, - { name = "six" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/cf/25/e3b2553d22ed542be807739556c69621ad2ab276ae8d5d2560f4ed20f652/Flask-Cors-3.0.10.tar.gz", hash = "sha256:b60839393f3b84a0f3746f6cdca56c1ad7426aa738b70d6c61375857823181de", size = 30867 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/db/84/901e700de86604b1c4ef4b57110d4e947c218b9997adf5d38fa7da493bce/Flask_Cors-3.0.10-py2.py3-none-any.whl", hash = "sha256:74efc975af1194fc7891ff5cd85b0f7478be4f7f59fe158102e91abb72bb4438", size = 14067 }, -] - -[[package]] -name = "gunicorn" -version = "21.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "packaging" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/06/89/acd9879fa6a5309b4bf16a5a8855f1e58f26d38e0c18ede9b3a70996b021/gunicorn-21.2.0.tar.gz", hash = "sha256:88ec8bff1d634f98e61b9f65bc4bf3cd918a90806c6f5c48bc5603849ec81033", size = 3632557 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/2a/c3a878eccb100ccddf45c50b6b8db8cf3301a6adede6e31d48e8531cab13/gunicorn-21.2.0-py3-none-any.whl", hash = "sha256:3213aa5e8c24949e792bcacfc176fef362e7aac80b76c56f6b5122bf350722f0", size = 80176 }, -] - -[[package]] -name = "humanize" -version = "4.12.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/22/d1/bbc4d251187a43f69844f7fd8941426549bbe4723e8ff0a7441796b0789f/humanize-4.12.3.tar.gz", hash = "sha256:8430be3a615106fdfceb0b2c1b41c4c98c6b0fc5cc59663a5539b111dd325fb0", size = 80514 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/1e/62a2ec3104394a2975a2629eec89276ede9dbe717092f6966fcf963e1bf0/humanize-4.12.3-py3-none-any.whl", hash = "sha256:2cbf6370af06568fa6d2da77c86edb7886f3160ecd19ee1ffef07979efc597f6", size = 128487 }, -] - -[[package]] -name = "identify" -version = "2.6.15" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ff/e7/685de97986c916a6d93b3876139e00eef26ad5bbbd61925d670ae8013449/identify-2.6.15.tar.gz", hash = "sha256:e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf", size = 99311 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/1c/e5fd8f973d4f375adb21565739498e2e9a1e54c858a97b9a8ccfdc81da9b/identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757", size = 99183 }, -] - -[[package]] -name = "idna" -version = "3.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, -] - -[[package]] -name = "importlib-metadata" -version = "8.7.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "zipp", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656 }, -] - -[[package]] -name = "iniconfig" -version = "2.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050 }, -] - -[[package]] -name = "ipdb" -version = "0.13.13" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "decorator" }, - { name = "ipython", version = "8.18.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "ipython", version = "8.37.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "ipython", version = "9.6.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/3d/1b/7e07e7b752017f7693a0f4d41c13e5ca29ce8cbcfdcc1fd6c4ad8c0a27a0/ipdb-0.13.13.tar.gz", hash = "sha256:e3ac6018ef05126d442af680aad863006ec19d02290561ac88b8b1c0b0cfc726", size = 17042 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/4c/b075da0092003d9a55cf2ecc1cae9384a1ca4f650d51b00fc59875fe76f6/ipdb-0.13.13-py3-none-any.whl", hash = "sha256:45529994741c4ab6d2388bfa5d7b725c2cf7fe9deffabdb8a6113aa5ed449ed4", size = 12130 }, -] - -[[package]] -name = "ipython" -version = "8.18.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, - { name = "decorator", marker = "python_full_version < '3.10'" }, - { name = "exceptiongroup", marker = "python_full_version < '3.10'" }, - { name = "jedi", marker = "python_full_version < '3.10'" }, - { name = "matplotlib-inline", marker = "python_full_version < '3.10'" }, - { name = "pexpect", marker = "python_full_version < '3.10' and sys_platform != 'win32'" }, - { name = "prompt-toolkit", marker = "python_full_version < '3.10'" }, - { name = "pygments", marker = "python_full_version < '3.10'" }, - { name = "stack-data", marker = "python_full_version < '3.10'" }, - { name = "traitlets", marker = "python_full_version < '3.10'" }, - { name = "typing-extensions", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/b9/3ba6c45a6df813c09a48bac313c22ff83efa26cbb55011218d925a46e2ad/ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27", size = 5486330 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/47/6b/d9fdcdef2eb6a23f391251fde8781c38d42acd82abe84d054cb74f7863b0/ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397", size = 808161 }, -] - -[[package]] -name = "ipython" -version = "8.37.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version == '3.10.*'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version == '3.10.*' and sys_platform == 'win32'" }, - { name = "decorator", marker = "python_full_version == '3.10.*'" }, - { name = "exceptiongroup", marker = "python_full_version == '3.10.*'" }, - { name = "jedi", marker = "python_full_version == '3.10.*'" }, - { name = "matplotlib-inline", marker = "python_full_version == '3.10.*'" }, - { name = "pexpect", marker = "python_full_version == '3.10.*' and sys_platform != 'emscripten' and sys_platform != 'win32'" }, - { name = "prompt-toolkit", marker = "python_full_version == '3.10.*'" }, - { name = "pygments", marker = "python_full_version == '3.10.*'" }, - { name = "stack-data", marker = "python_full_version == '3.10.*'" }, - { name = "traitlets", marker = "python_full_version == '3.10.*'" }, - { name = "typing-extensions", marker = "python_full_version == '3.10.*'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/85/31/10ac88f3357fc276dc8a64e8880c82e80e7459326ae1d0a211b40abf6665/ipython-8.37.0.tar.gz", hash = "sha256:ca815841e1a41a1e6b73a0b08f3038af9b2252564d01fc405356d34033012216", size = 5606088 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/91/d0/274fbf7b0b12643cbbc001ce13e6a5b1607ac4929d1b11c72460152c9fc3/ipython-8.37.0-py3-none-any.whl", hash = "sha256:ed87326596b878932dbcb171e3e698845434d8c61b8d8cd474bf663041a9dcf2", size = 831864 }, -] - -[[package]] -name = "ipython" -version = "9.6.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version >= '3.11' and sys_platform == 'win32'" }, - { name = "decorator", marker = "python_full_version >= '3.11'" }, - { name = "ipython-pygments-lexers", marker = "python_full_version >= '3.11'" }, - { name = "jedi", marker = "python_full_version >= '3.11'" }, - { name = "matplotlib-inline", marker = "python_full_version >= '3.11'" }, - { name = "pexpect", marker = "python_full_version >= '3.11' and sys_platform != 'emscripten' and sys_platform != 'win32'" }, - { name = "prompt-toolkit", marker = "python_full_version >= '3.11'" }, - { name = "pygments", marker = "python_full_version >= '3.11'" }, - { name = "stack-data", marker = "python_full_version >= '3.11'" }, - { name = "traitlets", marker = "python_full_version >= '3.11'" }, - { name = "typing-extensions", marker = "python_full_version == '3.11.*'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2a/34/29b18c62e39ee2f7a6a3bba7efd952729d8aadd45ca17efc34453b717665/ipython-9.6.0.tar.gz", hash = "sha256:5603d6d5d356378be5043e69441a072b50a5b33b4503428c77b04cb8ce7bc731", size = 4396932 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/48/c5/d5e07995077e48220269c28a221e168c91123ad5ceee44d548f54a057fc0/ipython-9.6.0-py3-none-any.whl", hash = "sha256:5f77efafc886d2f023442479b8149e7d86547ad0a979e9da9f045d252f648196", size = 616170 }, -] - -[[package]] -name = "ipython-pygments-lexers" -version = "1.1.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pygments", marker = "python_full_version >= '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074 }, -] - -[[package]] -name = "isort" -version = "6.1.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/1e/82/fa43935523efdfcce6abbae9da7f372b627b27142c3419fcf13bf5b0c397/isort-6.1.0.tar.gz", hash = "sha256:9b8f96a14cfee0677e78e941ff62f03769a06d412aabb9e2a90487b3b7e8d481", size = 824325 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/cc/9b681a170efab4868a032631dea1e8446d8ec718a7f657b94d49d1a12643/isort-6.1.0-py3-none-any.whl", hash = "sha256:58d8927ecce74e5087aef019f778d4081a3b6c98f15a80ba35782ca8a2097784", size = 94329 }, -] - -[[package]] -name = "isort" -version = "7.0.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", - "python_full_version == '3.10.*'", -] -sdist = { url = "https://files.pythonhosted.org/packages/63/53/4f3c058e3bace40282876f9b553343376ee687f3c35a525dc79dbd450f88/isort-7.0.0.tar.gz", hash = "sha256:5513527951aadb3ac4292a41a16cbc50dd1642432f5e8c20057d414bdafb4187", size = 805049 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/ed/e3705d6d02b4f7aea715a353c8ce193efd0b5db13e204df895d38734c244/isort-7.0.0-py3-none-any.whl", hash = "sha256:1bcabac8bc3c36c7fb7b98a76c8abb18e0f841a3ba81decac7691008592499c1", size = 94672 }, -] - -[[package]] -name = "itsdangerous" -version = "2.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234 }, -] - -[[package]] -name = "jedi" -version = "0.19.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "parso" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278 }, -] - -[[package]] -name = "jinja2" -version = "3.1.6" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markupsafe" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 }, -] - -[[package]] -name = "markupsafe" -version = "3.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357 }, - { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393 }, - { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732 }, - { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866 }, - { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964 }, - { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977 }, - { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366 }, - { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091 }, - { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065 }, - { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514 }, - { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353 }, - { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392 }, - { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984 }, - { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120 }, - { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032 }, - { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057 }, - { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359 }, - { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306 }, - { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094 }, - { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521 }, - { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274 }, - { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348 }, - { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149 }, - { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118 }, - { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993 }, - { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178 }, - { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319 }, - { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352 }, - { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097 }, - { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601 }, - { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 }, - { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 }, - { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 }, - { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 }, - { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 }, - { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 }, - { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 }, - { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 }, - { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 }, - { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 }, - { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 }, - { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 }, - { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 }, - { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 }, - { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 }, - { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 }, - { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 }, - { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 }, - { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 }, - { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 }, - { url = "https://files.pythonhosted.org/packages/a7/ea/9b1530c3fdeeca613faeb0fb5cbcf2389d816072fab72a71b45749ef6062/MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a", size = 14344 }, - { url = "https://files.pythonhosted.org/packages/4b/c2/fbdbfe48848e7112ab05e627e718e854d20192b674952d9042ebd8c9e5de/MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff", size = 12389 }, - { url = "https://files.pythonhosted.org/packages/f0/25/7a7c6e4dbd4f867d95d94ca15449e91e52856f6ed1905d58ef1de5e211d0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13", size = 21607 }, - { url = "https://files.pythonhosted.org/packages/53/8f/f339c98a178f3c1e545622206b40986a4c3307fe39f70ccd3d9df9a9e425/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144", size = 20728 }, - { url = "https://files.pythonhosted.org/packages/1a/03/8496a1a78308456dbd50b23a385c69b41f2e9661c67ea1329849a598a8f9/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29", size = 20826 }, - { url = "https://files.pythonhosted.org/packages/e6/cf/0a490a4bd363048c3022f2f475c8c05582179bb179defcee4766fb3dcc18/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0", size = 21843 }, - { url = "https://files.pythonhosted.org/packages/19/a3/34187a78613920dfd3cdf68ef6ce5e99c4f3417f035694074beb8848cd77/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0", size = 21219 }, - { url = "https://files.pythonhosted.org/packages/17/d8/5811082f85bb88410ad7e452263af048d685669bbbfb7b595e8689152498/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178", size = 20946 }, - { url = "https://files.pythonhosted.org/packages/7c/31/bd635fb5989440d9365c5e3c47556cfea121c7803f5034ac843e8f37c2f2/MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f", size = 15063 }, - { url = "https://files.pythonhosted.org/packages/b3/73/085399401383ce949f727afec55ec3abd76648d04b9f22e1c0e99cb4bec3/MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a", size = 15506 }, -] - -[[package]] -name = "matplotlib-inline" -version = "0.1.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/99/5b/a36a337438a14116b16480db471ad061c36c3694df7c2084a0da7ba538b7/matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90", size = 8159 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca", size = 9899 }, -] - -[[package]] -name = "msgpack" -version = "1.1.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/45/b1/ea4f68038a18c77c9467400d166d74c4ffa536f34761f7983a104357e614/msgpack-1.1.1.tar.gz", hash = "sha256:77b79ce34a2bdab2594f490c8e80dd62a02d650b91a75159a63ec413b8d104cd", size = 173555 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/33/52/f30da112c1dc92cf64f57d08a273ac771e7b29dea10b4b30369b2d7e8546/msgpack-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:353b6fc0c36fde68b661a12949d7d49f8f51ff5fa019c1e47c87c4ff34b080ed", size = 81799 }, - { url = "https://files.pythonhosted.org/packages/e4/35/7bfc0def2f04ab4145f7f108e3563f9b4abae4ab0ed78a61f350518cc4d2/msgpack-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:79c408fcf76a958491b4e3b103d1c417044544b68e96d06432a189b43d1215c8", size = 78278 }, - { url = "https://files.pythonhosted.org/packages/e8/c5/df5d6c1c39856bc55f800bf82778fd4c11370667f9b9e9d51b2f5da88f20/msgpack-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78426096939c2c7482bf31ef15ca219a9e24460289c00dd0b94411040bb73ad2", size = 402805 }, - { url = "https://files.pythonhosted.org/packages/20/8e/0bb8c977efecfe6ea7116e2ed73a78a8d32a947f94d272586cf02a9757db/msgpack-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b17ba27727a36cb73aabacaa44b13090feb88a01d012c0f4be70c00f75048b4", size = 408642 }, - { url = "https://files.pythonhosted.org/packages/59/a1/731d52c1aeec52006be6d1f8027c49fdc2cfc3ab7cbe7c28335b2910d7b6/msgpack-1.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a17ac1ea6ec3c7687d70201cfda3b1e8061466f28f686c24f627cae4ea8efd0", size = 395143 }, - { url = "https://files.pythonhosted.org/packages/2b/92/b42911c52cda2ba67a6418ffa7d08969edf2e760b09015593c8a8a27a97d/msgpack-1.1.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:88d1e966c9235c1d4e2afac21ca83933ba59537e2e2727a999bf3f515ca2af26", size = 395986 }, - { url = "https://files.pythonhosted.org/packages/61/dc/8ae165337e70118d4dab651b8b562dd5066dd1e6dd57b038f32ebc3e2f07/msgpack-1.1.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f6d58656842e1b2ddbe07f43f56b10a60f2ba5826164910968f5933e5178af75", size = 402682 }, - { url = "https://files.pythonhosted.org/packages/58/27/555851cb98dcbd6ce041df1eacb25ac30646575e9cd125681aa2f4b1b6f1/msgpack-1.1.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96decdfc4adcbc087f5ea7ebdcfd3dee9a13358cae6e81d54be962efc38f6338", size = 406368 }, - { url = "https://files.pythonhosted.org/packages/d4/64/39a26add4ce16f24e99eabb9005e44c663db00e3fce17d4ae1ae9d61df99/msgpack-1.1.1-cp310-cp310-win32.whl", hash = "sha256:6640fd979ca9a212e4bcdf6eb74051ade2c690b862b679bfcb60ae46e6dc4bfd", size = 65004 }, - { url = "https://files.pythonhosted.org/packages/7d/18/73dfa3e9d5d7450d39debde5b0d848139f7de23bd637a4506e36c9800fd6/msgpack-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:8b65b53204fe1bd037c40c4148d00ef918eb2108d24c9aaa20bc31f9810ce0a8", size = 71548 }, - { url = "https://files.pythonhosted.org/packages/7f/83/97f24bf9848af23fe2ba04380388216defc49a8af6da0c28cc636d722502/msgpack-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:71ef05c1726884e44f8b1d1773604ab5d4d17729d8491403a705e649116c9558", size = 82728 }, - { url = "https://files.pythonhosted.org/packages/aa/7f/2eaa388267a78401f6e182662b08a588ef4f3de6f0eab1ec09736a7aaa2b/msgpack-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:36043272c6aede309d29d56851f8841ba907a1a3d04435e43e8a19928e243c1d", size = 79279 }, - { url = "https://files.pythonhosted.org/packages/f8/46/31eb60f4452c96161e4dfd26dbca562b4ec68c72e4ad07d9566d7ea35e8a/msgpack-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a32747b1b39c3ac27d0670122b57e6e57f28eefb725e0b625618d1b59bf9d1e0", size = 423859 }, - { url = "https://files.pythonhosted.org/packages/45/16/a20fa8c32825cc7ae8457fab45670c7a8996d7746ce80ce41cc51e3b2bd7/msgpack-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a8b10fdb84a43e50d38057b06901ec9da52baac6983d3f709d8507f3889d43f", size = 429975 }, - { url = "https://files.pythonhosted.org/packages/86/ea/6c958e07692367feeb1a1594d35e22b62f7f476f3c568b002a5ea09d443d/msgpack-1.1.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba0c325c3f485dc54ec298d8b024e134acf07c10d494ffa24373bea729acf704", size = 413528 }, - { url = "https://files.pythonhosted.org/packages/75/05/ac84063c5dae79722bda9f68b878dc31fc3059adb8633c79f1e82c2cd946/msgpack-1.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:88daaf7d146e48ec71212ce21109b66e06a98e5e44dca47d853cbfe171d6c8d2", size = 413338 }, - { url = "https://files.pythonhosted.org/packages/69/e8/fe86b082c781d3e1c09ca0f4dacd457ede60a13119b6ce939efe2ea77b76/msgpack-1.1.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8b55ea20dc59b181d3f47103f113e6f28a5e1c89fd5b67b9140edb442ab67f2", size = 422658 }, - { url = "https://files.pythonhosted.org/packages/3b/2b/bafc9924df52d8f3bb7c00d24e57be477f4d0f967c0a31ef5e2225e035c7/msgpack-1.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4a28e8072ae9779f20427af07f53bbb8b4aa81151054e882aee333b158da8752", size = 427124 }, - { url = "https://files.pythonhosted.org/packages/a2/3b/1f717e17e53e0ed0b68fa59e9188f3f610c79d7151f0e52ff3cd8eb6b2dc/msgpack-1.1.1-cp311-cp311-win32.whl", hash = "sha256:7da8831f9a0fdb526621ba09a281fadc58ea12701bc709e7b8cbc362feabc295", size = 65016 }, - { url = "https://files.pythonhosted.org/packages/48/45/9d1780768d3b249accecc5a38c725eb1e203d44a191f7b7ff1941f7df60c/msgpack-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5fd1b58e1431008a57247d6e7cc4faa41c3607e8e7d4aaf81f7c29ea013cb458", size = 72267 }, - { url = "https://files.pythonhosted.org/packages/e3/26/389b9c593eda2b8551b2e7126ad3a06af6f9b44274eb3a4f054d48ff7e47/msgpack-1.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ae497b11f4c21558d95de9f64fff7053544f4d1a17731c866143ed6bb4591238", size = 82359 }, - { url = "https://files.pythonhosted.org/packages/ab/65/7d1de38c8a22cf8b1551469159d4b6cf49be2126adc2482de50976084d78/msgpack-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:33be9ab121df9b6b461ff91baac6f2731f83d9b27ed948c5b9d1978ae28bf157", size = 79172 }, - { url = "https://files.pythonhosted.org/packages/0f/bd/cacf208b64d9577a62c74b677e1ada005caa9b69a05a599889d6fc2ab20a/msgpack-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f64ae8fe7ffba251fecb8408540c34ee9df1c26674c50c4544d72dbf792e5ce", size = 425013 }, - { url = "https://files.pythonhosted.org/packages/4d/ec/fd869e2567cc9c01278a736cfd1697941ba0d4b81a43e0aa2e8d71dab208/msgpack-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a494554874691720ba5891c9b0b39474ba43ffb1aaf32a5dac874effb1619e1a", size = 426905 }, - { url = "https://files.pythonhosted.org/packages/55/2a/35860f33229075bce803a5593d046d8b489d7ba2fc85701e714fc1aaf898/msgpack-1.1.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb643284ab0ed26f6957d969fe0dd8bb17beb567beb8998140b5e38a90974f6c", size = 407336 }, - { url = "https://files.pythonhosted.org/packages/8c/16/69ed8f3ada150bf92745fb4921bd621fd2cdf5a42e25eb50bcc57a5328f0/msgpack-1.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d275a9e3c81b1093c060c3837e580c37f47c51eca031f7b5fb76f7b8470f5f9b", size = 409485 }, - { url = "https://files.pythonhosted.org/packages/c6/b6/0c398039e4c6d0b2e37c61d7e0e9d13439f91f780686deb8ee64ecf1ae71/msgpack-1.1.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fd6b577e4541676e0cc9ddc1709d25014d3ad9a66caa19962c4f5de30fc09ef", size = 412182 }, - { url = "https://files.pythonhosted.org/packages/b8/d0/0cf4a6ecb9bc960d624c93effaeaae75cbf00b3bc4a54f35c8507273cda1/msgpack-1.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb29aaa613c0a1c40d1af111abf025f1732cab333f96f285d6a93b934738a68a", size = 419883 }, - { url = "https://files.pythonhosted.org/packages/62/83/9697c211720fa71a2dfb632cad6196a8af3abea56eece220fde4674dc44b/msgpack-1.1.1-cp312-cp312-win32.whl", hash = "sha256:870b9a626280c86cff9c576ec0d9cbcc54a1e5ebda9cd26dab12baf41fee218c", size = 65406 }, - { url = "https://files.pythonhosted.org/packages/c0/23/0abb886e80eab08f5e8c485d6f13924028602829f63b8f5fa25a06636628/msgpack-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:5692095123007180dca3e788bb4c399cc26626da51629a31d40207cb262e67f4", size = 72558 }, - { url = "https://files.pythonhosted.org/packages/a1/38/561f01cf3577430b59b340b51329803d3a5bf6a45864a55f4ef308ac11e3/msgpack-1.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3765afa6bd4832fc11c3749be4ba4b69a0e8d7b728f78e68120a157a4c5d41f0", size = 81677 }, - { url = "https://files.pythonhosted.org/packages/09/48/54a89579ea36b6ae0ee001cba8c61f776451fad3c9306cd80f5b5c55be87/msgpack-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8ddb2bcfd1a8b9e431c8d6f4f7db0773084e107730ecf3472f1dfe9ad583f3d9", size = 78603 }, - { url = "https://files.pythonhosted.org/packages/a0/60/daba2699b308e95ae792cdc2ef092a38eb5ee422f9d2fbd4101526d8a210/msgpack-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:196a736f0526a03653d829d7d4c5500a97eea3648aebfd4b6743875f28aa2af8", size = 420504 }, - { url = "https://files.pythonhosted.org/packages/20/22/2ebae7ae43cd8f2debc35c631172ddf14e2a87ffcc04cf43ff9df9fff0d3/msgpack-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d592d06e3cc2f537ceeeb23d38799c6ad83255289bb84c2e5792e5a8dea268a", size = 423749 }, - { url = "https://files.pythonhosted.org/packages/40/1b/54c08dd5452427e1179a40b4b607e37e2664bca1c790c60c442c8e972e47/msgpack-1.1.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4df2311b0ce24f06ba253fda361f938dfecd7b961576f9be3f3fbd60e87130ac", size = 404458 }, - { url = "https://files.pythonhosted.org/packages/2e/60/6bb17e9ffb080616a51f09928fdd5cac1353c9becc6c4a8abd4e57269a16/msgpack-1.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e4141c5a32b5e37905b5940aacbc59739f036930367d7acce7a64e4dec1f5e0b", size = 405976 }, - { url = "https://files.pythonhosted.org/packages/ee/97/88983e266572e8707c1f4b99c8fd04f9eb97b43f2db40e3172d87d8642db/msgpack-1.1.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b1ce7f41670c5a69e1389420436f41385b1aa2504c3b0c30620764b15dded2e7", size = 408607 }, - { url = "https://files.pythonhosted.org/packages/bc/66/36c78af2efaffcc15a5a61ae0df53a1d025f2680122e2a9eb8442fed3ae4/msgpack-1.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4147151acabb9caed4e474c3344181e91ff7a388b888f1e19ea04f7e73dc7ad5", size = 424172 }, - { url = "https://files.pythonhosted.org/packages/8c/87/a75eb622b555708fe0427fab96056d39d4c9892b0c784b3a721088c7ee37/msgpack-1.1.1-cp313-cp313-win32.whl", hash = "sha256:500e85823a27d6d9bba1d057c871b4210c1dd6fb01fbb764e37e4e8847376323", size = 65347 }, - { url = "https://files.pythonhosted.org/packages/ca/91/7dc28d5e2a11a5ad804cf2b7f7a5fcb1eb5a4966d66a5d2b41aee6376543/msgpack-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:6d489fba546295983abd142812bda76b57e33d0b9f5d5b71c09a583285506f69", size = 72341 }, - { url = "https://files.pythonhosted.org/packages/1f/bd/0792be119d7fe7dc2148689ef65c90507d82d20a204aab3b98c74a1f8684/msgpack-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5be6b6bc52fad84d010cb45433720327ce886009d862f46b26d4d154001994b", size = 81882 }, - { url = "https://files.pythonhosted.org/packages/75/77/ce06c8e26a816ae8730a8e030d263c5289adcaff9f0476f9b270bdd7c5c2/msgpack-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3a89cd8c087ea67e64844287ea52888239cbd2940884eafd2dcd25754fb72232", size = 78414 }, - { url = "https://files.pythonhosted.org/packages/73/27/190576c497677fb4a0d05d896b24aea6cdccd910f206aaa7b511901befed/msgpack-1.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d75f3807a9900a7d575d8d6674a3a47e9f227e8716256f35bc6f03fc597ffbf", size = 400927 }, - { url = "https://files.pythonhosted.org/packages/ed/af/6a0aa5a06762e70726ec3c10fb966600d84a7220b52635cb0ab2dc64d32f/msgpack-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d182dac0221eb8faef2e6f44701812b467c02674a322c739355c39e94730cdbf", size = 405903 }, - { url = "https://files.pythonhosted.org/packages/1e/80/3f3da358cecbbe8eb12360814bd1277d59d2608485934742a074d99894a9/msgpack-1.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b13fe0fb4aac1aa5320cd693b297fe6fdef0e7bea5518cbc2dd5299f873ae90", size = 393192 }, - { url = "https://files.pythonhosted.org/packages/98/c6/3a0ec7fdebbb4f3f8f254696cd91d491c29c501dbebd86286c17e8f68cd7/msgpack-1.1.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:435807eeb1bc791ceb3247d13c79868deb22184e1fc4224808750f0d7d1affc1", size = 393851 }, - { url = "https://files.pythonhosted.org/packages/39/37/df50d5f8e68514b60fbe70f6e8337ea2b32ae2be030871bcd9d1cf7d4b62/msgpack-1.1.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4835d17af722609a45e16037bb1d4d78b7bdf19d6c0128116d178956618c4e88", size = 400292 }, - { url = "https://files.pythonhosted.org/packages/fc/ec/1e067292e02d2ceb4c8cb5ba222c4f7bb28730eef5676740609dc2627e0f/msgpack-1.1.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a8ef6e342c137888ebbfb233e02b8fbd689bb5b5fcc59b34711ac47ebd504478", size = 401873 }, - { url = "https://files.pythonhosted.org/packages/d3/31/e8c9c6b5b58d64c9efa99c8d181fcc25f38ead357b0360379fbc8a4234ad/msgpack-1.1.1-cp39-cp39-win32.whl", hash = "sha256:61abccf9de335d9efd149e2fff97ed5974f2481b3353772e8e2dd3402ba2bd57", size = 65028 }, - { url = "https://files.pythonhosted.org/packages/20/d6/cd62cded572e5e25892747a5d27850170bcd03c855e9c69c538e024de6f9/msgpack-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:40eae974c873b2992fd36424a5d9407f93e97656d999f43fca9d29f820899084", size = 71700 }, -] - -[[package]] -name = "ndindex" -version = "1.10.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/dc/a0/f584c0b6b998e4981201a1383200663a725f556f439cf58d02a093cb9f91/ndindex-1.10.0.tar.gz", hash = "sha256:20e3a2f0a8ed4646abf0f13296aab0b5b9cc8c5bc182b71b5945e76eb6f558bb", size = 258688 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/61/de/45af0f9b0abe5795228ca79577541c1c79b664996a5c9d15df21789e2ced/ndindex-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3d96dc319c39dce679d85a997f4eeb439f6de73c0793956b66598954ca61365c", size = 162311 }, - { url = "https://files.pythonhosted.org/packages/d9/dd/d950718536c3898580c3f903888209d75057659b862b3d8d8af17bdb4fa8/ndindex-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b082de3c042b6da7ca327f17d088de3695333c30e0f9717d2ed5de5dc4d70802", size = 161621 }, - { url = "https://files.pythonhosted.org/packages/bd/00/462ef86c63590e1f2e56d31ce46e9f13ae6aebd7506d33c08b927d2f1594/ndindex-1.10.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69cf517d138f47163d6c94cd9ccaafb91606a2aab386c05aaa0718975da09c88", size = 482241 }, - { url = "https://files.pythonhosted.org/packages/e3/a6/975bfec7bec7f274853b0c33953b5f2df4ad51f62d1aab0c7142fee98261/ndindex-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9cea2a5f7a432dafadb6c5732a9af3e7139adbf9085320f284885fe5d4776e4", size = 501603 }, - { url = "https://files.pythonhosted.org/packages/13/4a/8d39f8ab1d20cd246360d7af707107bc4a332c6758ea45780a5bff6ec29f/ndindex-1.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a3d2ea706c80e21022f6661524efb0aeed89a714a8fda4712df8d4a90ef507f5", size = 1620040 }, - { url = "https://files.pythonhosted.org/packages/87/83/ba24c57073c29ba3f69c52767bec64dc818e90ac23f6ee43c98172b9f888/ndindex-1.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d5b3b8f99970ce40fbff1e55ad9ddf9ea708e82ace91271784e7ff1d08707c4c", size = 1529863 }, - { url = "https://files.pythonhosted.org/packages/cd/2c/61e88acae938898994a6cfe83716db0e440f44f7b0c821a7adb2ab4cedbd/ndindex-1.10.0-cp310-cp310-win32.whl", hash = "sha256:6a5a401b867530fe4f1022cc8d578c8092cfdc726348e6d1569ec91881da365f", size = 149122 }, - { url = "https://files.pythonhosted.org/packages/a9/61/2bc88b2b5f71649f9e07fcf3509ce8eb187adbb3e787e4600b28ce00139c/ndindex-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:88504651ddcb6733ba0caf0cdfc214d8ba9f140609b69f6566ad143322ce5a96", size = 156550 }, - { url = "https://files.pythonhosted.org/packages/b4/1c/a53253d68bb269e5591c39b96ae2c4dd671132a82f63d70aea486f76d70c/ndindex-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e42198c8636eaf468cf28b7e1700738de37841853f5f15a0671bad4c3876a85", size = 162556 }, - { url = "https://files.pythonhosted.org/packages/0d/2a/4e268ff5992d4b42755ee19cf46c3e954632aadd57810db7173fe945ad47/ndindex-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ec9865e787eababc9aa1be973bf8545c044e2b68297fe37adf7aeefe0ec61f59", size = 161769 }, - { url = "https://files.pythonhosted.org/packages/14/67/28ef988483e1ff446873150979b20fa87833c711fbe3a816e0e6a3e6e7d3/ndindex-1.10.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72377bc5d15229eeefa73a4370212d0bdb8992c76c2228df0771e0dcdeb5354a", size = 504542 }, - { url = "https://files.pythonhosted.org/packages/79/d8/a4638485d17e5a236a7f8687a63229b4cc4737d018d8f8bdf18983419d5b/ndindex-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a8c9f85a1d6497a1fc3a8ac7faf64eef600f95d4330566ae7468e59b6da28d7", size = 528179 }, - { url = "https://files.pythonhosted.org/packages/40/2a/a7c119db8332b85fa6886104ac388a771dd2b0ec35e4b2443d555c5e0e00/ndindex-1.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:560211699c4fa370c30edace212b4b61950934c3c9a7b3964f52f2dd09c6913a", size = 1642463 }, - { url = "https://files.pythonhosted.org/packages/14/9a/41dd8270e9b0a411221c1c584fb088f0d43d750d596cf02e1f8b528c426d/ndindex-1.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:68e4ed3b5816d22cddf71478197c62ea2453a8f7dea0da57b52ce8b537c7a0c3", size = 1553373 }, - { url = "https://files.pythonhosted.org/packages/6e/36/4d42edfc5f350b83801a473721927c4c01c210014bb2ea1a754e232871d3/ndindex-1.10.0-cp311-cp311-win32.whl", hash = "sha256:52adf006f99f21913300d93d8b08fdd9d12796ee2dc7a1737acd1beea5f7e7af", size = 148975 }, - { url = "https://files.pythonhosted.org/packages/e9/b3/ec2b3447e49d69f033edb003761d3e2e01f2e5fe8ab397140099920405aa/ndindex-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:b90559638d35dd3c7f3f46dced6a306935866f86ba5cbd35190ef954334c33b9", size = 156723 }, - { url = "https://files.pythonhosted.org/packages/e5/cb/c44335f5aa81d54d2c06ea0076cc394a9d247ad8bf7dd63c87dec10d2e1f/ndindex-1.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:50f9c49659d91b19964da9ee96d5cb18f5102dc1b31ea5ca085f0b4d905cdc60", size = 162959 }, - { url = "https://files.pythonhosted.org/packages/42/f5/2bff167479b589a21288f8f150ca2dbbb5d20e3eb264515eafc5ff1c58f8/ndindex-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3e58c340b829275d2a2ac8fc468fca6dd1ca78a7351824dabf4a52cf0a79f648", size = 161618 }, - { url = "https://files.pythonhosted.org/packages/69/ed/1e921acc45f18b6ade332af772496b5a3681856c13b3a0bc3f5a46630b4e/ndindex-1.10.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd170addae6e4322438cc9ac1ae0cbf0d8f7bea25716fdbef53c4964ee84a64a", size = 521535 }, - { url = "https://files.pythonhosted.org/packages/ec/4a/0b6a4c8c06803efe531fc57d008294bd12a95b94c9ca4922f87cee2c3829/ndindex-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b33b378d1ec4d2e041d7d14a2d6d05f74a6ef0f9273985930ad0b993d86e8064", size = 546226 }, - { url = "https://files.pythonhosted.org/packages/4e/94/f8fb6e28660428bb359ffaf088409228fb9033db76ca6363fcf60d31ec13/ndindex-1.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c1eb9aa7ad4dd561dfb94b8c069677c59032f7c663e53ab05f97aa20c1643d1b", size = 1660328 }, - { url = "https://files.pythonhosted.org/packages/df/8e/a70ba950fff63d0a3a7142a53ff160cb03076a95964adb057be75a9c9be5/ndindex-1.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d490499a09e9cb78d02801d39d7da21e4975f09c78d0e1095a881adf20d0d4e7", size = 1576545 }, - { url = "https://files.pythonhosted.org/packages/d4/17/2a415224e7e35c7e36ffa1f58ef515f7653b118f0098c0f76f3e765b2826/ndindex-1.10.0-cp312-cp312-win32.whl", hash = "sha256:2c65d448210f8e3763e12d9a138195de77b383164d819080eaf64e832c2933bc", size = 149056 }, - { url = "https://files.pythonhosted.org/packages/37/e7/4f955c90e86c025ef04234adfa34ee5053f3dfc835b7d632e7c38ab713fc/ndindex-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:d8a9bfac1ce127bf55ad73b62ec57a415d5489db7a76056905a449f8346b69a3", size = 157017 }, - { url = "https://files.pythonhosted.org/packages/03/ee/8f7aa7dde0f2d947c2e4034f4c58b308bf1f48a18780183e7f84298a573c/ndindex-1.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:50b579a0c57a4072fc97848f1d0db8cb228ca73d050c8bc9d4e7cf2e75510829", size = 161193 }, - { url = "https://files.pythonhosted.org/packages/9b/3b/9f2a49b5d3a558e9cd067e0911e1bb8d8d553e1d689bb9a9119c775636b9/ndindex-1.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0956611e29f51857a54ba0750568ebdbf0eacfad4a262253af2522e77b476369", size = 159952 }, - { url = "https://files.pythonhosted.org/packages/76/b9/93273d8dd7a2e155af6ed0bad2f2618202794ffe537184b25ff666cf8e31/ndindex-1.10.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f82aada1f194c5ea11943ca89532cf449881de8c9c2c48b8baa43d467486fdb2", size = 502466 }, - { url = "https://files.pythonhosted.org/packages/b5/07/c64b0c8416f604f6990da5d1fa97c9de1278a4eec1efcc63b71053b4f0c0/ndindex-1.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38a56a16edbd62ef039b93e393047e66238d02dbc1e95e95b79c0bdd0a4785f7", size = 526910 }, - { url = "https://files.pythonhosted.org/packages/b3/a5/316f13eeda944db14015a6edaebd88fc83b196d86cae9f576be319b93873/ndindex-1.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8b11a3b8fd983adafea988b2a7e51fe8c0be819639b16506a472429069158f6d", size = 1642168 }, - { url = "https://files.pythonhosted.org/packages/f3/13/4c1cf1b6280669f32e9960215d6cbed027084b0bb423c924095f247f3185/ndindex-1.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:be7cfaed1e7a72c7e0bbc4a0e1965d3cc8207cb3d56bd351c0cb2b2d94db0bdd", size = 1557347 }, - { url = "https://files.pythonhosted.org/packages/2d/ac/36124ca146aaa6e84ac479e06a81b5ae9ebde2e3b4b2c77c49492bcfebae/ndindex-1.10.0-cp313-cp313-win32.whl", hash = "sha256:f779a0c20ffd617535bf57c7437d5521d5453daf2e0db0d148301df6b24c0932", size = 148623 }, - { url = "https://files.pythonhosted.org/packages/23/38/13169cc35be65a6683784c5a1f2c7e6d2219f58fb56abe9d13ef762a634a/ndindex-1.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:1ef8d71e0ddf0c6e39e64f1e328a37ebefcca1b89218a4068c353851bcb4cb0f", size = 156188 }, - { url = "https://files.pythonhosted.org/packages/29/f6/ba98045516f39b0414d03c466e7c46b79290cd54a73ff961b9081bc66a6e/ndindex-1.10.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6fcefeefc48815dd8e99999999477d91d4287d8034b1c81084042a49976b212c", size = 167198 }, - { url = "https://files.pythonhosted.org/packages/ca/14/4c8b1256009cda78387e6e3035d4b86582d98b557e56f7ee8f58df3e57b4/ndindex-1.10.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:882367d3d5a4d20155c23d890bf01ffbac78019eee09a9456ff3322f62eb34c1", size = 167324 }, - { url = "https://files.pythonhosted.org/packages/c5/34/a1e8117c0fe5a862da9e7f0162233340c7a9bbd728161a06cd0ad856514e/ndindex-1.10.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f04b3eeced5a10f1c00197ee93c913a691467c752306c0d97e6df9c02af4e6d", size = 608219 }, - { url = "https://files.pythonhosted.org/packages/19/6c/f9b449d0d9db404637d026798a208b677c04c349ab740db33ab78065603d/ndindex-1.10.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cb68232e58ca6cc92ddc8cdddcff8dcdfa5de030e89de8457e5d43de77bcc331", size = 1639541 }, - { url = "https://files.pythonhosted.org/packages/2c/14/0bfe948a092ddba3c23f18a6f4e3fc2029adfc3e433e634410ba98b7700f/ndindex-1.10.0-cp313-cp313t-win32.whl", hash = "sha256:af8ecd5a0221482e9b467918b90e78f85241572102fdcf0a941ef087e7dcf2e4", size = 157843 }, - { url = "https://files.pythonhosted.org/packages/50/49/0e7d831e918db3e8819f7327e835e4b106fe91ed0c865e96fb952f936b7f/ndindex-1.10.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2fb32342379547032fd25dbf5bfc7003ebc1bde582779e9a171373a738d6fb8b", size = 166116 }, - { url = "https://files.pythonhosted.org/packages/77/85/66a504a430e28cf0ed12a5092253814114dae2b842ec2894e5a81ee6df45/ndindex-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4542247a231a8a998636109d6241bc692b583de06afa3fc2b8fbbbd6424bb965", size = 163972 }, - { url = "https://files.pythonhosted.org/packages/bb/59/ced90fe3e5d2a695310334162dec2f7bf00659a1d2c760e089cef1e58dbe/ndindex-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:442ee28aaaa471bcc845406d7f006715e685087217e1d58b516b8d58a8c778eb", size = 162983 }, - { url = "https://files.pythonhosted.org/packages/2c/97/d7e08e27ac65a1869659985ec4e026f154d3b11e5298bdc97cdbec9b0aae/ndindex-1.10.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f0274114cc986f017093d1f92e8c3c38cc4aaf7ed39facecdbb4bf636d1bc95", size = 489636 }, - { url = "https://files.pythonhosted.org/packages/84/45/0beba49f64c9e06398c037b580467bf1001d186dc73de5aeafbad55e1ca7/ndindex-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009dde7c7216ea784744ed1d4bc1606c39f6d1b192395377e95a657b59e524c7", size = 510063 }, - { url = "https://files.pythonhosted.org/packages/d7/57/255a54840d0cac8f3e735e2ce2bba8bbe8dc33bf80b909d1cc39b9f97fce/ndindex-1.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:660cae3a9fae4fbdb12417fa4efe1f476c37793a5230a0fb28ea9cf38ad71658", size = 1628733 }, - { url = "https://files.pythonhosted.org/packages/2d/27/da2fd1a1e2b670e3fab1ab217561b3bb6fe38b6b52f0edb56762f2a80f6f/ndindex-1.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:304de39ba6bbea3bf0b4069c6240c87e9697fe355128324bc74cb6e92813df58", size = 1538961 }, - { url = "https://files.pythonhosted.org/packages/e7/18/eecb8a9df326b861fe021d1401026d7694f7d910c9f9baef4ae95448d3fe/ndindex-1.10.0-cp39-cp39-win32.whl", hash = "sha256:bf7aba235a994a0c488d9edfd00558a96eef3ae0f18c1504d9e50677ba82fbd5", size = 149916 }, - { url = "https://files.pythonhosted.org/packages/89/de/83add3ed9a94c6e5f7401d2a3547a51ef9c89f69ff9b8fd8a8ece598886d/ndindex-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:041be825a10e4bd7368e481a7893a96e96680a9623860e58d65c843e7734cccc", size = 157607 }, - { url = "https://files.pythonhosted.org/packages/b0/61/afde1bf918386625e477a7ac0fa518ca83f9239e2675affccf8d48d05e25/ndindex-1.10.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1851d2d490413edc5c5734f5f74e8d3b59cfc23eae561d10bd4db6e4162dcf02", size = 146659 }, - { url = "https://files.pythonhosted.org/packages/63/22/90a3e3aa613d4d7e5432e8d7cf0188049f61b34b104eef7f014b7e35a3aa/ndindex-1.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:490c577e6915f8d2d045239a14e70b1dfd14b703028a41f6a3713821598d0db8", size = 146160 }, - { url = "https://files.pythonhosted.org/packages/80/a5/677dc41756ac9b2ac3bd0b458abda4dee0c74ee1c6560be3a1b36cc2c9d1/ndindex-1.10.0-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:21f4c61db28b7ba8dc03548a3b2c3561feb8d61f7293dfc310df52aa2676463f", size = 163067 }, - { url = "https://files.pythonhosted.org/packages/01/8d/319499a3f9da41695a75243b8fd8576d42c1e382f5dc935b885f590a42be/ndindex-1.10.0-pp310-pypy310_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd41c7cce386bc21a38a2153427ce47f92d6bdb097dc3c5c42fa24e75090c8f", size = 160109 }, - { url = "https://files.pythonhosted.org/packages/7c/66/a6721aac78028ee1dd35106a20a2f5c940f17587bc8c8fb9d98040eeddec/ndindex-1.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ba5f6d09ad320e0045ea39d7efd66a21d73cd4875d114be08e7ba6204a8feb7", size = 148094 }, - { url = "https://files.pythonhosted.org/packages/c3/61/1333424bdfcebdcea63f5ed86ac98dccaf07ebb7e1463ca845a06e321d91/ndindex-1.10.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:aa17ea725f85af9285b298f72ccc8012949c0916d4426b0215d1c556dd995246", size = 146929 }, - { url = "https://files.pythonhosted.org/packages/eb/7c/0813615d958ec78c521b9c09518b1f49ec553a0bec0646b5f4ebbf33bdcb/ndindex-1.10.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:219fdef9d6a557913fd92418275088b46c727944356f3fe59f4f72d62efd6f3d", size = 146417 }, - { url = "https://files.pythonhosted.org/packages/d8/a1/b340a47409253f05c78d400f98b43477549ad1a1f7a5358acb784c79ed48/ndindex-1.10.0-pp311-pypy311_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1962137fcb69c00e2db42d5d045f9b7413fc37f44b143e7ae4a8c2c68ba3832", size = 163867 }, - { url = "https://files.pythonhosted.org/packages/02/24/e5192ffb87070e9ff2328d715e5aa3a7f6b673e86c1ee8f48136815564e1/ndindex-1.10.0-pp311-pypy311_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18c9c8271926fb16c59e827b61bb77f45ee31a824eaa50b386edcd77a6a7c9a3", size = 160644 }, - { url = "https://files.pythonhosted.org/packages/09/c5/b894cc961460e608b869d91164e9f825e3bb0579defb37c0eea61dce584e/ndindex-1.10.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:76e4fb082c83ccbc67c7a64b80e33bc5dfe9379f30c3b40a865914ae79947071", size = 147721 }, - { url = "https://files.pythonhosted.org/packages/09/90/0a8e43e9ade1ff08dfe60ffe8bbc7751dbb6d19ffb45717b126445aa4ac7/ndindex-1.10.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:074be9fc105e87f0eab6b464e78ecea6ffbb78d8bee07c5bbc2476f7900796ce", size = 146460 }, - { url = "https://files.pythonhosted.org/packages/70/ce/8796f89989ad6fc3603991bb20e55f28a9279ea905a9d63c127a0440df7c/ndindex-1.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:058002945c1e956761d9a9ae620a1ae1c01b613fc9808d09e6f65efd977e169d", size = 145977 }, - { url = "https://files.pythonhosted.org/packages/19/5a/67d0e21d21ccf19d396cb84dbf65b089f7076406400281e0b0bfc269602a/ndindex-1.10.0-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:004cfdde6106891e63f1a22e5d53d2884c277fd749839c6869e0bfffe5be2ed0", size = 162824 }, - { url = "https://files.pythonhosted.org/packages/bc/ac/72dbdbedce254317e83bcaec269f166450cf10cf5130296c0a7ca65a1bb9/ndindex-1.10.0-pp39-pypy39_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0a9bbd54500cebe3d481611acd420a46099ce65181b81e8347e11a6bac68445", size = 159876 }, - { url = "https://files.pythonhosted.org/packages/d4/84/5d74108950086ea209672dff368c42bd47e9d2d4890438e763a906a4a401/ndindex-1.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:367a2af7fbb84395a9d66f92dd0c4287b86e7b2c60cb96505c13ff36571bc9af", size = 147961 }, -] - -[[package]] -name = "nodeenv" -version = "1.9.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 }, -] - -[[package]] -name = "numexpr" -version = "2.10.2" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "numpy", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/21/67/c7415cf04ebe418193cfd6595ae03e3a64d76dac7b9c010098b39cc7992e/numexpr-2.10.2.tar.gz", hash = "sha256:b0aff6b48ebc99d2f54f27b5f73a58cb92fde650aeff1b397c71c8788b4fff1a", size = 106787 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/dc/bd84219318826d138b7e729ac3ffce3c706ab9d810ce74326a55c7252dd1/numexpr-2.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5b0e82d2109c1d9e63fcd5ea177d80a11b881157ab61178ddbdebd4c561ea46", size = 145011 }, - { url = "https://files.pythonhosted.org/packages/31/6a/b1f08141283327478a57490c0ab3f26a634d4741ff33b9e22f760a7cedb0/numexpr-2.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3fc2b8035a0c2cdc352e58c3875cb668836018065cbf5752cb531015d9a568d8", size = 134777 }, - { url = "https://files.pythonhosted.org/packages/7c/d6/6641864b0446ce472330de7644c78f90bd7e55d902046b44161f92721279/numexpr-2.10.2-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0db5ff5183935d1612653559c319922143e8fa3019007696571b13135f216458", size = 408893 }, - { url = "https://files.pythonhosted.org/packages/25/ab/cb5809cb1f66431632d63dc028c58cb91492725c74dddc4b97ba62e88a92/numexpr-2.10.2-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15f59655458056fdb3a621b1bb8e071581ccf7e823916c7568bb7c9a3e393025", size = 397305 }, - { url = "https://files.pythonhosted.org/packages/9c/a0/29bcb31a9debb743e3dc46bacd55f4f6ee6a77d95eda5c8dca19a29c0627/numexpr-2.10.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ce8cccf944339051e44a49a124a06287fe3066d0acbff33d1aa5aee10a96abb7", size = 1378789 }, - { url = "https://files.pythonhosted.org/packages/cc/72/415262a7bdda706c41bf8254311a5ca13d3b8532341ab478be4583d7061a/numexpr-2.10.2-cp310-cp310-win32.whl", hash = "sha256:ba85371c9a8d03e115f4dfb6d25dfbce05387002b9bc85016af939a1da9624f0", size = 151935 }, - { url = "https://files.pythonhosted.org/packages/71/fa/0124f0c2a502a0bac4553c8a171c551f154cf80a83a15e40d30c43e48a7e/numexpr-2.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:deb64235af9eeba59fcefa67e82fa80cfc0662e1b0aa373b7118a28da124d51d", size = 144961 }, - { url = "https://files.pythonhosted.org/packages/de/b7/f25d6166f92ef23737c1c90416144492a664f0a56510d90f7c6577c2cd14/numexpr-2.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b360eb8d392483410fe6a3d5a7144afa298c9a0aa3e9fe193e89590b47dd477", size = 145055 }, - { url = "https://files.pythonhosted.org/packages/66/64/428361ea6415826332f38ef2dd5c3abf4e7e601f033bfc9be68b680cb765/numexpr-2.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d9a42f5c24880350d88933c4efee91b857c378aaea7e8b86221fff569069841e", size = 134743 }, - { url = "https://files.pythonhosted.org/packages/3f/fb/639ec91d2ea7b4a5d66e26e8ef8e06b020c8e9b9ebaf3bab7b0a9bee472e/numexpr-2.10.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83fcb11988b57cc25b028a36d285287d706d1f536ebf2662ea30bd990e0de8b9", size = 410397 }, - { url = "https://files.pythonhosted.org/packages/89/5a/0f5c5b8a3a6d34eeecb30d0e2f722d50b9b38c0e175937e7c6268ffab997/numexpr-2.10.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4213a92efa9770bc28e3792134e27c7e5c7e97068bdfb8ba395baebbd12f991b", size = 398902 }, - { url = "https://files.pythonhosted.org/packages/a2/d5/ec734e735eba5a753efed5be3707ee7447ebd371772f8081b65a4153fb97/numexpr-2.10.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebdbef5763ca057eea0c2b5698e4439d084a0505d9d6e94f4804f26e8890c45e", size = 1380354 }, - { url = "https://files.pythonhosted.org/packages/30/51/406e572531d817480bd612ee08239a36ee82865fea02fce569f15631f4ee/numexpr-2.10.2-cp311-cp311-win32.whl", hash = "sha256:3bf01ec502d89944e49e9c1b5cc7c7085be8ca2eb9dd46a0eafd218afbdbd5f5", size = 151938 }, - { url = "https://files.pythonhosted.org/packages/04/32/5882ed1dbd96234f327a73316a481add151ff827cfaf2ea24fb4d5ad04db/numexpr-2.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:e2d0ae24b0728e4bc3f1d3f33310340d67321d36d6043f7ce26897f4f1042db0", size = 144961 }, - { url = "https://files.pythonhosted.org/packages/2b/96/d5053dea06d8298ae8052b4b049cbf8ef74998e28d57166cc27b8ae909e2/numexpr-2.10.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5323a46e75832334f1af86da1ef6ff0add00fbacdd266250be872b438bdf2be", size = 145029 }, - { url = "https://files.pythonhosted.org/packages/3e/3c/fcd5a812ed5dda757b2d9ef2764a3e1cca6f6d1f02dbf113dc23a2c7702a/numexpr-2.10.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a42963bd4c62d8afa4f51e7974debfa39a048383f653544ab54f50a2f7ec6c42", size = 134851 }, - { url = "https://files.pythonhosted.org/packages/0a/52/0ed3b306d8c9944129bce97fec73a2caff13adbd7e1df148d546d7eb2d4d/numexpr-2.10.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5191ba8f2975cb9703afc04ae845a929e193498c0e8bcd408ecb147b35978470", size = 411837 }, - { url = "https://files.pythonhosted.org/packages/7d/9c/6b671dd3fb67d7e7da93cb76b7c5277743f310a216b7856bb18776bb3371/numexpr-2.10.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:97298b14f0105a794bea06fd9fbc5c423bd3ff4d88cbc618860b83eb7a436ad6", size = 400577 }, - { url = "https://files.pythonhosted.org/packages/ea/4d/a167d1a215fe10ce58c45109f2869fd13aa0eef66f7e8c69af68be45d436/numexpr-2.10.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9d7805ccb6be2d3b0f7f6fad3707a09ac537811e8e9964f4074d28cb35543db", size = 1381735 }, - { url = "https://files.pythonhosted.org/packages/c1/d4/17e4434f989e4917d31cbd88a043e1c9c16958149cf43fa622987111392b/numexpr-2.10.2-cp312-cp312-win32.whl", hash = "sha256:cb845b2d4f9f8ef0eb1c9884f2b64780a85d3b5ae4eeb26ae2b0019f489cd35e", size = 152102 }, - { url = "https://files.pythonhosted.org/packages/b8/25/9ae599994076ef2a42d35ff6b0430da002647f212567851336a6c7b132d6/numexpr-2.10.2-cp312-cp312-win_amd64.whl", hash = "sha256:57b59cbb5dcce4edf09cd6ce0b57ff60312479930099ca8d944c2fac896a1ead", size = 145061 }, - { url = "https://files.pythonhosted.org/packages/8c/cb/2ea1848c46e4d75073c038dd75628d1aa442975303264ed230bf90f74f44/numexpr-2.10.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a37d6a51ec328c561b2ca8a2bef07025642eca995b8553a5267d0018c732976d", size = 145035 }, - { url = "https://files.pythonhosted.org/packages/ec/cf/bb2bcd81d6f3243590e19ac3e7795a1a370f3ebcd8ecec1f46dcd5333f37/numexpr-2.10.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:81d1dde7dd6166d8ff5727bb46ab42a6b0048db0e97ceb84a121334a404a800f", size = 134858 }, - { url = "https://files.pythonhosted.org/packages/48/9b/c9128ffb453205c2a4c84a3abed35447c7591c2c2812e77e34fd238cb2bb/numexpr-2.10.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5b3f814437d5a10797f8d89d2037cca2c9d9fa578520fc911f894edafed6ea3e", size = 415517 }, - { url = "https://files.pythonhosted.org/packages/7e/b0/64c04c9f8b4a563218d00daa1ec4563364961b79025162c5276ab2c7c407/numexpr-2.10.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9309f2e43fe6e4560699ef5c27d7a848b3ff38549b6b57194207cf0e88900527", size = 403846 }, - { url = "https://files.pythonhosted.org/packages/80/35/60e9041fd709fe98dd3109d73a03cdffaeb6ee2089179155f5c3754e9934/numexpr-2.10.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ebb73b93f5c4d6994f357fa5a47a9f7a5485577e633b3c46a603cb01445bbb19", size = 1381659 }, - { url = "https://files.pythonhosted.org/packages/bd/5a/955bf5b5cf8f3de7b044a999e36327e14191fa073ed0e329456ed0f8161d/numexpr-2.10.2-cp313-cp313-win32.whl", hash = "sha256:ec04c9a3c050c175348801e27c18c68d28673b7bfb865ef88ce333be523bbc01", size = 152105 }, - { url = "https://files.pythonhosted.org/packages/be/7a/8ce360a1848bb5bcc30a414493371678f43790ece397f8652d5f65757e57/numexpr-2.10.2-cp313-cp313-win_amd64.whl", hash = "sha256:d7a3fc83c959288544db3adc70612475d8ad53a66c69198105c74036182d10dd", size = 145060 }, - { url = "https://files.pythonhosted.org/packages/41/6a/06a225ac970c5921f41bc069a30438ff64fd79ef5e828f5ec2d4f6658307/numexpr-2.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0495f8111c3633e265248709b8b3b521bbfa646ba384909edd10e2b9a588a83a", size = 145100 }, - { url = "https://files.pythonhosted.org/packages/bb/c5/9ecfa0da1d93d57e3f447d10da8cf6d695c93131cec085625e5092b37631/numexpr-2.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2aa05ac71bee3b1253e73173c4d7fa96a09a18970c0226f1c2c07a71ffe988dc", size = 134839 }, - { url = "https://files.pythonhosted.org/packages/f5/30/f1a48c485183da517bd28e0df6fee337d12bbb0cd2d6bf13f8f5695afd37/numexpr-2.10.2-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3a23c3002ab330056fbdd2785871937a6f2f2fa85d06c8d0ff74ea8418119d1", size = 408149 }, - { url = "https://files.pythonhosted.org/packages/ed/f2/009d9dd8cd22f253fd6ead4165f81fafbe22489c1cfea612e18aa3dcb0fa/numexpr-2.10.2-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a018a7d81326f4c73d8b5aee61794d7d8514512f43957c0db61eb2a8a86848c7", size = 396740 }, - { url = "https://files.pythonhosted.org/packages/47/90/e3f12670b3cca9bed85096671265e0f65cde6cf4646baadd4ee9c33944a8/numexpr-2.10.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:037859b17a0abe2b489d4c2cfdadd2bf458ec80dd83f338ea5544c7987e06b85", size = 1377883 }, - { url = "https://files.pythonhosted.org/packages/06/1d/068c09a3c013c1178495f73a21ebd6ee25b9f0fc4202cea38b7a826c43a2/numexpr-2.10.2-cp39-cp39-win32.whl", hash = "sha256:eb278ccda6f893a312aa0452701bb17d098b7b14eb7c9381517d509cce0a39a3", size = 151878 }, - { url = "https://files.pythonhosted.org/packages/70/81/affb9ff26e8accb210fe5585b095bd6872f5614d18b76cd53888e955ed9a/numexpr-2.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:734b64c6d6a597601ce9d0ef7b666e678ec015b446f1d1412c23903c021436c3", size = 144960 }, -] - -[[package]] -name = "numexpr" -version = "2.11.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", - "python_full_version == '3.10.*'", -] -dependencies = [ - { name = "numpy", marker = "python_full_version >= '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d2/8f/2cc977e91adbfbcdb6b49fdb9147e1d1c7566eb2c0c1e737e9a47020b5ca/numexpr-2.11.0.tar.gz", hash = "sha256:75b2c01a4eda2e7c357bc67a3f5c3dd76506c15b5fd4dc42845ef2e182181bad", size = 108960 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/aa/3a/99d5c9fb7f1cbb465798b79b9fd6d5df5ab10fee0d499c2b72a76634c80e/numexpr-2.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f471fd055a9e13cf5f4337ee12379b30b4dcda1ae0d85018d4649e841578c02", size = 147492 }, - { url = "https://files.pythonhosted.org/packages/f4/32/914b8bb3d9a40e27ee56bfa915dcdfd60a460a6a9006bab80aa25df91c91/numexpr-2.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6e68a9800a3fa37c438b73a669f507c4973801a456a864ac56b62c3bd63d08af", size = 136741 }, - { url = "https://files.pythonhosted.org/packages/5c/89/177fae13baaa9380a9f714bdf8b88ae941ed2c2f89bd228f2f089a651afa/numexpr-2.11.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ad5cf0ebc3cdb12edb5aa50472108807ffd0a0ce95f87c0366a479fa83a7c346", size = 409327 }, - { url = "https://files.pythonhosted.org/packages/83/03/0718f1ac2d7cc0422096ab0ac16cc04597539a2c69a22616d781a2be4359/numexpr-2.11.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8c9e6b07c136d06495c792f603099039bb1e7c6c29854cc5eb3d7640268df016", size = 399827 }, - { url = "https://files.pythonhosted.org/packages/81/7d/8225d6fcafaa937606543bee6e985966c91d8741d25a8eb6d0143f64ce77/numexpr-2.11.0-cp310-cp310-win32.whl", hash = "sha256:4aba2f640d9d45b986a613ce94fcf008c42cc72eeba2990fefdb575228b1d3d1", size = 153165 }, - { url = "https://files.pythonhosted.org/packages/8d/c8/abd6371906c2690852dbbd4cb8faa3d26c51bc8ce849cb4b16dc24e799c1/numexpr-2.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:7f75797bc75a2e7edf52a1c9e68a1295fa84250161c8f4e41df9e72723332c65", size = 146233 }, - { url = "https://files.pythonhosted.org/packages/d8/d1/1cf8137990b3f3d445556ed63b9bc347aec39bde8c41146b02d3b35c1adc/numexpr-2.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:450eba3c93c3e3e8070566ad8d70590949d6e574b1c960bf68edd789811e7da8", size = 147535 }, - { url = "https://files.pythonhosted.org/packages/b6/5e/bac7649d043f47c7c14c797efe60dbd19476468a149399cd706fe2e47f8c/numexpr-2.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f0eb88dbac8a7e61ee433006d0ddfd6eb921f5c6c224d1b50855bc98fb304c44", size = 136710 }, - { url = "https://files.pythonhosted.org/packages/1b/9f/c88fc34d82d23c66ea0b78b00a1fb3b64048e0f7ac7791b2cd0d2a4ce14d/numexpr-2.11.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a194e3684b3553ea199c3f4837f422a521c7e2f0cce13527adc3a6b4049f9e7c", size = 411169 }, - { url = "https://files.pythonhosted.org/packages/e4/8d/4d78dad430b41d836146f9e6f545f5c4f7d1972a6aa427d8570ab232bf16/numexpr-2.11.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f677668ab2bb2452fee955af3702fbb3b71919e61e4520762b1e5f54af59c0d8", size = 401671 }, - { url = "https://files.pythonhosted.org/packages/83/1c/414670eb41a82b78bd09769a4f5fb49a934f9b3990957f02c833637a511e/numexpr-2.11.0-cp311-cp311-win32.whl", hash = "sha256:7d9e76a77c9644fbd60da3984e516ead5b84817748c2da92515cd36f1941a04d", size = 153159 }, - { url = "https://files.pythonhosted.org/packages/0c/97/8d00ca9b36f3ac68a8fd85e930ab0c9448d8c9ca7ce195ee75c188dabd45/numexpr-2.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:7163b488bfdcd13c300a8407c309e4cee195ef95d07facf5ac2678d66c988805", size = 146224 }, - { url = "https://files.pythonhosted.org/packages/38/45/7a0e5a0b800d92e73825494ac695fa05a52c7fc7088d69a336880136b437/numexpr-2.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4229060be866813122385c608bbd3ea48fe0b33e91f2756810d28c1cdbfc98f1", size = 147494 }, - { url = "https://files.pythonhosted.org/packages/74/46/3a26b84e44f4739ec98de0ede4b95b4b8096f721e22d0e97517eeb02017e/numexpr-2.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:097aa8835d32d6ac52f2be543384019b4b134d1fb67998cbfc4271155edfe54a", size = 136832 }, - { url = "https://files.pythonhosted.org/packages/75/05/e3076ff25d4a108b47640c169c0a64811748c43b63d9cc052ea56de1631e/numexpr-2.11.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f082321c244ff5d0e252071fb2c4fe02063a45934144a1456a5370ca139bec2", size = 412618 }, - { url = "https://files.pythonhosted.org/packages/70/e8/15e0e077a004db0edd530da96c60c948689c888c464ee5d14b82405ebd86/numexpr-2.11.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7a19435ca3d7dd502b8d8dce643555eb1b6013989e3f7577857289f6db6be16", size = 403363 }, - { url = "https://files.pythonhosted.org/packages/10/14/f22afb3a7ae41d03ba87f62d00fbcfb76389f9cc91b7a82593c39c509318/numexpr-2.11.0-cp312-cp312-win32.whl", hash = "sha256:f326218262c8d8537887cc4bbd613c8409d62f2cac799835c0360e0d9cefaa5c", size = 153307 }, - { url = "https://files.pythonhosted.org/packages/18/70/abc585269424582b3cd6db261e33b2ec96b5d4971da3edb29fc9b62a8926/numexpr-2.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:0a184e5930c77ab91dd9beee4df403b825cd9dfc4e9ba4670d31c9fcb4e2c08e", size = 146337 }, - { url = "https://files.pythonhosted.org/packages/74/63/dbf4fb6c48006d413a82db138d03c3c007d0ed0684f693c4b77196448660/numexpr-2.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:eb766218abad05c7c3ddad5367d0ec702d6152cb4a48d9fd56a6cef6abade70c", size = 147495 }, - { url = "https://files.pythonhosted.org/packages/3a/e4/2fbbf5b9121f54722dc4d4dfc75bc0b4e8ee2675f92ec86ee5697aecc53f/numexpr-2.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2036be213a6a1b5ce49acf60de99b911a0f9d174aab7679dde1fae315134f826", size = 136839 }, - { url = "https://files.pythonhosted.org/packages/a8/3f/aa36415919c90f712a11127eaa7c0c8d045768d62a484a29364e4801c383/numexpr-2.11.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:096ec768bee2ef14ac757b4178e3c5f05e5f1cb6cae83b2eea9b4ba3ec1a86dd", size = 416240 }, - { url = "https://files.pythonhosted.org/packages/b9/7d/4911f40d3610fc5557029f0d1f20ef9f571488319567ac4d8ee6d0978ee6/numexpr-2.11.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a1719788a787808c15c9bb98b6ff0c97d64a0e59c1a6ebe36d4ae4d7c5c09b95", size = 406641 }, - { url = "https://files.pythonhosted.org/packages/6f/bc/d00e717e77691c410c6c461d7880b4c498896874316acc0e044d7eafacbf/numexpr-2.11.0-cp313-cp313-win32.whl", hash = "sha256:6b5fdfc86cbf5373ea67d554cc6f08863825ea8e928416bed8d5285e387420c6", size = 153313 }, - { url = "https://files.pythonhosted.org/packages/52/a2/93346789e6d73a76fdb68171904ade25c112f25df363a8f602c6b21bc220/numexpr-2.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:5ff337b36db141a1a0b49f01282783744f49f0d401cc83a512fc5596eb7db5c6", size = 146340 }, - { url = "https://files.pythonhosted.org/packages/0b/20/c0e3aaf3cc4497e5253df2523a55c83b9d316cb5c9d5caaa4a1156cef6e3/numexpr-2.11.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:b9854fa70edbe93242b8bb4840e58d1128c45766d9a70710f05b4f67eb0feb6e", size = 148206 }, - { url = "https://files.pythonhosted.org/packages/de/49/22fd38ac990ba333f25b771305a5ffcd98c771f4d278868661ffb26deac1/numexpr-2.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:321736cb98f090ce864b58cc5c37661cb5548e394e0fe24d5f2c7892a89070c3", size = 137573 }, - { url = "https://files.pythonhosted.org/packages/fb/1e/50074e472e9e6bea4fe430869708d9ede333a187d8d0740e70d5a9560aad/numexpr-2.11.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5cc434eb4a4df2fe442bcc50df114e82ff7aa234657baf873b2c9cf3f851e8e", size = 426674 }, - { url = "https://files.pythonhosted.org/packages/8e/6d/7ccbc72b950653df62d29e2531c811ed80cfff93c927a5bfd86a71edb4da/numexpr-2.11.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:238d19465a272ada3967600fada55e4c6900485aefb42122a78dfcaf2efca65f", size = 416037 }, - { url = "https://files.pythonhosted.org/packages/31/7c/bbccad2734dd4b251cc6bdff8cf5ded18b5383f5a05aa8de7bf02acbb65b/numexpr-2.11.0-cp313-cp313t-win32.whl", hash = "sha256:0db4c2dcad09f9594b45fce794f4b903345195a8c216e252de2aa92884fd81a8", size = 153967 }, - { url = "https://files.pythonhosted.org/packages/75/d7/41287384e413e8d20457d35e264d9c9754e65eb13a988af51ceb7057f61b/numexpr-2.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a69b5c02014448a412012752dc46091902d28932c3be0c6e02e73cecceffb700", size = 147207 }, -] - -[[package]] -name = "numpy" -version = "1.26.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/94/ace0fdea5241a27d13543ee117cbc65868e82213fb31a8eb7fe9ff23f313/numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0", size = 20631468 }, - { url = "https://files.pythonhosted.org/packages/20/f7/b24208eba89f9d1b58c1668bc6c8c4fd472b20c45573cb767f59d49fb0f6/numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a", size = 13966411 }, - { url = "https://files.pythonhosted.org/packages/fc/a5/4beee6488160798683eed5bdb7eead455892c3b4e1f78d79d8d3f3b084ac/numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4", size = 14219016 }, - { url = "https://files.pythonhosted.org/packages/4b/d7/ecf66c1cd12dc28b4040b15ab4d17b773b87fa9d29ca16125de01adb36cd/numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f", size = 18240889 }, - { url = "https://files.pythonhosted.org/packages/24/03/6f229fe3187546435c4f6f89f6d26c129d4f5bed40552899fcf1f0bf9e50/numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a", size = 13876746 }, - { url = "https://files.pythonhosted.org/packages/39/fe/39ada9b094f01f5a35486577c848fe274e374bbf8d8f472e1423a0bbd26d/numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2", size = 18078620 }, - { url = "https://files.pythonhosted.org/packages/d5/ef/6ad11d51197aad206a9ad2286dc1aac6a378059e06e8cf22cd08ed4f20dc/numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07", size = 5972659 }, - { url = "https://files.pythonhosted.org/packages/19/77/538f202862b9183f54108557bfda67e17603fc560c384559e769321c9d92/numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5", size = 15808905 }, - { url = "https://files.pythonhosted.org/packages/11/57/baae43d14fe163fa0e4c47f307b6b2511ab8d7d30177c491960504252053/numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", size = 20630554 }, - { url = "https://files.pythonhosted.org/packages/1a/2e/151484f49fd03944c4a3ad9c418ed193cfd02724e138ac8a9505d056c582/numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", size = 13997127 }, - { url = "https://files.pythonhosted.org/packages/79/ae/7e5b85136806f9dadf4878bf73cf223fe5c2636818ba3ab1c585d0403164/numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", size = 14222994 }, - { url = "https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", size = 18252005 }, - { url = "https://files.pythonhosted.org/packages/09/bf/2b1aaf8f525f2923ff6cfcf134ae5e750e279ac65ebf386c75a0cf6da06a/numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", size = 13885297 }, - { url = "https://files.pythonhosted.org/packages/df/a0/4e0f14d847cfc2a633a1c8621d00724f3206cfeddeb66d35698c4e2cf3d2/numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", size = 18093567 }, - { url = "https://files.pythonhosted.org/packages/d2/b7/a734c733286e10a7f1a8ad1ae8c90f2d33bf604a96548e0a4a3a6739b468/numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", size = 5968812 }, - { url = "https://files.pythonhosted.org/packages/3f/6b/5610004206cf7f8e7ad91c5a85a8c71b2f2f8051a0c0c4d5916b76d6cbb2/numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", size = 15811913 }, - { url = "https://files.pythonhosted.org/packages/95/12/8f2020a8e8b8383ac0177dc9570aad031a3beb12e38847f7129bacd96228/numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218", size = 20335901 }, - { url = "https://files.pythonhosted.org/packages/75/5b/ca6c8bd14007e5ca171c7c03102d17b4f4e0ceb53957e8c44343a9546dcc/numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b", size = 13685868 }, - { url = "https://files.pythonhosted.org/packages/79/f8/97f10e6755e2a7d027ca783f63044d5b1bc1ae7acb12afe6a9b4286eac17/numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b", size = 13925109 }, - { url = "https://files.pythonhosted.org/packages/0f/50/de23fde84e45f5c4fda2488c759b69990fd4512387a8632860f3ac9cd225/numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed", size = 17950613 }, - { url = "https://files.pythonhosted.org/packages/4c/0c/9c603826b6465e82591e05ca230dfc13376da512b25ccd0894709b054ed0/numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a", size = 13572172 }, - { url = "https://files.pythonhosted.org/packages/76/8c/2ba3902e1a0fc1c74962ea9bb33a534bb05984ad7ff9515bf8d07527cadd/numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0", size = 17786643 }, - { url = "https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110", size = 5677803 }, - { url = "https://files.pythonhosted.org/packages/16/2e/86f24451c2d530c88daf997cb8d6ac622c1d40d19f5a031ed68a4b73a374/numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818", size = 15517754 }, - { url = "https://files.pythonhosted.org/packages/7d/24/ce71dc08f06534269f66e73c04f5709ee024a1afe92a7b6e1d73f158e1f8/numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c", size = 20636301 }, - { url = "https://files.pythonhosted.org/packages/ae/8c/ab03a7c25741f9ebc92684a20125fbc9fc1b8e1e700beb9197d750fdff88/numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be", size = 13971216 }, - { url = "https://files.pythonhosted.org/packages/6d/64/c3bcdf822269421d85fe0d64ba972003f9bb4aa9a419da64b86856c9961f/numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764", size = 14226281 }, - { url = "https://files.pythonhosted.org/packages/54/30/c2a907b9443cf42b90c17ad10c1e8fa801975f01cb9764f3f8eb8aea638b/numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3", size = 18249516 }, - { url = "https://files.pythonhosted.org/packages/43/12/01a563fc44c07095996d0129b8899daf89e4742146f7044cdbdb3101c57f/numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd", size = 13882132 }, - { url = "https://files.pythonhosted.org/packages/16/ee/9df80b06680aaa23fc6c31211387e0db349e0e36d6a63ba3bd78c5acdf11/numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c", size = 18084181 }, - { url = "https://files.pythonhosted.org/packages/28/7d/4b92e2fe20b214ffca36107f1a3e75ef4c488430e64de2d9af5db3a4637d/numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6", size = 5976360 }, - { url = "https://files.pythonhosted.org/packages/b5/42/054082bd8220bbf6f297f982f0a8f5479fcbc55c8b511d928df07b965869/numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea", size = 15814633 }, - { url = "https://files.pythonhosted.org/packages/3f/72/3df6c1c06fc83d9cfe381cccb4be2532bbd38bf93fbc9fad087b6687f1c0/numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30", size = 20455961 }, - { url = "https://files.pythonhosted.org/packages/8e/02/570545bac308b58ffb21adda0f4e220ba716fb658a63c151daecc3293350/numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c", size = 18061071 }, - { url = "https://files.pythonhosted.org/packages/f4/5f/fafd8c51235f60d49f7a88e2275e13971e90555b67da52dd6416caec32fe/numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0", size = 15709730 }, -] - -[[package]] -name = "openfisca-core" -version = "43.3.8" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "dpath" }, - { name = "numexpr", version = "2.10.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "numexpr", version = "2.11.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "numpy" }, - { name = "pendulum" }, - { name = "psutil" }, - { name = "pytest" }, - { name = "pyyaml" }, - { name = "sortedcontainers" }, - { name = "strenum" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/83/7f/1957ad6b19a5d66401957c69c6cca5e57be319174c8dd5aebf1196bffc9d/openfisca_core-43.3.8.tar.gz", hash = "sha256:fb6628ce121b3d4138bb387e746dcccf2d9cf1c318cbb10d6e4eca0afc89b37b", size = 198970 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/2e/e82f8a2162001c5c3e4e5bd94793221ac6a135ce51a5b6ba794c52f04746/openfisca_core-43.3.8-py3-none-any.whl", hash = "sha256:c3f58622779d8ec734f5272c99308010d1ebbc3bc4325d112912fb49d07233fd", size = 259341 }, -] - -[package.optional-dependencies] -web-api = [ - { name = "flask" }, - { name = "flask-cors" }, - { name = "gunicorn" }, - { name = "werkzeug" }, -] - -[[package]] -name = "openfisca-country-template" -version = "7.1.9" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "openfisca-core", extra = ["web-api"] }, -] -sdist = { url = "https://files.pythonhosted.org/packages/89/fa/2ffeae21a346f8ae0241b8990b2ff545d82d65f37b3e22a81b959e46f5d1/openfisca_country_template-7.1.9.tar.gz", hash = "sha256:57371d284e7d1f6850e00e8b348f1a396cf480b007afcab0d4b306d4f1a1bb2f", size = 35571 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6e/79/2c9bd599a8bef47e4bb8686641e37e6029b239b10ff8fe68b5b985296e3c/openfisca_country_template-7.1.9-py3-none-any.whl", hash = "sha256:a414c5b4880cdcc57e570e33dc893bf3503f8eb808971e60437f5c902dea9a72", size = 45480 }, -] - -[[package]] -name = "openfisca-survey-manager" -version = "4.0.0" -source = { editable = "." } -dependencies = [ - { name = "chardet" }, - { name = "configparser" }, - { name = "humanize" }, - { name = "numpy" }, - { name = "openfisca-core" }, - { name = "pandas" }, - { name = "pyarrow" }, - { name = "pyxdg" }, - { name = "pyyaml" }, - { name = "tables", version = "3.9.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "tables", version = "3.10.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "tables", version = "3.10.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "tabulate" }, - { name = "weightedcalcs" }, - { name = "wquantiles" }, -] - -[package.optional-dependencies] -casd = [ - { name = "isort", version = "6.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "isort", version = "7.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "pytest" }, - { name = "ruff" }, - { name = "scipy", version = "1.13.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "scipy", version = "1.16.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, -] -dev = [ - { name = "coveralls" }, - { name = "ipdb" }, - { name = "isort", version = "6.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "isort", version = "7.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "openfisca-country-template" }, - { name = "pdbpp" }, - { name = "pre-commit" }, - { name = "pyreadstat" }, - { name = "pytest" }, - { name = "pytest-cov" }, - { name = "pytest-order" }, - { name = "ruff" }, - { name = "scipy", version = "1.13.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "scipy", version = "1.16.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, -] -matching = [ - { name = "rpy2" }, -] -sas = [ - { name = "pyreadstat" }, - { name = "sas7bdat" }, -] - -[package.metadata] -requires-dist = [ - { name = "chardet", specifier = ">=5.1.0,<6.0" }, - { name = "configparser", specifier = ">=5.3.0,<8.0" }, - { name = "coveralls", marker = "extra == 'dev'", specifier = ">=3.3.1,<5.0" }, - { name = "humanize", specifier = ">=4.6.0,<5.0" }, - { name = "ipdb", marker = "extra == 'dev'" }, - { name = "isort", marker = "extra == 'casd'" }, - { name = "isort", marker = "extra == 'dev'" }, - { name = "numpy", specifier = ">=1.24.2,<2.0" }, - { name = "openfisca-core", specifier = ">=43.0.0,<44.0.0" }, - { name = "openfisca-country-template", marker = "extra == 'dev'", specifier = ">=7.1.5,<8.0.0" }, - { name = "pandas", specifier = ">=2.0.3,<3.0" }, - { name = "pdbpp", marker = "extra == 'dev'" }, - { name = "pre-commit", marker = "extra == 'dev'" }, - { name = "pyarrow", specifier = ">=13.0.0,<19.0.0" }, - { name = "pyreadstat", marker = "extra == 'dev'", specifier = ">=1.2.1,<1.3" }, - { name = "pyreadstat", marker = "extra == 'sas'", specifier = ">=1.2.1,<1.3" }, - { name = "pytest", marker = "extra == 'casd'" }, - { name = "pytest", marker = "extra == 'dev'" }, - { name = "pytest-cov", marker = "extra == 'dev'" }, - { name = "pytest-order", marker = "extra == 'dev'" }, - { name = "pyxdg", specifier = ">=0.28,<0.29" }, - { name = "pyyaml", specifier = ">=6.0,<7.0" }, - { name = "rpy2", marker = "extra == 'matching'", specifier = ">=3.5.10,<4.0" }, - { name = "ruff", marker = "extra == 'casd'" }, - { name = "ruff", marker = "extra == 'dev'" }, - { name = "sas7bdat", marker = "extra == 'sas'", specifier = ">=2.2.3,<3.0" }, - { name = "scipy", marker = "extra == 'casd'", specifier = ">=1.10.1,<2.0" }, - { name = "scipy", marker = "extra == 'dev'", specifier = ">=1.10.1,<2.0" }, - { name = "tables", specifier = ">=3.8.0,<4.0" }, - { name = "tabulate", specifier = ">=0.9.0,<0.10.0" }, - { name = "weightedcalcs", specifier = ">=0.1.2,<0.2.0" }, - { name = "wquantiles", specifier = ">=0.6,<0.7" }, -] -provides-extras = ["matching", "dev", "casd", "sas"] - -[[package]] -name = "packaging" -version = "25.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469 }, -] - -[[package]] -name = "pandas" -version = "2.3.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "numpy" }, - { name = "python-dateutil" }, - { name = "pytz" }, - { name = "tzdata" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d1/6f/75aa71f8a14267117adeeed5d21b204770189c0a0025acbdc03c337b28fc/pandas-2.3.1.tar.gz", hash = "sha256:0a95b9ac964fe83ce317827f80304d37388ea77616b1425f0ae41c9d2d0d7bb2", size = 4487493 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c4/ca/aa97b47287221fa37a49634532e520300088e290b20d690b21ce3e448143/pandas-2.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22c2e866f7209ebc3a8f08d75766566aae02bcc91d196935a1d9e59c7b990ac9", size = 11542731 }, - { url = "https://files.pythonhosted.org/packages/80/bf/7938dddc5f01e18e573dcfb0f1b8c9357d9b5fa6ffdee6e605b92efbdff2/pandas-2.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3583d348546201aff730c8c47e49bc159833f971c2899d6097bce68b9112a4f1", size = 10790031 }, - { url = "https://files.pythonhosted.org/packages/ee/2f/9af748366763b2a494fed477f88051dbf06f56053d5c00eba652697e3f94/pandas-2.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f951fbb702dacd390561e0ea45cdd8ecfa7fb56935eb3dd78e306c19104b9b0", size = 11724083 }, - { url = "https://files.pythonhosted.org/packages/2c/95/79ab37aa4c25d1e7df953dde407bb9c3e4ae47d154bc0dd1692f3a6dcf8c/pandas-2.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd05b72ec02ebfb993569b4931b2e16fbb4d6ad6ce80224a3ee838387d83a191", size = 12342360 }, - { url = "https://files.pythonhosted.org/packages/75/a7/d65e5d8665c12c3c6ff5edd9709d5836ec9b6f80071b7f4a718c6106e86e/pandas-2.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1b916a627919a247d865aed068eb65eb91a344b13f5b57ab9f610b7716c92de1", size = 13202098 }, - { url = "https://files.pythonhosted.org/packages/65/f3/4c1dbd754dbaa79dbf8b537800cb2fa1a6e534764fef50ab1f7533226c5c/pandas-2.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fe67dc676818c186d5a3d5425250e40f179c2a89145df477dd82945eaea89e97", size = 13837228 }, - { url = "https://files.pythonhosted.org/packages/3f/d6/d7f5777162aa9b48ec3910bca5a58c9b5927cfd9cfde3aa64322f5ba4b9f/pandas-2.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:2eb789ae0274672acbd3c575b0598d213345660120a257b47b5dafdc618aec83", size = 11336561 }, - { url = "https://files.pythonhosted.org/packages/76/1c/ccf70029e927e473a4476c00e0d5b32e623bff27f0402d0a92b7fc29bb9f/pandas-2.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2b0540963d83431f5ce8870ea02a7430adca100cec8a050f0811f8e31035541b", size = 11566608 }, - { url = "https://files.pythonhosted.org/packages/ec/d3/3c37cb724d76a841f14b8f5fe57e5e3645207cc67370e4f84717e8bb7657/pandas-2.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fe7317f578c6a153912bd2292f02e40c1d8f253e93c599e82620c7f69755c74f", size = 10823181 }, - { url = "https://files.pythonhosted.org/packages/8a/4c/367c98854a1251940edf54a4df0826dcacfb987f9068abf3e3064081a382/pandas-2.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6723a27ad7b244c0c79d8e7007092d7c8f0f11305770e2f4cd778b3ad5f9f85", size = 11793570 }, - { url = "https://files.pythonhosted.org/packages/07/5f/63760ff107bcf5146eee41b38b3985f9055e710a72fdd637b791dea3495c/pandas-2.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3462c3735fe19f2638f2c3a40bd94ec2dc5ba13abbb032dd2fa1f540a075509d", size = 12378887 }, - { url = "https://files.pythonhosted.org/packages/15/53/f31a9b4dfe73fe4711c3a609bd8e60238022f48eacedc257cd13ae9327a7/pandas-2.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:98bcc8b5bf7afed22cc753a28bc4d9e26e078e777066bc53fac7904ddef9a678", size = 13230957 }, - { url = "https://files.pythonhosted.org/packages/e0/94/6fce6bf85b5056d065e0a7933cba2616dcb48596f7ba3c6341ec4bcc529d/pandas-2.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d544806b485ddf29e52d75b1f559142514e60ef58a832f74fb38e48d757b299", size = 13883883 }, - { url = "https://files.pythonhosted.org/packages/c8/7b/bdcb1ed8fccb63d04bdb7635161d0ec26596d92c9d7a6cce964e7876b6c1/pandas-2.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:b3cd4273d3cb3707b6fffd217204c52ed92859533e31dc03b7c5008aa933aaab", size = 11340212 }, - { url = "https://files.pythonhosted.org/packages/46/de/b8445e0f5d217a99fe0eeb2f4988070908979bec3587c0633e5428ab596c/pandas-2.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:689968e841136f9e542020698ee1c4fbe9caa2ed2213ae2388dc7b81721510d3", size = 11588172 }, - { url = "https://files.pythonhosted.org/packages/1e/e0/801cdb3564e65a5ac041ab99ea6f1d802a6c325bb6e58c79c06a3f1cd010/pandas-2.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:025e92411c16cbe5bb2a4abc99732a6b132f439b8aab23a59fa593eb00704232", size = 10717365 }, - { url = "https://files.pythonhosted.org/packages/51/a5/c76a8311833c24ae61a376dbf360eb1b1c9247a5d9c1e8b356563b31b80c/pandas-2.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b7ff55f31c4fcb3e316e8f7fa194566b286d6ac430afec0d461163312c5841e", size = 11280411 }, - { url = "https://files.pythonhosted.org/packages/da/01/e383018feba0a1ead6cf5fe8728e5d767fee02f06a3d800e82c489e5daaf/pandas-2.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dcb79bf373a47d2a40cf7232928eb7540155abbc460925c2c96d2d30b006eb4", size = 11988013 }, - { url = "https://files.pythonhosted.org/packages/5b/14/cec7760d7c9507f11c97d64f29022e12a6cc4fc03ac694535e89f88ad2ec/pandas-2.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:56a342b231e8862c96bdb6ab97170e203ce511f4d0429589c8ede1ee8ece48b8", size = 12767210 }, - { url = "https://files.pythonhosted.org/packages/50/b9/6e2d2c6728ed29fb3d4d4d302504fb66f1a543e37eb2e43f352a86365cdf/pandas-2.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ca7ed14832bce68baef331f4d7f294411bed8efd032f8109d690df45e00c4679", size = 13440571 }, - { url = "https://files.pythonhosted.org/packages/80/a5/3a92893e7399a691bad7664d977cb5e7c81cf666c81f89ea76ba2bff483d/pandas-2.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:ac942bfd0aca577bef61f2bc8da8147c4ef6879965ef883d8e8d5d2dc3e744b8", size = 10987601 }, - { url = "https://files.pythonhosted.org/packages/32/ed/ff0a67a2c5505e1854e6715586ac6693dd860fbf52ef9f81edee200266e7/pandas-2.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9026bd4a80108fac2239294a15ef9003c4ee191a0f64b90f170b40cfb7cf2d22", size = 11531393 }, - { url = "https://files.pythonhosted.org/packages/c7/db/d8f24a7cc9fb0972adab0cc80b6817e8bef888cfd0024eeb5a21c0bb5c4a/pandas-2.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6de8547d4fdb12421e2d047a2c446c623ff4c11f47fddb6b9169eb98ffba485a", size = 10668750 }, - { url = "https://files.pythonhosted.org/packages/0f/b0/80f6ec783313f1e2356b28b4fd8d2148c378370045da918c73145e6aab50/pandas-2.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:782647ddc63c83133b2506912cc6b108140a38a37292102aaa19c81c83db2928", size = 11342004 }, - { url = "https://files.pythonhosted.org/packages/e9/e2/20a317688435470872885e7fc8f95109ae9683dec7c50be29b56911515a5/pandas-2.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ba6aff74075311fc88504b1db890187a3cd0f887a5b10f5525f8e2ef55bfdb9", size = 12050869 }, - { url = "https://files.pythonhosted.org/packages/55/79/20d746b0a96c67203a5bee5fb4e00ac49c3e8009a39e1f78de264ecc5729/pandas-2.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e5635178b387bd2ba4ac040f82bc2ef6e6b500483975c4ebacd34bec945fda12", size = 12750218 }, - { url = "https://files.pythonhosted.org/packages/7c/0f/145c8b41e48dbf03dd18fdd7f24f8ba95b8254a97a3379048378f33e7838/pandas-2.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6f3bf5ec947526106399a9e1d26d40ee2b259c66422efdf4de63c848492d91bb", size = 13416763 }, - { url = "https://files.pythonhosted.org/packages/b2/c0/54415af59db5cdd86a3d3bf79863e8cc3fa9ed265f0745254061ac09d5f2/pandas-2.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:1c78cf43c8fde236342a1cb2c34bcff89564a7bfed7e474ed2fffa6aed03a956", size = 10987482 }, - { url = "https://files.pythonhosted.org/packages/48/64/2fd2e400073a1230e13b8cd604c9bc95d9e3b962e5d44088ead2e8f0cfec/pandas-2.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8dfc17328e8da77be3cf9f47509e5637ba8f137148ed0e9b5241e1baf526e20a", size = 12029159 }, - { url = "https://files.pythonhosted.org/packages/d8/0a/d84fd79b0293b7ef88c760d7dca69828d867c89b6d9bc52d6a27e4d87316/pandas-2.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ec6c851509364c59a5344458ab935e6451b31b818be467eb24b0fe89bd05b6b9", size = 11393287 }, - { url = "https://files.pythonhosted.org/packages/50/ae/ff885d2b6e88f3c7520bb74ba319268b42f05d7e583b5dded9837da2723f/pandas-2.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:911580460fc4884d9b05254b38a6bfadddfcc6aaef856fb5859e7ca202e45275", size = 11309381 }, - { url = "https://files.pythonhosted.org/packages/85/86/1fa345fc17caf5d7780d2699985c03dbe186c68fee00b526813939062bb0/pandas-2.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f4d6feeba91744872a600e6edbbd5b033005b431d5ae8379abee5bcfa479fab", size = 11883998 }, - { url = "https://files.pythonhosted.org/packages/81/aa/e58541a49b5e6310d89474333e994ee57fea97c8aaa8fc7f00b873059bbf/pandas-2.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:fe37e757f462d31a9cd7580236a82f353f5713a80e059a29753cf938c6775d96", size = 12704705 }, - { url = "https://files.pythonhosted.org/packages/d5/f9/07086f5b0f2a19872554abeea7658200824f5835c58a106fa8f2ae96a46c/pandas-2.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5db9637dbc24b631ff3707269ae4559bce4b7fd75c1c4d7e13f40edc42df4444", size = 13189044 }, - { url = "https://files.pythonhosted.org/packages/6e/21/ecf2df680982616459409b09962a8c2065330c7151dc6538069f3b634acf/pandas-2.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4645f770f98d656f11c69e81aeb21c6fca076a44bed3dcbb9396a4311bc7f6d8", size = 11567275 }, - { url = "https://files.pythonhosted.org/packages/1e/1a/dcb50e44b75419e96b276c9fb023b0f147b3c411be1cd517492aa2a184d4/pandas-2.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:342e59589cc454aaff7484d75b816a433350b3d7964d7847327edda4d532a2e3", size = 10811488 }, - { url = "https://files.pythonhosted.org/packages/2d/55/66cd2b679f6a27398380eac7574bc24746128f74626a3c02b978ea00e5ce/pandas-2.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d12f618d80379fde6af007f65f0c25bd3e40251dbd1636480dfffce2cf1e6da", size = 11763000 }, - { url = "https://files.pythonhosted.org/packages/ae/1c/5b9b263c80fd5e231b77df6f78cd7426d1d4ad3a4e858e85b7b3d93d0e9c/pandas-2.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd71c47a911da120d72ef173aeac0bf5241423f9bfea57320110a978457e069e", size = 12361395 }, - { url = "https://files.pythonhosted.org/packages/f7/74/7e817b31413fbb96366ea327d43d1926a9c48c58074e27e094e2839a0e36/pandas-2.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:09e3b1587f0f3b0913e21e8b32c3119174551deb4a4eba4a89bc7377947977e7", size = 13225086 }, - { url = "https://files.pythonhosted.org/packages/1f/0f/bc0a44b47eba2f22ae4235719a573d552ef7ad76ed3ea39ae62d554e040b/pandas-2.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2323294c73ed50f612f67e2bf3ae45aea04dce5690778e08a09391897f35ff88", size = 13871698 }, - { url = "https://files.pythonhosted.org/packages/fa/cb/6c32f8fadefa4314b740fbe8f74f6a02423bd1549e7c930826df35ac3c1b/pandas-2.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:b4b0de34dc8499c2db34000ef8baad684cfa4cbd836ecee05f323ebfba348c7d", size = 11357186 }, -] - -[[package]] -name = "parso" -version = "0.8.5" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d4/de/53e0bcf53d13e005bd8c92e7855142494f41171b34c2536b86187474184d/parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a", size = 401205 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668 }, -] - -[[package]] -name = "pdbpp" -version = "0.11.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "fancycompleter" }, - { name = "pygments" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c6/4c/118ef9534ac0632859b48c305d8c5dc9d6f963564fdfa66bc785c560247c/pdbpp-0.11.7.tar.gz", hash = "sha256:cb6604ac31a35ed0f2a29650a8c022b26284620be3e01cfd41b683b91da1ff14", size = 76026 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/99/e9/704bbc08aace64fee536e4c2c20f63f64f6fdbad72938c5ed46c9723a9f1/pdbpp-0.11.7-py3-none-any.whl", hash = "sha256:51916b63693898cf4881b36b4501c83947758d73f582f1f84893662b163bdb75", size = 30545 }, -] - -[[package]] -name = "pendulum" -version = "3.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "python-dateutil" }, - { name = "tzdata" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/23/7c/009c12b86c7cc6c403aec80f8a4308598dfc5995e5c523a5491faaa3952e/pendulum-3.1.0.tar.gz", hash = "sha256:66f96303560f41d097bee7d2dc98ffca716fbb3a832c4b3062034c2d45865015", size = 85930 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b4/d8/398cd27903a6899d0ae47b896d88e0b15849fc334931a6732e7ce3be9a45/pendulum-3.1.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:aa545a59e6517cf43597455a6fb44daa4a6e08473d67a7ad34e4fa951efb9620", size = 338637 }, - { url = "https://files.pythonhosted.org/packages/aa/9d/a125554919c6db14e189393254c7781ee98ed5a121b6c05652d353b03c12/pendulum-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:299df2da6c490ede86bb8d58c65e33d7a2a42479d21475a54b467b03ccb88531", size = 326003 }, - { url = "https://files.pythonhosted.org/packages/53/9f/43a5a902f904e06252c259c2f6cf2dceafbb25aef158df08f79c0089dfd7/pendulum-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbaa66e3ab179a2746eec67462f852a5d555bd709c25030aef38477468dd008e", size = 344335 }, - { url = "https://files.pythonhosted.org/packages/ca/24/00fcd6abd1f7623d2bbcca048b45f01aa8bb6b647e0477c3a8ea6094335c/pendulum-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3907ab3744c32e339c358d88ec80cd35fa2d4b25c77a3c67e6b39e99b7090c5", size = 382169 }, - { url = "https://files.pythonhosted.org/packages/32/bc/20a87f24c26c6c4daf3c69311208b28130b4d19c006da16efc0e55715963/pendulum-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8244958c5bc4ed1c47ee84b098ddd95287a3fc59e569ca6e2b664c6396138ec4", size = 436675 }, - { url = "https://files.pythonhosted.org/packages/1d/eb/3b1818a796408a250b8e6cfaa5372b991c0cbec768e02e0f9a226755383d/pendulum-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca5722b3993b85ff7dfced48d86b318f863c359877b6badf1a3601e35199ef8f", size = 353728 }, - { url = "https://files.pythonhosted.org/packages/36/23/755ef61f863b2777925171a59509540205b561a9e07ee7de0b5be9226bea/pendulum-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5b77a3dc010eea1a4916ef3771163d808bfc3e02b894c37df311287f18e5b764", size = 524465 }, - { url = "https://files.pythonhosted.org/packages/07/1f/a3e5f08890d13d93eee725778bfeaa233db5c55463e526857dffbc1a47e4/pendulum-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2d6e1eff4a15fdb8fb3867c5469e691c2465eef002a6a541c47b48a390ff4cf4", size = 525690 }, - { url = "https://files.pythonhosted.org/packages/43/c5/bf8ce472b81e8f5f074e8ba39899d288acce417c2c4a9ec7486d56970e28/pendulum-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:73de43ec85b46ac75db848c8e2f3f5d086e90b11cd9c7f029e14c8d748d920e2", size = 260356 }, - { url = "https://files.pythonhosted.org/packages/5e/6e/d28d3c22e6708b819a94c05bd05a3dfaed5c685379e8b6dc4b34b473b942/pendulum-3.1.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:61a03d14f8c64d13b2f7d5859e4b4053c4a7d3b02339f6c71f3e4606bfd67423", size = 338596 }, - { url = "https://files.pythonhosted.org/packages/e1/e6/43324d58021d463c2eeb6146b169d2c935f2f840f9e45ac2d500453d954c/pendulum-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e674ed2d158afa5c361e60f1f67872dc55b492a10cacdaa7fcd7b7da5f158f24", size = 325854 }, - { url = "https://files.pythonhosted.org/packages/b0/a7/d2ae79b960bfdea94dab67e2f118697b08bc9e98eb6bd8d32c4d99240da3/pendulum-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c75377eb16e58bbe7e03ea89eeea49be6fc5de0934a4aef0e263f8b4fa71bc2", size = 344334 }, - { url = "https://files.pythonhosted.org/packages/96/94/941f071212e23c29aae7def891fb636930c648386e059ce09ea0dcd43933/pendulum-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:656b8b0ce070f0f2e5e2668247d3c783c55336534aa1f13bd0969535878955e1", size = 382259 }, - { url = "https://files.pythonhosted.org/packages/51/ad/a78a701656aec00d16fee636704445c23ca11617a0bfe7c3848d1caa5157/pendulum-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48962903e6c1afe1f13548cb6252666056086c107d59e3d64795c58c9298bc2e", size = 436361 }, - { url = "https://files.pythonhosted.org/packages/da/93/83f59ccbf4435c29dca8c63a6560fcbe4783079a468a5f91d9f886fd21f0/pendulum-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d364ec3f8e65010fefd4b0aaf7be5eb97e5df761b107a06f5e743b7c3f52c311", size = 353653 }, - { url = "https://files.pythonhosted.org/packages/6f/0f/42d6644ec6339b41066f594e52d286162aecd2e9735aaf994d7e00c9e09d/pendulum-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dd52caffc2afb86612ec43bbeb226f204ea12ebff9f3d12f900a7d3097210fcc", size = 524567 }, - { url = "https://files.pythonhosted.org/packages/de/45/d84d909202755ab9d3379e5481fdf70f53344ebefbd68d6f5803ddde98a6/pendulum-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d439fccaa35c91f686bd59d30604dab01e8b5c1d0dd66e81648c432fd3f8a539", size = 525571 }, - { url = "https://files.pythonhosted.org/packages/0d/e0/4de160773ce3c2f7843c310db19dd919a0cd02cc1c0384866f63b18a6251/pendulum-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:43288773a86d9c5c0ddb645f88f615ff6bd12fd1410b34323662beccb18f3b49", size = 260259 }, - { url = "https://files.pythonhosted.org/packages/c1/7f/ffa278f78112c6c6e5130a702042f52aab5c649ae2edf814df07810bbba5/pendulum-3.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:569ea5072ae0f11d625e03b36d865f8037b76e838a3b621f6967314193896a11", size = 253899 }, - { url = "https://files.pythonhosted.org/packages/7a/d7/b1bfe15a742f2c2713acb1fdc7dc3594ff46ef9418ac6a96fcb12a6ba60b/pendulum-3.1.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4dfd53e7583ccae138be86d6c0a0b324c7547df2afcec1876943c4d481cf9608", size = 336209 }, - { url = "https://files.pythonhosted.org/packages/eb/87/0392da0c603c828b926d9f7097fbdddaafc01388cb8a00888635d04758c3/pendulum-3.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a6e06a28f3a7d696546347805536f6f38be458cb79de4f80754430696bea9e6", size = 323130 }, - { url = "https://files.pythonhosted.org/packages/c0/61/95f1eec25796be6dddf71440ee16ec1fd0c573fc61a73bd1ef6daacd529a/pendulum-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e68d6a51880708084afd8958af42dc8c5e819a70a6c6ae903b1c4bfc61e0f25", size = 341509 }, - { url = "https://files.pythonhosted.org/packages/b5/7b/eb0f5e6aa87d5e1b467a1611009dbdc92f0f72425ebf07669bfadd8885a6/pendulum-3.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e3f1e5da39a7ea7119efda1dd96b529748c1566f8a983412d0908455d606942", size = 378674 }, - { url = "https://files.pythonhosted.org/packages/29/68/5a4c1b5de3e54e16cab21d2ec88f9cd3f18599e96cc90a441c0b0ab6b03f/pendulum-3.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9af1e5eeddb4ebbe1b1c9afb9fd8077d73416ade42dd61264b3f3b87742e0bb", size = 436133 }, - { url = "https://files.pythonhosted.org/packages/87/5d/f7a1d693e5c0f789185117d5c1d5bee104f5b0d9fbf061d715fb61c840a8/pendulum-3.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20f74aa8029a42e327bfc150472e0e4d2358fa5d795f70460160ba81b94b6945", size = 351232 }, - { url = "https://files.pythonhosted.org/packages/30/77/c97617eb31f1d0554edb073201a294019b9e0a9bd2f73c68e6d8d048cd6b/pendulum-3.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cf6229e5ee70c2660148523f46c472e677654d0097bec010d6730f08312a4931", size = 521562 }, - { url = "https://files.pythonhosted.org/packages/76/22/0d0ef3393303877e757b848ecef8a9a8c7627e17e7590af82d14633b2cd1/pendulum-3.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:350cabb23bf1aec7c7694b915d3030bff53a2ad4aeabc8c8c0d807c8194113d6", size = 523221 }, - { url = "https://files.pythonhosted.org/packages/99/f3/aefb579aa3cebd6f2866b205fc7a60d33e9a696e9e629024752107dc3cf5/pendulum-3.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:42959341e843077c41d47420f28c3631de054abd64da83f9b956519b5c7a06a7", size = 260502 }, - { url = "https://files.pythonhosted.org/packages/02/74/4332b5d6e34c63d4df8e8eab2249e74c05513b1477757463f7fdca99e9be/pendulum-3.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:006758e2125da2e624493324dfd5d7d1b02b0c44bc39358e18bf0f66d0767f5f", size = 253089 }, - { url = "https://files.pythonhosted.org/packages/8e/1f/af928ba4aa403dac9569f787adcf024005e7654433d71f7a84e608716837/pendulum-3.1.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:28658b0baf4b30eb31d096a375983cfed033e60c0a7bbe94fa23f06cd779b50b", size = 336209 }, - { url = "https://files.pythonhosted.org/packages/b6/16/b010643007ba964c397da7fa622924423883c1bbff1a53f9d1022cd7f024/pendulum-3.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b114dcb99ce511cb8f5495c7b6f0056b2c3dba444ef1ea6e48030d7371bd531a", size = 323132 }, - { url = "https://files.pythonhosted.org/packages/64/19/c3c47aeecb5d9bceb0e89faafd800d39809b696c5b7bba8ec8370ad5052c/pendulum-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2404a6a54c80252ea393291f0b7f35525a61abae3d795407f34e118a8f133a18", size = 341509 }, - { url = "https://files.pythonhosted.org/packages/38/cf/c06921ff6b860ff7e62e70b8e5d4dc70e36f5abb66d168bd64d51760bc4e/pendulum-3.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d06999790d9ee9962a1627e469f98568bf7ad1085553fa3c30ed08b3944a14d7", size = 378674 }, - { url = "https://files.pythonhosted.org/packages/62/0b/a43953b9eba11e82612b033ac5133f716f1b76b6108a65da6f408b3cc016/pendulum-3.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94751c52f6b7c306734d1044c2c6067a474237e1e5afa2f665d1fbcbbbcf24b3", size = 436133 }, - { url = "https://files.pythonhosted.org/packages/eb/a0/ec3d70b3b96e23ae1d039f132af35e17704c22a8250d1887aaefea4d78a6/pendulum-3.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5553ac27be05e997ec26d7f004cf72788f4ce11fe60bb80dda604a64055b29d0", size = 351232 }, - { url = "https://files.pythonhosted.org/packages/f4/97/aba23f1716b82f6951ba2b1c9178a2d107d1e66c102762a9bf19988547ea/pendulum-3.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:f8dee234ca6142bf0514368d01a72945a44685aaa2fc4c14c98d09da9437b620", size = 521563 }, - { url = "https://files.pythonhosted.org/packages/01/33/2c0d5216cc53d16db0c4b3d510f141ee0a540937f8675948541190fbd48b/pendulum-3.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7378084fe54faab4ee481897a00b710876f2e901ded6221671e827a253e643f2", size = 523221 }, - { url = "https://files.pythonhosted.org/packages/51/89/8de955c339c31aeae77fd86d3225509b998c81875e9dba28cb88b8cbf4b3/pendulum-3.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:8539db7ae2c8da430ac2515079e288948c8ebf7eb1edd3e8281b5cdf433040d6", size = 260501 }, - { url = "https://files.pythonhosted.org/packages/15/c3/226a3837363e94f8722461848feec18bfdd7d5172564d53aa3c3397ff01e/pendulum-3.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:1ce26a608e1f7387cd393fba2a129507c4900958d4f47b90757ec17656856571", size = 253087 }, - { url = "https://files.pythonhosted.org/packages/c9/eb/e128af9e1a216c17b932f8c7f1f927f97cd8fd71d6e60148e1c69ad2bcf4/pendulum-3.1.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2504df1a7ff8e0827781a601ff399bfcad23e7b7943f87ef33db02c11131f5e8", size = 339467 }, - { url = "https://files.pythonhosted.org/packages/2f/c3/a7f515c6dbe89f2d92216ccd9a2e31dd657767d7f64177da1579092a41e3/pendulum-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4041a7156695499b6676ed092f27e17760db2341bf350f6c5ea9137dd2cfd3f6", size = 326898 }, - { url = "https://files.pythonhosted.org/packages/15/da/80774350340e9d06789460b6e8def2d497cffe09334a8911be8d226caec5/pendulum-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b277e9177651d6af8500b95f0af1e3c1769064f2353c06f638d3c1e065063e", size = 344866 }, - { url = "https://files.pythonhosted.org/packages/55/cf/771d1fee8d14abefbccbed99622c6f26e33b839b8964a18188b4f9159bf1/pendulum-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:784cf82b676118816fb81ea6bcbdf8f3b0c49aa74fcb895647ef7f8046093471", size = 382902 }, - { url = "https://files.pythonhosted.org/packages/bd/95/0660ae5dbe9212c99fbccb27ca64361018600aa954fab95641653a39ce36/pendulum-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e44277a391fa5ad2e9ce02b1b24fd9489cb2a371ae2459eddb238301d31204d", size = 437782 }, - { url = "https://files.pythonhosted.org/packages/04/1f/007dff40bb0325ae91eba3d4d6ce911945d808f87d3cebaa4556ca07f35d/pendulum-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a7d0bca8cca92d60734b64fa4fa58b17b8ec1f55112bf77d00ee65248d19177", size = 354462 }, - { url = "https://files.pythonhosted.org/packages/e6/cc/8d2ed88beab1622623e321bd1f754eee174bb97e6ffcd34ceb9cce87a4ea/pendulum-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bfac5e02faee02c180444e722c298690688ec1c3dfa1aab65fb4e0e3825d84ed", size = 525009 }, - { url = "https://files.pythonhosted.org/packages/44/c0/a503df53796b0dc1dae7d50573d936f86f8f482cf4acd9adbbd2a30ef817/pendulum-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0da70941b062220e734c2c510ad30daa60aca1a37e893f1baa0da065ffa4c72", size = 526351 }, - { url = "https://files.pythonhosted.org/packages/c6/bc/1fbc57b2e482a0ee3b5a0759c4b3b5127f0401cdce4afef111a3b6179d8d/pendulum-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:300a237fb81028edb9604d4d1bb205b80515fd22ab9c1a4c55014d07869122f8", size = 260813 }, - { url = "https://files.pythonhosted.org/packages/66/10/3258c084653606d2be2c7168998eda4a57cf1559cecb43cf1100000fda5f/pendulum-3.1.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d2cac744940299d8da41a3ed941aa1e02b5abbc9ae2c525f3aa2ae30c28a86b5", size = 339442 }, - { url = "https://files.pythonhosted.org/packages/98/d5/98a1a10cd1cfb3390fbf070864e9a10de8e70a9d4509832132f4d900d655/pendulum-3.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ffb39c3f3906a9c9a108fa98e5556f18b52d2c6451984bbfe2f14436ec4fc9d4", size = 326609 }, - { url = "https://files.pythonhosted.org/packages/0a/2e/448abdebc11b9c54e190d273cb084162643199fc184cb1bb6bff7900e67f/pendulum-3.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebe18b1c2eb364064cc4a68a65900f1465cac47d0891dab82341766bcc05b40c", size = 344777 }, - { url = "https://files.pythonhosted.org/packages/ed/91/ee857bbd51168bf08b89c3a4705c920725eee0f830ccc513b8370f6ce71d/pendulum-3.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9e9b28a35cec9fcd90f224b4878456129a057dbd694fc8266a9393834804995", size = 354404 }, - { url = "https://files.pythonhosted.org/packages/bc/d4/e63a57df65e2b2d10f3aa917a4069be9abf5ac7d56d11336e0510742d8a6/pendulum-3.1.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a3be19b73a9c6a866724419295482f817727e635ccc82f07ae6f818943a1ee96", size = 524948 }, - { url = "https://files.pythonhosted.org/packages/93/87/04e74600c5a5674e5f341b8888b530a9de9b84b31889f80fac3bee3e9e87/pendulum-3.1.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:24a53b523819bda4c70245687a589b5ea88711f7caac4be5f276d843fe63076b", size = 526340 }, - { url = "https://files.pythonhosted.org/packages/48/27/d3577a5f6f7d1fbf1138d87ce21ebab363c78642513b991d1c424d658d09/pendulum-3.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bd701789414fbd0be3c75f46803f31e91140c23821e4bcb0fa2bddcdd051c425", size = 261089 }, - { url = "https://files.pythonhosted.org/packages/53/8f/6620b0df6acdd8c020ec4f5907e3deb8c6d46970568299b66eed9b5f53b4/pendulum-3.1.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0803639fc98e03f74d0b83955a2800bcee1c99b0700638aae9ab7ceb1a7dcca3", size = 340056 }, - { url = "https://files.pythonhosted.org/packages/dd/90/957491643cff9d97764e443bd0ef7c5c549e733e306159d6ca8ab4034fb5/pendulum-3.1.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4cceff50503ef9cb021e53a238f867c9843b4dd55859582d682f3c9e52460699", size = 327095 }, - { url = "https://files.pythonhosted.org/packages/fe/f2/550855e0285671278f763963b2470f437d340759aaef927fedb204e58fb0/pendulum-3.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2cf8adcf3030eef78c3cd82afd9948cd1a4ae1a9450e9ac128b9e744c42825f", size = 345131 }, - { url = "https://files.pythonhosted.org/packages/b4/8f/938b83fe3e1450f4b04d1f96e8b2c288e07ad6b942260fef24cfd98cc3d0/pendulum-3.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5bce0f71c10e983e1c39e1eb37b9a5f5c2aa0c15a36edaaa0a844fb1fbc7bbb", size = 354886 }, - { url = "https://files.pythonhosted.org/packages/d9/aa/0e3c231a7e35b362226204d7276a47c0e225aa59b30c7c9cd2a8e2660967/pendulum-3.1.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c1354be2df38f031ac6a985949b6541be7d39dd7e44c8804f4bc9a39dea9f3bb", size = 525344 }, - { url = "https://files.pythonhosted.org/packages/0b/c7/d3654a790129684d0e8dc04707cb6d75633d7b102a962c6dc0f862c64c25/pendulum-3.1.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cbd933a40c915ed5c41b083115cca15c7afa8179363b2a61db167c64fa0670", size = 526685 }, - { url = "https://files.pythonhosted.org/packages/50/d9/4a166256386b7973e36ff44135e8d009f4afb25d6c72df5380ccfd6fbb89/pendulum-3.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3363a470b5d67dbf8d9fd1bf77dcdbf720788bc3be4a10bdcd28ae5d7dbd26c4", size = 261170 }, - { url = "https://files.pythonhosted.org/packages/6e/23/e98758924d1b3aac11a626268eabf7f3cf177e7837c28d47bf84c64532d0/pendulum-3.1.0-py3-none-any.whl", hash = "sha256:f9178c2a8e291758ade1e8dd6371b1d26d08371b4c7730a6e9a3ef8b16ebae0f", size = 111799 }, -] - -[[package]] -name = "pexpect" -version = "4.9.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "ptyprocess" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772 }, -] - -[[package]] -name = "platformdirs" -version = "4.3.8" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567 }, -] - -[[package]] -name = "pluggy" -version = "1.6.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538 }, -] - -[[package]] -name = "pre-commit" -version = "4.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cfgv" }, - { name = "identify" }, - { name = "nodeenv" }, - { name = "pyyaml" }, - { name = "virtualenv" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ff/29/7cf5bbc236333876e4b41f56e06857a87937ce4bf91e117a6991a2dbb02a/pre_commit-4.3.0.tar.gz", hash = "sha256:499fe450cc9d42e9d58e606262795ecb64dd05438943c62b66f6a8673da30b16", size = 193792 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5b/a5/987a405322d78a73b66e39e4a90e4ef156fd7141bf71df987e50717c321b/pre_commit-4.3.0-py2.py3-none-any.whl", hash = "sha256:2b0747ad7e6e967169136edffee14c16e148a778a54e4f967921aa1ebf2308d8", size = 220965 }, -] - -[[package]] -name = "prompt-toolkit" -version = "3.0.52" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "wcwidth" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431 }, -] - -[[package]] -name = "psutil" -version = "5.9.8" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/90/c7/6dc0a455d111f68ee43f27793971cf03fe29b6ef972042549db29eec39a2/psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c", size = 503247 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/e3/07ae864a636d70a8a6f58da27cb1179192f1140d5d1da10886ade9405797/psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81", size = 248702 }, - { url = "https://files.pythonhosted.org/packages/b3/bd/28c5f553667116b2598b9cc55908ec435cb7f77a34f2bff3e3ca765b0f78/psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421", size = 285242 }, - { url = "https://files.pythonhosted.org/packages/c5/4f/0e22aaa246f96d6ac87fe5ebb9c5a693fbe8877f537a1022527c47ca43c5/psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4", size = 288191 }, - { url = "https://files.pythonhosted.org/packages/6e/f5/2aa3a4acdc1e5940b59d421742356f133185667dd190b166dbcfcf5d7b43/psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0", size = 251252 }, - { url = "https://files.pythonhosted.org/packages/93/52/3e39d26feae7df0aa0fd510b14012c3678b36ed068f7d78b8d8784d61f0e/psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf", size = 255090 }, - { url = "https://files.pythonhosted.org/packages/05/33/2d74d588408caedd065c2497bdb5ef83ce6082db01289a1e1147f6639802/psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8", size = 249898 }, -] - -[[package]] -name = "ptyprocess" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993 }, -] - -[[package]] -name = "pure-eval" -version = "0.2.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842 }, -] - -[[package]] -name = "py-cpuinfo" -version = "9.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/37/a8/d832f7293ebb21690860d2e01d8115e5ff6f2ae8bbdc953f0eb0fa4bd2c7/py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690", size = 104716 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5", size = 22335 }, -] - -[[package]] -name = "pyarrow" -version = "18.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7f/7b/640785a9062bb00314caa8a387abce547d2a420cf09bd6c715fe659ccffb/pyarrow-18.1.0.tar.gz", hash = "sha256:9386d3ca9c145b5539a1cfc75df07757dff870168c959b473a0bccbc3abc8c73", size = 1118671 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1a/bb/8d4a1573f66e0684f190dd2b55fd0b97a7214de8882d58a3867e777bf640/pyarrow-18.1.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e21488d5cfd3d8b500b3238a6c4b075efabc18f0f6d80b29239737ebd69caa6c", size = 29531620 }, - { url = "https://files.pythonhosted.org/packages/30/90/893acfad917533b624a97b9e498c0e8393908508a0a72d624fe935e632bf/pyarrow-18.1.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b516dad76f258a702f7ca0250885fc93d1fa5ac13ad51258e39d402bd9e2e1e4", size = 30836521 }, - { url = "https://files.pythonhosted.org/packages/a3/2a/526545a7464b5fb2fa6e2c4bad16ca90e59e1843025c534fd907b7f73e5a/pyarrow-18.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f443122c8e31f4c9199cb23dca29ab9427cef990f283f80fe15b8e124bcc49b", size = 39213905 }, - { url = "https://files.pythonhosted.org/packages/8a/77/4b3fab91a30e19e233e738d0c5eca5a8f6dd05758bc349a2ca262c65de79/pyarrow-18.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a03da7f2758645d17b7b4f83c8bffeae5bbb7f974523fe901f36288d2eab71", size = 40128881 }, - { url = "https://files.pythonhosted.org/packages/aa/e2/a88e16c5e45e562449c52305bd3bc2f9d704295322d3434656e7ccac1444/pyarrow-18.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ba17845efe3aa358ec266cf9cc2800fa73038211fb27968bfa88acd09261a470", size = 38627517 }, - { url = "https://files.pythonhosted.org/packages/6d/84/8037c20005ccc7b869726465be0957bd9c29cfc88612962030f08292ad06/pyarrow-18.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:3c35813c11a059056a22a3bef520461310f2f7eea5c8a11ef9de7062a23f8d56", size = 40060187 }, - { url = "https://files.pythonhosted.org/packages/2a/38/d6435c723ff73df8ae74626ea778262fbcc2b9b0d1a4f3db915b61711b05/pyarrow-18.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9736ba3c85129d72aefa21b4f3bd715bc4190fe4426715abfff90481e7d00812", size = 25118314 }, - { url = "https://files.pythonhosted.org/packages/9e/4d/a4988e7d82f4fbc797715db4185939a658eeffb07a25bab7262bed1ea076/pyarrow-18.1.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:eaeabf638408de2772ce3d7793b2668d4bb93807deed1725413b70e3156a7854", size = 29554860 }, - { url = "https://files.pythonhosted.org/packages/59/03/3a42c5c1e4bd4c900ab62aa1ff6b472bdb159ba8f1c3e5deadab7222244f/pyarrow-18.1.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:3b2e2239339c538f3464308fd345113f886ad031ef8266c6f004d49769bb074c", size = 30867076 }, - { url = "https://files.pythonhosted.org/packages/75/7e/332055ac913373e89256dce9d14b7708f55f7bd5be631456c897f0237738/pyarrow-18.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f39a2e0ed32a0970e4e46c262753417a60c43a3246972cfc2d3eb85aedd01b21", size = 39212135 }, - { url = "https://files.pythonhosted.org/packages/8c/64/5099cdb325828722ef7ffeba9a4696f238eb0cdeae227f831c2d77fcf1bd/pyarrow-18.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31e9417ba9c42627574bdbfeada7217ad8a4cbbe45b9d6bdd4b62abbca4c6f6", size = 40125195 }, - { url = "https://files.pythonhosted.org/packages/83/88/1938d783727db1b178ff71bc6a6143d7939e406db83a9ec23cad3dad325c/pyarrow-18.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:01c034b576ce0eef554f7c3d8c341714954be9b3f5d5bc7117006b85fcf302fe", size = 38641884 }, - { url = "https://files.pythonhosted.org/packages/5e/b5/9e14e9f7590e0eaa435ecea84dabb137284a4dbba7b3c337b58b65b76d95/pyarrow-18.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f266a2c0fc31995a06ebd30bcfdb7f615d7278035ec5b1cd71c48d56daaf30b0", size = 40076877 }, - { url = "https://files.pythonhosted.org/packages/4d/a3/817ac7fe0891a2d66e247e223080f3a6a262d8aefd77e11e8c27e6acf4e1/pyarrow-18.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:d4f13eee18433f99adefaeb7e01d83b59f73360c231d4782d9ddfaf1c3fbde0a", size = 25119811 }, - { url = "https://files.pythonhosted.org/packages/6a/50/12829e7111b932581e51dda51d5cb39207a056c30fe31ef43f14c63c4d7e/pyarrow-18.1.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:9f3a76670b263dc41d0ae877f09124ab96ce10e4e48f3e3e4257273cee61ad0d", size = 29514620 }, - { url = "https://files.pythonhosted.org/packages/d1/41/468c944eab157702e96abab3d07b48b8424927d4933541ab43788bb6964d/pyarrow-18.1.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:da31fbca07c435be88a0c321402c4e31a2ba61593ec7473630769de8346b54ee", size = 30856494 }, - { url = "https://files.pythonhosted.org/packages/68/f9/29fb659b390312a7345aeb858a9d9c157552a8852522f2c8bad437c29c0a/pyarrow-18.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:543ad8459bc438efc46d29a759e1079436290bd583141384c6f7a1068ed6f992", size = 39203624 }, - { url = "https://files.pythonhosted.org/packages/6e/f6/19360dae44200e35753c5c2889dc478154cd78e61b1f738514c9f131734d/pyarrow-18.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0743e503c55be0fdb5c08e7d44853da27f19dc854531c0570f9f394ec9671d54", size = 40139341 }, - { url = "https://files.pythonhosted.org/packages/bb/e6/9b3afbbcf10cc724312e824af94a2e993d8ace22994d823f5c35324cebf5/pyarrow-18.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d4b3d2a34780645bed6414e22dda55a92e0fcd1b8a637fba86800ad737057e33", size = 38618629 }, - { url = "https://files.pythonhosted.org/packages/3a/2e/3b99f8a3d9e0ccae0e961978a0d0089b25fb46ebbcfb5ebae3cca179a5b3/pyarrow-18.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c52f81aa6f6575058d8e2c782bf79d4f9fdc89887f16825ec3a66607a5dd8e30", size = 40078661 }, - { url = "https://files.pythonhosted.org/packages/76/52/f8da04195000099d394012b8d42c503d7041b79f778d854f410e5f05049a/pyarrow-18.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:0ad4892617e1a6c7a551cfc827e072a633eaff758fa09f21c4ee548c30bcaf99", size = 25092330 }, - { url = "https://files.pythonhosted.org/packages/cb/87/aa4d249732edef6ad88899399047d7e49311a55749d3c373007d034ee471/pyarrow-18.1.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:84e314d22231357d473eabec709d0ba285fa706a72377f9cc8e1cb3c8013813b", size = 29497406 }, - { url = "https://files.pythonhosted.org/packages/3c/c7/ed6adb46d93a3177540e228b5ca30d99fc8ea3b13bdb88b6f8b6467e2cb7/pyarrow-18.1.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:f591704ac05dfd0477bb8f8e0bd4b5dc52c1cadf50503858dce3a15db6e46ff2", size = 30835095 }, - { url = "https://files.pythonhosted.org/packages/41/d7/ed85001edfb96200ff606943cff71d64f91926ab42828676c0fc0db98963/pyarrow-18.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acb7564204d3c40babf93a05624fc6a8ec1ab1def295c363afc40b0c9e66c191", size = 39194527 }, - { url = "https://files.pythonhosted.org/packages/59/16/35e28eab126342fa391593415d79477e89582de411bb95232f28b131a769/pyarrow-18.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74de649d1d2ccb778f7c3afff6085bd5092aed4c23df9feeb45dd6b16f3811aa", size = 40131443 }, - { url = "https://files.pythonhosted.org/packages/0c/95/e855880614c8da20f4cd74fa85d7268c725cf0013dc754048593a38896a0/pyarrow-18.1.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:f96bd502cb11abb08efea6dab09c003305161cb6c9eafd432e35e76e7fa9b90c", size = 38608750 }, - { url = "https://files.pythonhosted.org/packages/54/9d/f253554b1457d4fdb3831b7bd5f8f00f1795585a606eabf6fec0a58a9c38/pyarrow-18.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:36ac22d7782554754a3b50201b607d553a8d71b78cdf03b33c1125be4b52397c", size = 40066690 }, - { url = "https://files.pythonhosted.org/packages/2f/58/8912a2563e6b8273e8aa7b605a345bba5a06204549826f6493065575ebc0/pyarrow-18.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:25dbacab8c5952df0ca6ca0af28f50d45bd31c1ff6fcf79e2d120b4a65ee7181", size = 25081054 }, - { url = "https://files.pythonhosted.org/packages/82/f9/d06ddc06cab1ada0c2f2fd205ac8c25c2701182de1b9c4bf7a0a44844431/pyarrow-18.1.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:6a276190309aba7bc9d5bd2933230458b3521a4317acfefe69a354f2fe59f2bc", size = 29525542 }, - { url = "https://files.pythonhosted.org/packages/ab/94/8917e3b961810587ecbdaa417f8ebac0abb25105ae667b7aa11c05876976/pyarrow-18.1.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:ad514dbfcffe30124ce655d72771ae070f30bf850b48bc4d9d3b25993ee0e386", size = 30829412 }, - { url = "https://files.pythonhosted.org/packages/5e/e3/3b16c3190f3d71d3b10f6758d2d5f7779ef008c4fd367cedab3ed178a9f7/pyarrow-18.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aebc13a11ed3032d8dd6e7171eb6e86d40d67a5639d96c35142bd568b9299324", size = 39119106 }, - { url = "https://files.pythonhosted.org/packages/1d/d6/5d704b0d25c3c79532f8c0639f253ec2803b897100f64bcb3f53ced236e5/pyarrow-18.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6cf5c05f3cee251d80e98726b5c7cc9f21bab9e9783673bac58e6dfab57ecc8", size = 40090940 }, - { url = "https://files.pythonhosted.org/packages/37/29/366bc7e588220d74ec00e497ac6710c2833c9176f0372fe0286929b2d64c/pyarrow-18.1.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:11b676cd410cf162d3f6a70b43fb9e1e40affbc542a1e9ed3681895f2962d3d9", size = 38548177 }, - { url = "https://files.pythonhosted.org/packages/c8/11/fabf6ecabb1fe5b7d96889228ca2a9158c4c3bb732e3b8ee3f7f6d40b703/pyarrow-18.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:b76130d835261b38f14fc41fdfb39ad8d672afb84c447126b84d5472244cfaba", size = 40043567 }, - { url = "https://files.pythonhosted.org/packages/fd/9b/60516e3876ec6f25b0909afa70f90a15de83b48c7c0d8042fac4e64c4411/pyarrow-18.1.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:0b331e477e40f07238adc7ba7469c36b908f07c89b95dd4bd3a0ec84a3d1e21e", size = 29543752 }, - { url = "https://files.pythonhosted.org/packages/14/a7/bd08b6f1a2bd2e71dc6bb0451fc1872607e44c83daf1ee63c82764a2d233/pyarrow-18.1.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:2c4dd0c9010a25ba03e198fe743b1cc03cd33c08190afff371749c52ccbbaf76", size = 30850753 }, - { url = "https://files.pythonhosted.org/packages/84/c9/62ef9c6281c0e5b4ee1afa9d7bd556e72e06da6706b7906c32c15e69b3d6/pyarrow-18.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f97b31b4c4e21ff58c6f330235ff893cc81e23da081b1a4b1c982075e0ed4e9", size = 39226870 }, - { url = "https://files.pythonhosted.org/packages/b2/99/a6e89e71655a38475e76b060777c8bf69c078b772bec3b7daf7361440f05/pyarrow-18.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a4813cb8ecf1809871fd2d64a8eff740a1bd3691bbe55f01a3cf6c5ec869754", size = 40139114 }, - { url = "https://files.pythonhosted.org/packages/64/a9/06d79923890682e4fe7a16524abee307407008a413115354aaf3226b8410/pyarrow-18.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:05a5636ec3eb5cc2a36c6edb534a38ef57b2ab127292a716d00eabb887835f1e", size = 38639231 }, - { url = "https://files.pythonhosted.org/packages/3b/8c/4c3ed19026a00740b81fe1c87f3ff235b2763a0a1ddf5711a9d026b775ce/pyarrow-18.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:73eeed32e724ea3568bb06161cad5fa7751e45bc2228e33dcb10c614044165c7", size = 40070949 }, - { url = "https://files.pythonhosted.org/packages/87/d8/94161a7ca5c55199484e926165e9e33f318ea1d1b0d7cdbcbc3652b933ec/pyarrow-18.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:a1880dd6772b685e803011a6b43a230c23b566859a6e0c9a276c1e0faf4f4052", size = 25301373 }, -] - -[[package]] -name = "pycparser" -version = "2.22" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, -] - -[[package]] -name = "pygments" -version = "2.19.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217 }, -] - -[[package]] -name = "pyreadline3" -version = "3.5.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178 }, -] - -[[package]] -name = "pyreadstat" -version = "1.2.9" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pandas" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/7e/25f1e328accc41726ada3bb88bd701e17a14ddbf7ab124d181d4e4075783/pyreadstat-1.2.9.tar.gz", hash = "sha256:bc4ff137b70013869727926867cf3c175ac56fd4e4a1d0e4317b559d8487ee4f", size = 565303 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/c5/32a6919fe00e8d4eaf9293414da0db59c52a5d3a2a2c7c7aa86fcb761dda/pyreadstat-1.2.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:78e4166126473c4429f17264c165d5cd0b6ea700d3fcbcaa76859826e7da61c2", size = 591701 }, - { url = "https://files.pythonhosted.org/packages/ae/6b/bb342640be14062a37565679c944acec5aa441c146714a458ecc99e8cd81/pyreadstat-1.2.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:319ea675ba47b49a64e9dc28944d4366adb063c3f77a46c6961a720361ddd8a1", size = 586448 }, - { url = "https://files.pythonhosted.org/packages/f6/3f/3e8c6932446a658258c7183059e8fa7f0f9a2f13cc1f5b6240af8f8829bb/pyreadstat-1.2.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bda5a38f76e14c551333cb39488ece4f3e1e26161b16d0662f34c75e1505f2af", size = 591007 }, - { url = "https://files.pythonhosted.org/packages/83/a6/b23ac91588eb664c802f1fb8ee422e562232b5de37b705cea6acfe764b4e/pyreadstat-1.2.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5bd3dad326ac798a3d9a1c44935362e5e751316b7a965caa769a55673501302", size = 626271 }, - { url = "https://files.pythonhosted.org/packages/90/10/2f4a052da83d62be9e1ffce592edcf340bb78016cf405f7def62e41a2a45/pyreadstat-1.2.9-cp310-cp310-win_amd64.whl", hash = "sha256:4c304316748035fdf3b759a45e385d629b4b0f28c56a84fb66e4e68d673d729b", size = 2396727 }, - { url = "https://files.pythonhosted.org/packages/5d/83/696ddbb71c180bb5c3a08d6919b0e547ec0986292572f528c25eeaf03c53/pyreadstat-1.2.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d64f75eb110e5a2d47b3a800088a986e52dede4ba235e61296d49771c823fe25", size = 580631 }, - { url = "https://files.pythonhosted.org/packages/6f/db/0b35ae8e6f43184806bf70037670c0469225718e0ee6778e8cf7731c3731/pyreadstat-1.2.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eb465a04996749650bbc471ee2bdc8405f1440ffc54b50108c50eedb1e4300ca", size = 578733 }, - { url = "https://files.pythonhosted.org/packages/bf/c1/efb1f05570c1c0f715cfbf76b719153f52f0cf7950bd71aedbf52f48fe91/pyreadstat-1.2.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20eb85cb162612aa30ab2ffafda90fd6bc243aedec66101e4639644ab843efdd", size = 586248 }, - { url = "https://files.pythonhosted.org/packages/4f/14/fd48259edfb01e33a37635250a04d75a191e3714c56093089c6a8bbd6ad5/pyreadstat-1.2.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b17458e0ea63b399c0bd08645d8859f7c53b191c5463dc891b2dbad167b8cbb", size = 617677 }, - { url = "https://files.pythonhosted.org/packages/c5/27/a919731092492263da67161466c4971b3a22bbb38bec1a714849d71c90af/pyreadstat-1.2.9-cp311-cp311-win_amd64.whl", hash = "sha256:aac614c2a6b67d81c585230dbe2493acb21e0a796dd8a8171362a6eba06bab6d", size = 2399296 }, - { url = "https://files.pythonhosted.org/packages/e9/dd/e8a18d1c46f369d73751da0a4fc023751eebca364543970a94562fe2011e/pyreadstat-1.2.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5a6ddca6f163b8085ff3ca451a1282ea7cf8aca41361b2b3ad17512d533baa4c", size = 612864 }, - { url = "https://files.pythonhosted.org/packages/1f/a6/6a0440d74b403e956e3644ceb24fe485540fbefeed7a7d9e9127ee2c8219/pyreadstat-1.2.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:37778a5011b89aa5394f9ab62907ce7a31458628a86e1ec7e2ef918aa982923b", size = 612105 }, - { url = "https://files.pythonhosted.org/packages/92/f3/cc57167643a8d2ce283ae04d11742b317f990a565223a629e1e0010ea9b3/pyreadstat-1.2.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12ba3e801c5c96a26fa2d640c6a506d8a0824bb7fb40d04fe10cb88a23b7a121", size = 627581 }, - { url = "https://files.pythonhosted.org/packages/d3/ed/1e29bbe1d74fb0a226101fe9eae672e99f2eedf39f7f4788e7a4dfa5cada/pyreadstat-1.2.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a68650f381c3d28e50234b6a5487f86bd4ae4aeed035f881acff2f5567a79df6", size = 659123 }, - { url = "https://files.pythonhosted.org/packages/bc/11/1a7982ec191cb8e591978fb360c7ced3077ebcddcea1ade144d28044f498/pyreadstat-1.2.9-cp312-cp312-win_amd64.whl", hash = "sha256:2ac0c92aec8fff4e05767ac9b9713b87dc5349244b9b2007095ac7a21fbee08b", size = 2387510 }, - { url = "https://files.pythonhosted.org/packages/a9/f5/cea5d3eb7e599ebb61c17e7f3baa756b884c5646c1314d46b0a6254bdf9c/pyreadstat-1.2.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6f79035db902c7551c11fd1d96fac010a34a7f03e7acc4c2c3c98e297d3e9c30", size = 612319 }, - { url = "https://files.pythonhosted.org/packages/e3/8f/c9a123afb4dddd65a513386b5ce170a38ffa89678c5d68c465c445c2273a/pyreadstat-1.2.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:32824454b92f33b70a02f92995c0bbafe4d9f25afaf592db242ee33fea74d386", size = 611790 }, - { url = "https://files.pythonhosted.org/packages/4c/51/467031fc1c074e313c0946702b40581bc772d25585525b97a4ecf1b19a84/pyreadstat-1.2.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35ea2c6fd421c49e4d65e1956d24a2a66620e4fc54dcc2bbb4a91e91cf73ff70", size = 627207 }, - { url = "https://files.pythonhosted.org/packages/b3/8b/635a4f7e5cfa75f450db76f94cf3825b38d85a00745638288af56f8328bf/pyreadstat-1.2.9-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8037fa2cf2b210d713f7e6d94f4bd0050b269cb989386283e703c26f75eae8b", size = 658571 }, - { url = "https://files.pythonhosted.org/packages/13/c0/be18a495f8db2b92f57ecfc176b61b806c03bbc8b733cdfd0cedbdb1f120/pyreadstat-1.2.9-cp313-cp313-win_amd64.whl", hash = "sha256:45697a370502fc2a2d102b4643c1211966e410bdff98e0f287eac26943653bc9", size = 2391467 }, -] - -[[package]] -name = "pyrepl" -version = "0.11.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/08/4f/7088417e5465c53a30b918d30542aad89352ea0d635a5d077717c69a7d2b/pyrepl-0.11.4.tar.gz", hash = "sha256:efe988b4a6e5eed587e9769dc2269aeec2b6feec2f5d77995ee85b9ad7cf7063", size = 51089 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/a5/ce97a778f096aaa27cfcb7ad09f1198cf73277dcab6c68a4b8f332d91e48/pyrepl-0.11.4-py3-none-any.whl", hash = "sha256:ac30d6340267a21c39e1b1934f92bca6b8735017d14b17e40f903b2d1563541d", size = 55596 }, -] - -[[package]] -name = "pytest" -version = "8.4.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, - { name = "iniconfig" }, - { name = "packaging" }, - { name = "pluggy" }, - { name = "pygments" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474 }, -] - -[[package]] -name = "pytest-cov" -version = "6.2.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "coverage", extra = ["toml"] }, - { name = "pluggy" }, - { name = "pytest" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/18/99/668cade231f434aaa59bbfbf49469068d2ddd945000621d3d165d2e7dd7b/pytest_cov-6.2.1.tar.gz", hash = "sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2", size = 69432 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/16/4ea354101abb1287856baa4af2732be351c7bee728065aed451b678153fd/pytest_cov-6.2.1-py3-none-any.whl", hash = "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5", size = 24644 }, -] - -[[package]] -name = "pytest-order" -version = "1.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pytest" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/1d/66/02ae17461b14a52ce5a29ae2900156b9110d1de34721ccc16ccd79419876/pytest_order-1.3.0.tar.gz", hash = "sha256:51608fec3d3ee9c0adaea94daa124a5c4c1d2bb99b00269f098f414307f23dde", size = 47544 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/73/59b038d1aafca89f8e9936eaa8ffa6bb6138d00459d13a32ce070be4f280/pytest_order-1.3.0-py3-none-any.whl", hash = "sha256:2cd562a21380345dd8d5774aa5fd38b7849b6ee7397ca5f6999bbe6e89f07f6e", size = 14609 }, -] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "six" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, -] - -[[package]] -name = "pytz" -version = "2025.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225 }, -] - -[[package]] -name = "pyxdg" -version = "0.28" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/25/7998cd2dec731acbd438fbf91bc619603fc5188de0a9a17699a781840452/pyxdg-0.28.tar.gz", hash = "sha256:3267bb3074e934df202af2ee0868575484108581e6f3cb006af1da35395e88b4", size = 77776 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/8d/cf41b66a8110670e3ad03dab9b759704eeed07fa96e90fdc0357b2ba70e2/pyxdg-0.28-py2.py3-none-any.whl", hash = "sha256:bdaf595999a0178ecea4052b7f4195569c1ff4d344567bccdc12dfdf02d545ab", size = 49520 }, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199 }, - { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758 }, - { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463 }, - { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280 }, - { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239 }, - { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802 }, - { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527 }, - { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052 }, - { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774 }, - { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612 }, - { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040 }, - { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829 }, - { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167 }, - { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952 }, - { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301 }, - { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638 }, - { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850 }, - { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980 }, - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 }, - { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, - { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, - { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, - { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, - { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, - { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, - { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, - { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, - { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, - { url = "https://files.pythonhosted.org/packages/65/d8/b7a1db13636d7fb7d4ff431593c510c8b8fca920ade06ca8ef20015493c5/PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d", size = 184777 }, - { url = "https://files.pythonhosted.org/packages/0a/02/6ec546cd45143fdf9840b2c6be8d875116a64076218b61d68e12548e5839/PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f", size = 172318 }, - { url = "https://files.pythonhosted.org/packages/0e/9a/8cc68be846c972bda34f6c2a93abb644fb2476f4dcc924d52175786932c9/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290", size = 720891 }, - { url = "https://files.pythonhosted.org/packages/e9/6c/6e1b7f40181bc4805e2e07f4abc10a88ce4648e7e95ff1abe4ae4014a9b2/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12", size = 722614 }, - { url = "https://files.pythonhosted.org/packages/3d/32/e7bd8535d22ea2874cef6a81021ba019474ace0d13a4819c2a4bce79bd6a/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19", size = 737360 }, - { url = "https://files.pythonhosted.org/packages/d7/12/7322c1e30b9be969670b672573d45479edef72c9a0deac3bb2868f5d7469/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e", size = 699006 }, - { url = "https://files.pythonhosted.org/packages/82/72/04fcad41ca56491995076630c3ec1e834be241664c0c09a64c9a2589b507/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725", size = 723577 }, - { url = "https://files.pythonhosted.org/packages/ed/5e/46168b1f2757f1fcd442bc3029cd8767d88a98c9c05770d8b420948743bb/PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631", size = 144593 }, - { url = "https://files.pythonhosted.org/packages/19/87/5124b1c1f2412bb95c59ec481eaf936cd32f0fe2a7b16b97b81c4c017a6a/PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8", size = 162312 }, -] - -[[package]] -name = "requests" -version = "2.32.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "charset-normalizer" }, - { name = "idna" }, - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847 }, -] - -[[package]] -name = "rpy2" -version = "3.6.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "packaging", marker = "sys_platform == 'win32'" }, - { name = "rpy2-rinterface" }, - { name = "rpy2-robjects" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6a/ca/189761c689faf6f4168703f0d4cf340c09eeee4d70c07c4c493724236b7c/rpy2-3.6.1.tar.gz", hash = "sha256:9f7409f254c359618839bc937859a5078d9d94ebceb48dfd1192d442aeffb350", size = 52749 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/be/77c01409c3d4c275c283a69e64bca3ce8e2a7abcb6fa0edd41f991048034/rpy2-3.6.1-py3-none-any.whl", hash = "sha256:f8f2a4979330c0daf6227f0c4de434234f13976f20dfa9e6f40020b98f4567a3", size = 9895 }, -] - -[[package]] -name = "rpy2-rinterface" -version = "3.6.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cffi" }, - { name = "packaging", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/63/912a5e2939faed541adb6575b91b536d521ef8c4b20860e61f7150393bfd/rpy2_rinterface-3.6.1.tar.gz", hash = "sha256:9d0fbbc5852081229b6a6cb223d6b6f0e1f07d50ea98ec308ba7d32e71099471", size = 78847 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f2/14/cb78d9e41419017213b03d5b4f3377077d652fd1c0f9656e5850db645198/rpy2_rinterface-3.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:230dc380b2b8407633313dcf7c057257ce2e1bb1bba5aa7f1199f079d47cf2c4", size = 172550 }, - { url = "https://files.pythonhosted.org/packages/85/8f/aa7f2328956932c51724fdba7f9be5788f5913e0a4e631b13ed6c58d468f/rpy2_rinterface-3.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:9ef7f453eab4f0b5e82adf19ba36c6209e2210c0b1af7a352e2f56a7d509d026", size = 173694 }, - { url = "https://files.pythonhosted.org/packages/c1/22/4d8fd346ddffee4b37d192da685800fbf553bc96bbddb924f7e37aae043a/rpy2_rinterface-3.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:833a7949cbb315aa5e2a28953e6c7fdca507139346bab12421a6355fb0883491", size = 172541 }, - { url = "https://files.pythonhosted.org/packages/1a/3f/a2cb8521cf2974f8485dccb72554802e33f8b7aad1e8742ac842a2704d22/rpy2_rinterface-3.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:e859959d3f8626af01b0ffa065053bff10518a6930db5ece46cc4052e903a0d4", size = 173690 }, - { url = "https://files.pythonhosted.org/packages/97/4a/bafe1c757d273aa79fca82572377808f32d6daceedf5a130af4997943fa7/rpy2_rinterface-3.6.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f1545e52aa0cd3b9e728c45e7c576a745041cd991b82cd7bcb53aebb7be8c5b8", size = 172660 }, - { url = "https://files.pythonhosted.org/packages/d5/2c/03dee13b1bbcb9f4c830464ad69b84c29e6c8bfd708afdab3c38dfbcf7b0/rpy2_rinterface-3.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:90ce63127c84b6532f97ab4251242ac69284d536eb30298b8541588f9726740b", size = 174034 }, - { url = "https://files.pythonhosted.org/packages/aa/16/656131ffe407677fa1168a273fb8fa44377bd57f922309efe5ac8bd21306/rpy2_rinterface-3.6.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ddb0c263cb5a13dce7e28e07ffa72f76c2fecc75cdb5b4c8a3872e3804343ed", size = 172664 }, - { url = "https://files.pythonhosted.org/packages/1a/36/e3d0e013af09a422c51e004e748e198335252472ce8b7535d585ab162b92/rpy2_rinterface-3.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:1b011a6695bf1efe36d6e1f983d1faafa87aa3a9a09689891a9609c2682b98c7", size = 174036 }, - { url = "https://files.pythonhosted.org/packages/02/c0/8496fd4fed78b47ca883090a0d9799876cfeb615b34a06eea9418c01bdae/rpy2_rinterface-3.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a3fc6971fd96ac7d8f65e9b5c8b9971cfa4add7af3a213eb1c68e0abcd4c677b", size = 172537 }, - { url = "https://files.pythonhosted.org/packages/36/69/a745227e9059722adde236992839676eedc6b258686a3ab1b62b4e744bbf/rpy2_rinterface-3.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a01da5a3c055ba21cc267153fe021e3c4933dd7acc9b32b12e915c524fefb144", size = 173686 }, -] - -[[package]] -name = "rpy2-robjects" -version = "3.6.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "jinja2" }, - { name = "packaging", marker = "sys_platform == 'win32'" }, - { name = "rpy2-rinterface" }, - { name = "tzlocal" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c2/a5/3f0187417b2bde373258e2ef4e0a9296ef7e049278a2164c363797adc585/rpy2_robjects-3.6.1.tar.gz", hash = "sha256:934a7b58873531eeae4b44557f9fadb6680a0ac85d6f042afd30296b04b6c47a", size = 105374 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ac/3b/9bc0f0281b29f943f9d9fa7f2db07fb6cf4a6e35e39a8b8f0a612cfc3f5c/rpy2_robjects-3.6.1-py3-none-any.whl", hash = "sha256:8699e4554391bcaac5fb32d39828f90d9fb5c0b00236da74bb874da587e92bc7", size = 125387 }, -] - -[[package]] -name = "ruff" -version = "0.14.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9e/58/6ca66896635352812de66f71cdf9ff86b3a4f79071ca5730088c0cd0fc8d/ruff-0.14.1.tar.gz", hash = "sha256:1dd86253060c4772867c61791588627320abcb6ed1577a90ef432ee319729b69", size = 5513429 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/39/9cc5ab181478d7a18adc1c1e051a84ee02bec94eb9bdfd35643d7c74ca31/ruff-0.14.1-py3-none-linux_armv6l.whl", hash = "sha256:083bfc1f30f4a391ae09c6f4f99d83074416b471775b59288956f5bc18e82f8b", size = 12445415 }, - { url = "https://files.pythonhosted.org/packages/ef/2e/1226961855ccd697255988f5a2474890ac7c5863b080b15bd038df820818/ruff-0.14.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f6fa757cd717f791009f7669fefb09121cc5f7d9bd0ef211371fad68c2b8b224", size = 12784267 }, - { url = "https://files.pythonhosted.org/packages/c1/ea/fd9e95863124ed159cd0667ec98449ae461de94acda7101f1acb6066da00/ruff-0.14.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d6191903d39ac156921398e9c86b7354d15e3c93772e7dbf26c9fcae59ceccd5", size = 11781872 }, - { url = "https://files.pythonhosted.org/packages/1e/5a/e890f7338ff537dba4589a5e02c51baa63020acfb7c8cbbaea4831562c96/ruff-0.14.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed04f0e04f7a4587244e5c9d7df50e6b5bf2705d75059f409a6421c593a35896", size = 12226558 }, - { url = "https://files.pythonhosted.org/packages/a6/7a/8ab5c3377f5bf31e167b73651841217542bcc7aa1c19e83030835cc25204/ruff-0.14.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5c9e6cf6cd4acae0febbce29497accd3632fe2025c0c583c8b87e8dbdeae5f61", size = 12187898 }, - { url = "https://files.pythonhosted.org/packages/48/8d/ba7c33aa55406955fc124e62c8259791c3d42e3075a71710fdff9375134f/ruff-0.14.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fa2458527794ecdfbe45f654e42c61f2503a230545a91af839653a0a93dbc6", size = 12939168 }, - { url = "https://files.pythonhosted.org/packages/b4/c2/70783f612b50f66d083380e68cbd1696739d88e9b4f6164230375532c637/ruff-0.14.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:39f1c392244e338b21d42ab29b8a6392a722c5090032eb49bb4d6defcdb34345", size = 14386942 }, - { url = "https://files.pythonhosted.org/packages/48/44/cd7abb9c776b66d332119d67f96acf15830d120f5b884598a36d9d3f4d83/ruff-0.14.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7382fa12a26cce1f95070ce450946bec357727aaa428983036362579eadcc5cf", size = 13990622 }, - { url = "https://files.pythonhosted.org/packages/eb/56/4259b696db12ac152fe472764b4f78bbdd9b477afd9bc3a6d53c01300b37/ruff-0.14.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd0bf2be3ae8521e1093a487c4aa3b455882f139787770698530d28ed3fbb37c", size = 13431143 }, - { url = "https://files.pythonhosted.org/packages/e0/35/266a80d0eb97bd224b3265b9437bd89dde0dcf4faf299db1212e81824e7e/ruff-0.14.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cabcaa9ccf8089fb4fdb78d17cc0e28241520f50f4c2e88cb6261ed083d85151", size = 13132844 }, - { url = "https://files.pythonhosted.org/packages/65/6e/d31ce218acc11a8d91ef208e002a31acf315061a85132f94f3df7a252b18/ruff-0.14.1-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:747d583400f6125ec11a4c14d1c8474bf75d8b419ad22a111a537ec1a952d192", size = 13401241 }, - { url = "https://files.pythonhosted.org/packages/9f/b5/dbc4221bf0b03774b3b2f0d47f39e848d30664157c15b965a14d890637d2/ruff-0.14.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5a6e74c0efd78515a1d13acbfe6c90f0f5bd822aa56b4a6d43a9ffb2ae6e56cd", size = 12132476 }, - { url = "https://files.pythonhosted.org/packages/98/4b/ac99194e790ccd092d6a8b5f341f34b6e597d698e3077c032c502d75ea84/ruff-0.14.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0ea6a864d2fb41a4b6d5b456ed164302a0d96f4daac630aeba829abfb059d020", size = 12139749 }, - { url = "https://files.pythonhosted.org/packages/47/26/7df917462c3bb5004e6fdfcc505a49e90bcd8a34c54a051953118c00b53a/ruff-0.14.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0826b8764f94229604fa255918d1cc45e583e38c21c203248b0bfc9a0e930be5", size = 12544758 }, - { url = "https://files.pythonhosted.org/packages/64/d0/81e7f0648e9764ad9b51dd4be5e5dac3fcfff9602428ccbae288a39c2c22/ruff-0.14.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cbc52160465913a1a3f424c81c62ac8096b6a491468e7d872cb9444a860bc33d", size = 13221811 }, - { url = "https://files.pythonhosted.org/packages/c3/07/3c45562c67933cc35f6d5df4ca77dabbcd88fddaca0d6b8371693d29fd56/ruff-0.14.1-py3-none-win32.whl", hash = "sha256:e037ea374aaaff4103240ae79168c0945ae3d5ae8db190603de3b4012bd1def6", size = 12319467 }, - { url = "https://files.pythonhosted.org/packages/02/88/0ee4ca507d4aa05f67e292d2e5eb0b3e358fbcfe527554a2eda9ac422d6b/ruff-0.14.1-py3-none-win_amd64.whl", hash = "sha256:59d599cdff9c7f925a017f6f2c256c908b094e55967f93f2821b1439928746a1", size = 13401123 }, - { url = "https://files.pythonhosted.org/packages/b8/81/4b6387be7014858d924b843530e1b2a8e531846807516e9bea2ee0936bf7/ruff-0.14.1-py3-none-win_arm64.whl", hash = "sha256:e3b443c4c9f16ae850906b8d0a707b2a4c16f8d2f0a7fe65c475c5886665ce44", size = 12436636 }, -] - -[[package]] -name = "sas7bdat" -version = "2.2.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "six" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/4d/7b/d41d280b12c776e67c840bace0e5f02acda6adb13cd1fbb04293ece0bc3c/sas7bdat-2.2.3.tar.gz", hash = "sha256:484c609d962442203c15bc719a638de992a23cd13bc1971a5af6dfb0daf9f797", size = 16053 } - -[[package]] -name = "scipy" -version = "1.13.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "numpy", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ae/00/48c2f661e2816ccf2ecd77982f6605b2950afe60f60a52b4cbbc2504aa8f/scipy-1.13.1.tar.gz", hash = "sha256:095a87a0312b08dfd6a6155cbbd310a8c51800fc931b8c0b84003014b874ed3c", size = 57210720 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/33/59/41b2529908c002ade869623b87eecff3e11e3ce62e996d0bdcb536984187/scipy-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:20335853b85e9a49ff7572ab453794298bcf0354d8068c5f6775a0eabf350aca", size = 39328076 }, - { url = "https://files.pythonhosted.org/packages/d5/33/f1307601f492f764062ce7dd471a14750f3360e33cd0f8c614dae208492c/scipy-1.13.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d605e9c23906d1994f55ace80e0125c587f96c020037ea6aa98d01b4bd2e222f", size = 30306232 }, - { url = "https://files.pythonhosted.org/packages/c0/66/9cd4f501dd5ea03e4a4572ecd874936d0da296bd04d1c45ae1a4a75d9c3a/scipy-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfa31f1def5c819b19ecc3a8b52d28ffdcc7ed52bb20c9a7589669dd3c250989", size = 33743202 }, - { url = "https://files.pythonhosted.org/packages/a3/ba/7255e5dc82a65adbe83771c72f384d99c43063648456796436c9a5585ec3/scipy-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26264b282b9da0952a024ae34710c2aff7d27480ee91a2e82b7b7073c24722f", size = 38577335 }, - { url = "https://files.pythonhosted.org/packages/49/a5/bb9ded8326e9f0cdfdc412eeda1054b914dfea952bda2097d174f8832cc0/scipy-1.13.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:eccfa1906eacc02de42d70ef4aecea45415f5be17e72b61bafcfd329bdc52e94", size = 38820728 }, - { url = "https://files.pythonhosted.org/packages/12/30/df7a8fcc08f9b4a83f5f27cfaaa7d43f9a2d2ad0b6562cced433e5b04e31/scipy-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:2831f0dc9c5ea9edd6e51e6e769b655f08ec6db6e2e10f86ef39bd32eb11da54", size = 46210588 }, - { url = "https://files.pythonhosted.org/packages/b4/15/4a4bb1b15bbd2cd2786c4f46e76b871b28799b67891f23f455323a0cdcfb/scipy-1.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:27e52b09c0d3a1d5b63e1105f24177e544a222b43611aaf5bc44d4a0979e32f9", size = 39333805 }, - { url = "https://files.pythonhosted.org/packages/ba/92/42476de1af309c27710004f5cdebc27bec62c204db42e05b23a302cb0c9a/scipy-1.13.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:54f430b00f0133e2224c3ba42b805bfd0086fe488835effa33fa291561932326", size = 30317687 }, - { url = "https://files.pythonhosted.org/packages/80/ba/8be64fe225360a4beb6840f3cbee494c107c0887f33350d0a47d55400b01/scipy-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e89369d27f9e7b0884ae559a3a956e77c02114cc60a6058b4e5011572eea9299", size = 33694638 }, - { url = "https://files.pythonhosted.org/packages/36/07/035d22ff9795129c5a847c64cb43c1fa9188826b59344fee28a3ab02e283/scipy-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a78b4b3345f1b6f68a763c6e25c0c9a23a9fd0f39f5f3d200efe8feda560a5fa", size = 38569931 }, - { url = "https://files.pythonhosted.org/packages/d9/10/f9b43de37e5ed91facc0cfff31d45ed0104f359e4f9a68416cbf4e790241/scipy-1.13.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45484bee6d65633752c490404513b9ef02475b4284c4cfab0ef946def50b3f59", size = 38838145 }, - { url = "https://files.pythonhosted.org/packages/4a/48/4513a1a5623a23e95f94abd675ed91cfb19989c58e9f6f7d03990f6caf3d/scipy-1.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:5713f62f781eebd8d597eb3f88b8bf9274e79eeabf63afb4a737abc6c84ad37b", size = 46196227 }, - { url = "https://files.pythonhosted.org/packages/f2/7b/fb6b46fbee30fc7051913068758414f2721003a89dd9a707ad49174e3843/scipy-1.13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5d72782f39716b2b3509cd7c33cdc08c96f2f4d2b06d51e52fb45a19ca0c86a1", size = 39357301 }, - { url = "https://files.pythonhosted.org/packages/dc/5a/2043a3bde1443d94014aaa41e0b50c39d046dda8360abd3b2a1d3f79907d/scipy-1.13.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:017367484ce5498445aade74b1d5ab377acdc65e27095155e448c88497755a5d", size = 30363348 }, - { url = "https://files.pythonhosted.org/packages/e7/cb/26e4a47364bbfdb3b7fb3363be6d8a1c543bcd70a7753ab397350f5f189a/scipy-1.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:949ae67db5fa78a86e8fa644b9a6b07252f449dcf74247108c50e1d20d2b4627", size = 33406062 }, - { url = "https://files.pythonhosted.org/packages/88/ab/6ecdc526d509d33814835447bbbeedbebdec7cca46ef495a61b00a35b4bf/scipy-1.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de3ade0e53bc1f21358aa74ff4830235d716211d7d077e340c7349bc3542e884", size = 38218311 }, - { url = "https://files.pythonhosted.org/packages/0b/00/9f54554f0f8318100a71515122d8f4f503b1a2c4b4cfab3b4b68c0eb08fa/scipy-1.13.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ac65fb503dad64218c228e2dc2d0a0193f7904747db43014645ae139c8fad16", size = 38442493 }, - { url = "https://files.pythonhosted.org/packages/3e/df/963384e90733e08eac978cd103c34df181d1fec424de383cdc443f418dd4/scipy-1.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:cdd7dacfb95fea358916410ec61bbc20440f7860333aee6d882bb8046264e949", size = 45910955 }, - { url = "https://files.pythonhosted.org/packages/7f/29/c2ea58c9731b9ecb30b6738113a95d147e83922986b34c685b8f6eefde21/scipy-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:436bbb42a94a8aeef855d755ce5a465479c721e9d684de76bf61a62e7c2b81d5", size = 39352927 }, - { url = "https://files.pythonhosted.org/packages/5c/c0/e71b94b20ccf9effb38d7147c0064c08c622309fd487b1b677771a97d18c/scipy-1.13.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:8335549ebbca860c52bf3d02f80784e91a004b71b059e3eea9678ba994796a24", size = 30324538 }, - { url = "https://files.pythonhosted.org/packages/6d/0f/aaa55b06d474817cea311e7b10aab2ea1fd5d43bc6a2861ccc9caec9f418/scipy-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d533654b7d221a6a97304ab63c41c96473ff04459e404b83275b60aa8f4b7004", size = 33732190 }, - { url = "https://files.pythonhosted.org/packages/35/f5/d0ad1a96f80962ba65e2ce1de6a1e59edecd1f0a7b55990ed208848012e0/scipy-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637e98dcf185ba7f8e663e122ebf908c4702420477ae52a04f9908707456ba4d", size = 38612244 }, - { url = "https://files.pythonhosted.org/packages/8d/02/1165905f14962174e6569076bcc3315809ae1291ed14de6448cc151eedfd/scipy-1.13.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a014c2b3697bde71724244f63de2476925596c24285c7a637364761f8710891c", size = 38845637 }, - { url = "https://files.pythonhosted.org/packages/3e/77/dab54fe647a08ee4253963bcd8f9cf17509c8ca64d6335141422fe2e2114/scipy-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:392e4ec766654852c25ebad4f64e4e584cf19820b980bc04960bca0b0cd6eaa2", size = 46227440 }, -] - -[[package]] -name = "scipy" -version = "1.15.3" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version == '3.10.*'", -] -dependencies = [ - { name = "numpy", marker = "python_full_version == '3.10.*'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0f/37/6964b830433e654ec7485e45a00fc9a27cf868d622838f6b6d9c5ec0d532/scipy-1.15.3.tar.gz", hash = "sha256:eae3cf522bc7df64b42cad3925c876e1b0b6c35c1337c93e12c0f366f55b0eaf", size = 59419214 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/78/2f/4966032c5f8cc7e6a60f1b2e0ad686293b9474b65246b0c642e3ef3badd0/scipy-1.15.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:a345928c86d535060c9c2b25e71e87c39ab2f22fc96e9636bd74d1dbf9de448c", size = 38702770 }, - { url = "https://files.pythonhosted.org/packages/a0/6e/0c3bf90fae0e910c274db43304ebe25a6b391327f3f10b5dcc638c090795/scipy-1.15.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:ad3432cb0f9ed87477a8d97f03b763fd1d57709f1bbde3c9369b1dff5503b253", size = 30094511 }, - { url = "https://files.pythonhosted.org/packages/ea/b1/4deb37252311c1acff7f101f6453f0440794f51b6eacb1aad4459a134081/scipy-1.15.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:aef683a9ae6eb00728a542b796f52a5477b78252edede72b8327a886ab63293f", size = 22368151 }, - { url = "https://files.pythonhosted.org/packages/38/7d/f457626e3cd3c29b3a49ca115a304cebb8cc6f31b04678f03b216899d3c6/scipy-1.15.3-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:1c832e1bd78dea67d5c16f786681b28dd695a8cb1fb90af2e27580d3d0967e92", size = 25121732 }, - { url = "https://files.pythonhosted.org/packages/db/0a/92b1de4a7adc7a15dcf5bddc6e191f6f29ee663b30511ce20467ef9b82e4/scipy-1.15.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:263961f658ce2165bbd7b99fa5135195c3a12d9bef045345016b8b50c315cb82", size = 35547617 }, - { url = "https://files.pythonhosted.org/packages/8e/6d/41991e503e51fc1134502694c5fa7a1671501a17ffa12716a4a9151af3df/scipy-1.15.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2abc762b0811e09a0d3258abee2d98e0c703eee49464ce0069590846f31d40", size = 37662964 }, - { url = "https://files.pythonhosted.org/packages/25/e1/3df8f83cb15f3500478c889be8fb18700813b95e9e087328230b98d547ff/scipy-1.15.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ed7284b21a7a0c8f1b6e5977ac05396c0d008b89e05498c8b7e8f4a1423bba0e", size = 37238749 }, - { url = "https://files.pythonhosted.org/packages/93/3e/b3257cf446f2a3533ed7809757039016b74cd6f38271de91682aa844cfc5/scipy-1.15.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5380741e53df2c566f4d234b100a484b420af85deb39ea35a1cc1be84ff53a5c", size = 40022383 }, - { url = "https://files.pythonhosted.org/packages/d1/84/55bc4881973d3f79b479a5a2e2df61c8c9a04fcb986a213ac9c02cfb659b/scipy-1.15.3-cp310-cp310-win_amd64.whl", hash = "sha256:9d61e97b186a57350f6d6fd72640f9e99d5a4a2b8fbf4b9ee9a841eab327dc13", size = 41259201 }, - { url = "https://files.pythonhosted.org/packages/96/ab/5cc9f80f28f6a7dff646c5756e559823614a42b1939d86dd0ed550470210/scipy-1.15.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:993439ce220d25e3696d1b23b233dd010169b62f6456488567e830654ee37a6b", size = 38714255 }, - { url = "https://files.pythonhosted.org/packages/4a/4a/66ba30abe5ad1a3ad15bfb0b59d22174012e8056ff448cb1644deccbfed2/scipy-1.15.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:34716e281f181a02341ddeaad584205bd2fd3c242063bd3423d61ac259ca7eba", size = 30111035 }, - { url = "https://files.pythonhosted.org/packages/4b/fa/a7e5b95afd80d24313307f03624acc65801846fa75599034f8ceb9e2cbf6/scipy-1.15.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3b0334816afb8b91dab859281b1b9786934392aa3d527cd847e41bb6f45bee65", size = 22384499 }, - { url = "https://files.pythonhosted.org/packages/17/99/f3aaddccf3588bb4aea70ba35328c204cadd89517a1612ecfda5b2dd9d7a/scipy-1.15.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:6db907c7368e3092e24919b5e31c76998b0ce1684d51a90943cb0ed1b4ffd6c1", size = 25152602 }, - { url = "https://files.pythonhosted.org/packages/56/c5/1032cdb565f146109212153339f9cb8b993701e9fe56b1c97699eee12586/scipy-1.15.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:721d6b4ef5dc82ca8968c25b111e307083d7ca9091bc38163fb89243e85e3889", size = 35503415 }, - { url = "https://files.pythonhosted.org/packages/bd/37/89f19c8c05505d0601ed5650156e50eb881ae3918786c8fd7262b4ee66d3/scipy-1.15.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39cb9c62e471b1bb3750066ecc3a3f3052b37751c7c3dfd0fd7e48900ed52982", size = 37652622 }, - { url = "https://files.pythonhosted.org/packages/7e/31/be59513aa9695519b18e1851bb9e487de66f2d31f835201f1b42f5d4d475/scipy-1.15.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:795c46999bae845966368a3c013e0e00947932d68e235702b5c3f6ea799aa8c9", size = 37244796 }, - { url = "https://files.pythonhosted.org/packages/10/c0/4f5f3eeccc235632aab79b27a74a9130c6c35df358129f7ac8b29f562ac7/scipy-1.15.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:18aaacb735ab38b38db42cb01f6b92a2d0d4b6aabefeb07f02849e47f8fb3594", size = 40047684 }, - { url = "https://files.pythonhosted.org/packages/ab/a7/0ddaf514ce8a8714f6ed243a2b391b41dbb65251affe21ee3077ec45ea9a/scipy-1.15.3-cp311-cp311-win_amd64.whl", hash = "sha256:ae48a786a28412d744c62fd7816a4118ef97e5be0bee968ce8f0a2fba7acf3bb", size = 41246504 }, - { url = "https://files.pythonhosted.org/packages/37/4b/683aa044c4162e10ed7a7ea30527f2cbd92e6999c10a8ed8edb253836e9c/scipy-1.15.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6ac6310fdbfb7aa6612408bd2f07295bcbd3fda00d2d702178434751fe48e019", size = 38766735 }, - { url = "https://files.pythonhosted.org/packages/7b/7e/f30be3d03de07f25dc0ec926d1681fed5c732d759ac8f51079708c79e680/scipy-1.15.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:185cd3d6d05ca4b44a8f1595af87f9c372bb6acf9c808e99aa3e9aa03bd98cf6", size = 30173284 }, - { url = "https://files.pythonhosted.org/packages/07/9c/0ddb0d0abdabe0d181c1793db51f02cd59e4901da6f9f7848e1f96759f0d/scipy-1.15.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:05dc6abcd105e1a29f95eada46d4a3f251743cfd7d3ae8ddb4088047f24ea477", size = 22446958 }, - { url = "https://files.pythonhosted.org/packages/af/43/0bce905a965f36c58ff80d8bea33f1f9351b05fad4beaad4eae34699b7a1/scipy-1.15.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:06efcba926324df1696931a57a176c80848ccd67ce6ad020c810736bfd58eb1c", size = 25242454 }, - { url = "https://files.pythonhosted.org/packages/56/30/a6f08f84ee5b7b28b4c597aca4cbe545535c39fe911845a96414700b64ba/scipy-1.15.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05045d8b9bfd807ee1b9f38761993297b10b245f012b11b13b91ba8945f7e45", size = 35210199 }, - { url = "https://files.pythonhosted.org/packages/0b/1f/03f52c282437a168ee2c7c14a1a0d0781a9a4a8962d84ac05c06b4c5b555/scipy-1.15.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271e3713e645149ea5ea3e97b57fdab61ce61333f97cfae392c28ba786f9bb49", size = 37309455 }, - { url = "https://files.pythonhosted.org/packages/89/b1/fbb53137f42c4bf630b1ffdfc2151a62d1d1b903b249f030d2b1c0280af8/scipy-1.15.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6cfd56fc1a8e53f6e89ba3a7a7251f7396412d655bca2aa5611c8ec9a6784a1e", size = 36885140 }, - { url = "https://files.pythonhosted.org/packages/2e/2e/025e39e339f5090df1ff266d021892694dbb7e63568edcfe43f892fa381d/scipy-1.15.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ff17c0bb1cb32952c09217d8d1eed9b53d1463e5f1dd6052c7857f83127d539", size = 39710549 }, - { url = "https://files.pythonhosted.org/packages/e6/eb/3bf6ea8ab7f1503dca3a10df2e4b9c3f6b3316df07f6c0ded94b281c7101/scipy-1.15.3-cp312-cp312-win_amd64.whl", hash = "sha256:52092bc0472cfd17df49ff17e70624345efece4e1a12b23783a1ac59a1b728ed", size = 40966184 }, - { url = "https://files.pythonhosted.org/packages/73/18/ec27848c9baae6e0d6573eda6e01a602e5649ee72c27c3a8aad673ebecfd/scipy-1.15.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2c620736bcc334782e24d173c0fdbb7590a0a436d2fdf39310a8902505008759", size = 38728256 }, - { url = "https://files.pythonhosted.org/packages/74/cd/1aef2184948728b4b6e21267d53b3339762c285a46a274ebb7863c9e4742/scipy-1.15.3-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:7e11270a000969409d37ed399585ee530b9ef6aa99d50c019de4cb01e8e54e62", size = 30109540 }, - { url = "https://files.pythonhosted.org/packages/5b/d8/59e452c0a255ec352bd0a833537a3bc1bfb679944c4938ab375b0a6b3a3e/scipy-1.15.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:8c9ed3ba2c8a2ce098163a9bdb26f891746d02136995df25227a20e71c396ebb", size = 22383115 }, - { url = "https://files.pythonhosted.org/packages/08/f5/456f56bbbfccf696263b47095291040655e3cbaf05d063bdc7c7517f32ac/scipy-1.15.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:0bdd905264c0c9cfa74a4772cdb2070171790381a5c4d312c973382fc6eaf730", size = 25163884 }, - { url = "https://files.pythonhosted.org/packages/a2/66/a9618b6a435a0f0c0b8a6d0a2efb32d4ec5a85f023c2b79d39512040355b/scipy-1.15.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79167bba085c31f38603e11a267d862957cbb3ce018d8b38f79ac043bc92d825", size = 35174018 }, - { url = "https://files.pythonhosted.org/packages/b5/09/c5b6734a50ad4882432b6bb7c02baf757f5b2f256041da5df242e2d7e6b6/scipy-1.15.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9deabd6d547aee2c9a81dee6cc96c6d7e9a9b1953f74850c179f91fdc729cb7", size = 37269716 }, - { url = "https://files.pythonhosted.org/packages/77/0a/eac00ff741f23bcabd352731ed9b8995a0a60ef57f5fd788d611d43d69a1/scipy-1.15.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dde4fc32993071ac0c7dd2d82569e544f0bdaff66269cb475e0f369adad13f11", size = 36872342 }, - { url = "https://files.pythonhosted.org/packages/fe/54/4379be86dd74b6ad81551689107360d9a3e18f24d20767a2d5b9253a3f0a/scipy-1.15.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f77f853d584e72e874d87357ad70f44b437331507d1c311457bed8ed2b956126", size = 39670869 }, - { url = "https://files.pythonhosted.org/packages/87/2e/892ad2862ba54f084ffe8cc4a22667eaf9c2bcec6d2bff1d15713c6c0703/scipy-1.15.3-cp313-cp313-win_amd64.whl", hash = "sha256:b90ab29d0c37ec9bf55424c064312930ca5f4bde15ee8619ee44e69319aab163", size = 40988851 }, - { url = "https://files.pythonhosted.org/packages/1b/e9/7a879c137f7e55b30d75d90ce3eb468197646bc7b443ac036ae3fe109055/scipy-1.15.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3ac07623267feb3ae308487c260ac684b32ea35fd81e12845039952f558047b8", size = 38863011 }, - { url = "https://files.pythonhosted.org/packages/51/d1/226a806bbd69f62ce5ef5f3ffadc35286e9fbc802f606a07eb83bf2359de/scipy-1.15.3-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:6487aa99c2a3d509a5227d9a5e889ff05830a06b2ce08ec30df6d79db5fcd5c5", size = 30266407 }, - { url = "https://files.pythonhosted.org/packages/e5/9b/f32d1d6093ab9eeabbd839b0f7619c62e46cc4b7b6dbf05b6e615bbd4400/scipy-1.15.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:50f9e62461c95d933d5c5ef4a1f2ebf9a2b4e83b0db374cb3f1de104d935922e", size = 22540030 }, - { url = "https://files.pythonhosted.org/packages/e7/29/c278f699b095c1a884f29fda126340fcc201461ee8bfea5c8bdb1c7c958b/scipy-1.15.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:14ed70039d182f411ffc74789a16df3835e05dc469b898233a245cdfd7f162cb", size = 25218709 }, - { url = "https://files.pythonhosted.org/packages/24/18/9e5374b617aba742a990581373cd6b68a2945d65cc588482749ef2e64467/scipy-1.15.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a769105537aa07a69468a0eefcd121be52006db61cdd8cac8a0e68980bbb723", size = 34809045 }, - { url = "https://files.pythonhosted.org/packages/e1/fe/9c4361e7ba2927074360856db6135ef4904d505e9b3afbbcb073c4008328/scipy-1.15.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9db984639887e3dffb3928d118145ffe40eff2fa40cb241a306ec57c219ebbbb", size = 36703062 }, - { url = "https://files.pythonhosted.org/packages/b7/8e/038ccfe29d272b30086b25a4960f757f97122cb2ec42e62b460d02fe98e9/scipy-1.15.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:40e54d5c7e7ebf1aa596c374c49fa3135f04648a0caabcb66c52884b943f02b4", size = 36393132 }, - { url = "https://files.pythonhosted.org/packages/10/7e/5c12285452970be5bdbe8352c619250b97ebf7917d7a9a9e96b8a8140f17/scipy-1.15.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5e721fed53187e71d0ccf382b6bf977644c533e506c4d33c3fb24de89f5c3ed5", size = 38979503 }, - { url = "https://files.pythonhosted.org/packages/81/06/0a5e5349474e1cbc5757975b21bd4fad0e72ebf138c5592f191646154e06/scipy-1.15.3-cp313-cp313t-win_amd64.whl", hash = "sha256:76ad1fb5f8752eabf0fa02e4cc0336b4e8f021e2d5f061ed37d6d264db35e3ca", size = 40308097 }, -] - -[[package]] -name = "scipy" -version = "1.16.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", -] -dependencies = [ - { name = "numpy", marker = "python_full_version >= '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/81/18/b06a83f0c5ee8cddbde5e3f3d0bb9b702abfa5136ef6d4620ff67df7eee5/scipy-1.16.0.tar.gz", hash = "sha256:b5ef54021e832869c8cfb03bc3bf20366cbcd426e02a58e8a58d7584dfbb8f62", size = 30581216 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/f8/53fc4884df6b88afd5f5f00240bdc49fee2999c7eff3acf5953eb15bc6f8/scipy-1.16.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:deec06d831b8f6b5fb0b652433be6a09db29e996368ce5911faf673e78d20085", size = 36447362 }, - { url = "https://files.pythonhosted.org/packages/c9/25/fad8aa228fa828705142a275fc593d701b1817c98361a2d6b526167d07bc/scipy-1.16.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:d30c0fe579bb901c61ab4bb7f3eeb7281f0d4c4a7b52dbf563c89da4fd2949be", size = 28547120 }, - { url = "https://files.pythonhosted.org/packages/8d/be/d324ddf6b89fd1c32fecc307f04d095ce84abb52d2e88fab29d0cd8dc7a8/scipy-1.16.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:b2243561b45257f7391d0f49972fca90d46b79b8dbcb9b2cb0f9df928d370ad4", size = 20818922 }, - { url = "https://files.pythonhosted.org/packages/cd/e0/cf3f39e399ac83fd0f3ba81ccc5438baba7cfe02176be0da55ff3396f126/scipy-1.16.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:e6d7dfc148135e9712d87c5f7e4f2ddc1304d1582cb3a7d698bbadedb61c7afd", size = 23409695 }, - { url = "https://files.pythonhosted.org/packages/5b/61/d92714489c511d3ffd6830ac0eb7f74f243679119eed8b9048e56b9525a1/scipy-1.16.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:90452f6a9f3fe5a2cf3748e7be14f9cc7d9b124dce19667b54f5b429d680d539", size = 33444586 }, - { url = "https://files.pythonhosted.org/packages/af/2c/40108915fd340c830aee332bb85a9160f99e90893e58008b659b9f3dddc0/scipy-1.16.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a2f0bf2f58031c8701a8b601df41701d2a7be17c7ffac0a4816aeba89c4cdac8", size = 35284126 }, - { url = "https://files.pythonhosted.org/packages/d3/30/e9eb0ad3d0858df35d6c703cba0a7e16a18a56a9e6b211d861fc6f261c5f/scipy-1.16.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c4abb4c11fc0b857474241b812ce69ffa6464b4bd8f4ecb786cf240367a36a7", size = 35608257 }, - { url = "https://files.pythonhosted.org/packages/c8/ff/950ee3e0d612b375110d8cda211c1f787764b4c75e418a4b71f4a5b1e07f/scipy-1.16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b370f8f6ac6ef99815b0d5c9f02e7ade77b33007d74802efc8316c8db98fd11e", size = 38040541 }, - { url = "https://files.pythonhosted.org/packages/8b/c9/750d34788288d64ffbc94fdb4562f40f609d3f5ef27ab4f3a4ad00c9033e/scipy-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:a16ba90847249bedce8aa404a83fb8334b825ec4a8e742ce6012a7a5e639f95c", size = 38570814 }, - { url = "https://files.pythonhosted.org/packages/01/c0/c943bc8d2bbd28123ad0f4f1eef62525fa1723e84d136b32965dcb6bad3a/scipy-1.16.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:7eb6bd33cef4afb9fa5f1fb25df8feeb1e52d94f21a44f1d17805b41b1da3180", size = 36459071 }, - { url = "https://files.pythonhosted.org/packages/99/0d/270e2e9f1a4db6ffbf84c9a0b648499842046e4e0d9b2275d150711b3aba/scipy-1.16.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:1dbc8fdba23e4d80394ddfab7a56808e3e6489176d559c6c71935b11a2d59db1", size = 28490500 }, - { url = "https://files.pythonhosted.org/packages/1c/22/01d7ddb07cff937d4326198ec8d10831367a708c3da72dfd9b7ceaf13028/scipy-1.16.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:7dcf42c380e1e3737b343dec21095c9a9ad3f9cbe06f9c05830b44b1786c9e90", size = 20762345 }, - { url = "https://files.pythonhosted.org/packages/34/7f/87fd69856569ccdd2a5873fe5d7b5bbf2ad9289d7311d6a3605ebde3a94b/scipy-1.16.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:26ec28675f4a9d41587266084c626b02899db373717d9312fa96ab17ca1ae94d", size = 23418563 }, - { url = "https://files.pythonhosted.org/packages/f6/f1/e4f4324fef7f54160ab749efbab6a4bf43678a9eb2e9817ed71a0a2fd8de/scipy-1.16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:952358b7e58bd3197cfbd2f2f2ba829f258404bdf5db59514b515a8fe7a36c52", size = 33203951 }, - { url = "https://files.pythonhosted.org/packages/6d/f0/b6ac354a956384fd8abee2debbb624648125b298f2c4a7b4f0d6248048a5/scipy-1.16.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:03931b4e870c6fef5b5c0970d52c9f6ddd8c8d3e934a98f09308377eba6f3824", size = 35070225 }, - { url = "https://files.pythonhosted.org/packages/e5/73/5cbe4a3fd4bc3e2d67ffad02c88b83edc88f381b73ab982f48f3df1a7790/scipy-1.16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:512c4f4f85912767c351a0306824ccca6fd91307a9f4318efe8fdbd9d30562ef", size = 35389070 }, - { url = "https://files.pythonhosted.org/packages/86/e8/a60da80ab9ed68b31ea5a9c6dfd3c2f199347429f229bf7f939a90d96383/scipy-1.16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e69f798847e9add03d512eaf5081a9a5c9a98757d12e52e6186ed9681247a1ac", size = 37825287 }, - { url = "https://files.pythonhosted.org/packages/ea/b5/29fece1a74c6a94247f8a6fb93f5b28b533338e9c34fdcc9cfe7a939a767/scipy-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:adf9b1999323ba335adc5d1dc7add4781cb5a4b0ef1e98b79768c05c796c4e49", size = 38431929 }, - { url = "https://files.pythonhosted.org/packages/46/95/0746417bc24be0c2a7b7563946d61f670a3b491b76adede420e9d173841f/scipy-1.16.0-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:e9f414cbe9ca289a73e0cc92e33a6a791469b6619c240aa32ee18abdce8ab451", size = 36418162 }, - { url = "https://files.pythonhosted.org/packages/19/5a/914355a74481b8e4bbccf67259bbde171348a3f160b67b4945fbc5f5c1e5/scipy-1.16.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:bbba55fb97ba3cdef9b1ee973f06b09d518c0c7c66a009c729c7d1592be1935e", size = 28465985 }, - { url = "https://files.pythonhosted.org/packages/58/46/63477fc1246063855969cbefdcee8c648ba4b17f67370bd542ba56368d0b/scipy-1.16.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:58e0d4354eacb6004e7aa1cd350e5514bd0270acaa8d5b36c0627bb3bb486974", size = 20737961 }, - { url = "https://files.pythonhosted.org/packages/93/86/0fbb5588b73555e40f9d3d6dde24ee6fac7d8e301a27f6f0cab9d8f66ff2/scipy-1.16.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:75b2094ec975c80efc273567436e16bb794660509c12c6a31eb5c195cbf4b6dc", size = 23377941 }, - { url = "https://files.pythonhosted.org/packages/ca/80/a561f2bf4c2da89fa631b3cbf31d120e21ea95db71fd9ec00cb0247c7a93/scipy-1.16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6b65d232157a380fdd11a560e7e21cde34fdb69d65c09cb87f6cc024ee376351", size = 33196703 }, - { url = "https://files.pythonhosted.org/packages/11/6b/3443abcd0707d52e48eb315e33cc669a95e29fc102229919646f5a501171/scipy-1.16.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1d8747f7736accd39289943f7fe53a8333be7f15a82eea08e4afe47d79568c32", size = 35083410 }, - { url = "https://files.pythonhosted.org/packages/20/ab/eb0fc00e1e48961f1bd69b7ad7e7266896fe5bad4ead91b5fc6b3561bba4/scipy-1.16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eb9f147a1b8529bb7fec2a85cf4cf42bdfadf9e83535c309a11fdae598c88e8b", size = 35387829 }, - { url = "https://files.pythonhosted.org/packages/57/9e/d6fc64e41fad5d481c029ee5a49eefc17f0b8071d636a02ceee44d4a0de2/scipy-1.16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d2b83c37edbfa837a8923d19c749c1935ad3d41cf196006a24ed44dba2ec4358", size = 37841356 }, - { url = "https://files.pythonhosted.org/packages/7c/a7/4c94bbe91f12126b8bf6709b2471900577b7373a4fd1f431f28ba6f81115/scipy-1.16.0-cp313-cp313-win_amd64.whl", hash = "sha256:79a3c13d43c95aa80b87328a46031cf52508cf5f4df2767602c984ed1d3c6bbe", size = 38403710 }, - { url = "https://files.pythonhosted.org/packages/47/20/965da8497f6226e8fa90ad3447b82ed0e28d942532e92dd8b91b43f100d4/scipy-1.16.0-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:f91b87e1689f0370690e8470916fe1b2308e5b2061317ff76977c8f836452a47", size = 36813833 }, - { url = "https://files.pythonhosted.org/packages/28/f4/197580c3dac2d234e948806e164601c2df6f0078ed9f5ad4a62685b7c331/scipy-1.16.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:88a6ca658fb94640079e7a50b2ad3b67e33ef0f40e70bdb7dc22017dae73ac08", size = 28974431 }, - { url = "https://files.pythonhosted.org/packages/8a/fc/e18b8550048d9224426e76906694c60028dbdb65d28b1372b5503914b89d/scipy-1.16.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:ae902626972f1bd7e4e86f58fd72322d7f4ec7b0cfc17b15d4b7006efc385176", size = 21246454 }, - { url = "https://files.pythonhosted.org/packages/8c/48/07b97d167e0d6a324bfd7484cd0c209cc27338b67e5deadae578cf48e809/scipy-1.16.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:8cb824c1fc75ef29893bc32b3ddd7b11cf9ab13c1127fe26413a05953b8c32ed", size = 23772979 }, - { url = "https://files.pythonhosted.org/packages/4c/4f/9efbd3f70baf9582edf271db3002b7882c875ddd37dc97f0f675ad68679f/scipy-1.16.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:de2db7250ff6514366a9709c2cba35cb6d08498e961cba20d7cff98a7ee88938", size = 33341972 }, - { url = "https://files.pythonhosted.org/packages/3f/dc/9e496a3c5dbe24e76ee24525155ab7f659c20180bab058ef2c5fa7d9119c/scipy-1.16.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e85800274edf4db8dd2e4e93034f92d1b05c9421220e7ded9988b16976f849c1", size = 35185476 }, - { url = "https://files.pythonhosted.org/packages/ce/b3/21001cff985a122ba434c33f2c9d7d1dc3b669827e94f4fc4e1fe8b9dfd8/scipy-1.16.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4f720300a3024c237ace1cb11f9a84c38beb19616ba7c4cdcd771047a10a1706", size = 35570990 }, - { url = "https://files.pythonhosted.org/packages/e5/d3/7ba42647d6709251cdf97043d0c107e0317e152fa2f76873b656b509ff55/scipy-1.16.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:aad603e9339ddb676409b104c48a027e9916ce0d2838830691f39552b38a352e", size = 37950262 }, - { url = "https://files.pythonhosted.org/packages/eb/c4/231cac7a8385394ebbbb4f1ca662203e9d8c332825ab4f36ffc3ead09a42/scipy-1.16.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f56296fefca67ba605fd74d12f7bd23636267731a72cb3947963e76b8c0a25db", size = 38515076 }, -] - -[[package]] -name = "six" -version = "1.17.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, -] - -[[package]] -name = "sortedcontainers" -version = "2.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575 }, -] - -[[package]] -name = "stack-data" -version = "0.6.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "asttokens" }, - { name = "executing" }, - { name = "pure-eval" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521 }, -] - -[[package]] -name = "strenum" -version = "0.4.15" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/85/ad/430fb60d90e1d112a62ff57bdd1f286ec73a2a0331272febfddd21f330e1/StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff", size = 23384 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/81/69/297302c5f5f59c862faa31e6cb9a4cd74721cd1e052b38e464c5b402df8b/StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659", size = 8851 }, -] - -[[package]] -name = "tables" -version = "3.9.2" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "blosc2", version = "2.5.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "numexpr", version = "2.10.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "numpy", marker = "python_full_version < '3.10'" }, - { name = "packaging", marker = "python_full_version < '3.10'" }, - { name = "py-cpuinfo", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/31/83/8a13be8338219c3fe0aa7357d1ec4edb27bc346e0f224df7212892b243b5/tables-3.9.2.tar.gz", hash = "sha256:d470263c2e50c4b7c8635a0d99ac1ff2f9e704c24d71e5fa33c4529e7d0ad9c3", size = 4683437 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/17/95/dea498a083e1fbe60bf0ea999da38cb8e33b5506ba99237b5416267a9bc0/tables-3.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8a4e71fc9d2a3a0cacce4994afd47cd5f4797093ff9cee2cc7dc87e51f308107", size = 7392824 }, - { url = "https://files.pythonhosted.org/packages/49/b0/e424d4d409925cdf38689bb85f33e2a7c7bca310fb188de65af031c933b7/tables-3.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbea426ce9bdd60cda435a265823b31d18f2b36e9045fb2d565679825a7aa46", size = 7113863 }, - { url = "https://files.pythonhosted.org/packages/22/86/5a474f002126e37d9ee1062e71dc99223d1618ffcfbd5683a7662766c4af/tables-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e756c272bb111d016fec1d03a60095403a8fb42a5fbaf5f317dcf6e3b9d8e92e", size = 7481194 }, - { url = "https://files.pythonhosted.org/packages/90/eb/8b998ae92009d6e9a696d8ec43a5b5ab9200a715f8da86d9f952a1a1bde5/tables-3.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:eea41cb32dd22b30d6f3dd4e113f6d693384d301c89f3c4b4712f90c9c955875", size = 4391242 }, - { url = "https://files.pythonhosted.org/packages/14/41/ea4928ece6c79614d1b61215a52760d88a134e55eca42a3a09815468afa9/tables-3.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d71913fb8147dc6132595b94fc82f88f6c2436a3b5c57aadfe26c680f96aa387", size = 7323512 }, - { url = "https://files.pythonhosted.org/packages/d2/de/a3f2a4c6ac32b81e4e2789d7364db0f8fff0657f1b767004c3e63b353142/tables-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d6bbc477d038a17c5062ab6ccd94c8b1fa365cf017b9a2ad6c2dff1a07abb2b", size = 7112660 }, - { url = "https://files.pythonhosted.org/packages/03/76/53d432936c6955de1346b629b9f92213b11ee9e8aeab8831027fac9e053d/tables-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e67c71070b871fade3694a4c764504e03836bb1843321766cf2e40b7d280e84", size = 7478756 }, - { url = "https://files.pythonhosted.org/packages/64/09/1071a5cef60108dc3fd8d44596371d223834171800f43a652a9bd523588f/tables-3.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:ab9291ff4d243e7966b6706a2675b83138bd9bbe82721d695b78971660d59632", size = 4391764 }, - { url = "https://files.pythonhosted.org/packages/01/4f/cb9e44384982889ff720b8fabddb24aa4b0deb69afb1a5f09dd17a93b4ca/tables-3.9.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c14dc879b041cf53be1afe9e5ed581e1aeacdcee9e2e1ee79110dc96a4c8d97c", size = 7351431 }, - { url = "https://files.pythonhosted.org/packages/9a/e0/070c3ebc11614f1c3fc413f2327ea9d1818444eab7ae4975a2fea7de03cb/tables-3.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2848fb3dce30a7b83fa099d026a91d7b10ad48afae04fa10f974f1da3f1e2bbf", size = 7077135 }, - { url = "https://files.pythonhosted.org/packages/dd/50/8efbd7339a1fd69658f623bcee5e4c0318d823cc39bb23dbc6b0ff1a7491/tables-3.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b131c9b4e003816a45e2efe5c5c797d01d8308cac4aee72597a15837cedb605c", size = 7442059 }, - { url = "https://files.pythonhosted.org/packages/8b/84/93dd082a991c36f3e55e170cb71c4011256e45549c0a3b0d3693cf9053e3/tables-3.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:c6304d321452fd56865e5c309e38373011b0f0f6c714786c5660613ceb623acb", size = 4374051 }, - { url = "https://files.pythonhosted.org/packages/64/5d/eb0c1b4401cbd9006589cb1f2a4288e70faa246538c7115ee7e54805c2d7/tables-3.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c52087ed8b90a5f6ba87f0adcd1c433e5f5db7c7ca5984b08ff45f2247635f7d", size = 7396210 }, - { url = "https://files.pythonhosted.org/packages/a9/9c/fb98d7a5b9e1c7f588bf71ea75c87711212c964f0c1c8e0cb1246bda0a35/tables-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:164b945d0cb731c7232775fd3657f150bcf05413928b86033b023a1dc8dbeb05", size = 7116751 }, - { url = "https://files.pythonhosted.org/packages/62/cf/897ad10477897bd55bcb3431833b274adda1c01bc8f856b8de57f6de0b83/tables-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a068d4ad08d5a6b2ad457f60ac6676efdab9e29459e776e433d5537a46e62e41", size = 7483600 }, - { url = "https://files.pythonhosted.org/packages/28/67/f710befc6a57a339ac0aa7272c7df58014fff4f8863c16f683894b8c7893/tables-3.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:bca5a6bf162a84a6ef74ca4017b28c59c1526cffdbd93ce94c98ff8f9593f1d5", size = 4394103 }, -] - -[[package]] -name = "tables" -version = "3.10.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version == '3.10.*'", -] -dependencies = [ - { name = "blosc2", version = "3.5.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "numexpr", version = "2.11.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "numpy", marker = "python_full_version == '3.10.*'" }, - { name = "packaging", marker = "python_full_version == '3.10.*'" }, - { name = "py-cpuinfo", marker = "python_full_version == '3.10.*'" }, - { name = "typing-extensions", marker = "python_full_version == '3.10.*'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0d/5d/96708a84e9fcd29d1f684d56d4c38a23d29b1c934599a072a49f27ccfa71/tables-3.10.1.tar.gz", hash = "sha256:4aa07ac734b9c037baeaf44aec64ec902ad247f57811b59f30c4e31d31f126cf", size = 4762413 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ff/69/a768ec8104ada032c9be09f521f548766ddd0351bc941c9d42fa5db001de/tables-3.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bca9d11a570ca1bc57f0845e54e55c3093d5a1ace376faee639e09503a73745b", size = 6823691 }, - { url = "https://files.pythonhosted.org/packages/e4/2d/074bc14b39de9b552eec02ee583eff2997d903da1355f4450506335a6055/tables-3.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b62881cb682438d1e92b9178db42b160638aef3ca23341f7d98e9b27821b1eb4", size = 5471221 }, - { url = "https://files.pythonhosted.org/packages/4a/30/29411ab804b5ac4bee25c82ba38f4e7a8c0b52c6a1cdbeea7d1db33a53fe/tables-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9cf1bfd8b0e0195196205fc8a134628219cff85d20da537facd67a291e6b347", size = 7170201 }, - { url = "https://files.pythonhosted.org/packages/0a/7d/3165c7538b8e89b22fa17ad68e04106cca7023cf68e94011ae7b3b6d2a78/tables-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77f0e6dd45b91d99bf3976c8655c48fe3816baf390b9098e4fb2f0fdf9da7078", size = 7571035 }, - { url = "https://files.pythonhosted.org/packages/46/b3/985a23d2cf27aad383301a5e99e1851228a1941b868515612b5357bded5f/tables-3.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:d90542ec172d1d60df0b796c48ad446f2b69a5d5cd3077bd6450891b854d1ffb", size = 6311650 }, - { url = "https://files.pythonhosted.org/packages/dc/04/957264eb35e60251830a965e2d02332eb36ed14fbd8345df06981bbf3ece/tables-3.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f8917262a2bb3cd79d37e108557e34ec4b365fdcc806e01dd10765a84c65dab6", size = 6790492 }, - { url = "https://files.pythonhosted.org/packages/b2/19/eb7af9d92aaf6766f5fedfce11a97ab03cf39856561c5f562dc0c769a682/tables-3.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f93f6db623b484bb6606537c2a71e95ee34fae19b0d891867642dd8c7be05af6", size = 5506835 }, - { url = "https://files.pythonhosted.org/packages/b0/8f/897324e1ad543ca439b2c91f04c406f3eeda6e7ff2f43b4cd939f05043e4/tables-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01ca51624bca1a87e703d6d6b796368bc3460ff007ea8b1341be03bedd863833", size = 7166960 }, - { url = "https://files.pythonhosted.org/packages/4e/5c/3f21d1135bf60af99ac79a17bbffd333d69763df2197ba04f47dd30bbd4e/tables-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9372516c76be3a05a573df63a69ce38315d03b5816d2a1e89c48129ec8b161b0", size = 7568724 }, - { url = "https://files.pythonhosted.org/packages/1f/e3/3ee6b66263902eccadc4e0e23bca7fb480fd190904b7ce0bea4777b5b799/tables-3.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:09190fb504888aeacafb7739c13d5c5a3e87af3d261f4d2f832b1f8407be133a", size = 6312200 }, - { url = "https://files.pythonhosted.org/packages/95/ec/ea6c476e33602c172c797fe8f8ab96d007d964137068276d142b142a28e5/tables-3.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7090af37909e3bf229d5599fa442633e5a93b6082960b01038dc0106e07a8da", size = 6791597 }, - { url = "https://files.pythonhosted.org/packages/74/02/a967a506e9204e3328a8c03f67e6f3c919defc8df11aba83ae5b2abf7b0f/tables-3.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:203ed50c0c5f30f007df7633089b2a567b99856cd25d68f19d91624a8db2e7ad", size = 5474779 }, - { url = "https://files.pythonhosted.org/packages/c3/26/925793f753664ec698b2c6315c818269313db143da38150897cf260405c2/tables-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e36ce9f10471c69c1f0b06c6966de762558a35d62592c55df7994a8019adaf0c", size = 7130683 }, - { url = "https://files.pythonhosted.org/packages/d8/79/2b34f22284459e940a84e71dba19b2a34c7cc0ce3cdf685923c50d5b9611/tables-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f233e78cc9fa4157ec4c3ef2abf01a731fe7969bc6ed73539e5f4cd3b94c98b2", size = 7531367 }, - { url = "https://files.pythonhosted.org/packages/3d/27/5a23830f611e26dd7ee104096c6bb82e481b16f3f17ccaed3075f8d48312/tables-3.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:34357d2f2f75843a44e6fe54d1f11fc2e35a8fd3cb134df3d3362cff78010adb", size = 6295046 }, - { url = "https://files.pythonhosted.org/packages/d3/d4/e7c25df877e054b05f146d6ccb920bcdbe8d39b35a0962868b80547532c7/tables-3.10.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6fc5b46a4f359249c3ab9a0a0a2448d7e680e68cffd63fdf3fb7171781edd46e", size = 6824253 }, - { url = "https://files.pythonhosted.org/packages/c6/49/091865d75090a24493bd1b66e52d72f4d9627ff42983a13d4dcd89455d02/tables-3.10.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2ecabd7f459d40b7f9f5256850dd5f43773fda7b789f827de92c3d26df1e320f", size = 5499587 }, - { url = "https://files.pythonhosted.org/packages/23/83/9dac8af333149fa01add439f710d4a312b70faf81c2f59a16b8bfaebb75e/tables-3.10.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40a4ee18f3c9339d9dd8fd3777c75cda5768f2ff347064a2796f59161a190af8", size = 7128236 }, - { url = "https://files.pythonhosted.org/packages/89/fd/62f31643596f6ab71fc6d2a87acdee0bc01a03fbe1a7f3f6dc0c91e2546d/tables-3.10.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:757c6ea257c174af8036cf8f273ede756bbcd6db5ac7e2a4d64e788b0f371152", size = 7527953 }, -] - -[[package]] -name = "tables" -version = "3.10.2" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", -] -dependencies = [ - { name = "blosc2", version = "3.5.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "numexpr", version = "2.11.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "numpy", marker = "python_full_version >= '3.11'" }, - { name = "packaging", marker = "python_full_version >= '3.11'" }, - { name = "py-cpuinfo", marker = "python_full_version >= '3.11'" }, - { name = "typing-extensions", marker = "python_full_version >= '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/15/50/23ead25f60bb1babe7f2f061d8a2f8c2f6804c1a20b3058677beb9085b56/tables-3.10.2.tar.gz", hash = "sha256:2544812a7186fadba831d6dd34eb49ccd788d6a83f4e4c2b431b835b6796c910", size = 4779722 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/96/f6/ef0c376c1fa01b916d5db0c2681be063f6289ee99faf7bb6610e0b55b773/tables-3.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:63f8adec3c4421a011c5c6a245c0c1fccf16dba7aaa67d9915d2821cf365ed4a", size = 6767194 }, - { url = "https://files.pythonhosted.org/packages/d9/d0/accd41382fa9da45bf816c56f85bda64223a3b8d0006d3496b67e0781a6e/tables-3.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34c120bff666d33d3bdfb9e33173a4869d5f34e6c87824f2c7ec6a72c8dfab82", size = 5482665 }, - { url = "https://files.pythonhosted.org/packages/59/2f/c95e94423c463177b8a7d55a1dbbd524840fe6a684844ff728f238e71f68/tables-3.10.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e71f63ac67c583ac42943c99c2d33bcc9e361e94d1ab1a763dc0698bdd9ff815", size = 7117696 }, - { url = "https://files.pythonhosted.org/packages/88/d5/71665919aa2a5a3d2a20eeef3c71dc7c2ebbd9f26d114a7808514aba24d6/tables-3.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:154773f97763ccc91a29bcead6ab7b5ef164c2ed8c409cd79a2115aa9b4184c9", size = 7520921 }, - { url = "https://files.pythonhosted.org/packages/46/96/b5023c1f7b9d560cac3e2c0daceebaeb88dd24c70c75db2d291abfa563e5/tables-3.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:96b5e945d275415e79ddb0578657ecc6ac77030dcc0632ab2c39f89390bb239d", size = 6407137 }, - { url = "https://files.pythonhosted.org/packages/ab/c4/1efbcc699db863d88874f3d111e5bb6dd2e0fbaca38f91c992e696324730/tables-3.10.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c6ba58205d1f6a4e0e2212bc221e76cf104f22190f90c3f1683f3c1ab138f28f", size = 6734990 }, - { url = "https://files.pythonhosted.org/packages/4a/db/4c7facfc805ab764f2ee256011d20f96791d2426afa3389ca7ff2a8a4ea8/tables-3.10.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdb5c040aa43e5e96259d6f6bb9df5b66fef2b071a6eb035c21bf6508e865d40", size = 5483377 }, - { url = "https://files.pythonhosted.org/packages/93/0a/53815b516a2465b329e5dc2079c99a8b6b1a23f6b9ce5da8a7ebc7892bf4/tables-3.10.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e694123fa886d9be57f55fc7e1dcacac49f0b4ed4a931c795bd8f82f7111b5a8", size = 7081356 }, - { url = "https://files.pythonhosted.org/packages/d3/e1/3f4adfc83eb7390abb964682a7d1df0dbe451dd2cee99750b1c7ca8e2c9d/tables-3.10.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6c12d0d04de89297763923ebeaddfd7e0b51f29041895db284fd4913e7448b7", size = 7483570 }, - { url = "https://files.pythonhosted.org/packages/9a/d4/0b9ba57a5a8d2d05d1108055a8d70a4b066db4ebed61921de34043a31bdb/tables-3.10.2-cp312-cp312-win_amd64.whl", hash = "sha256:a406d5dbbcb6604bd1ca129af337e0790d4e02d29d06159ddb9f74e38d756d32", size = 6388443 }, - { url = "https://files.pythonhosted.org/packages/ab/02/8c7aeaa6c8aac8e0298d40dc5fc55477fddc30cb31e4dc7e5e473be4b464/tables-3.10.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7b8bc07c715bad3d447ed8f834388ef2e10265e2c4af6b1297fc61adb645948f", size = 6725764 }, - { url = "https://files.pythonhosted.org/packages/91/f4/8683395d294b9e4576fd7d888aa6cf5583c013c2c0a2e47f862c2842407f/tables-3.10.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:28677ed8e1a371471495599078f48da0850f82457d6c852ca77959c974371140", size = 5442663 }, - { url = "https://files.pythonhosted.org/packages/72/9b/ea43159eed8f81bfa1ead8fa8201a3c352e84c7220e046bb548736833951/tables-3.10.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaaea478dcf27dd54679ef2643c26d3b8b15676ad81e4d80a88fd1682d23deb1", size = 7078747 }, - { url = "https://files.pythonhosted.org/packages/04/95/b3e88edc674e35d9011b168df0d7a9b1c3ab98733fa26e740ac7964edc2f/tables-3.10.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5e67a9f901842f9a4b1f3d2307f4bdd94047514fe0d0c558ed19c11f53c402a", size = 7479985 }, - { url = "https://files.pythonhosted.org/packages/63/ca/eaa029a43d269bdda6985931d6cfd479e876cd8cf7c887d818bef05ef03b/tables-3.10.2-cp313-cp313-win_amd64.whl", hash = "sha256:5637fdcded5ba5426aa24e0e42d6f990926a4da7f193830df131dfcb7e842900", size = 6385562 }, -] - -[[package]] -name = "tabulate" -version = "0.9.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252 }, -] - -[[package]] -name = "tomli" -version = "2.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077 }, - { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429 }, - { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067 }, - { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030 }, - { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898 }, - { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894 }, - { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319 }, - { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273 }, - { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310 }, - { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309 }, - { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762 }, - { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453 }, - { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486 }, - { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349 }, - { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159 }, - { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243 }, - { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645 }, - { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584 }, - { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875 }, - { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418 }, - { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708 }, - { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582 }, - { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543 }, - { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691 }, - { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170 }, - { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530 }, - { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666 }, - { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954 }, - { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724 }, - { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383 }, - { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257 }, -] - -[[package]] -name = "traitlets" -version = "5.14.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359 }, -] - -[[package]] -name = "typing-extensions" -version = "4.14.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906 }, -] - -[[package]] -name = "tzdata" -version = "2025.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839 }, -] - -[[package]] -name = "tzlocal" -version = "5.3.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "tzdata", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/8b/2e/c14812d3d4d9cd1773c6be938f89e5735a1f11a9f184ac3639b93cef35d5/tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd", size = 30761 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/14/e2a54fabd4f08cd7af1c07030603c3356b74da07f7cc056e600436edfa17/tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d", size = 18026 }, -] - -[[package]] -name = "urllib3" -version = "2.5.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795 }, -] - -[[package]] -name = "virtualenv" -version = "20.35.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "distlib" }, - { name = "filelock", version = "3.19.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "filelock", version = "3.20.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "platformdirs" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a4/d5/b0ccd381d55c8f45d46f77df6ae59fbc23d19e901e2d523395598e5f4c93/virtualenv-20.35.3.tar.gz", hash = "sha256:4f1a845d131133bdff10590489610c98c168ff99dc75d6c96853801f7f67af44", size = 6002907 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/27/73/d9a94da0e9d470a543c1b9d3ccbceb0f59455983088e727b8a1824ed90fb/virtualenv-20.35.3-py3-none-any.whl", hash = "sha256:63d106565078d8c8d0b206d48080f938a8b25361e19432d2c9db40d2899c810a", size = 5981061 }, -] - -[[package]] -name = "wcwidth" -version = "0.2.14" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286 }, -] - -[[package]] -name = "weightedcalcs" -version = "0.1.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pandas" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/71/09/1bef813e2cb52123cee058aa543918e82a78deb97eaed27eae0af2a24f03/weightedcalcs-0.1.3.tar.gz", hash = "sha256:78853f53409991671a34c2f06153d380083a4d1b242829897f15a9e5013036d5", size = 5264 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/24/ad3dcc7f6cf747e804d02a176697509a32b2eb82799377c943ae98ff3c39/weightedcalcs-0.1.3-py3-none-any.whl", hash = "sha256:afb18abaafd34cf6fa3550f8a851f5353c473d9bd80d9bd26acc860a23d6ddc6", size = 3715 }, -] - -[[package]] -name = "werkzeug" -version = "2.3.8" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markupsafe" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/3d/4b/d746f1000782c89d6c97df9df43ba8f4d126038608843d3560ae88d201b5/werkzeug-2.3.8.tar.gz", hash = "sha256:554b257c74bbeb7a0d254160a4f8ffe185243f52a52035060b761ca62d977f03", size = 819747 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/21/0a674dfe66e9df9072c46269c882e9f901d36d987d8ea50ead033a9c1e01/werkzeug-2.3.8-py3-none-any.whl", hash = "sha256:bba1f19f8ec89d4d607a3bd62f1904bd2e609472d93cd85e9d4e178f472c3748", size = 242332 }, -] - -[[package]] -name = "wquantiles" -version = "0.6" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "numpy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/eb/24/ae0216512052aee6cf68cba89f83ce011e06feff7a4cae5b03d78d078b24/wquantiles-0.6.tar.gz", hash = "sha256:a9e5b61277c8bf414394131bba4af0fc565de379ca73d6f7a336ba60184fa5c4", size = 2948 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/75/3cce30508bf46121b7cabce57b9cacbf8d935fa555cb3c5fca43f8dd0414/wquantiles-0.6-py3-none-any.whl", hash = "sha256:1b90d68fa05251bb96f8806a346e8d7dec9a9bb99f381ad5094707b46ab85218", size = 3291 }, -] - -[[package]] -name = "zipp" -version = "3.23.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276 }, -]