diff --git a/.github/workflows/_integration_test_shared.yml b/.github/workflows/_integration_test_shared.yml index 19ba72290..624e459be 100644 --- a/.github/workflows/_integration_test_shared.yml +++ b/.github/workflows/_integration_test_shared.yml @@ -13,7 +13,7 @@ jobs: steps: - uses: actions/checkout@v4 - uses: ./.github/actions/setup-python/ - - run: poetry install + - run: poetry install --with=dev,dev-server --extras=all - run: | poetry run pytest truss/tests \ --durations=0 -m 'integration' \ @@ -34,7 +34,7 @@ jobs: steps: - uses: actions/checkout@v4 - uses: ./.github/actions/setup-python/ - - run: poetry install + - run: poetry install --with=dev,dev-server --extras=all - run: | poetry run pytest truss-chains/tests \ --durations=0 -m 'integration' \ diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index cb29cf6eb..56bfc91c9 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -25,6 +25,7 @@ jobs: fetch-depth: 2 - uses: ./.github/actions/detect-versions/ id: versions + build-and-push-truss-base-images-if-needed: needs: [detect-version-changed] if: needs.detect-version-changed.outputs.build_base_images == 'true' @@ -46,7 +47,7 @@ jobs: - uses: actions/checkout@v4 - uses: ./.github/actions/setup-python/ - - run: poetry install + - run: poetry install --with=dev,dev-server --extras=all - shell: bash run: | poetry run bin/generate_base_images.py \ diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index a5493a7e4..ad3e3cdcc 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -28,6 +28,7 @@ jobs: fetch-depth: 2 - uses: ./.github/actions/detect-versions/ id: versions + build-and-push-truss-base-images-if-needed: needs: [detect-version-changed] if: needs.detect-version-changed.outputs.build_base_images == 'true' @@ -49,7 +50,7 @@ jobs: - uses: actions/checkout@v4 - uses: ./.github/actions/setup-python/ - - run: poetry install + - run: poetry install --with=dev,dev-server --extras=all - shell: bash run: | poetry run bin/generate_base_images.py \ @@ -64,7 +65,7 @@ jobs: if: ${{ !failure() && !cancelled() && (needs.build-and-push-truss-base-images-if-needed.result == 'success' || needs.build-and-push-truss-base-images-if-needed.result == 'skipped') }} uses: ./.github/workflows/_integration_test_shared.yml - publish-to-pypi: + publish-rc-to-pypi: needs: [detect-version-changed] if: ${{ !failure() && !cancelled() && needs.detect-version-changed.outputs.release_version == 'true' && needs.detect-version-changed.outputs.is_prerelease_version == 'true' }} runs-on: ubuntu-20.04 @@ -84,7 +85,7 @@ jobs: - uses: ./.github/actions/setup-python/ - name: Install poetry packages - run: poetry install --no-dev + run: poetry install --extras=all - name: Build run: poetry build diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index 5d9a16d7a..b2464fed3 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -15,7 +15,7 @@ jobs: with: lfs: true - uses: ./.github/actions/setup-python/ - - run: poetry install + - run: poetry install --with=dev,dev-server --extras=all - run: poetry run pre-commit run --all-files env: SKIP: ruff @@ -31,7 +31,7 @@ jobs: with: lfs: true - uses: ./.github/actions/setup-python/ - - run: poetry install + - run: poetry install --with=dev,dev-server --extras=all - name: run tests run: poetry run pytest --durations=0 -m 'not integration' --junitxml=report.xml - name: Publish Test Report # Not sure how to display this in the UI for non PRs. @@ -50,7 +50,6 @@ jobs: use-verbose-mode: "yes" folder-path: "docs" - enforce-chains-example-docs-sync: runs-on: ubuntu-20.04 steps: diff --git a/.github/workflows/release-truss-utils.yml b/.github/workflows/release-truss-utils.yml index e3604f098..87a4e853a 100644 --- a/.github/workflows/release-truss-utils.yml +++ b/.github/workflows/release-truss-utils.yml @@ -40,7 +40,7 @@ jobs: - name: Install poetry packages working-directory: truss-utils - run: poetry install --no-dev + run: poetry install - name: Build working-directory: truss-utils diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 5744184a5..e0ce4f77b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -60,7 +60,7 @@ jobs: tags: baseten/truss-context-builder:v${{ needs.detect-version-changed.outputs.new_version }} labels: ${{ steps.meta.outputs.labels }} - publish-to-pypi: + publish-release-to-pypi: needs: [detect-version-changed] if: ${{ !failure() && !cancelled() && needs.detect-version-changed.outputs.release_version == 'true' }} runs-on: ubuntu-20.04 @@ -80,7 +80,7 @@ jobs: - uses: ./.github/actions/setup-python/ - name: Install poetry packages - run: poetry install --no-dev + run: poetry install --extras=all - name: Build run: poetry build diff --git a/.gitignore b/.gitignore index 399d9cc04..bdf47a921 100644 --- a/.gitignore +++ b/.gitignore @@ -49,7 +49,7 @@ __diff_output__ /env/ /output/ /cache/ -/dist/ +dist/ # Spritesmith spritesmith-generated/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 19633396f..cad82a0a2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,6 +22,11 @@ repos: - id: ruff-format - repo: local hooks: + - id: pyproject.toml + name: pyproject.toml + entry: poetry run python bin/pyproject_toml_linter.py + language: python + additional_dependencies: [tomlkit] - id: mypy name: mypy-local entry: poetry run mypy diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ccbf71059..5891038dc 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -4,8 +4,8 @@ Truss was first created at [Baseten](https://baseten.co), but as an open and liv We use GitHub features for project management on Truss: -* For bugs and feature requests, file an issue. -* For changes and updates, create a pull request. +* For bugs and feature requests, [file an issue](https://github.com/basetenlabs/truss/issues). +* For changes and updates, create a [pull request](https://github.com/basetenlabs/truss/pulls). * To view and comment on the roadmap, [check the projects tab](https://github.com/orgs/basetenlabs/projects/3). ## Local development @@ -30,7 +30,7 @@ asdf plugin add poetry asdf install # Install poetry dependencies -poetry install +poetry install --with=dev,dev-server --extras=all # And finally precommit poetry run pre-commit install diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index e6b9cad0d..000000000 --- a/Dockerfile +++ /dev/null @@ -1,8 +0,0 @@ -FROM python:3.9 - -RUN curl -sSL https://install.python-poetry.org | python - - -ENV PATH="/root/.local/bin:${PATH}" -COPY . . - -RUN poetry install --only main diff --git a/bin/codespace_post_create.sh b/bin/codespace_post_create.sh index 76176509f..dc3a5a381 100755 --- a/bin/codespace_post_create.sh +++ b/bin/codespace_post_create.sh @@ -1,4 +1,4 @@ #! /bin/bash -poetry install +poetry install --with=dev,dev-server --extras=all poetry run pre-commit install git lfs install diff --git a/bin/generate_base_images.py b/bin/generate_base_images.py index 432912fbe..bb93c93dd 100755 --- a/bin/generate_base_images.py +++ b/bin/generate_base_images.py @@ -9,7 +9,7 @@ from typing import List, Optional, Set from jinja2 import Environment, FileSystemLoader -from truss.constants import SUPPORTED_PYTHON_VERSIONS +from truss.base.constants import SUPPORTED_PYTHON_VERSIONS from truss.contexts.image_builder.util import ( truss_base_image_name, truss_base_image_tag, diff --git a/bin/pyproject_toml_linter.py b/bin/pyproject_toml_linter.py new file mode 100644 index 000000000..225b1af32 --- /dev/null +++ b/bin/pyproject_toml_linter.py @@ -0,0 +1,77 @@ +# type: ignore # tomlkit APIs are messy. +import collections +import pathlib +from typing import DefaultDict, Set + +import tomlkit + + +def _populate_extras(pyproject_path: pathlib.Path) -> None: + with pyproject_path.open("r", encoding="utf-8") as file: + original_content = file.read() + content = tomlkit.parse(original_content) + + dependencies = content["tool"]["poetry"]["dependencies"] + dependency_metadata = content["tool"]["dependency_metadata"] + + extra_sections: DefaultDict[str, Set[str]] = collections.defaultdict(set) + all_deps: Set[str] = set() + + for key, value in dependencies.items(): + if isinstance(value, dict): + is_optional = value.get("optional", False) + else: + is_optional = False # Base dependencies. + + if not is_optional: + continue + + if key not in dependency_metadata: + raise ValueError( + f"`{key}` is missing in `[tool.dependency_metadata]`. " + f"(file: {pyproject_path}). Please add metadata." + ) + metadata = dependency_metadata[key] + components = metadata["components"].split(",") + for component in components: + if component == "base": + continue + extra_sections[component].add(key) + all_deps.add(key) + + for key in dependency_metadata.keys(): + if key not in dependencies: + raise ValueError( + f"`{key}` in `[tool.dependency_metadata]` is not in " + "`[tool.poetry.dependencies]`. " + f"(file: {pyproject_path}). Please remove or sync." + ) + + extras_section = tomlkit.table() + for extra_section, deps in extra_sections.items(): + extras_section[extra_section] = tomlkit.array() + extras_section[extra_section].extend(sorted(deps)) + + extras_section["all"] = tomlkit.array() + extras_section["all"].extend(sorted(all_deps)) + + if "extras" not in content["tool"]["poetry"]: + raise ValueError("Expected section [tool.poetry.extras] to be present.") + + content["tool"]["poetry"]["extras"] = extras_section + + updated_content = tomlkit.dumps(content) + + # Compare the content before and after; if changes were made, fail the check + if original_content != updated_content: + with pyproject_path.open("w", encoding="utf-8") as file: + file.write(updated_content) + print(f"File '{pyproject_path}' was updated. Please re-stage the changes.") + exit(1) + + print("No changes detected.") + + +if __name__ == "__main__": + pyproject_file = pathlib.Path(__file__).parent.parent.resolve() / "pyproject.toml" + _populate_extras(pyproject_file) diff --git a/context_builder.Dockerfile b/context_builder.Dockerfile index 39d7b50b3..70d4c0a56 100644 --- a/context_builder.Dockerfile +++ b/context_builder.Dockerfile @@ -24,5 +24,4 @@ COPY ./README.md ./README.md # https://python-poetry.org/docs/configuration/#virtualenvsin-project # to write to project root .venv file to be used for context builder test -RUN poetry config virtualenvs.in-project true \ - && poetry install --only builder +RUN poetry config virtualenvs.in-project true && poetry install --extras=all diff --git a/docs/chains/doc_gen/API-reference.mdx b/docs/chains/doc_gen/API-reference.mdx index d45c26dec..6ff27670b 100644 --- a/docs/chains/doc_gen/API-reference.mdx +++ b/docs/chains/doc_gen/API-reference.mdx @@ -81,10 +81,8 @@ an access token for downloading model weights). | Name | Type | Description | |-----------------------|---------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | `data_dir` | *Path\|None* | The directory where the chainlet can store and access data, e.g. for downloading model weights. | -| `user_config` | *UserConfigT* | User-defined configuration for the chainlet. | | `chainlet_to_service` | *Mapping[str,[ServiceDescriptor](#class-truss-chains-servicedescriptor)]* | A mapping from chainlet names to service descriptors. This is used create RPCs sessions to dependency chainlets. It contains only the chainlet services that are dependencies of the current chainlet. | | `secrets` | *Mapping[str,str]* | A mapping from secret names to secret values. It contains only the secrets that are listed in `remote_config.assets.secret_keys` of the current chainlet. | -| `user_env` | *Mapping[str,str]* | These values can be provided to the deploy command and customize the behavior of deployed chainlets. E.g. for differentiating between prod and dev version of the same chain. | | `environment` | *[Environment](#class-truss-chains-definitions-environment)\|None* | The environment that the chainlet is deployed in. None if the chainlet is not associated with an environment. | #### get_baseten_api_key() @@ -216,14 +214,14 @@ modules and keep their requirement files right next their python source files. **Parameters:** -| Name | Type | Description | -|-------------------------|----------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `base_image` | *[BasetenImage](#truss-chains-basetenimage)\|[CustomImage](#truss-chains-customimage)* | The base image used by the chainlet. Other dependencies and assets are included as additional layers on top of that image. You can choose a baseten default image for a supported python version (e.g. `BasetenImage.PY311`), this will also include GPU drivers if needed, or provide a custom image (e.g. `CustomImage(image="python:3.11-slim")`). Specification by string is deprecated. | -| `pip_requirements_file` | *AbsPath\|None* | Path to a file containing pip requirements. The file content is naively concatenated with `pip_requirements`. | -| `pip_requirements` | *list[str]* | A list of pip requirements to install. The items are naively concatenated with the content of the `pip_requirements_file`. | -| `apt_requirements` | *list[str]* | A list of apt requirements to install. | -| `data_dir` | *AbsPath\|None* | Data from this directory is copied into the docker image and accessible to the remote chainlet at runtime. | -| `external_package_dirs` | *list[AbsPath]\|None* | A list of directories containing additional python packages outside the chain’s workspace dir, e.g. a shared library. This code is copied into the docker image and importable at runtime. | +| Name | Type | Description | +|-------------------------|----------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `base_image` | *[BasetenImage](#truss-chains-basetenimage)\|[CustomImage](#truss-chains-customimage)* | The base image used by the chainlet. Other dependencies and assets are included as additional layers on top of that image. You can choose a baseten default image for a supported python version (e.g. `BasetenImage.PY311`), this will also include GPU drivers if needed, or provide a custom image (e.g. `CustomImage(image="python:3.11-slim")`). | +| `pip_requirements_file` | *AbsPath\|None* | Path to a file containing pip requirements. The file content is naively concatenated with `pip_requirements`. | +| `pip_requirements` | *list[str]* | A list of pip requirements to install. The items are naively concatenated with the content of the `pip_requirements_file`. | +| `apt_requirements` | *list[str]* | A list of apt requirements to install. | +| `data_dir` | *AbsPath\|None* | Data from this directory is copied into the docker image and accessible to the remote chainlet at runtime. | +| `external_package_dirs` | *list[AbsPath]\|None* | A list of directories containing additional python packages outside the chain’s workspace dir, e.g. a shared library. This code is copied into the docker image and importable at runtime. | ### *class* `truss_chains.BasetenImage` @@ -294,12 +292,12 @@ For example, model weight caching can be used like this: ```python import truss_chains as chains -from truss import truss_config +from truss.base import truss_config mistral_cache = truss_config.ModelRepo( repo_id="mistralai/Mistral-7B-Instruct-v0.2", allow_patterns=["*.json", "*.safetensors", ".model"] - ) +) chains.Assets(cached=[mistral_cache], ...) ``` @@ -331,7 +329,6 @@ Deploys a chain remotely (with all dependent chainlets). | `chain_name` | *str* | The name of the chain. | | `publish` | *bool* | Whether to publish the chain as a published deployment (it is a draft deployment otherwise) | | `promote` | *bool* | Whether to promote the chain to be the production deployment (this implies publishing as well). | -| `user_env` | *Mapping[str,str]\|None* | These values can be provided to the push command and customize the behavior of deployed chainlets. E.g. for differentiating between prod and dev version of the same chain. | | `only_generate_trusses` | *bool* | Used for debugging purposes. If set to True, only the the underlying truss models for the chainlets are generated in `/tmp/.chains_generated`. | | `remote` | *str\|None* | name of a remote config in .trussrc. If not provided, it will be inquired. | | `environment` | *str\|None* | The name of an environment to promote deployment into. | @@ -465,7 +462,6 @@ corresponding fields of | `secrets` | *Mapping[str,str]\|None* | A dict of secrets keys and values to provide to the chainlets. | | `data_dir` | *Path\|str\|None* | Path to a directory with data files. | | `chainlet_to_service` | *Mapping[str,[ServiceDescriptor](#class-truss-chains-servicedescriptor)* | A dict of chainlet names to service descriptors. | -| `user_env` | *Mapping[str,str]\|None* | see [`push`](#truss-chains-push). | * **Return type:** *ContextManager*[None] diff --git a/docs/chains/doc_gen/README.md b/docs/chains/doc_gen/README.md index 3c1934787..6b44de55e 100644 --- a/docs/chains/doc_gen/README.md +++ b/docs/chains/doc_gen/README.md @@ -9,9 +9,9 @@ Extra deps required: The general process is: 1. Document as much as possible in the code, including usage examples, links etc. -2. Auto-generate `generated-API-reference.mdx` with `poetry run python - docs/chains/doc_gen/generate_reference.py`. This applies the patch file and - launches meld to resolve conflicts. +2. Auto-generate `generated-API-reference.mdx` with + `poetry run python docs/chains/doc_gen/generate_reference.py`. + This applies the patch file and launches meld to resolve conflicts. 4. Proofread `docs/chains/doc_gen/API-reference.mdx`. 5. If proofreading leads to edits or the upstream docstrings changed lot, update the patch file: `diff -u \ diff --git a/docs/chains/doc_gen/generate_reference.py b/docs/chains/doc_gen/generate_reference.py index 2f09fc5c0..e8b4323d2 100644 --- a/docs/chains/doc_gen/generate_reference.py +++ b/docs/chains/doc_gen/generate_reference.py @@ -69,7 +69,6 @@ "General framework and helper functions.", [ "truss_chains.push", - "truss_chains.deploy_remotely", "truss_chains.remote.ChainService", "truss_chains.make_abs_path_here", "truss_chains.run_local", diff --git a/docs/chains/doc_gen/generated-reference.mdx b/docs/chains/doc_gen/generated-reference.mdx index 911457158..d0a3bb48b 100644 --- a/docs/chains/doc_gen/generated-reference.mdx +++ b/docs/chains/doc_gen/generated-reference.mdx @@ -71,7 +71,7 @@ context instance is provided. ### *class* `truss_chains.DeploymentContext` -Bases: `pydantic.BaseModel`, `Generic`[`UserConfigT`] +Bases: `pydantic.BaseModel` Bundles config values and resources needed to instantiate Chainlets. @@ -85,10 +85,9 @@ an access token for downloading model weights). | Name | Type | Description | |------|------|-------------| | `data_dir` | *Path\|None* | The directory where the chainlet can store and access data, e.g. for downloading model weights. | -| `user_config` | *UserConfigT* | User-defined configuration for the chainlet. | +| `user_config` | ** | User-defined configuration for the chainlet. | | `chainlet_to_service` | *Mapping[str,[ServiceDescriptor](#truss_chains.ServiceDescriptor* | A mapping from chainlet names to service descriptors. This is used create RPCs sessions to dependency chainlets. It contains only the chainlet services that are dependencies of the current chainlet. | | `secrets` | *MappingNoIter[str,str]* | A mapping from secret names to secret values. It contains only the secrets that are listed in `remote_config.assets.secret_keys` of the current chainlet. | -| `user_env` | *Mapping[str,str]* | These values can be provided to the deploy command and customize the behavior of deployed chainlets. E.g. for differentiating between prod and dev version of the same chain. | | `environment` | *[Environment](#truss_chains.definitions.Environment* | The environment that the chainlet is deployed in. None if the chainlet is not associated with an environment. | #### chainlet_to_service *: Mapping[str, [ServiceDescriptor](#truss_chains.ServiceDescriptor)]* @@ -111,10 +110,6 @@ an access token for downloading model weights). #### secrets *: MappingNoIter[str, str]* -#### user_config *: UserConfigT* - -#### user_env *: Mapping[str, str]* - ### *class* `truss_chains.definitions.Environment` Bases: `pydantic.BaseModel` @@ -256,7 +251,7 @@ modules and keep their requirement files right next their python source files. | Name | Type | Description | |------|------|-------------| -| `base_image` | *[BasetenImage](#truss_chains.BasetenImage* | The base image used by the chainlet. Other dependencies and assets are included as additional layers on top of that image. You can choose a Baseten default image for a supported python version (e.g. `BasetenImage.PY311`), this will also include GPU drivers if needed, or provide a custom image (e.g. `CustomImage(image="python:3.11-slim")`). Specification by string is deprecated. | +| `base_image` | *[BasetenImage](#truss_chains.BasetenImage* | The base image used by the chainlet. Other dependencies and assets are included as additional layers on top of that image. You can choose a Baseten default image for a supported python version (e.g. `BasetenImage.PY311`), this will also include GPU drivers if needed, or provide a custom image (e.g. `CustomImage(image="python:3.11-slim")`).. | | `pip_requirements_file` | *AbsPath\|None* | Path to a file containing pip requirements. The file content is naively concatenated with `pip_requirements`. | | `pip_requirements` | *list[str]* | A list of pip requirements to install. The items are naively concatenated with the content of the `pip_requirements_file`. | | `apt_requirements` | *list[str]* | A list of apt requirements to install. | @@ -265,7 +260,7 @@ modules and keep their requirement files right next their python source files. #### apt_requirements *: list[str]* -#### base_image *: [BasetenImage](#truss_chains.BasetenImage) | [CustomImage](#truss_chains.CustomImage) | str* +#### base_image *: [BasetenImage](#truss_chains.BasetenImage) | [CustomImage](#truss_chains.CustomImage)* #### data_dir *: AbsPath | None* @@ -355,7 +350,7 @@ For example, model weight caching can be used like this: ```default import truss_chains as chains -from truss import truss_config +from truss.base import truss_config mistral_cache = truss_config.ModelRepo( repo_id="mistralai/Mistral-7B-Instruct-v0.2", @@ -406,7 +401,6 @@ Deploys a chain remotely (with all dependent chainlets). | `chain_name` | *str* | The name of the chain. | | `publish` | *bool* | Whether to publish the chain as a published deployment (it is a draft deployment otherwise) | | `promote` | *bool* | Whether to promote the chain to be the production deployment (this implies publishing as well). | -| `user_env` | *Mapping[str,str]\|None* | These values can be provided to the push command and customize the behavior of deployed chainlets. E.g. for differentiating between prod and dev version of the same chain. | | `only_generate_trusses` | *bool* | Used for debugging purposes. If set to True, only the the underlying truss models for the chainlets are generated in `/tmp/.chains_generated`. | | `remote` | *str\|None* | name of a remote config in .trussrc. If not provided, it will be inquired. | | `environment` | *str\|None* | The name of an environment to promote deployment into. | @@ -416,26 +410,6 @@ Deploys a chain remotely (with all dependent chainlets). * **Return type:** *BasetenChainService* -### `truss_chains.deploy_remotely` - -Deprecated, use `push` instead. - - -**Parameters:** - -| Name | Type | Description | -|------|------|-------------| -| `entrypoint` | *Type[ABCChainlet]* | | -| `chain_name` | *str* | | -| `publish` | *bool* | | -| `promote` | *bool* | | -| `user_env` | *Mapping[str,str]\|None* | | -| `only_generate_trusses` | *bool* | | -| `remote` | *str\|None* | | - -* **Return type:** - *BasetenChainService* - ### *class* `truss_chains.remote.ChainService` Bases: `ABC` @@ -562,7 +536,6 @@ corresponding fields of `DeploymentContext`. | `secrets` | *Mapping[str,str]\|None* | A dict of secrets keys and values to provide to the chainlets. | | `data_dir` | *Path\|str\|None* | Path to a directory with data files. | | `chainlet_to_service` | *Mapping[str,[ServiceDescriptor](#truss_chains.ServiceDescriptor* | A dict of chainlet names to service descriptors. | -| `user_env` | *Mapping[str,str]\|None* | see `deploy_remotely`. | * **Return type:** *ContextManager*[None] diff --git a/docs/chains/doc_gen/reference.patch b/docs/chains/doc_gen/reference.patch index b31761583..ecf741a4f 100644 --- a/docs/chains/doc_gen/reference.patch +++ b/docs/chains/doc_gen/reference.patch @@ -1,5 +1,5 @@ ---- docs/chains/doc_gen/generated-reference.mdx 2024-11-04 11:41:28.619593764 -0800 -+++ docs/chains/doc_gen/API-reference.mdx 2024-11-04 11:53:15.657920649 -0800 +--- docs/chains/doc_gen/generated-reference.mdx 2024-11-07 16:51:32.687418306 -0800 ++++ docs/chains/doc_gen/API-reference.mdx 2024-11-07 16:58:24.661786055 -0800 @@ -24,31 +24,28 @@ dependency of another chainlet. The return value of `depends` is intended to be used as a default argument in a chainlet’s `__init__`-method. @@ -40,7 +40,7 @@ ### `truss_chains.depends_context` -@@ -58,20 +55,19 @@ +@@ -58,16 +55,15 @@ [example chainlet](https://github.com/basetenlabs/truss/blob/main/truss-chains/truss_chains/example_chainlet.py) for more guidance on the `__init__`-signature of chainlets. @@ -59,22 +59,16 @@ ### *class* `truss_chains.DeploymentContext` --Bases: `pydantic.BaseModel`, `Generic`[`UserConfigT`] -+Bases: `pydantic.BaseModel` - - Bundles config values and resources needed to instantiate Chainlets. - -@@ -82,20 +78,14 @@ +@@ -82,19 +78,12 @@ **Parameters:** -| Name | Type | Description | -|------|------|-------------| -| `data_dir` | *Path\|None* | The directory where the chainlet can store and access data, e.g. for downloading model weights. | --| `user_config` | *UserConfigT* | User-defined configuration for the chainlet. | +-| `user_config` | ** | User-defined configuration for the chainlet. | -| `chainlet_to_service` | *Mapping[str,[ServiceDescriptor](#truss_chains.ServiceDescriptor* | A mapping from chainlet names to service descriptors. This is used create RPCs sessions to dependency chainlets. It contains only the chainlet services that are dependencies of the current chainlet. | -| `secrets` | *MappingNoIter[str,str]* | A mapping from secret names to secret values. It contains only the secrets that are listed in `remote_config.assets.secret_keys` of the current chainlet. | --| `user_env` | *Mapping[str,str]* | These values can be provided to the deploy command and customize the behavior of deployed chainlets. E.g. for differentiating between prod and dev version of the same chain. | -| `environment` | *[Environment](#truss_chains.definitions.Environment* | The environment that the chainlet is deployed in. None if the chainlet is not associated with an environment. | - -#### chainlet_to_service *: Mapping[str, [ServiceDescriptor](#truss_chains.ServiceDescriptor)]* @@ -85,15 +79,13 @@ +| Name | Type | Description | +|-----------------------|---------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `data_dir` | *Path\|None* | The directory where the chainlet can store and access data, e.g. for downloading model weights. | -+| `user_config` | *UserConfigT* | User-defined configuration for the chainlet. | +| `chainlet_to_service` | *Mapping[str,[ServiceDescriptor](#class-truss-chains-servicedescriptor)]* | A mapping from chainlet names to service descriptors. This is used create RPCs sessions to dependency chainlets. It contains only the chainlet services that are dependencies of the current chainlet. | +| `secrets` | *Mapping[str,str]* | A mapping from secret names to secret values. It contains only the secrets that are listed in `remote_config.assets.secret_keys` of the current chainlet. | -+| `user_env` | *Mapping[str,str]* | These values can be provided to the deploy command and customize the behavior of deployed chainlets. E.g. for differentiating between prod and dev version of the same chain. | +| `environment` | *[Environment](#class-truss-chains-definitions-environment)\|None* | The environment that the chainlet is deployed in. None if the chainlet is not associated with an environment. | #### get_baseten_api_key() -@@ -104,16 +94,14 @@ +@@ -103,12 +92,14 @@ #### get_service_descriptor(chainlet_name) @@ -101,22 +93,19 @@ - **chainlet_name** (*str*) -* **Return type:** - [*ServiceDescriptor*](#truss_chains.ServiceDescriptor) -- --#### secrets *: MappingNoIter[str, str]* +**Parameters:** --#### user_config *: UserConfigT* +-#### secrets *: MappingNoIter[str, str]* +| Name | Type | Description | +|-----------------|-------|---------------------------| +| `chainlet_name` | *str* | The name of the chainlet. | - --#### user_env *: Mapping[str, str]* ++ +* **Return type:** + [*ServiceDescriptor*](#class-truss-chains-servicedescriptor) ### *class* `truss_chains.definitions.Environment` -@@ -123,7 +111,6 @@ +@@ -118,7 +109,6 @@ * **Parameters:** **name** (*str*) – The name of the environment. @@ -124,7 +113,7 @@ ### *class* `truss_chains.ChainletOptions` -@@ -132,14 +119,10 @@ +@@ -127,14 +117,10 @@ **Parameters:** @@ -143,7 +132,7 @@ ### *class* `truss_chains.RPCOptions` -@@ -149,15 +132,10 @@ +@@ -144,15 +130,10 @@ **Parameters:** @@ -163,7 +152,7 @@ ### `truss_chains.mark_entrypoint` -@@ -169,18 +147,23 @@ +@@ -164,18 +145,23 @@ Example usage: @@ -191,7 +180,7 @@ # Remote Configuration -@@ -194,7 +177,7 @@ +@@ -189,7 +175,7 @@ This is specified as a class variable for each chainlet class, e.g.: @@ -200,7 +189,7 @@ import truss_chains as chains -@@ -210,34 +193,13 @@ +@@ -205,34 +191,13 @@ **Parameters:** @@ -242,7 +231,7 @@ ### *class* `truss_chains.DockerImage` -@@ -245,35 +207,23 @@ +@@ -240,35 +205,23 @@ Configures the docker image in which a remoted chainlet is deployed. @@ -260,7 +249,7 @@ -| Name | Type | Description | -|------|------|-------------| --| `base_image` | *[BasetenImage](#truss_chains.BasetenImage* | The base image used by the chainlet. Other dependencies and assets are included as additional layers on top of that image. You can choose a Baseten default image for a supported python version (e.g. `BasetenImage.PY311`), this will also include GPU drivers if needed, or provide a custom image (e.g. `CustomImage(image="python:3.11-slim")`). Specification by string is deprecated. | +-| `base_image` | *[BasetenImage](#truss_chains.BasetenImage* | The base image used by the chainlet. Other dependencies and assets are included as additional layers on top of that image. You can choose a Baseten default image for a supported python version (e.g. `BasetenImage.PY311`), this will also include GPU drivers if needed, or provide a custom image (e.g. `CustomImage(image="python:3.11-slim")`).. | -| `pip_requirements_file` | *AbsPath\|None* | Path to a file containing pip requirements. The file content is naively concatenated with `pip_requirements`. | -| `pip_requirements` | *list[str]* | A list of pip requirements to install. The items are naively concatenated with the content of the `pip_requirements_file`. | -| `apt_requirements` | *list[str]* | A list of apt requirements to install. | @@ -269,7 +258,7 @@ - -#### apt_requirements *: list[str]* - --#### base_image *: [BasetenImage](#truss_chains.BasetenImage) | [CustomImage](#truss_chains.CustomImage) | str* +-#### base_image *: [BasetenImage](#truss_chains.BasetenImage) | [CustomImage](#truss_chains.CustomImage)* - -#### data_dir *: AbsPath | None* - @@ -278,35 +267,35 @@ -#### pip_requirements *: list[str]* - -#### pip_requirements_file *: AbsPath | None* -+| Name | Type | Description | -+|-------------------------|----------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -+| `base_image` | *[BasetenImage](#truss-chains-basetenimage)\|[CustomImage](#truss-chains-customimage)* | The base image used by the chainlet. Other dependencies and assets are included as additional layers on top of that image. You can choose a baseten default image for a supported python version (e.g. `BasetenImage.PY311`), this will also include GPU drivers if needed, or provide a custom image (e.g. `CustomImage(image="python:3.11-slim")`). Specification by string is deprecated. | -+| `pip_requirements_file` | *AbsPath\|None* | Path to a file containing pip requirements. The file content is naively concatenated with `pip_requirements`. | -+| `pip_requirements` | *list[str]* | A list of pip requirements to install. The items are naively concatenated with the content of the `pip_requirements_file`. | -+| `apt_requirements` | *list[str]* | A list of apt requirements to install. | -+| `data_dir` | *AbsPath\|None* | Data from this directory is copied into the docker image and accessible to the remote chainlet at runtime. | -+| `external_package_dirs` | *list[AbsPath]\|None* | A list of directories containing additional python packages outside the chain’s workspace dir, e.g. a shared library. This code is copied into the docker image and importable at runtime. | ++| Name | Type | Description | ++|-------------------------|----------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| ++| `base_image` | *[BasetenImage](#truss-chains-basetenimage)\|[CustomImage](#truss-chains-customimage)* | The base image used by the chainlet. Other dependencies and assets are included as additional layers on top of that image. You can choose a baseten default image for a supported python version (e.g. `BasetenImage.PY311`), this will also include GPU drivers if needed, or provide a custom image (e.g. `CustomImage(image="python:3.11-slim")`). | ++| `pip_requirements_file` | *AbsPath\|None* | Path to a file containing pip requirements. The file content is naively concatenated with `pip_requirements`. | ++| `pip_requirements` | *list[str]* | A list of pip requirements to install. The items are naively concatenated with the content of the `pip_requirements_file`. | ++| `apt_requirements` | *list[str]* | A list of apt requirements to install. | ++| `data_dir` | *AbsPath\|None* | Data from this directory is copied into the docker image and accessible to the remote chainlet at runtime. | ++| `external_package_dirs` | *list[AbsPath]\|None* | A list of directories containing additional python packages outside the chain’s workspace dir, e.g. a shared library. This code is copied into the docker image and importable at runtime. | ### *class* `truss_chains.BasetenImage` -@@ -282,11 +232,12 @@ +@@ -277,11 +230,12 @@ Default images, curated by baseten, for different python versions. If a Chainlet uses GPUs, drivers will be included in the image. -#### PY310 *= 'py310'* +- +-#### PY311 *= 'py311'* +| Enum Member | Value | +|-------------|---------| +| `PY310` | *py310* | +| `PY311 ` | *py311* | +| `PY39` | *py39* | --#### PY311 *= 'py311'* -- -#### PY39 *= 'py39'* ### *class* `truss_chains.CustomImage` -@@ -296,42 +247,35 @@ +@@ -291,42 +245,35 @@ **Parameters:** @@ -365,7 +354,7 @@ It is important to understand the difference between predict_concurrency and the concurrency target (used for autoscaling, i.e. adding or removing replicas). Furthermore, the `predict_concurrency` of a single instance is implemented in -@@ -342,18 +286,13 @@ +@@ -337,52 +284,33 @@ - With a threadpool if it’s a synchronous function. This requires that the threads don’t have significant CPU load (due to the GIL). @@ -383,9 +372,16 @@ -```default +```python import truss_chains as chains - from truss import truss_config + from truss.base import truss_config + + mistral_cache = truss_config.ModelRepo( + repo_id="mistralai/Mistral-7B-Instruct-v0.2", + allow_patterns=["*.json", "*.safetensors", ".model"] +- ) ++) + chains.Assets(cached=[mistral_cache], ...) + ``` -@@ -367,27 +306,13 @@ See [truss caching guide](https://docs.baseten.co/deploy/guides/model-cache#enabling-caching-for-a-model) for more details on caching. @@ -418,7 +414,7 @@ # Core -@@ -400,75 +325,41 @@ +@@ -395,24 +323,26 @@ **Parameters:** @@ -428,7 +424,6 @@ -| `chain_name` | *str* | The name of the chain. | -| `publish` | *bool* | Whether to publish the chain as a published deployment (it is a draft deployment otherwise) | -| `promote` | *bool* | Whether to promote the chain to be the production deployment (this implies publishing as well). | --| `user_env` | *Mapping[str,str]\|None* | These values can be provided to the push command and customize the behavior of deployed chainlets. E.g. for differentiating between prod and dev version of the same chain. | -| `only_generate_trusses` | *bool* | Used for debugging purposes. If set to True, only the the underlying truss models for the chainlets are generated in `/tmp/.chains_generated`. | -| `remote` | *str\|None* | name of a remote config in .trussrc. If not provided, it will be inquired. | -| `environment` | *str\|None* | The name of an environment to promote deployment into. | @@ -438,7 +433,6 @@ +| `chain_name` | *str* | The name of the chain. | +| `publish` | *bool* | Whether to publish the chain as a published deployment (it is a draft deployment otherwise) | +| `promote` | *bool* | Whether to promote the chain to be the production deployment (this implies publishing as well). | -+| `user_env` | *Mapping[str,str]\|None* | These values can be provided to the push command and customize the behavior of deployed chainlets. E.g. for differentiating between prod and dev version of the same chain. | +| `only_generate_trusses` | *bool* | Used for debugging purposes. If set to True, only the the underlying truss models for the chainlets are generated in `/tmp/.chains_generated`. | +| `remote` | *str\|None* | name of a remote config in .trussrc. If not provided, it will be inquired. | +| `environment` | *str\|None* | The name of an environment to promote deployment into. | @@ -449,34 +443,17 @@ - *BasetenChainService* + [*ChainService*](#class-truss-chains-remote-chainservice) - ### `truss_chains.deploy_remotely` - --Deprecated, use `push` instead. -- -- --**Parameters:** -- --| Name | Type | Description | --|------|------|-------------| --| `entrypoint` | *Type[ABCChainlet]* | | --| `chain_name` | *str* | | --| `publish` | *bool* | | --| `promote` | *bool* | | --| `user_env` | *Mapping[str,str]\|None* | | --| `only_generate_trusses` | *bool* | | --| `remote` | *str\|None* | | -- --* **Return type:** -- *BasetenChainService* +-### *class* `truss_chains.remote.ChainService` ++### `truss_chains.deploy_remotely` ++ +Deprecated, use [`push`](#truss-chains-push) instead. - ### *class* `truss_chains.remote.ChainService` - -Bases: `ABC` -- ++### *class* `truss_chains.remote.ChainService` + Handle for a deployed chain. - A `ChainService` is created and returned when using `push`. It +@@ -420,29 +350,13 @@ bundles the individual services for each chainlet in the chain, and provides utilities to query their status, invoke the entrypoint etc. @@ -509,7 +486,7 @@ * **Return type:** list[*DeployedChainlet*] -@@ -478,18 +369,23 @@ +@@ -452,18 +366,23 @@ Invokes the entrypoint with JSON data. @@ -537,7 +514,7 @@ Link to status page on Baseten. -@@ -511,12 +407,12 @@ +@@ -485,12 +404,12 @@ You can now in `root/sub_package/chainlet.py` point to the requirements file like this: @@ -552,7 +529,7 @@ This helper uses the directory of the immediately calling module as an absolute reference point for resolving the file location. Therefore, you MUST NOT wrap the instantiation of `make_abs_path_here` into a -@@ -524,7 +420,7 @@ +@@ -498,7 +417,7 @@ Ok: @@ -561,7 +538,7 @@ def foo(path: AbsPath): abs_path = path.abs_path -@@ -534,7 +430,7 @@ +@@ -508,7 +427,7 @@ Not Ok: @@ -570,7 +547,7 @@ def foo(path: str): dangerous_value = make_abs_path_here(path).abs_path -@@ -542,8 +438,15 @@ +@@ -516,8 +435,15 @@ foo("./somewhere") ``` @@ -588,7 +565,7 @@ * **Return type:** *AbsPath* -@@ -552,24 +455,24 @@ +@@ -526,23 +452,23 @@ Context manager local debug execution of a chain. The arguments only need to be provided if the chainlets explicitly access any the @@ -604,13 +581,11 @@ -| `secrets` | *Mapping[str,str]\|None* | A dict of secrets keys and values to provide to the chainlets. | -| `data_dir` | *Path\|str\|None* | Path to a directory with data files. | -| `chainlet_to_service` | *Mapping[str,[ServiceDescriptor](#truss_chains.ServiceDescriptor* | A dict of chainlet names to service descriptors. | --| `user_env` | *Mapping[str,str]\|None* | see `deploy_remotely`. | +| Name | Type | Description | +|-----------------------|--------------------------------------------------------------------------|----------------------------------------------------------------| +| `secrets` | *Mapping[str,str]\|None* | A dict of secrets keys and values to provide to the chainlets. | +| `data_dir` | *Path\|str\|None* | Path to a directory with data files. | +| `chainlet_to_service` | *Mapping[str,[ServiceDescriptor](#class-truss-chains-servicedescriptor)* | A dict of chainlet names to service descriptors. | -+| `user_env` | *Mapping[str,str]\|None* | see [`push`](#truss-chains-push). | * **Return type:** *ContextManager*[None] @@ -622,7 +597,7 @@ import os import truss_chains as chains -@@ -595,7 +498,8 @@ +@@ -568,7 +494,8 @@ print(result) ``` @@ -632,7 +607,7 @@ for more details. ### *class* `truss_chains.ServiceDescriptor` -@@ -607,22 +511,13 @@ +@@ -580,22 +507,13 @@ **Parameters:** @@ -661,7 +636,7 @@ Base class for stubs that invoke remote chainlets. -@@ -630,17 +525,18 @@ +@@ -603,17 +521,18 @@ in user-code for wrapping a deployed truss model into the chains framework, e.g. like that: @@ -682,7 +657,7 @@ resp = await self._remote.predict_async( json_payload={"audio": audio_b64}) return WhisperOutput(text=resp["text"], language=resp["language"]) -@@ -657,28 +553,24 @@ +@@ -630,28 +549,24 @@ ) ``` @@ -720,7 +695,7 @@ ### *class* `truss_chains.RemoteErrorDetail` -@@ -690,20 +582,13 @@ +@@ -663,20 +578,13 @@ **Parameters:** @@ -748,7 +723,7 @@ #### format() -@@ -712,7 +597,3 @@ +@@ -685,7 +593,3 @@ * **Return type:** str diff --git a/docs/contribute/contributing.md b/docs/contribute/contributing.md deleted file mode 100644 index df156a152..000000000 --- a/docs/contribute/contributing.md +++ /dev/null @@ -1,9 +0,0 @@ -# Contributing to Truss - -Truss was first created at [Baseten](https://baseten.co), but as an open and living project eagerly accepts contributions of all kinds from the broader developer community. Please note that all participation with Truss falls under our [code of conduct](https://github.com/basetenlabs/truss/blob/main/CODE_OF_CONDUCT.md). - -We use GitHub features for project management on Truss: - -* For bugs and feature requests, [file an issue](https://github.com/basetenlabs/truss/issues). -* For changes and updates, create a [pull request](https://github.com/basetenlabs/truss/pulls). -* To view and comment on the roadmap, [check the projects tab](https://github.com/orgs/basetenlabs/projects/3). diff --git a/docs/contribute/setup.md b/docs/contribute/setup.md deleted file mode 100644 index 3cf7fd464..000000000 --- a/docs/contribute/setup.md +++ /dev/null @@ -1,38 +0,0 @@ -# Setting up local development (contributor) - -To get started contributing to Truss, first fork the repository. - -## Truss setup - -**PLEASE NOTE:** the ML ecosystem in general is still not well supported on M1 Macs, and as such, we do not recommend or support local development on M1 for Truss. Truss is well-optimized for use with GitHub Codespaces and other container-based development environments. - -We use `asdf` to manage Python binaries and `poetry` to manage Python dependencies. - -For development in a macOS environment, we use `brew` to manage system packages. - -``` -# Install asdf (or use another method https://asdf-vm.com/) -brew install asdf - -# Install `asdf` managed python and poetry -asdf plugin add python -asdf plugin add poetry - -# Install poetry dependencies -poetry install - -# And finally precommit -poetry run pre-commit install -``` - -Then to run the entire test suite - -``` -poetry run pytest truss/tests -``` - -## Docs setup - -Contributions to documentation are very welcome! Simply edit the appropriate markdown files in the `docs/` folder and make a pull request. For larger changes, tutorials, or any questions please contact [team@trussml.com](mailto:team@trussml.com). - -Baseten docs are built using Mintlify. To run the docs site locally, use Mintlify's [getting started guide](https://mintlify.com/docs/development). diff --git a/poetry.lock b/poetry.lock index 8ae6c9381..7f98610e1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -327,17 +327,17 @@ files = [ [[package]] name = "boto3" -version = "1.35.52" +version = "1.35.56" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.35.52-py3-none-any.whl", hash = "sha256:ec0e797441db56af63b1150bba49f114b0f885f5d76c3b6dc18075f73030d2bb"}, - {file = "boto3-1.35.52.tar.gz", hash = "sha256:68299da8ab2bb37cc843d61b9f4c1c9367438406cfd65a8f593afc7b3bfe226d"}, + {file = "boto3-1.35.56-py3-none-any.whl", hash = "sha256:d04608cf40f429025eb66b52b835bdc333436022918788853ed0bbbba6dd2f09"}, + {file = "boto3-1.35.56.tar.gz", hash = "sha256:6fcc510a4e747e85f84046b0ba0e5b178e89ba0f8ac9e2b6ebb4cc925c68c23b"}, ] [package.dependencies] -botocore = ">=1.35.52,<1.36.0" +botocore = ">=1.35.56,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -346,13 +346,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.52" +version = "1.35.56" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.52-py3-none-any.whl", hash = "sha256:cdbb5e43c9c3a977763e2a10d3b8b9c405d51279f9fcfd4ca4800763b22acba5"}, - {file = "botocore-1.35.52.tar.gz", hash = "sha256:1fe7485ea13d638b089103addd818c12984ff1e4d208de15f180b1e25ad944c5"}, + {file = "botocore-1.35.56-py3-none-any.whl", hash = "sha256:4be97f7bc1fbf33ad71ee1b678cea0ecf9638e61d5f566a46f261cde969dd690"}, + {file = "botocore-1.35.56.tar.gz", hash = "sha256:8a9e752c8e87a423575ac528340a35d4318b8576ae4c6e0acfe5a3867f6bbccf"}, ] [package.dependencies] @@ -1051,13 +1051,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.35.0" +version = "2.36.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google_auth-2.35.0-py2.py3-none-any.whl", hash = "sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f"}, - {file = "google_auth-2.35.0.tar.gz", hash = "sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a"}, + {file = "google_auth-2.36.0-py2.py3-none-any.whl", hash = "sha256:51a15d47028b66fd36e5c64a82d2d57480075bccc7da37cde257fc94177a61fb"}, + {file = "google_auth-2.36.0.tar.gz", hash = "sha256:545e9618f2df0bcbb7dcbc45a546485b1212624716975a1ea5ae8149ce769ab1"}, ] [package.dependencies] @@ -1417,22 +1417,26 @@ all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2 [[package]] name = "importlib-metadata" -version = "8.4.0" +version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, - {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, ] [package.dependencies] -zipp = ">=0.5" +zipp = ">=3.20" [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] name = "importlib-resources" @@ -2313,132 +2317,132 @@ files = [ [[package]] name = "opentelemetry-api" -version = "1.27.0" +version = "1.28.0" description = "OpenTelemetry Python API" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7"}, - {file = "opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342"}, + {file = "opentelemetry_api-1.28.0-py3-none-any.whl", hash = "sha256:8457cd2c59ea1bd0988560f021656cecd254ad7ef6be4ba09dbefeca2409ce52"}, + {file = "opentelemetry_api-1.28.0.tar.gz", hash = "sha256:578610bcb8aa5cdcb11169d136cc752958548fb6ccffb0969c1036b0ee9e5353"}, ] [package.dependencies] deprecated = ">=1.2.6" -importlib-metadata = ">=6.0,<=8.4.0" +importlib-metadata = ">=6.0,<=8.5.0" [[package]] name = "opentelemetry-exporter-otlp" -version = "1.27.0" +version = "1.28.0" description = "OpenTelemetry Collector Exporters" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_exporter_otlp-1.27.0-py3-none-any.whl", hash = "sha256:7688791cbdd951d71eb6445951d1cfbb7b6b2d7ee5948fac805d404802931145"}, - {file = "opentelemetry_exporter_otlp-1.27.0.tar.gz", hash = "sha256:4a599459e623868cc95d933c301199c2367e530f089750e115599fccd67cb2a1"}, + {file = "opentelemetry_exporter_otlp-1.28.0-py3-none-any.whl", hash = "sha256:1fd02d70f2c1b7ac5579c81e78de4594b188d3317c8ceb69e8b53900fb7b40fd"}, + {file = "opentelemetry_exporter_otlp-1.28.0.tar.gz", hash = "sha256:31ae7495831681dd3da34ac457f6970f147465ae4b9aae3a888d7a581c7cd868"}, ] [package.dependencies] -opentelemetry-exporter-otlp-proto-grpc = "1.27.0" -opentelemetry-exporter-otlp-proto-http = "1.27.0" +opentelemetry-exporter-otlp-proto-grpc = "1.28.0" +opentelemetry-exporter-otlp-proto-http = "1.28.0" [[package]] name = "opentelemetry-exporter-otlp-proto-common" -version = "1.27.0" +version = "1.28.0" description = "OpenTelemetry Protobuf encoding" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_exporter_otlp_proto_common-1.27.0-py3-none-any.whl", hash = "sha256:675db7fffcb60946f3a5c43e17d1168a3307a94a930ecf8d2ea1f286f3d4f79a"}, - {file = "opentelemetry_exporter_otlp_proto_common-1.27.0.tar.gz", hash = "sha256:159d27cf49f359e3798c4c3eb8da6ef4020e292571bd8c5604a2a573231dd5c8"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.28.0-py3-none-any.whl", hash = "sha256:467e6437d24e020156dffecece8c0a4471a8a60f6a34afeda7386df31a092410"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.28.0.tar.gz", hash = "sha256:5fa0419b0c8e291180b0fc8430a20dd44a3f3236f8e0827992145914f273ec4f"}, ] [package.dependencies] -opentelemetry-proto = "1.27.0" +opentelemetry-proto = "1.28.0" [[package]] name = "opentelemetry-exporter-otlp-proto-grpc" -version = "1.27.0" +version = "1.28.0" description = "OpenTelemetry Collector Protobuf over gRPC Exporter" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_exporter_otlp_proto_grpc-1.27.0-py3-none-any.whl", hash = "sha256:56b5bbd5d61aab05e300d9d62a6b3c134827bbd28d0b12f2649c2da368006c9e"}, - {file = "opentelemetry_exporter_otlp_proto_grpc-1.27.0.tar.gz", hash = "sha256:af6f72f76bcf425dfb5ad11c1a6d6eca2863b91e63575f89bb7b4b55099d968f"}, + {file = "opentelemetry_exporter_otlp_proto_grpc-1.28.0-py3-none-any.whl", hash = "sha256:edbdc53e7783f88d4535db5807cb91bd7b1ec9e9b9cdbfee14cd378f29a3b328"}, + {file = "opentelemetry_exporter_otlp_proto_grpc-1.28.0.tar.gz", hash = "sha256:47a11c19dc7f4289e220108e113b7de90d59791cb4c37fc29f69a6a56f2c3735"}, ] [package.dependencies] deprecated = ">=1.2.6" googleapis-common-protos = ">=1.52,<2.0" -grpcio = ">=1.0.0,<2.0.0" +grpcio = ">=1.63.2,<2.0.0" opentelemetry-api = ">=1.15,<2.0" -opentelemetry-exporter-otlp-proto-common = "1.27.0" -opentelemetry-proto = "1.27.0" -opentelemetry-sdk = ">=1.27.0,<1.28.0" +opentelemetry-exporter-otlp-proto-common = "1.28.0" +opentelemetry-proto = "1.28.0" +opentelemetry-sdk = ">=1.28.0,<1.29.0" [[package]] name = "opentelemetry-exporter-otlp-proto-http" -version = "1.27.0" +version = "1.28.0" description = "OpenTelemetry Collector Protobuf over HTTP Exporter" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_exporter_otlp_proto_http-1.27.0-py3-none-any.whl", hash = "sha256:688027575c9da42e179a69fe17e2d1eba9b14d81de8d13553a21d3114f3b4d75"}, - {file = "opentelemetry_exporter_otlp_proto_http-1.27.0.tar.gz", hash = "sha256:2103479092d8eb18f61f3fbff084f67cc7f2d4a7d37e75304b8b56c1d09ebef5"}, + {file = "opentelemetry_exporter_otlp_proto_http-1.28.0-py3-none-any.whl", hash = "sha256:e8f3f7961b747edb6b44d51de4901a61e9c01d50debd747b120a08c4996c7e7b"}, + {file = "opentelemetry_exporter_otlp_proto_http-1.28.0.tar.gz", hash = "sha256:d83a9a03a8367ead577f02a64127d827c79567de91560029688dd5cfd0152a8e"}, ] [package.dependencies] deprecated = ">=1.2.6" googleapis-common-protos = ">=1.52,<2.0" opentelemetry-api = ">=1.15,<2.0" -opentelemetry-exporter-otlp-proto-common = "1.27.0" -opentelemetry-proto = "1.27.0" -opentelemetry-sdk = ">=1.27.0,<1.28.0" +opentelemetry-exporter-otlp-proto-common = "1.28.0" +opentelemetry-proto = "1.28.0" +opentelemetry-sdk = ">=1.28.0,<1.29.0" requests = ">=2.7,<3.0" [[package]] name = "opentelemetry-proto" -version = "1.27.0" +version = "1.28.0" description = "OpenTelemetry Python Proto" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_proto-1.27.0-py3-none-any.whl", hash = "sha256:b133873de5581a50063e1e4b29cdcf0c5e253a8c2d8dc1229add20a4c3830ace"}, - {file = "opentelemetry_proto-1.27.0.tar.gz", hash = "sha256:33c9345d91dafd8a74fc3d7576c5a38f18b7fdf8d02983ac67485386132aedd6"}, + {file = "opentelemetry_proto-1.28.0-py3-none-any.whl", hash = "sha256:d5ad31b997846543b8e15504657d9a8cf1ad3c71dcbbb6c4799b1ab29e38f7f9"}, + {file = "opentelemetry_proto-1.28.0.tar.gz", hash = "sha256:4a45728dfefa33f7908b828b9b7c9f2c6de42a05d5ec7b285662ddae71c4c870"}, ] [package.dependencies] -protobuf = ">=3.19,<5.0" +protobuf = ">=5.0,<6.0" [[package]] name = "opentelemetry-sdk" -version = "1.27.0" +version = "1.28.0" description = "OpenTelemetry Python SDK" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_sdk-1.27.0-py3-none-any.whl", hash = "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d"}, - {file = "opentelemetry_sdk-1.27.0.tar.gz", hash = "sha256:d525017dea0ccce9ba4e0245100ec46ecdc043f2d7b8315d56b19aff0904fa6f"}, + {file = "opentelemetry_sdk-1.28.0-py3-none-any.whl", hash = "sha256:4b37da81d7fad67f6683c4420288c97f4ed0d988845d5886435f428ec4b8429a"}, + {file = "opentelemetry_sdk-1.28.0.tar.gz", hash = "sha256:41d5420b2e3fb7716ff4981b510d551eff1fc60eb5a95cf7335b31166812a893"}, ] [package.dependencies] -opentelemetry-api = "1.27.0" -opentelemetry-semantic-conventions = "0.48b0" +opentelemetry-api = "1.28.0" +opentelemetry-semantic-conventions = "0.49b0" typing-extensions = ">=3.7.4" [[package]] name = "opentelemetry-semantic-conventions" -version = "0.48b0" +version = "0.49b0" description = "OpenTelemetry Semantic Conventions" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_semantic_conventions-0.48b0-py3-none-any.whl", hash = "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f"}, - {file = "opentelemetry_semantic_conventions-0.48b0.tar.gz", hash = "sha256:12d74983783b6878162208be57c9effcb89dc88691c64992d70bb89dc00daa1a"}, + {file = "opentelemetry_semantic_conventions-0.49b0-py3-none-any.whl", hash = "sha256:0458117f6ead0b12e3221813e3e511d85698c31901cac84682052adb9c17c7cd"}, + {file = "opentelemetry_semantic_conventions-0.49b0.tar.gz", hash = "sha256:dbc7b28339e5390b6b28e022835f9bac4e134a80ebf640848306d3c5192557e8"}, ] [package.dependencies] deprecated = ">=1.2.6" -opentelemetry-api = "1.27.0" +opentelemetry-api = "1.28.0" [[package]] name = "packaging" @@ -2727,22 +2731,22 @@ testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "4.25.5" +version = "5.28.3" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.5-cp310-abi3-win32.whl", hash = "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8"}, - {file = "protobuf-4.25.5-cp310-abi3-win_amd64.whl", hash = "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea"}, - {file = "protobuf-4.25.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2fde3d805354df675ea4c7c6338c1aecd254dfc9925e88c6d31a2bcb97eb173"}, - {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:919ad92d9b0310070f8356c24b855c98df2b8bd207ebc1c0c6fcc9ab1e007f3d"}, - {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fe14e16c22be926d3abfcb500e60cab068baf10b542b8c858fa27e098123e331"}, - {file = "protobuf-4.25.5-cp38-cp38-win32.whl", hash = "sha256:98d8d8aa50de6a2747efd9cceba361c9034050ecce3e09136f90de37ddba66e1"}, - {file = "protobuf-4.25.5-cp38-cp38-win_amd64.whl", hash = "sha256:b0234dd5a03049e4ddd94b93400b67803c823cfc405689688f59b34e0742381a"}, - {file = "protobuf-4.25.5-cp39-cp39-win32.whl", hash = "sha256:abe32aad8561aa7cc94fc7ba4fdef646e576983edb94a73381b03c53728a626f"}, - {file = "protobuf-4.25.5-cp39-cp39-win_amd64.whl", hash = "sha256:7a183f592dc80aa7c8da7ad9e55091c4ffc9497b3054452d629bb85fa27c2a45"}, - {file = "protobuf-4.25.5-py3-none-any.whl", hash = "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41"}, - {file = "protobuf-4.25.5.tar.gz", hash = "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584"}, + {file = "protobuf-5.28.3-cp310-abi3-win32.whl", hash = "sha256:0c4eec6f987338617072592b97943fdbe30d019c56126493111cf24344c1cc24"}, + {file = "protobuf-5.28.3-cp310-abi3-win_amd64.whl", hash = "sha256:91fba8f445723fcf400fdbe9ca796b19d3b1242cd873907979b9ed71e4afe868"}, + {file = "protobuf-5.28.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a3f6857551e53ce35e60b403b8a27b0295f7d6eb63d10484f12bc6879c715687"}, + {file = "protobuf-5.28.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:3fa2de6b8b29d12c61911505d893afe7320ce7ccba4df913e2971461fa36d584"}, + {file = "protobuf-5.28.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:712319fbdddb46f21abb66cd33cb9e491a5763b2febd8f228251add221981135"}, + {file = "protobuf-5.28.3-cp38-cp38-win32.whl", hash = "sha256:3e6101d095dfd119513cde7259aa703d16c6bbdfae2554dfe5cfdbe94e32d548"}, + {file = "protobuf-5.28.3-cp38-cp38-win_amd64.whl", hash = "sha256:27b246b3723692bf1068d5734ddaf2fccc2cdd6e0c9b47fe099244d80200593b"}, + {file = "protobuf-5.28.3-cp39-cp39-win32.whl", hash = "sha256:135658402f71bbd49500322c0f736145731b16fc79dc8f367ab544a17eab4535"}, + {file = "protobuf-5.28.3-cp39-cp39-win_amd64.whl", hash = "sha256:70585a70fc2dd4818c51287ceef5bdba6387f88a578c86d47bb34669b5552c36"}, + {file = "protobuf-5.28.3-py3-none-any.whl", hash = "sha256:cee1757663fa32a1ee673434fcf3bf24dd54763c79690201208bafec62f19eed"}, + {file = "protobuf-5.28.3.tar.gz", hash = "sha256:64badbc49180a5e401f373f9ce7ab1d18b63f7dd4a9cdc43c92b9f0b481cef7b"}, ] [[package]] @@ -3341,13 +3345,13 @@ fixture = ["fixtures"] [[package]] name = "rich" -version = "13.9.3" +version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" files = [ - {file = "rich-13.9.3-py3-none-any.whl", hash = "sha256:9836f5096eb2172c9e77df411c1b009bace4193d6a481d534fea75ebba758283"}, - {file = "rich-13.9.3.tar.gz", hash = "sha256:bc1e01b899537598cf02579d2b9f4a415104d3fc439313a7a2c165d76557a08e"}, + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, ] [package.dependencies] @@ -3380,114 +3384,114 @@ docs = ["markdown-include", "mkdocs", "mkdocs-glightbox", "mkdocs-material-exten [[package]] name = "rpds-py" -version = "0.20.0" +version = "0.20.1" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, - {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"}, - {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"}, - {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"}, - {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"}, - {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"}, - {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"}, - {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"}, - {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"}, - {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"}, - {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"}, - {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"}, - {file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"}, - {file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"}, - {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"}, - {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"}, - {file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"}, - {file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"}, - {file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"}, - {file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"}, - {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, + {file = "rpds_py-0.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a649dfd735fff086e8a9d0503a9f0c7d01b7912a333c7ae77e1515c08c146dad"}, + {file = "rpds_py-0.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f16bc1334853e91ddaaa1217045dd7be166170beec337576818461268a3de67f"}, + {file = "rpds_py-0.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14511a539afee6f9ab492b543060c7491c99924314977a55c98bfa2ee29ce78c"}, + {file = "rpds_py-0.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3ccb8ac2d3c71cda472b75af42818981bdacf48d2e21c36331b50b4f16930163"}, + {file = "rpds_py-0.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c142b88039b92e7e0cb2552e8967077e3179b22359e945574f5e2764c3953dcf"}, + {file = "rpds_py-0.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f19169781dddae7478a32301b499b2858bc52fc45a112955e798ee307e294977"}, + {file = "rpds_py-0.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13c56de6518e14b9bf6edde23c4c39dac5b48dcf04160ea7bce8fca8397cdf86"}, + {file = "rpds_py-0.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:925d176a549f4832c6f69fa6026071294ab5910e82a0fe6c6228fce17b0706bd"}, + {file = "rpds_py-0.20.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:78f0b6877bfce7a3d1ff150391354a410c55d3cdce386f862926a4958ad5ab7e"}, + {file = "rpds_py-0.20.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3dd645e2b0dcb0fd05bf58e2e54c13875847687d0b71941ad2e757e5d89d4356"}, + {file = "rpds_py-0.20.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4f676e21db2f8c72ff0936f895271e7a700aa1f8d31b40e4e43442ba94973899"}, + {file = "rpds_py-0.20.1-cp310-none-win32.whl", hash = "sha256:648386ddd1e19b4a6abab69139b002bc49ebf065b596119f8f37c38e9ecee8ff"}, + {file = "rpds_py-0.20.1-cp310-none-win_amd64.whl", hash = "sha256:d9ecb51120de61e4604650666d1f2b68444d46ae18fd492245a08f53ad2b7711"}, + {file = "rpds_py-0.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:762703bdd2b30983c1d9e62b4c88664df4a8a4d5ec0e9253b0231171f18f6d75"}, + {file = "rpds_py-0.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0b581f47257a9fce535c4567782a8976002d6b8afa2c39ff616edf87cbeff712"}, + {file = "rpds_py-0.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:842c19a6ce894493563c3bd00d81d5100e8e57d70209e84d5491940fdb8b9e3a"}, + {file = "rpds_py-0.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42cbde7789f5c0bcd6816cb29808e36c01b960fb5d29f11e052215aa85497c93"}, + {file = "rpds_py-0.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c8e9340ce5a52f95fa7d3b552b35c7e8f3874d74a03a8a69279fd5fca5dc751"}, + {file = "rpds_py-0.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ba6f89cac95c0900d932c9efb7f0fb6ca47f6687feec41abcb1bd5e2bd45535"}, + {file = "rpds_py-0.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a916087371afd9648e1962e67403c53f9c49ca47b9680adbeef79da3a7811b0"}, + {file = "rpds_py-0.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:200a23239781f46149e6a415f1e870c5ef1e712939fe8fa63035cd053ac2638e"}, + {file = "rpds_py-0.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:58b1d5dd591973d426cbb2da5e27ba0339209832b2f3315928c9790e13f159e8"}, + {file = "rpds_py-0.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6b73c67850ca7cae0f6c56f71e356d7e9fa25958d3e18a64927c2d930859b8e4"}, + {file = "rpds_py-0.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d8761c3c891cc51e90bc9926d6d2f59b27beaf86c74622c8979380a29cc23ac3"}, + {file = "rpds_py-0.20.1-cp311-none-win32.whl", hash = "sha256:cd945871335a639275eee904caef90041568ce3b42f402c6959b460d25ae8732"}, + {file = "rpds_py-0.20.1-cp311-none-win_amd64.whl", hash = "sha256:7e21b7031e17c6b0e445f42ccc77f79a97e2687023c5746bfb7a9e45e0921b84"}, + {file = "rpds_py-0.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:36785be22066966a27348444b40389f8444671630063edfb1a2eb04318721e17"}, + {file = "rpds_py-0.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:142c0a5124d9bd0e2976089484af5c74f47bd3298f2ed651ef54ea728d2ea42c"}, + {file = "rpds_py-0.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbddc10776ca7ebf2a299c41a4dde8ea0d8e3547bfd731cb87af2e8f5bf8962d"}, + {file = "rpds_py-0.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:15a842bb369e00295392e7ce192de9dcbf136954614124a667f9f9f17d6a216f"}, + {file = "rpds_py-0.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be5ef2f1fc586a7372bfc355986226484e06d1dc4f9402539872c8bb99e34b01"}, + {file = "rpds_py-0.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbcf360c9e3399b056a238523146ea77eeb2a596ce263b8814c900263e46031a"}, + {file = "rpds_py-0.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd27a66740ffd621d20b9a2f2b5ee4129a56e27bfb9458a3bcc2e45794c96cb"}, + {file = "rpds_py-0.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0b937b2a1988f184a3e9e577adaa8aede21ec0b38320d6009e02bd026db04fa"}, + {file = "rpds_py-0.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6889469bfdc1eddf489729b471303739bf04555bb151fe8875931f8564309afc"}, + {file = "rpds_py-0.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:19b73643c802f4eaf13d97f7855d0fb527fbc92ab7013c4ad0e13a6ae0ed23bd"}, + {file = "rpds_py-0.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3c6afcf2338e7f374e8edc765c79fbcb4061d02b15dd5f8f314a4af2bdc7feb5"}, + {file = "rpds_py-0.20.1-cp312-none-win32.whl", hash = "sha256:dc73505153798c6f74854aba69cc75953888cf9866465196889c7cdd351e720c"}, + {file = "rpds_py-0.20.1-cp312-none-win_amd64.whl", hash = "sha256:8bbe951244a838a51289ee53a6bae3a07f26d4e179b96fc7ddd3301caf0518eb"}, + {file = "rpds_py-0.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6ca91093a4a8da4afae7fe6a222c3b53ee4eef433ebfee4d54978a103435159e"}, + {file = "rpds_py-0.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b9c2fe36d1f758b28121bef29ed1dee9b7a2453e997528e7d1ac99b94892527c"}, + {file = "rpds_py-0.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f009c69bc8c53db5dfab72ac760895dc1f2bc1b62ab7408b253c8d1ec52459fc"}, + {file = "rpds_py-0.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6740a3e8d43a32629bb9b009017ea5b9e713b7210ba48ac8d4cb6d99d86c8ee8"}, + {file = "rpds_py-0.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:32b922e13d4c0080d03e7b62991ad7f5007d9cd74e239c4b16bc85ae8b70252d"}, + {file = "rpds_py-0.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe00a9057d100e69b4ae4a094203a708d65b0f345ed546fdef86498bf5390982"}, + {file = "rpds_py-0.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fe9b04b6fa685bd39237d45fad89ba19e9163a1ccaa16611a812e682913496"}, + {file = "rpds_py-0.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aa7ac11e294304e615b43f8c441fee5d40094275ed7311f3420d805fde9b07b4"}, + {file = "rpds_py-0.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aa97af1558a9bef4025f8f5d8c60d712e0a3b13a2fe875511defc6ee77a1ab7"}, + {file = "rpds_py-0.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:483b29f6f7ffa6af845107d4efe2e3fa8fb2693de8657bc1849f674296ff6a5a"}, + {file = "rpds_py-0.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:37fe0f12aebb6a0e3e17bb4cd356b1286d2d18d2e93b2d39fe647138458b4bcb"}, + {file = "rpds_py-0.20.1-cp313-none-win32.whl", hash = "sha256:a624cc00ef2158e04188df5e3016385b9353638139a06fb77057b3498f794782"}, + {file = "rpds_py-0.20.1-cp313-none-win_amd64.whl", hash = "sha256:b71b8666eeea69d6363248822078c075bac6ed135faa9216aa85f295ff009b1e"}, + {file = "rpds_py-0.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5b48e790e0355865197ad0aca8cde3d8ede347831e1959e158369eb3493d2191"}, + {file = "rpds_py-0.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3e310838a5801795207c66c73ea903deda321e6146d6f282e85fa7e3e4854804"}, + {file = "rpds_py-0.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2249280b870e6a42c0d972339e9cc22ee98730a99cd7f2f727549af80dd5a963"}, + {file = "rpds_py-0.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e79059d67bea28b53d255c1437b25391653263f0e69cd7dec170d778fdbca95e"}, + {file = "rpds_py-0.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b431c777c9653e569986ecf69ff4a5dba281cded16043d348bf9ba505486f36"}, + {file = "rpds_py-0.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da584ff96ec95e97925174eb8237e32f626e7a1a97888cdd27ee2f1f24dd0ad8"}, + {file = "rpds_py-0.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a0629ec053fc013808a85178524e3cb63a61dbc35b22499870194a63578fb9"}, + {file = "rpds_py-0.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fbf15aff64a163db29a91ed0868af181d6f68ec1a3a7d5afcfe4501252840bad"}, + {file = "rpds_py-0.20.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:07924c1b938798797d60c6308fa8ad3b3f0201802f82e4a2c41bb3fafb44cc28"}, + {file = "rpds_py-0.20.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4a5a844f68776a7715ecb30843b453f07ac89bad393431efbf7accca3ef599c1"}, + {file = "rpds_py-0.20.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:518d2ca43c358929bf08f9079b617f1c2ca6e8848f83c1225c88caeac46e6cbc"}, + {file = "rpds_py-0.20.1-cp38-none-win32.whl", hash = "sha256:3aea7eed3e55119635a74bbeb80b35e776bafccb70d97e8ff838816c124539f1"}, + {file = "rpds_py-0.20.1-cp38-none-win_amd64.whl", hash = "sha256:7dca7081e9a0c3b6490a145593f6fe3173a94197f2cb9891183ef75e9d64c425"}, + {file = "rpds_py-0.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b41b6321805c472f66990c2849e152aff7bc359eb92f781e3f606609eac877ad"}, + {file = "rpds_py-0.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a90c373ea2975519b58dece25853dbcb9779b05cc46b4819cb1917e3b3215b6"}, + {file = "rpds_py-0.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16d4477bcb9fbbd7b5b0e4a5d9b493e42026c0bf1f06f723a9353f5153e75d30"}, + {file = "rpds_py-0.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:84b8382a90539910b53a6307f7c35697bc7e6ffb25d9c1d4e998a13e842a5e83"}, + {file = "rpds_py-0.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4888e117dd41b9d34194d9e31631af70d3d526efc363085e3089ab1a62c32ed1"}, + {file = "rpds_py-0.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5265505b3d61a0f56618c9b941dc54dc334dc6e660f1592d112cd103d914a6db"}, + {file = "rpds_py-0.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e75ba609dba23f2c95b776efb9dd3f0b78a76a151e96f96cc5b6b1b0004de66f"}, + {file = "rpds_py-0.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1791ff70bc975b098fe6ecf04356a10e9e2bd7dc21fa7351c1742fdeb9b4966f"}, + {file = "rpds_py-0.20.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d126b52e4a473d40232ec2052a8b232270ed1f8c9571aaf33f73a14cc298c24f"}, + {file = "rpds_py-0.20.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c14937af98c4cc362a1d4374806204dd51b1e12dded1ae30645c298e5a5c4cb1"}, + {file = "rpds_py-0.20.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3d089d0b88996df627693639d123c8158cff41c0651f646cd8fd292c7da90eaf"}, + {file = "rpds_py-0.20.1-cp39-none-win32.whl", hash = "sha256:653647b8838cf83b2e7e6a0364f49af96deec64d2a6578324db58380cff82aca"}, + {file = "rpds_py-0.20.1-cp39-none-win_amd64.whl", hash = "sha256:fa41a64ac5b08b292906e248549ab48b69c5428f3987b09689ab2441f267d04d"}, + {file = "rpds_py-0.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a07ced2b22f0cf0b55a6a510078174c31b6d8544f3bc00c2bcee52b3d613f74"}, + {file = "rpds_py-0.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:68cb0a499f2c4a088fd2f521453e22ed3527154136a855c62e148b7883b99f9a"}, + {file = "rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa3060d885657abc549b2a0f8e1b79699290e5d83845141717c6c90c2df38311"}, + {file = "rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95f3b65d2392e1c5cec27cff08fdc0080270d5a1a4b2ea1d51d5f4a2620ff08d"}, + {file = "rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2cc3712a4b0b76a1d45a9302dd2f53ff339614b1c29603a911318f2357b04dd2"}, + {file = "rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d4eea0761e37485c9b81400437adb11c40e13ef513375bbd6973e34100aeb06"}, + {file = "rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f5179583d7a6cdb981151dd349786cbc318bab54963a192692d945dd3f6435d"}, + {file = "rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fbb0ffc754490aff6dabbf28064be47f0f9ca0b9755976f945214965b3ace7e"}, + {file = "rpds_py-0.20.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a94e52537a0e0a85429eda9e49f272ada715506d3b2431f64b8a3e34eb5f3e75"}, + {file = "rpds_py-0.20.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:92b68b79c0da2a980b1c4197e56ac3dd0c8a149b4603747c4378914a68706979"}, + {file = "rpds_py-0.20.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:93da1d3db08a827eda74356f9f58884adb254e59b6664f64cc04cdff2cc19b0d"}, + {file = "rpds_py-0.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:754bbed1a4ca48479e9d4182a561d001bbf81543876cdded6f695ec3d465846b"}, + {file = "rpds_py-0.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ca449520e7484534a2a44faf629362cae62b660601432d04c482283c47eaebab"}, + {file = "rpds_py-0.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9c4cb04a16b0f199a8c9bf807269b2f63b7b5b11425e4a6bd44bd6961d28282c"}, + {file = "rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb63804105143c7e24cee7db89e37cb3f3941f8e80c4379a0b355c52a52b6780"}, + {file = "rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:55cd1fa4ecfa6d9f14fbd97ac24803e6f73e897c738f771a9fe038f2f11ff07c"}, + {file = "rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f8f741b6292c86059ed175d80eefa80997125b7c478fb8769fd9ac8943a16c0"}, + {file = "rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fc212779bf8411667234b3cdd34d53de6c2b8b8b958e1e12cb473a5f367c338"}, + {file = "rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ad56edabcdb428c2e33bbf24f255fe2b43253b7d13a2cdbf05de955217313e6"}, + {file = "rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a3a1e9ee9728b2c1734f65d6a1d376c6f2f6fdcc13bb007a08cc4b1ff576dc5"}, + {file = "rpds_py-0.20.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e13de156137b7095442b288e72f33503a469aa1980ed856b43c353ac86390519"}, + {file = "rpds_py-0.20.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:07f59760ef99f31422c49038964b31c4dfcfeb5d2384ebfc71058a7c9adae2d2"}, + {file = "rpds_py-0.20.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:59240685e7da61fb78f65a9f07f8108e36a83317c53f7b276b4175dc44151684"}, + {file = "rpds_py-0.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:83cba698cfb3c2c5a7c3c6bac12fe6c6a51aae69513726be6411076185a8b24a"}, + {file = "rpds_py-0.20.1.tar.gz", hash = "sha256:e1791c4aabd117653530dccd24108fa03cc6baf21f58b950d0a73c3b3b29a350"}, ] [[package]] @@ -3684,6 +3688,17 @@ files = [ {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, ] +[[package]] +name = "tomlkit" +version = "0.13.2" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, +] + [[package]] name = "tornado" version = "6.4.1" @@ -3706,13 +3721,13 @@ files = [ [[package]] name = "tqdm" -version = "4.66.6" +version = "4.67.0" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.6-py3-none-any.whl", hash = "sha256:223e8b5359c2efc4b30555531f09e9f2f3589bcd7fdd389271191031b49b7a63"}, - {file = "tqdm-4.66.6.tar.gz", hash = "sha256:4bdd694238bef1485ce839d67967ab50af8f9272aab687c0d7702a01da0be090"}, + {file = "tqdm-4.67.0-py3-none-any.whl", hash = "sha256:0cd8af9d56911acab92182e88d763100d4788bdf421d251616040cc4d44863be"}, + {file = "tqdm-4.67.0.tar.gz", hash = "sha256:fe5a6f95e6fe0b9755e9469b77b9c3cf850048224ecaa8293d7d2d31f97d869a"}, ] [package.dependencies] @@ -3720,6 +3735,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [package.extras] dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +discord = ["requests"] notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] @@ -3875,13 +3891,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.24.0.post1" +version = "0.32.0" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" files = [ - {file = "uvicorn-0.24.0.post1-py3-none-any.whl", hash = "sha256:7c84fea70c619d4a710153482c0d230929af7bcf76c7bfa6de151f0a3a80121e"}, - {file = "uvicorn-0.24.0.post1.tar.gz", hash = "sha256:09c8e5a79dc466bdf28dead50093957db184de356fcdc48697bad3bde4c2588e"}, + {file = "uvicorn-0.32.0-py3-none-any.whl", hash = "sha256:60b8f3a5ac027dcd31448f411ced12b5ef452c646f76f02f8cc3f25d8d26fd82"}, + {file = "uvicorn-0.32.0.tar.gz", hash = "sha256:f78b36b143c16f54ccdb8190d0a26b5f1901fe5a3c777e1ab29f26391af8551e"}, ] [package.dependencies] @@ -4260,7 +4276,10 @@ enabler = ["pytest-enabler (>=2.2)"] test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] +[extras] +all = [] + [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.13" -content-hash = "65169e4cb14ce8ff94469b8dde2312d4d739b0ee5e54760acbdaca8a8838acfb" +content-hash = "61ac41461bea2c5ace3446c6748b8589d2df5624999cb39fce49d992737b343d" diff --git a/pyproject.toml b/pyproject.toml index cb621a181..6e8a6d56a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "truss" -version = "0.9.49" +version = "0.9.50" description = "A seamless bridge from model development to model delivery" license = "MIT" readme = "README.md" @@ -14,11 +14,14 @@ keywords = [ "Model Deployment", "Machine Learning", ] + packages = [ { include = "truss", from = "." }, { include = "truss_chains", from = "./truss-chains" }, ] +[tool.poetry.scripts] +truss = "truss.cli:truss_cli" [tool.poetry.urls] "Homepage" = "https://truss.baseten.co" @@ -26,90 +29,138 @@ packages = [ "Documentation" = "https://truss.baseten.co" "Baseten" = "https://baseten.co" +# Note: *why* are dependencies and defined like this? +# The goal is to factorize the overall truss package into a light-weight `base` part that includes +# e.g. the Truss config and has no heavy dependencies. Other functionalities are organzied into +# components (or "extras") sub-packages, that can be selectively installed (and heavy dependencies +# are only installed as needed). +# +# These sub-packages should have clear separation of concerns, and it should be carefully designed +# how they depend on and import each other (e.g. `base` must not depend on anything else, the +# server does not need local CLI tools). +# +# We want components to be selectable via pip installs (e.g. `pip install truss[server]`). +# Unfortunately poetry dependency groups don't integrate natively with the "extras" concept: +# Specifically, dependencies listed in groups (other than the implicit main group) cannot be used +# for extras. +# +# This leaves us with the following process: +# +# * Use poetry groups only for dev dependencies. These are never included in pip. For dev envs +# use the following installation command `poetry install --with=dev,dev-server --extras=all`. +# * All other dependencies are in the main group `tool.poetry.dependencies`. Base dependencies are +# at the top and non-optional. +# * Dependencies from other compoents are listed after, and marked with `optional = false`. If a +# dependency is needed by mutlipe extras, only add it once, but see next step. This also ensures +# that poetry resolves *all* dependencies from all extras to be globally consistent. +# * Since poetry groups don't work with extras, we need to make the association between a dependency +# and the componnent(s) in which it is used in a different way. Because it's cumbersome to fill +# in `tool.poetry.extras` manually, we automate this process and only define +# `tool.dependency_metadata` where we map for each extra dependency to one or multiple components +# that need it. +# * As a pre-commit step `pyproject_toml_linter.py` populates `tool.poetry.extras` groups and also +# creates an "all"-extras. +# +# TODO: The full factorization is WIP, so far only `base` has been cleanly factored out. +# All other dependencies are lumped together in "other". Customers should install truss +# as `pip install truss[local]`, so we temporarily fill local with all deps, until it is properly +# isolated. [tool.poetry.dependencies] -aiohttp = "^3.10.10" -aiofiles = "^24.1.0" -blake3 = "^0.3.3" -boto3 = "^1.34.85" -fastapi = ">=0.109.1" -google-cloud-storage = "2.10.0" -httpcore = ">=1.0.5" # Need min version because of https://github.com/encode/httpx/issues/1171. -httpx = ">=0.24.1" -huggingface_hub = ">=0.25.0" -inquirerpy = "^0.3.4" -Jinja2 = "^3.1.2" -libcst = "<1.2.0" -loguru = ">=0.7.2" -msgpack = ">=1.0.2" -msgpack-numpy = ">=0.4.8" -numpy = ">=1.23.5" -opentelemetry-api = ">=1.25.0" -opentelemetry-sdk = ">=1.25.0" -opentelemetry-exporter-otlp = ">=1.25.0" -packaging = ">=20.9" -pathspec = ">=0.9.0" -psutil = ">=5.9.4" -pydantic = ">=1.10.0" -pytest-asyncio = "^0.23.6" -# Note: when using chains, 3.9 will be required at runtime, but other truss functionality works with 3.8. +# "base" dependencies. +# "When using chains, 3.9 will be required at runtime, but other truss functionality works with 3.8. python = ">=3.8,<3.13" -python-json-logger = ">=2.0.2" -python-on-whales = "^0.68.0" -PyYAML = ">=6.0" -rich = "^13.4.2" -rich-click = "^1.6.1" -ruff = "^0.4.8" # Not a dev dep, needed for chains code gen. -single-source = "^0.3.0" -tenacity = "^8.0.1" -watchfiles = "^0.19.0" - -[tool.poetry.group.builder.dependencies] -blake3 = "^0.3.3" -boto3 = "^1.26.157" -click = "^8.0.3" -fastapi = ">=0.109.1" -google-cloud-storage = "2.10.0" -httpx = ">=0.24.1" huggingface_hub = ">=0.25.0" -Jinja2 = "^3.1.2" -loguru = ">=0.7.2" -packaging = ">=20.9" -pathspec = ">=0.9.0" -psutil = ">=5.9.4" -python = ">=3.8,<3.12" -python-json-logger = ">=2.0.2" +pydantic = ">=1.10.0" # We cannot upgrade to v2, due to customer needs. PyYAML = ">=6.0" -requests = ">=2.31" -rich = "^13.4.2" single-source = "^0.3.0" -tenacity = "^8.0.1" -uvicorn = "^0.24.0" -watchfiles = "^0.19.0" +# "non-base" dependencies. +# TODO: until we have resolved the question on how users can install the local tools frictionless +# (extras cannot be marked to be included by default), all below packages are non-optional. +# This also means that so far extras defined in `[tool.poetry.extras]` don't have any meaning, +# since everything is globally included anyway. +Jinja2 = { version = "^3.1.2", optional = false } +aiofiles = { version = "^24.1.0", optional = false } +aiohttp = { version = "^3.10.10", optional = false } +blake3 = { version = "^0.3.3", optional = false } +boto3 = { version = "^1.34.85", optional = false } +click = { version = "^8.0.3", optional = false } +fastapi = { version = ">=0.109.1", optional = false } +google-cloud-storage = { version = "2.10.0", optional = false } +httpx = { version = ">=0.24.1", optional = false } +inquirerpy = { version = "^0.3.4", optional = false } +libcst = { version = "<1.2.0", optional = false } +loguru = { version = ">=0.7.2", optional = false } +packaging = { version = ">=20.9", optional = false } +pathspec = { version = ">=0.9.0", optional = false } +psutil = { version = ">=5.9.4", optional = false } +python-json-logger = { version = ">=2.0.2", optional = false } +python-on-whales = { version = "^0.68.0", optional = false } +requests = { version = ">=2.31", optional = false } +rich = { version = "^13.4.2", optional = false } +rich-click = { version = "^1.6.1", optional = false } +ruff = { version = "^0.4.8", optional = false } # Not a dev dep, needed for chains code gen. +tenacity = { version = "^8.0.1", optional = false } +watchfiles = { version = "^0.19.0", optional = false } + -[tool.poetry.dev-dependencies] +[tool.dependency_metadata] +# `base` / `main` deps which are non-optional are always included and don't need to be added here. +Jinja2 = { components = "other" } +aiofiles = { components = "other" } +aiohttp = { components = "other" } +blake3 = { components = "other" } +boto3 = { components = "other" } +click = { components = "other" } +fastapi = { components = "other" } +google-cloud-storage = { components = "other" } +httpx = { components = "other" } +inquirerpy = { components = "other" } +libcst = { components = "other" } +loguru = { components = "other" } +packaging = { components = "other" } +pathspec = { components = "other" } +psutil = { components = "other" } +python-json-logger = { components = "other" } +python-on-whales = { components = "other" } +requests = { components = "other" } +rich = { components = "other" } +rich-click = { components = "other" } +ruff = { components = "other" } +tenacity = { components = "other" } +watchfiles = { components = "other" } + +[tool.poetry.group.dev.dependencies] +# These packages are needed as the dev/testing tooling coverage = "^6.4.1" -dockerfile = "^3.2.0" +httpx = { extras = ["cli"], version = "*" } ipdb = "^0.13.9" ipykernel = "^6.16.0" ipython = "^7.16" +mypy = "^1.0.0" nbconvert = "^7.2.1" pre-commit = "^2.18.1" pytest = "7.2.0" +pytest-asyncio = "^0.23.6" pytest-cov = "^3.0.0" +pytest-split = "^0.8.1" +requests-mock = ">=1.11.0" +tomlkit = ">=0.12" types-PyYAML = "^6.0.12.12" +types-aiofiles = ">=24.1.0" +types-requests = "==2.31.0.2" types-setuptools = "^69.0.0.0" -types-aiofiles = "^24.1.0.20240626" -[tool.poetry.scripts] -truss = 'truss.cli:truss_cli' - -[tool.poetry.group.dev.dependencies] +[tool.poetry.group.dev-server.dependencies] +# These packages are needed to run local tests of server components. Note that the actual +# server deps for building the docker image are (so far) defined in `requirements.txt`-files. +dockerfile = "^3.2.0" flask = "^2.3.3" -httpx = { extras = ["cli"], version = "*" } -mypy = "^1.0.0" -pytest-split = "^0.8.1" -requests-mock = ">=1.11.0" -types-requests = "==2.31.0.2" +msgpack = ">=1.0.2" +msgpack-numpy = ">=0.4.8" +numpy = ">=1.23.5" +opentelemetry-api = ">=1.25.0" +opentelemetry-exporter-otlp = ">=1.25.0" +opentelemetry-sdk = ">=1.25.0" uvicorn = ">=0.24.0" uvloop = ">=0.17.0" @@ -160,5 +211,10 @@ section-order = [ ] [tool.ruff.lint.pycodestyle] -# The formatter can go sometimes go over the 88 character limit, so we want to provide some buffer. +# The formatter can go sometimes go over the 88-character limit, so we want to provide some buffer. max-line-length = 120 + + +# Note: `tool.poetry.extras` was autogenerated by `pyproject_toml_linter.py`, do not edit manually. +[tool.poetry.extras] +all = [] diff --git a/truss-chains/examples/audio-transcription/whisper_chainlet.py b/truss-chains/examples/audio-transcription/whisper_chainlet.py index 54f01256d..65823190d 100644 --- a/truss-chains/examples/audio-transcription/whisper_chainlet.py +++ b/truss-chains/examples/audio-transcription/whisper_chainlet.py @@ -2,7 +2,7 @@ import tempfile import data_types -from truss import truss_config +from truss.base import truss_config import truss_chains as chains @@ -18,7 +18,9 @@ def base64_to_wav(base64_string, output_file_path): class WhisperModel(chains.ChainletBase): remote_config = chains.RemoteConfig( docker_image=chains.DockerImage( - base_image="baseten/truss-server-base:3.10-gpu-v0.9.0", + base_image=chains.CustomImage( + image="baseten/truss-server-base:3.10-gpu-v0.9.0" + ), apt_requirements=[ "ffmpeg", ], diff --git a/truss-chains/examples/mistral/mistral_chainlet.py b/truss-chains/examples/mistral/mistral_chainlet.py index b2ed8168b..d71ea96cd 100644 --- a/truss-chains/examples/mistral/mistral_chainlet.py +++ b/truss-chains/examples/mistral/mistral_chainlet.py @@ -1,6 +1,6 @@ from typing import Protocol -from truss import truss_config +from truss.base import truss_config import truss_chains as chains diff --git a/truss-chains/tests/chains_e2e_test.py b/truss-chains/tests/chains_e2e_test.py index 9e403a91b..f10d90cbe 100644 --- a/truss-chains/tests/chains_e2e_test.py +++ b/truss-chains/tests/chains_e2e_test.py @@ -16,10 +16,7 @@ def test_chain(): root = Path(__file__).parent.resolve() chain_root = root / "itest_chain" / "itest_chain.py" with framework.import_target(chain_root, "ItestChain") as entrypoint: - options = definitions.PushOptionsLocalDocker( - chain_name="integration-test", - user_env={"test_env_key": "test_env_value"}, - ) + options = definitions.PushOptionsLocalDocker(chain_name="integration-test") service = remote.push(entrypoint, options) url = service.run_remote_url.replace("host.docker.internal", "localhost") @@ -41,7 +38,6 @@ def test_chain(): "part_lens": [10], }, ["a", "b"], - "test_env_value", ] # Call with values for default arguments. response = requests.post( @@ -67,7 +63,6 @@ def test_chain(): "part_lens": [3], }, ["bola"], - "test_env_value", ] # Test with errors. @@ -87,7 +82,7 @@ async def test_chain_local(): root = Path(__file__).parent.resolve() chain_root = root / "itest_chain" / "itest_chain.py" with framework.import_target(chain_root, "ItestChain") as entrypoint: - with public_api.run_local(user_env={"test_env_key": "test_env_value"}): + with public_api.run_local(): with pytest.raises(ValueError): # First time `SplitTextFailOnce` raises an error and # currently local mode does not have retries. @@ -104,7 +99,6 @@ async def test_chain_local(): "part_lens": [10], }, ["a", "b"], - "test_env_value", ) # Convert the pydantic model to a dict for comparison @@ -114,7 +108,6 @@ async def test_chain_local(): result[2], result[3].dict(), result[4], - result[5], ) assert result_dict == expected diff --git a/truss-chains/tests/itest_chain/itest_chain.py b/truss-chains/tests/itest_chain/itest_chain.py index b0fc20b2b..5881ad4f3 100644 --- a/truss-chains/tests/itest_chain/itest_chain.py +++ b/truss-chains/tests/itest_chain/itest_chain.py @@ -1,6 +1,5 @@ import math -import pydantic from user_package import shared_chainlet from user_package.nested_package import io_types @@ -20,11 +19,6 @@ pip_requirements_file=chains.make_abs_path_here("requirements.txt"), ) -IMAGE_STR = chains.DockerImage( - base_image="python:3.11-slim", - pip_requirements_file=chains.make_abs_path_here("requirements.txt"), -) - class GenerateData(chains.ChainletBase): remote_config = chains.RemoteConfig( @@ -37,41 +31,43 @@ def run_remote(self, length: int) -> str: return (template * repetitions)[:length] -class DummyUserConfig(pydantic.BaseModel): - multiplier: int - - class TextReplicator(chains.ChainletBase): remote_config = chains.RemoteConfig(docker_image=IMAGE_CUSTOM) - default_user_config = DummyUserConfig(multiplier=2) def __init__(self, context=chains.depends_context()): - self.user_config = context.user_config + self.multiplier = 2 def run_remote(self, data: str) -> str: if len(data) > 30: raise ValueError(f"This input is too long: {len(data)}.") - return data * self.user_config.multiplier + return data * self.multiplier + +class SideEffectBase(chains.ChainletBase): + def __init__(self, context=chains.depends_context()): + self.ctx = context -class SideEffectOnly(chains.ChainletBase): + def run_remote(self) -> None: + print("I'm have no input and no outputs, I just print.") + + +class SideEffectOnlySubclass(SideEffectBase): remote_config = chains.RemoteConfig(docker_image=IMAGE_CUSTOM) - default_user_config = DummyUserConfig(multiplier=2) def __init__(self, context=chains.depends_context()): - self.user_config = context.user_config + super().__init__(context=context) def run_remote(self) -> None: - print("I'm have no input and no outputs, I just print.") + return super().run_remote() class TextToNum(chains.ChainletBase): - remote_config = chains.RemoteConfig(docker_image=IMAGE_STR) + remote_config = chains.RemoteConfig(docker_image=IMAGE_BASETEN) def __init__( self, replicator: TextReplicator = chains.depends(TextReplicator), - side_effect=chains.depends(SideEffectOnly), + side_effect=chains.depends(SideEffectOnlySubclass), ) -> None: self._replicator = replicator self._side_effect = side_effect @@ -110,7 +106,7 @@ async def run_remote( parts=[], part_lens=[10] ), simple_default_arg: list[str] = ["a", "b"], - ) -> tuple[int, str, int, shared_chainlet.SplitTextOutput, list[str], str]: + ) -> tuple[int, str, int, shared_chainlet.SplitTextOutput, list[str]]: data = self._data_generator.run_remote(length) text_parts, number = await self._data_splitter.run_remote( io_types.SplitTextInput( @@ -130,5 +126,4 @@ async def run_remote( number, pydantic_default_arg, simple_default_arg, - self._context.user_env["test_env_key"], ) diff --git a/truss-chains/tests/test_framework.py b/truss-chains/tests/test_framework.py index 45ef50c73..5f33a3c00 100644 --- a/truss-chains/tests/test_framework.py +++ b/truss-chains/tests/test_framework.py @@ -1,3 +1,4 @@ +import asyncio import contextlib import logging import re @@ -92,7 +93,94 @@ def run_remote(self) -> str: chain.run_remote() -# Assert that Chain(let) definitions are validated ################################# +# The problem with supporting helper functions in `run_local` is that the stack trace +# looks similar to the forbidden one in `InitInRun`. +@pytest.mark.skip(reason="Helper functions not supported yet.") +def test_ok_with_subclass_and_helper_fn(): + def build(): + return Chainlet1() + + with chains.run_local(): + chain = build() + print(chain.run_remote()) + + +# Test sub-classing (incl. detection of naive chainlet instantiation). ################# + + +class BaseChainlet(chains.ChainletBase): + def __init__(self): + self.base_value = "base_value" + logging.info("########## Init Base") + + async def run_remote(self) -> str: + return self.__class__.name + + +class IntermediateChainlet(BaseChainlet): + def __init__(self): + logging.info("########## Start init Intermediate") + super().__init__() + self.added_value = "added_value" + logging.info("########## Finish init Intermediate") + + async def run_remote(self) -> str: + return self.__class__.name + + +class DerivedChainlet(IntermediateChainlet): + def __init__(self): + logging.info("########## Start init Derived") + super().__init__() + self.base_value = "overridden_base_value" + logging.info("########## Finish init Derived") + + async def run_remote(self) -> str: + return self.__class__.name + + +class InitInInitSub(chains.ChainletBase): + def __init__(self, a=chains.depends(BaseChainlet)): + self.b = DerivedChainlet() + self.a = a + + async def run_remote(self) -> str: + return await self.b.run_remote() + + +class CorrectChain(chains.ChainletBase): + def __init__( + self, a=chains.depends(BaseChainlet), b=chains.depends(DerivedChainlet) + ): + self.a = a + self.b = b + + async def run_remote(self) -> str: + return await self.a.run_remote() + " " + await self.b.run_remote() + + +# Make sure there are no other validations errors from above definitions.. +framework.raise_validation_errors() + + +def test_raises_init_in_init_subclass(): + match = "Chainlets cannot be naively instantiated" + with pytest.raises(definitions.ChainsRuntimeError, match=match): + with chains.run_local(): + InitInInitSub() + + +def test_ok_with_subclass(): + with chains.run_local(): + chain = CorrectChain() + assert chain.a.base_value == "base_value" + assert chain.b.base_value == "overridden_base_value" + assert chain.b.added_value == "added_value" + result = asyncio.run(chain.run_remote()) + assert result == "BaseChainlet DerivedChainlet" + + +# Assert that Chain(let) definitions are validated ##################################### @contextlib.contextmanager diff --git a/truss-chains/truss_chains/__init__.py b/truss-chains/truss_chains/__init__.py index 69c1493c8..a9dd61551 100644 --- a/truss-chains/truss_chains/__init__.py +++ b/truss-chains/truss_chains/__init__.py @@ -36,7 +36,6 @@ ChainletBase, depends, depends_context, - deploy_remotely, # Alias for backwards compat. mark_entrypoint, push, run_local, @@ -60,7 +59,6 @@ "StubBase", "depends", "depends_context", - "deploy_remotely", "make_abs_path_here", "mark_entrypoint", "push", diff --git a/truss-chains/truss_chains/code_gen.py b/truss-chains/truss_chains/code_gen.py index 01cacd22a..8107a2573 100644 --- a/truss-chains/truss_chains/code_gen.py +++ b/truss-chains/truss_chains/code_gen.py @@ -36,7 +36,7 @@ import libcst import truss -from truss import truss_config +from truss.base import truss_config from truss.contexts.image_builder import serving_image_builder from truss.util import path as truss_path @@ -476,14 +476,7 @@ def _gen_truss_chainlet_model( _SpecifyChainletTypeAnnotation(user_chainlet_ref.src) ) model_class_src = libcst.Module(body=[class_definition]).code - - if utils.issubclass_safe(chainlet_descriptor.user_config_type.raw, type(None)): - userconfig_pin = "UserConfigT = type(None)" - else: - user_config_ref = _gen_type_import_and_ref(chainlet_descriptor.user_config_type) - imports.update(user_config_ref.imports) - userconfig_pin = f"UserConfigT = {user_config_ref.src}" - return _Source(src=f"{userconfig_pin}\n\n{model_class_src}", imports=imports) + return _Source(src=model_class_src, imports=imports) def _gen_truss_chainlet_file( @@ -559,18 +552,17 @@ def _inplace_fill_base_image( mutable_truss_config.base_image.python_executable_path = ( image.base_image.python_executable_path ) - elif isinstance(image.base_image, str): # This options is deprecated. - mutable_truss_config.base_image = truss_config.BaseImage(image=image.base_image) + raise NotImplementedError( + "Specifying docker base image as string is deprecated" + ) def _make_truss_config( chainlet_dir: pathlib.Path, chains_config: definitions.RemoteConfig, - user_config: definitions.UserConfigT, chainlet_to_service: Mapping[str, definitions.ServiceDescriptor], model_name: str, - user_env: Mapping[str, str], ) -> truss_config.TrussConfig: """Generate a truss config for a Chainlet.""" config = truss_config.TrussConfig() @@ -614,9 +606,7 @@ def _make_truss_config( config.external_data = truss_config.ExternalData(items=assets.external_data) # Metadata. chains_metadata: definitions.TrussMetadata = definitions.TrussMetadata( - user_config=user_config, - chainlet_to_service=chainlet_to_service, - user_env=user_env, + chainlet_to_service=chainlet_to_service ) config.model_metadata[definitions.TRUSS_CONFIG_CHAINS_KEY] = ( chains_metadata.model_dump() @@ -634,7 +624,6 @@ def gen_truss_chainlet( chainlet_descriptor: definitions.ChainletAPIDescriptor, model_name: str, chainlet_display_name_to_url: Mapping[str, str], - user_env: Mapping[str, str], ) -> pathlib.Path: # Filter needed services and customize options. dep_services = {} @@ -650,10 +639,8 @@ def gen_truss_chainlet( _make_truss_config( chainlet_dir, chainlet_descriptor.chainlet_cls.remote_config, - chainlet_descriptor.chainlet_cls.default_user_config, dep_services, model_name, - user_env, ) # TODO This assumes all imports are absolute w.r.t chain root (or site-packages). truss_path.copy_tree_path( diff --git a/truss-chains/truss_chains/definitions.py b/truss-chains/truss_chains/definitions.py index b8d7034d5..04d08ed86 100644 --- a/truss-chains/truss_chains/definitions.py +++ b/truss-chains/truss_chains/definitions.py @@ -23,13 +23,11 @@ ) import pydantic -from truss import truss_config -from truss.constants import PRODUCTION_ENVIRONMENT_NAME +from truss.base import truss_config +from truss.base.constants import PRODUCTION_ENVIRONMENT_NAME from truss.remote import baseten as baseten_remote from truss.remote import remote_cli, remote_factory -UserConfigT = TypeVar("UserConfigT", bound=Union[pydantic.BaseModel, None]) - BASETEN_API_SECRET_NAME = "baseten_chain_api_key" SECRET_DUMMY = "***" TRUSS_CONFIG_CHAINS_KEY = "chains_metadata" @@ -173,8 +171,7 @@ class DockerImage(SafeModelNonSerializable): assets are included as additional layers on top of that image. You can choose a Baseten default image for a supported python version (e.g. ``BasetenImage.PY311``), this will also include GPU drivers if needed, or - provide a custom image (e.g. ``CustomImage(image="python:3.11-slim")``). - Specification by string is deprecated. + provide a custom image (e.g. ``CustomImage(image="python:3.11-slim")``).. pip_requirements_file: Path to a file containing pip requirements. The file content is naively concatenated with ``pip_requirements``. pip_requirements: A list of pip requirements to install. The items are @@ -188,8 +185,7 @@ class DockerImage(SafeModelNonSerializable): """ # TODO: this is not stable yet and might change or refer back to truss. - # Image as str is deprecated. - base_image: Union[BasetenImage, CustomImage, str] = BasetenImage.PY311 + base_image: Union[BasetenImage, CustomImage] = BasetenImage.PY311 pip_requirements_file: Optional[AbsPath] = None pip_requirements: list[str] = [] apt_requirements: list[str] = [] @@ -296,12 +292,12 @@ class Assets: For example, model weight caching can be used like this:: import truss_chains as chains - from truss import truss_config + from truss.base import truss_config mistral_cache = truss_config.ModelRepo( repo_id="mistralai/Mistral-7B-Instruct-v0.2", allow_patterns=["*.json", "*.safetensors", ".model"] - ) + ) chains.Assets(cached=[mistral_cache], ...) See `truss caching guide `_ @@ -416,7 +412,7 @@ class Environment(SafeModel): # can add more fields here as we add them to dynamic_config configmap -class DeploymentContext(SafeModelNonSerializable, Generic[UserConfigT]): +class DeploymentContext(SafeModelNonSerializable): """Bundles config values and resources needed to instantiate Chainlets. The context can optionally added as a trailing argument in a Chainlet's @@ -433,18 +429,13 @@ class DeploymentContext(SafeModelNonSerializable, Generic[UserConfigT]): secrets: A mapping from secret names to secret values. It contains only the secrets that are listed in ``remote_config.assets.secret_keys`` of the current chainlet. - user_env: These values can be provided to - the deploy command and customize the behavior of deployed chainlets. E.g. - for differentiating between prod and dev version of the same chain. environment: The environment that the chainlet is deployed in. None if the chainlet is not associated with an environment. """ data_dir: Optional[pathlib.Path] = None - user_config: UserConfigT chainlet_to_service: Mapping[str, ServiceDescriptor] secrets: MappingNoIter[str, str] - user_env: Mapping[str, str] environment: Optional[Environment] = None def get_service_descriptor(self, chainlet_name: str) -> ServiceDescriptor: @@ -474,17 +465,14 @@ def get_baseten_api_key(self) -> str: return api_key -class TrussMetadata(SafeModel, Generic[UserConfigT]): +class TrussMetadata(SafeModel): """Plugin for the truss config (in config["model_metadata"]["chains_metadata"]).""" - user_config: UserConfigT chainlet_to_service: Mapping[str, ServiceDescriptor] - user_env: Mapping[str, str] class ABCChainlet(abc.ABC): remote_config: ClassVar[RemoteConfig] = RemoteConfig(docker_image=DockerImage()) - default_user_config: ClassVar[Optional[pydantic.BaseModel]] = None _init_is_patched: ClassVar[bool] = False @classmethod @@ -555,7 +543,6 @@ class ChainletAPIDescriptor(SafeModelNonSerializable): has_context: bool dependencies: Mapping[str, DependencyDescriptor] endpoint: EndpointAPIDescriptor - user_config_type: TypeDescriptor def __hash__(self) -> int: return hash(self.chainlet_cls) @@ -632,7 +619,6 @@ class GenericRemoteException(Exception): ... class PushOptions(SafeModelNonSerializable): chain_name: str - user_env: Mapping[str, str] only_generate_trusses: bool = False @@ -648,7 +634,6 @@ def create( publish: bool, promote: Optional[bool], only_generate_trusses: bool, - user_env: Mapping[str, str], remote: Optional[str] = None, environment: Optional[str] = None, ) -> "PushOptionsBaseten": @@ -669,7 +654,6 @@ def create( chain_name=chain_name, publish=publish, only_generate_trusses=only_generate_trusses, - user_env=user_env, environment=environment, ) diff --git a/truss-chains/truss_chains/framework.py b/truss-chains/truss_chains/framework.py index a65028a63..9e24366ae 100644 --- a/truss-chains/truss_chains/framework.py +++ b/truss-chains/truss_chains/framework.py @@ -37,8 +37,10 @@ _SIMPLE_TYPES = {int, float, complex, bool, str, bytes, None} _SIMPLE_CONTAINERS = {list, dict} -_DOCS_URL_CHAINING = "https://docs.baseten.co/chains/chaining-chainlets" -_DOCS_URL_LOCAL = "https://docs.baseten.co/chains/gettin-started" +_DOCS_URL_CHAINING = ( + "https://docs.baseten.co/chains/concepts#depends-call-other-chainlets" +) +_DOCS_URL_LOCAL = "https://docs.baseten.co/chains/guide#local-development" _ENTRYPOINT_ATTR_NAME = "_chains_entrypoint" @@ -114,6 +116,7 @@ def format_errors(self) -> str: def maybe_display_errors(self) -> None: if self.has_errors: sys.stderr.write(self.format_errors()) + sys.stderr.write("\n") _global_error_collector = _ErrorCollector() @@ -229,14 +232,17 @@ def _example_chainlet_code() -> str: return class_code -def _instantiation_error_msg(cls_name: str): +def _instantiation_error_msg(cls_name: str, location: Optional[str] = None) -> str: + location_format = f"{location}\n" if location else "" return ( - f"Error when instantiating Chainlet `{cls_name}`. " + f"Error when instantiating Chainlet `{cls_name}`.\n" + f"{location_format}" "Chainlets cannot be naively instantiated. Possible fixes:\n" - "1. To use Chainlets as dependencies in other Chainlets 'chaining'), " + "1. To use Chainlets as dependencies in other Chainlets ('chaining'), " f"add them as init argument. See {_DOCS_URL_CHAINING}.\n" f"2. For local / debug execution, use the `{run_local.__name__}`-" - f"context. See {_DOCS_URL_LOCAL}.\n" + f"context. See {_DOCS_URL_LOCAL}. You cannot use helper functions to " + "instantiate the Chain in this case.\n" "3. Push the chain and call the remote endpoint.\n" "Example of correct `__init__` with dependencies:\n" f"{_example_chainlet_code()}" @@ -420,11 +426,11 @@ def _validate_and_describe_endpoint( if not is_async: warnings.warn( "`run_remote` must be an async (coroutine) function in future releases. " - "Replace `def run_remote(...` with `async def run_remote(...`. " + "Replace `def run_remote(...)` with `async def run_remote(...)`. " "Local testing and execution can be done with " "`asyncio.run(my_chainlet.run_remote(...))`.\n" "Note on concurrency: previously sync functions were run in threads by the " - "Truss server.\bn" + "Truss server.\n" "For some frameworks this was **unsafe** (e.g. in torch the CUDA context " "is not thread-safe).\n" "Additionally, python threads hold the GIL and therefore might not give " @@ -516,12 +522,11 @@ def __init__( [dep_1: dep_1_type = truss_chains.depends(dep_1_class),] ... [dep_N: dep_N_type = truss_chains.provides(dep_N_class),] - [context: truss_chains.Context[UserConfig] = truss_chains.provide_context()] + [context: truss_chains.Context = truss_chains.depends_context()] ) -> None: ``` * The context argument is optionally trailing and must have a default constructed - with the `provide_context` directive. The type can be templated by a user - defined config e.g. `truss_chains.Context[UserConfig]`. + with the `provide_context` directive. * The names and number of Chainlet "dependency" arguments are arbitrary. * Default values for dependencies must be constructed with the `depends` directive to make the dependency injection work. The argument to `depends` must be a @@ -689,7 +694,6 @@ def validate_and_register_class(cls: Type[definitions.ABCChainlet]) -> None: has_context=init_validator.has_context, endpoint=_validate_and_describe_endpoint(cls, location), src_path=src_path, - user_config_type=definitions.TypeDescriptor(raw=type(cls.default_user_config)), ) logging.debug( f"Descriptor for {cls}:\n{pprint.pformat(chainlet_descriptor, indent=4)}\n" @@ -825,6 +829,9 @@ def ensure_args_are_injected(cls, original_init: Callable, kwargs) -> None: "run_local_stack_depth" ) +_INIT_LOCAL_NAME = "__init_local__" +_INIT_NAME = "__init__" + def _create_modified_init_for_local( chainlet_descriptor: definitions.ChainletAPIDescriptor, @@ -834,7 +841,6 @@ def _create_modified_init_for_local( secrets: Mapping[str, str], data_dir: Optional[pathlib.Path], chainlet_to_service: Mapping[str, definitions.ServiceDescriptor], - user_env: Mapping[str, str], ): """Replaces the default argument values with local Chainlet instantiations. @@ -842,63 +848,145 @@ def _create_modified_init_for_local( any init args (because the patched defaults are sufficient). """ - def _verify_stack(stack: list[inspect.FrameInfo], levels_below_run_local: int): - # TODO: this checks is incompatible with sub-classing chainlets. - for frame in stack[:levels_below_run_local]: - # This is a robust way to compare for function identity, since `wraps` - # actually changes the name. - if frame.frame.f_code != __init_local__.__code__: # type: ignore[attr-defined] - assert frame.code_context is not None - logging.error( - f"Chainlet init called outside {__init_local__.__name__}, " - f'occurred in:\n File "{frame.filename}", line {frame.lineno}, in ' - f"{frame.function}\n {frame.code_context[0].strip()}." - ) - raise definitions.ChainsRuntimeError( - _instantiation_error_msg(chainlet_descriptor.name) - ) + def _detect_naive_instantiations( + stack: list[inspect.FrameInfo], levels_below_run_local: int + ) -> None: + # The goal is to find cases where a chainlet is directly instantiated + # in a place that is not immediately inside the `run_local`-contextmanager. + # In particular chainlets being instantiated in the `__init__` or `run_remote` + # methods of other chainlets (instead of being passed as dependencies with + # `chains.depends()`). + # + # We look into the calls stack of any (wrapped) invocation of an + # ABCChainlet-subclass's `__init__`. + # We also cut off the "above" call stack, such that `run_local` (and anything + # above that) is ignored, so it is possible to use `run_local` in nested code. + # + # A valid stack looks like this: + # * `__init_local__` as deepest frame (which would then call + # `__init_with_arg_check__` -> `__init__` if validation passes). + # * If a chainlet has no base classes, this can *only* be called from + # `__init_local__` - the part when the chainlet needs to be instantiated and + # added to `cls_to_instance`. + # * If a chainlet has other chainlets as base classes, they may call a chain + # of `super().__init()`. Each will add a triple of + # (__init__, __init_with_arg_check__, __init_local__) to the stack. While + # these 3 init layers belong to the different base classes, the type of the + # `self` arg is fixed. + # + # To detect invalid stacks we can rephrase this: `__init_local__` can only be + # called under either of these conditions: + # * From `__init_local__` when needing to populate `cls_to_instance`. + # * From a subclass's `__init__` using `super().__init__()`. This means the + # type (and instance) of the `self` arg in the calling `__init_local__` and + # the invoked `__init__` must are identical. In the forbidden situation that + # for example Chainlet `A` tries to create an instance of `B` inside its + # `__init__` the `self` args are two different instances. + substack = stack[:levels_below_run_local] + parts = ["-------- Chainlet Instantiation Stack --------"] + # Track the owner classes encountered in the stack to detect invalid scenarios + transformed_stack = [] + for frame in substack: + func_name = frame.function + line_number = frame.lineno + local_vars = frame.frame.f_locals + init_owner_class = None + self_value = None + # Determine if "self" exists and extract the owner class + if "self" in local_vars: + self_value = local_vars["self"] + if func_name == _INIT_NAME: + try: + name_parts = frame.frame.f_code.co_qualname.split(".") # type: ignore[attr-defined] + except AttributeError: # `co_qualname` only in Python 3.11+. + name_parts = [] + if len(name_parts) > 1: + init_owner_class = name_parts[-2] + elif func_name == _INIT_LOCAL_NAME: + assert ( + "init_owner_class" in local_vars + ), f"`{_INIT_LOCAL_NAME}` must capture `init_owner_class`" + init_owner_class = local_vars["init_owner_class"].__name__ + + if init_owner_class: + parts.append( + f"{func_name}:{line_number} | type(self)=<" + f"{self_value.__class__.__name__}> method of <" + f"{init_owner_class}>" + ) + else: + parts.append( + f"{func_name}:l{line_number} | type(self)=<" + f"{self_value.__class__.__name__}>" + ) + else: + parts.append(f"{func_name}:l{line_number}") + + transformed_stack.append((func_name, self_value, frame)) + + if len(parts) > 1: + logging.debug("\n".join(parts)) + + # Analyze the stack after preparing relevant information. + for i in range(len(transformed_stack) - 1): + func_name, self_value, _ = transformed_stack[i] + up_func_name, up_self_value, up_frame = transformed_stack[i + 1] + if func_name != _INIT_LOCAL_NAME: + continue # OK, we only validate `__init_local__` invocations. + # We are in `__init_local__`. Now check who and how called it. + if up_func_name == _INIT_LOCAL_NAME: + # Note: in this case `self` in the current frame is different then + # self in the parent frame, since a new instance is created. + continue # Ok, populating `cls_to_instance`. + if up_func_name == _INIT_NAME and self_value == up_self_value: + continue # OK, call to `super().__init__()`. + + # Everything else is invalid. + location = ( + f"{up_frame.filename}:{up_frame.lineno} ({up_frame.function})\n" + f" {up_frame.code_context[0].strip()}" # type: ignore[index] + ) + raise definitions.ChainsRuntimeError( + _instantiation_error_msg(chainlet_descriptor.name, location) + ) - original_init = chainlet_descriptor.chainlet_cls.__init__ + __original_init__ = chainlet_descriptor.chainlet_cls.__init__ - @functools.wraps(original_init) + @functools.wraps(__original_init__) def __init_local__(self: definitions.ABCChainlet, **kwargs) -> None: logging.debug(f"Patched `__init__` of `{chainlet_descriptor.name}`.") stack_depth = run_local_stack_depth.get(None) - assert stack_depth is not None, "The patched init is only called in context." + assert stack_depth is not None, "__init_local__ is only called in context." stack = inspect.stack() current_stack_depth = len(stack) levels_below_run_local = current_stack_depth - stack_depth - _verify_stack(stack, levels_below_run_local) + # Capture `init_owner_class` in locals, because we check it in + # `_detect_naive_instantiations`. + init_owner_class = chainlet_descriptor.chainlet_cls # noqa: F841 + _detect_naive_instantiations(stack, levels_below_run_local) + kwargs_mod = dict(kwargs) if ( chainlet_descriptor.has_context and definitions.CONTEXT_ARG_NAME not in kwargs_mod ): kwargs_mod[definitions.CONTEXT_ARG_NAME] = definitions.DeploymentContext( - user_config=chainlet_descriptor.chainlet_cls.default_user_config, secrets=secrets, data_dir=data_dir, chainlet_to_service=chainlet_to_service, - user_env=user_env, - ) - else: - logging.debug( - f"Use explicitly given context for `{self.__class__.__name__}`." ) for arg_name, dep in chainlet_descriptor.dependencies.items(): chainlet_cls = dep.chainlet_cls if arg_name in kwargs_mod: logging.debug( - f"Use explicitly given instance for `{arg_name}` " - f"of type `{dep.name}`." + f"Use given instance for `{arg_name}` of type `{dep.name}`." ) continue if chainlet_cls in cls_to_instance: logging.debug( - f"Use previously created instance for `{arg_name}` " - f"of type `{dep.name}`." + f"Use previously created `{arg_name}` of type `{dep.name}`." ) - instance = cls_to_instance[chainlet_cls] + kwargs_mod[arg_name] = cls_to_instance[chainlet_cls] else: logging.debug( f"Create new instance for `{arg_name}` of type `{dep.name}`. " @@ -907,13 +995,13 @@ def __init_local__(self: definitions.ABCChainlet, **kwargs) -> None: assert chainlet_cls._init_is_patched # Dependency chainlets are instantiated here, using their __init__ # that is patched for local. + logging.warning(f"Making first {dep.name}.") instance = chainlet_cls() # type: ignore # Here init args are patched. cls_to_instance[chainlet_cls] = instance - - kwargs_mod[arg_name] = instance + kwargs_mod[arg_name] = instance logging.debug(f"Calling original __init__ of {chainlet_descriptor.name}.") - original_init(self, **kwargs_mod) + __original_init__(self, **kwargs_mod) return __init_local__ @@ -924,7 +1012,6 @@ def run_local( secrets: Mapping[str, str], data_dir: Optional[pathlib.Path], chainlet_to_service: Mapping[str, definitions.ServiceDescriptor], - user_env: Mapping[str, str], ) -> Any: """Context to run Chainlets with dependency injection from local instances.""" # TODO: support retries in local mode. @@ -933,9 +1020,10 @@ def run_local( ] = {} original_inits: MutableMapping[Type[definitions.ABCChainlet], Callable] = {} - # Capture the stack depth when entering the context manager + # Capture the stack depth when entering the context manager. The stack is used + # to check that chainlets' `__init__` methods are only called within this context + # manager, to flag naive instantiations. stack_depth = len(inspect.stack()) - token = None for chainlet_descriptor in _global_chainlet_registry.chainlet_descriptors: original_inits[chainlet_descriptor.chainlet_cls] = ( chainlet_descriptor.chainlet_cls.__init__ @@ -946,21 +1034,20 @@ def run_local( secrets, data_dir, chainlet_to_service, - user_env, ) chainlet_descriptor.chainlet_cls.__init__ = init_for_local # type: ignore[method-assign] chainlet_descriptor.chainlet_cls._init_is_patched = True + # Subtract 2 levels: `run_local` (this) and `__enter__` (from @contextmanager). + token = run_local_stack_depth.set(stack_depth - 2) try: - # Subtract 2 levels: `run_local` (this) and `__enter__` (from @contextmanager). - token = run_local_stack_depth.set(stack_depth - 2) yield finally: # Restore original classes to unpatched state. for chainlet_cls, original_init in original_inits.items(): chainlet_cls.__init__ = original_init # type: ignore[method-assign] chainlet_cls._init_is_patched = False - if token is not None: - run_local_stack_depth.reset(token) + + run_local_stack_depth.reset(token) ######################################################################################## @@ -1045,7 +1132,8 @@ def import_target( target_cls = getattr(module, target_name, None) if not target_cls: raise AttributeError( - f"Target Chainlet class `{target_name}` not found in `{module_path}`." + f"Target Chainlet class `{target_name}` not found " + f"in `{module_path}`." ) if not utils.issubclass_safe(target_cls, definitions.ABCChainlet): raise TypeError( diff --git a/truss-chains/truss_chains/model_skeleton.py b/truss-chains/truss_chains/model_skeleton.py index cd97ba161..2173b5fed 100644 --- a/truss-chains/truss_chains/model_skeleton.py +++ b/truss-chains/truss_chains/model_skeleton.py @@ -1,18 +1,14 @@ import pathlib from typing import Optional -import pydantic from truss.templates.shared import secrets_resolver from truss_chains import definitions from truss_chains.utils import override_chainlet_to_service_metadata -# Better: in >=3.10 use `TypeAlias`. -UserConfigT = pydantic.BaseModel - class TrussChainletModel: - _context: definitions.DeploymentContext[UserConfigT] + _context: definitions.DeploymentContext _chainlet: definitions.ABCChainlet def __init__( @@ -24,10 +20,8 @@ def __init__( dict ] = None, # TODO: Remove the default value once all truss versions are synced up. ) -> None: - truss_metadata: definitions.TrussMetadata[UserConfigT] = ( - definitions.TrussMetadata[ - UserConfigT - ].model_validate( + truss_metadata: definitions.TrussMetadata = ( + definitions.TrussMetadata.model_validate( config["model_metadata"][definitions.TRUSS_CONFIG_CHAINS_KEY] ) ) @@ -36,12 +30,10 @@ def __init__( ) override_chainlet_to_service_metadata(truss_metadata.chainlet_to_service) - self._context = definitions.DeploymentContext[UserConfigT]( - user_config=truss_metadata.user_config, + self._context = definitions.DeploymentContext( chainlet_to_service=truss_metadata.chainlet_to_service, secrets=secrets, data_dir=data_dir, - user_env=truss_metadata.user_env, environment=deployment_environment, ) diff --git a/truss-chains/truss_chains/public_api.py b/truss-chains/truss_chains/public_api.py index d8817decb..aab38f798 100644 --- a/truss-chains/truss_chains/public_api.py +++ b/truss-chains/truss_chains/public_api.py @@ -1,6 +1,5 @@ import functools import pathlib -import warnings from typing import ContextManager, Mapping, Optional, Type, Union from truss_chains import definitions, framework @@ -122,7 +121,6 @@ def push( chain_name: str, publish: bool = True, promote: bool = True, - user_env: Optional[Mapping[str, str]] = None, only_generate_trusses: bool = False, remote: Optional[str] = None, environment: Optional[str] = None, @@ -137,9 +135,6 @@ def push( draft deployment otherwise) promote: Whether to promote the chain to be the production deployment (this implies publishing as well). - user_env: These values can be provided to - the push command and customize the behavior of deployed chainlets. E.g. - for differentiating between prod and dev version of the same chain. only_generate_trusses: Used for debugging purposes. If set to True, only the the underlying truss models for the chainlets are generated in ``/tmp/.chains_generated``. @@ -155,7 +150,6 @@ def push( chain_name=chain_name, publish=publish, promote=promote, - user_env=user_env or {}, only_generate_trusses=only_generate_trusses, remote=remote, environment=environment, @@ -165,38 +159,10 @@ def push( return service -def deploy_remotely( - entrypoint: Type[definitions.ABCChainlet], - chain_name: str, - publish: bool = True, - promote: bool = True, - user_env: Optional[Mapping[str, str]] = None, - only_generate_trusses: bool = False, - remote: Optional[str] = None, -) -> chains_remote.BasetenChainService: - """Deprecated, use ``push`` instead.""" - warnings.warn( - "Chains `deploy_remotely()` is deprecated and will be removed in a " - "future version. Please use `push()` instead.", - DeprecationWarning, - stacklevel=2, - ) - return push( - entrypoint, - chain_name, - publish, - promote, - user_env, - only_generate_trusses, - remote, - ) - - def run_local( secrets: Optional[Mapping[str, str]] = None, data_dir: Optional[Union[pathlib.Path, str]] = None, chainlet_to_service: Optional[Mapping[str, definitions.ServiceDescriptor]] = None, - user_env: Optional[Mapping[str, str]] = None, ) -> ContextManager[None]: """Context manager local debug execution of a chain. @@ -207,7 +173,6 @@ def run_local( secrets: A dict of secrets keys and values to provide to the chainlets. data_dir: Path to a directory with data files. chainlet_to_service: A dict of chainlet names to service descriptors. - user_env: see ``deploy_remotely``. Example usage (as trailing main section in a chain file):: @@ -240,6 +205,4 @@ class HelloWorld(chains.ChainletBase): for more details. """ data_dir = pathlib.Path(data_dir) if data_dir else None - return framework.run_local( - secrets or {}, data_dir, chainlet_to_service or {}, user_env or {} - ) + return framework.run_local(secrets or {}, data_dir, chainlet_to_service or {}) diff --git a/truss-chains/truss_chains/remote.py b/truss-chains/truss_chains/remote.py index 1ccb7f667..564376f4e 100644 --- a/truss-chains/truss_chains/remote.py +++ b/truss-chains/truss_chains/remote.py @@ -24,7 +24,6 @@ ) import tenacity -import truss import watchfiles if TYPE_CHECKING: @@ -34,6 +33,7 @@ from truss.remote.baseten import custom_types as b10_types from truss.remote.baseten import remote as b10_remote from truss.remote.baseten import service as b10_service +from truss.truss_handle import build as truss_build from truss.util import log_utils from truss.util import path as truss_path @@ -45,7 +45,7 @@ def _push_to_baseten( truss_dir: pathlib.Path, options: definitions.PushOptionsBaseten, chainlet_name: str ) -> b10_service.BasetenService: - truss_handle = truss.load(str(truss_dir)) + truss_handle = truss_build.load(str(truss_dir)) model_name = truss_handle.spec.config.model_name assert model_name is not None assert bool(_MODEL_NAME_RE.match(model_name)) @@ -111,7 +111,7 @@ def _push_service( f"Running in docker container `{chainlet_descriptor.display_name}` " ) port = utils.get_free_port() - truss_handle = truss.load(str(truss_dir)) + truss_handle = truss_build.load(str(truss_dir)) truss_handle.add_secret( definitions.BASETEN_API_SECRET_NAME, options.baseten_chain_api_key ) @@ -392,7 +392,6 @@ def push( chainlet_descriptor, model_name, chainlet_display_name_to_url, - self._options.user_env, ) if self._options.only_generate_trusses: chainlet_display_name_to_url[chainlet_descriptor.display_name] = ( @@ -547,7 +546,7 @@ def _assert_chainlet_names_same(self, new_names: set[str]) -> None: raise definitions.ChainsDeploymentError("\n".join(msg_parts)) def _code_gen_and_patch_thread( - self, descr: definitions.ChainletAPIDescriptor, user_env: Mapping[str, str] + self, descr: definitions.ChainletAPIDescriptor ) -> tuple[b10_remote.PatchResult, list[str]]: with log_utils.LogInterceptor() as log_interceptor: # TODO: Maybe try-except code_gen errors explicitly. @@ -558,7 +557,6 @@ def _code_gen_and_patch_thread( descr, self._chainlet_data[descr.display_name].oracle_name, self._chainlet_display_name_to_url, - user_env, ) patch_result = self._remote_provider.patch_for_chainlet( chainlet_dir, self._ignore_patterns @@ -566,11 +564,7 @@ def _code_gen_and_patch_thread( logs = log_interceptor.get_logs() return patch_result, logs - def _patch( - self, - executor: concurrent.futures.Executor, - user_env: Optional[Mapping[str, str]], - ) -> None: + def _patch(self, executor: concurrent.futures.Executor) -> None: exception_raised = None stack_trace = "" with log_utils.LogInterceptor() as log_interceptor, self._console.status( @@ -593,7 +587,6 @@ def _patch( future = executor.submit( self._code_gen_and_patch_thread, chainlet_descr, - user_env or {}, ) future_to_display_name[future] = chainlet_descr.display_name # Threads need to finish while inside the `import_target`-context. @@ -678,15 +671,15 @@ def _check_patch_results( ) self._error_console.print(msg) - def watch(self, user_env: Optional[Mapping[str, str]]) -> None: + def watch(self) -> None: with concurrent.futures.ThreadPoolExecutor() as executor: # Perform one initial patch at startup. - self._patch(executor, user_env) + self._patch(executor) self._console.print("👀 Watching for new changes.", style="blue") for _ in watchfiles.watch( self._chain_root, watch_filter=self._watch_filter, raise_interrupt=False ): - self._patch(executor, user_env) + self._patch(executor) self._console.print("👀 Watching for new changes.", style="blue") @@ -696,7 +689,6 @@ def watch( entrypoint: Optional[str], name: Optional[str], remote: Optional[str], - user_env: Optional[Mapping[str, str]], console: "rich_console.Console", error_console: "rich_console.Console", show_stack_trace: bool, @@ -711,4 +703,4 @@ def watch( patcher = _Watcher( source, entrypoint, name, remote, console, error_console, show_stack_trace ) - patcher.watch(user_env) + patcher.watch() diff --git a/truss/__init__.py b/truss/__init__.py index e381fa15d..011b91f40 100644 --- a/truss/__init__.py +++ b/truss/__init__.py @@ -7,7 +7,6 @@ # Suppress Pydantic V1 warnings, because we have to use it for backwards compat. warnings.filterwarnings("ignore", category=PydanticDeprecatedSince20) - __version__ = get_version(__name__, Path(__file__).parent.parent) @@ -15,7 +14,8 @@ def version(): return __version__ -from truss.api import login, push -from truss.build import from_directory, init, kill_all, load +from truss.api import login, push, whoami +from truss.base import truss_config +from truss.truss_handle.build import load # TODO: Refactor all usages and remove. -__all__ = ["from_directory", "init", "kill_all", "load", "push", "login"] +__all__ = ["push", "login", "load", "whoami", "truss_config"] diff --git a/truss/api/__init__.py b/truss/api/__init__.py index e0248452b..306a13b14 100644 --- a/truss/api/__init__.py +++ b/truss/api/__init__.py @@ -1,10 +1,10 @@ from typing import Optional, cast -import truss from truss.api import definitions from truss.remote.baseten.service import BasetenService from truss.remote.remote_factory import RemoteFactory from truss.remote.truss_remote import RemoteConfig +from truss.truss_handle.build import load def login(api_key: str): @@ -26,6 +26,27 @@ def login(api_key: str): RemoteFactory.update_remote_config(remote_config) +def whoami(remote: Optional[str] = None): + """ + Returns account information for the current user. + """ + if not remote: + available_remotes = RemoteFactory.get_available_config_names() + if len(available_remotes) == 1: + remote = available_remotes[0] + elif len(available_remotes) == 0: + raise ValueError( + "Please authenticate via truss.login and pass it as an argument." + ) + else: + raise ValueError( + "Multiple remotes found. Please pass the remote as an argument." + ) + + remote_provider = RemoteFactory.create(remote=remote) + return remote_provider.whoami() + + def push( target_directory: str, remote: Optional[str] = None, @@ -74,7 +95,7 @@ def push( ) remote_provider = RemoteFactory.create(remote=remote) - tr = truss.load(target_directory) + tr = load(target_directory) model_name = model_name or tr.spec.config.model_name if not model_name: raise ValueError( diff --git a/truss/test_data/annotated_types_truss/model/__init__.py b/truss/base/__init__.py similarity index 100% rename from truss/test_data/annotated_types_truss/model/__init__.py rename to truss/base/__init__.py diff --git a/truss/constants.py b/truss/base/constants.py similarity index 94% rename from truss/constants.py rename to truss/base/constants.py index 52f0038fe..94dce03a8 100644 --- a/truss/constants.py +++ b/truss/base/constants.py @@ -1,4 +1,3 @@ -import os import pathlib from typing import Set @@ -11,11 +10,10 @@ HUGGINGFACE_TRANSFORMER = "huggingface_transformer" LIGHTGBM = "lightgbm" -BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -CODE_DIR = pathlib.Path(BASE_DIR, "truss") +_TRUSS_ROOT = pathlib.Path(__file__).parent.parent.resolve() -TEMPLATES_DIR = pathlib.Path(CODE_DIR, "templates") -DOCKER_SERVER_TEMPLATES_DIR = pathlib.Path(CODE_DIR, "templates", "docker_server") +TEMPLATES_DIR = _TRUSS_ROOT / "templates" +DOCKER_SERVER_TEMPLATES_DIR = TEMPLATES_DIR / "docker_server" SERVER_CODE_DIR: pathlib.Path = TEMPLATES_DIR / "server" TRITON_SERVER_CODE_DIR: pathlib.Path = TEMPLATES_DIR / "triton" AUDIO_MODEL_TRTLLM_TRUSS_DIR: pathlib.Path = TEMPLATES_DIR / "trtllm-audio" diff --git a/truss/base/custom_types.py b/truss/base/custom_types.py new file mode 100644 index 000000000..1a35654e0 --- /dev/null +++ b/truss/base/custom_types.py @@ -0,0 +1,35 @@ +from dataclasses import dataclass +from enum import Enum +from typing import Any + + +# TODO(marius/TaT): kill this. +class ModelFrameworkType(Enum): + SKLEARN = "sklearn" + TENSORFLOW = "tensorflow" + KERAS = "keras" + PYTORCH = "pytorch" + HUGGINGFACE_TRANSFORMER = "huggingface_transformer" + XGBOOST = "xgboost" + LIGHTGBM = "lightgbm" + MLFLOW = "mlflow" + CUSTOM = "custom" + + +@dataclass +class Example: + name: str + input: Any + + @staticmethod + def from_dict(example_dict): + return Example( + name=example_dict["name"], + input=example_dict["input"], + ) + + def to_dict(self) -> dict: + return { + "name": self.name, + "input": self.input, + } diff --git a/truss/errors.py b/truss/base/errors.py similarity index 93% rename from truss/errors.py rename to truss/base/errors.py index d3fb49a28..f096d5732 100644 --- a/truss/errors.py +++ b/truss/base/errors.py @@ -38,3 +38,7 @@ class ContainerNotFoundError(Error): class ContainerAPINoResponseError(Error): pass + + +class RemoteNetworkError(Exception): + pass diff --git a/truss/config/trt_llm.py b/truss/base/trt_llm_config.py similarity index 97% rename from truss/config/trt_llm.py rename to truss/base/trt_llm_config.py index 949439c0e..315165402 100644 --- a/truss/config/trt_llm.py +++ b/truss/base/trt_llm_config.py @@ -1,5 +1,4 @@ import json -import logging import warnings from enum import Enum from typing import Optional @@ -7,16 +6,10 @@ from huggingface_hub.errors import HFValidationError from huggingface_hub.utils import validate_repo_id from pydantic import BaseModel, PydanticDeprecatedSince20, validator -from rich.console import Console # Suppress Pydantic V1 warnings, because we have to use it for backwards compat. warnings.filterwarnings("ignore", category=PydanticDeprecatedSince20) -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - -console = Console() - class TrussTRTLLMModel(str, Enum): LLAMA = "llama" diff --git a/truss/truss_config.py b/truss/base/truss_config.py similarity index 92% rename from truss/truss_config.py rename to truss/base/truss_config.py index f28eb1cc5..da0af2645 100644 --- a/truss/truss_config.py +++ b/truss/base/truss_config.py @@ -1,23 +1,24 @@ import logging +import sys from dataclasses import _MISSING_TYPE, dataclass, field, fields from enum import Enum from pathlib import Path -from typing import Any, Dict, List, Optional +from typing import Any, Callable, Dict, List, Optional, TypeVar import yaml -from truss.config.trt_llm import TRTLLMConfiguration, TrussTRTLLMQuantizationType -from truss.constants import HTTP_PUBLIC_BLOB_BACKEND -from truss.custom_types import ModelFrameworkType -from truss.errors import ValidationError -from truss.util.data_structures import transform_optional -from truss.validation import ( +from truss.base.constants import HTTP_PUBLIC_BLOB_BACKEND +from truss.base.custom_types import ModelFrameworkType +from truss.base.errors import ValidationError +from truss.base.trt_llm_config import TRTLLMConfiguration, TrussTRTLLMQuantizationType +from truss.base.validation import ( validate_cpu_spec, validate_memory_spec, validate_python_executable_path, validate_secret_name, validate_secret_to_path_mapping, ) +from truss.util.requirements import parse_requirement_string DEFAULT_MODEL_FRAMEWORK_TYPE = ModelFrameworkType.CUSTOM DEFAULT_MODEL_TYPE = "Model" @@ -43,8 +44,17 @@ VALID_PYTHON_VERSIONS = ["py38", "py39", "py310", "py311"] -# Set up logging -logging.basicConfig(level=logging.INFO) +X = TypeVar("X") +Y = TypeVar("Y") + + +def transform_optional(x: Optional[X], fn: Callable[[X], Optional[Y]]) -> Optional[Y]: + if x is None: + return None + + return fn(x) + + logger = logging.getLogger(__name__) @@ -617,8 +627,13 @@ def load_requirements_from_file(self, truss_dir: Path) -> List[str]: if self.requirements_file: requirements_path = truss_dir / self.requirements_file try: + requirements = [] with open(requirements_path) as f: - return [x for x in f.read().split("\n") if x] + for line in f.readlines(): + parsed_line = parse_requirement_string(line) + if parsed_line: + requirements.append(parsed_line) + return requirements except Exception as e: logger.exception( f"failed to read requirements file: {self.requirements_file}" @@ -795,3 +810,45 @@ def obj_to_dict(obj, verbose: bool = False): d[field_name] = field_curr_value return d + + +# TODO(marius): consolidate this with config/validation: +def _infer_python_version() -> str: + return f"py{sys.version_info.major}{sys.version_info.minor}" + + +def map_local_to_supported_python_version() -> str: + return map_to_supported_python_version(_infer_python_version()) + + +def map_to_supported_python_version(python_version: str) -> str: + """Map python version to truss supported python version. + + Currently, it maps any versions greater than 3.11 to 3.11. + + Args: + python_version: in the form py[major_version][minor_version] e.g. py39, + py310. + """ + python_major_version = int(python_version[2:3]) + python_minor_version = int(python_version[3:]) + + if python_major_version != 3: + raise NotImplementedError("Only python version 3 is supported") + + if python_minor_version > 11: + logger.info( + f"Mapping python version {python_major_version}.{python_minor_version}" + " to 3.11, the highest version that Truss currently supports." + ) + return "py311" + + if python_minor_version < 8: + # TODO: consider raising an error instead - it doesn't' seem safe. + logger.info( + f"Mapping python version {python_major_version}.{python_minor_version}" + " to 3.8, the lowest version that Truss currently supports." + ) + return "py38" + + return python_version diff --git a/truss/truss_spec.py b/truss/base/truss_spec.py similarity index 95% rename from truss/truss_spec.py rename to truss/base/truss_spec.py index 85af0b2c1..340b27b4e 100644 --- a/truss/truss_spec.py +++ b/truss/base/truss_spec.py @@ -4,11 +4,11 @@ import yaml -from truss.constants import CONFIG_FILE -from truss.custom_types import Example, ModelFrameworkType -from truss.errors import ValidationError -from truss.truss_config import ExternalData, ModelServer, TrussConfig -from truss.validation import validate_memory_spec +from truss.base.constants import CONFIG_FILE +from truss.base.custom_types import Example, ModelFrameworkType +from truss.base.errors import ValidationError +from truss.base.truss_config import ExternalData, ModelServer, TrussConfig +from truss.base.validation import validate_memory_spec class TrussSpec: diff --git a/truss/validation.py b/truss/base/validation.py similarity index 97% rename from truss/validation.py rename to truss/base/validation.py index c4cfeba58..f071ab8d1 100644 --- a/truss/validation.py +++ b/truss/base/validation.py @@ -3,8 +3,8 @@ from pathlib import PosixPath from typing import Dict, Pattern -from truss.constants import REGISTRY_BUILD_SECRET_PREFIX -from truss.errors import ValidationError +from truss.base.constants import REGISTRY_BUILD_SECRET_PREFIX +from truss.base.errors import ValidationError SECRET_NAME_MATCH_REGEX: Pattern[str] = re.compile(r"^[-._a-zA-Z0-9]+$") MILLI_CPU_REGEX: Pattern[str] = re.compile(r"^[0-9.]*m$") diff --git a/truss/blob/blob_backend.py b/truss/blob/blob_backend.py deleted file mode 100644 index f93d1c67f..000000000 --- a/truss/blob/blob_backend.py +++ /dev/null @@ -1,10 +0,0 @@ -from abc import ABC, abstractmethod -from pathlib import Path - - -class BlobBackend(ABC): - """A blob backend downloads large remote files.""" - - @abstractmethod - def download(self, url: str, download_to: Path): - raise NotImplementedError() diff --git a/truss/blob/blob_backend_registry.py b/truss/blob/blob_backend_registry.py deleted file mode 100644 index a624a0997..000000000 --- a/truss/blob/blob_backend_registry.py +++ /dev/null @@ -1,23 +0,0 @@ -from typing import Dict - -from truss.blob.blob_backend import BlobBackend -from truss.blob.http_public_blob_backend import HttpPublic -from truss.constants import HTTP_PUBLIC_BLOB_BACKEND - - -class _BlobBackendRegistry: - def __init__(self) -> None: - self._backends: Dict[str, BlobBackend] = {} - # Register default backend - self._backends[HTTP_PUBLIC_BLOB_BACKEND] = HttpPublic() - - def register_backend(self, name: str, backend: BlobBackend): - self._backends[name] = backend - - def get_backend(self, name: str): - if name not in self._backends: - raise ValueError(f"Backend {name} is not registered.") - return self._backends[name] - - -BLOB_BACKEND_REGISTRY = _BlobBackendRegistry() diff --git a/truss/blob/http_public_blob_backend.py b/truss/blob/http_public_blob_backend.py deleted file mode 100644 index 55910551e..000000000 --- a/truss/blob/http_public_blob_backend.py +++ /dev/null @@ -1,23 +0,0 @@ -import shutil -from pathlib import Path - -import requests -from truss.blob.blob_backend import BlobBackend - -BLOB_DOWNLOAD_TIMEOUT_SECS = 600 # 10 minutes - - -class HttpPublic(BlobBackend): - """Downloads without auth, files must be publicly available.""" - - def download(self, URL: str, download_to: Path): - # Streaming download to keep memory usage low - resp = requests.get( - URL, - allow_redirects=True, - stream=True, - timeout=BLOB_DOWNLOAD_TIMEOUT_SECS, - ) - resp.raise_for_status() - with download_to.open("wb") as file: - shutil.copyfileobj(resp.raw, file) diff --git a/truss/cli/cli.py b/truss/cli/cli.py index b35aa09ac..2bc1cb156 100644 --- a/truss/cli/cli.py +++ b/truss/cli/cli.py @@ -7,7 +7,7 @@ import warnings from functools import wraps from pathlib import Path -from typing import Any, Callable, List, Optional, Tuple, Union +from typing import Callable, List, Optional, Tuple, Union import rich import rich.live @@ -20,8 +20,13 @@ from rich.console import Console import truss -from truss.config.trt_llm import TrussTRTLLMQuantizationType -from truss.constants import PRODUCTION_ENVIRONMENT_NAME, TRTLLM_MIN_MEMORY_REQUEST_GI +from truss.base.constants import ( + PRODUCTION_ENVIRONMENT_NAME, + TRTLLM_MIN_MEMORY_REQUEST_GI, +) +from truss.base.errors import RemoteNetworkError +from truss.base.trt_llm_config import TrussTRTLLMQuantizationType +from truss.base.truss_config import Build, ModelServer from truss.remote.baseten.core import ( ACTIVE_STATUS, DEPLOYING_STATUSES, @@ -38,13 +43,15 @@ inquire_remote_name, ) from truss.remote.remote_factory import USER_TRUSSRC_PATH, RemoteFactory -from truss.truss_config import Build, ModelServer -from truss.util.config_checks import ( +from truss.trt_llm.config_checks import ( check_and_update_memory_for_trt_llm_builder, check_secrets_for_trt_llm_builder, uses_trt_llm_builder, ) -from truss.util.errors import RemoteNetworkError +from truss.truss_handle.build import cleanup as _cleanup +from truss.truss_handle.build import init as _init +from truss.truss_handle.build import load +from truss.util import docker from truss.util.log_utils import LogInterceptor rich.spinner.SPINNERS["deploying"] = {"interval": 500, "frames": ["👾 ", " 👾"]} @@ -216,7 +223,7 @@ def init(target_directory, backend, name) -> None: model_name = name else: model_name = inquire_model_name() - truss.init( + _init( target_directory=target_directory, build_config=build_config, model_name=model_name, @@ -334,6 +341,28 @@ def login(api_key: Optional[str]): login(api_key) +@truss_cli.command() +@click.option( + "--remote", + type=str, + required=False, + help="Name of the remote in .trussrc to check whoami.", +) +@error_handling +def whoami(remote: Optional[str]): + """ + Shows user information and exit. + """ + from truss.api import whoami + + if not remote: + remote = inquire_remote_name(RemoteFactory.get_available_config_names()) + + user = whoami(remote) + + console.print(f"{user.workspace_name}\{user.user_email}") + + @truss_cli.command() @click.argument("target_directory", required=False, default=os.getcwd()) @click.option( @@ -380,35 +409,7 @@ def watch( # Chains Stuff ######################################################################### -class ChainsGroup(click.Group): - _ALIASES = {"deploy": "push"} # Alias `deploy` to push for backwards compat. - - def get_command(self, ctx: click.Context, cmd_name: str) -> Optional[click.Command]: - if cmd_name in self._ALIASES: - if cmd_name == "deploy": - warnings.warn( - "`truss chains deploy` is deprecated and will be removed in a " - "future version. Please use `truss chains push` instead.", - DeprecationWarning, - stacklevel=1, - ) - cmd_name = self._ALIASES[cmd_name] - - return super().get_command(ctx, cmd_name) - - def list_commands(self, ctx: click.Context) -> List[str]: - commands = super().list_commands(ctx) - return commands + list(self._ALIASES.keys()) - - def invoke(self, ctx: click.Context) -> Any: - # This import raises error messages if pydantic v2 or python older than 3.9 - # are installed. - import truss_chains # noqa: F401 - - return super().invoke(ctx) - - -@click.group(cls=ChainsGroup) +@click.group() def chains(): """Subcommands for truss chains""" @@ -557,10 +558,8 @@ def _create_chains_table(service) -> Tuple[rich.table.Table, List[str]]: "--user_env", required=False, type=str, - help=( - "Key-value-pairs (as JSON str) that can be used to control " - "deployment-specific chainlet behavior." - ), + help="[DEPRECATED], use ``environment`` instead.", + hidden=True, ) @log_level_option @error_handling @@ -604,17 +603,7 @@ def push_chain( wait = True if user_env: - try: - user_env_parsed = json.loads(user_env) - except json.JSONDecodeError as e: - raise ValueError( - f"Invalid JSON string for user_env: `{user_env}`.\n" - f"user_env must be a JSON dict with string values and string keys.\n" - 'Example: --user_env \'{"key1": "value1", "key2": "value2"}\'.\n' - f"Error: {e}" - ) - else: - user_env_parsed = {} + raise ValueError("`user_env` is deprecated, use `environment` instead.") if promote and environment: promote_warning = "`promote` flag and `environment` flag were both specified. Ignoring the value of `promote`" @@ -627,7 +616,6 @@ def push_chain( promote=promote, publish=publish, only_generate_trusses=dryrun, - user_env=user_env_parsed, remote=remote, environment=environment, ) @@ -684,7 +672,6 @@ def push_chain( entrypoint, name, remote, - user_env_parsed, console, error_console, show_stack_trace=not is_humanfriendly_log_level, @@ -718,10 +705,8 @@ def push_chain( "--user_env", required=False, type=str, - help=( - "Key-value-pairs (as JSON str) that can be used to control " - "deployment-specific chainlet behavior." - ), + help="[DEPRECATED], use `environment` instead.", + hidden=True, ) @log_level_option @error_handling @@ -748,24 +733,13 @@ def watch_chains( console.print("") # Print a newline. if user_env: - try: - user_env_parsed = json.loads(user_env) - except json.JSONDecodeError as e: - raise ValueError( - f"Invalid JSON string for user_env: `{user_env}`.\n" - f"user_env must be a JSON dict with string values and string keys.\n" - 'Example: --user_env \'{"key1": "value1", "key2": "value2"}\'.\n' - f"Error: {e}" - ) - else: - user_env_parsed = {} + raise ValueError("`user_env` is deprecated, use `environment` instead.") chains_remote.watch( source, entrypoint, name, remote, - user_env_parsed, console, error_console, show_stack_trace=not is_humanfriendly_log_level, @@ -1073,6 +1047,14 @@ def run_python(script, target_directory): default=False, help="Trust truss with hosted secrets.", ) +@click.option( + "--disable-truss-download", + type=bool, + is_flag=True, + required=False, + default=False, + help="Disable downloading the truss directory from the UI.", +) @click.option( "--deployment-name", type=str, @@ -1107,6 +1089,7 @@ def push( model_name: str, publish: bool = False, trusted: bool = False, + disable_truss_download: bool = False, promote: bool = False, preserve_previous_production_deployment: bool = False, deployment_name: Optional[str] = None, @@ -1195,6 +1178,7 @@ def push( preserve_previous_prod_deployment=preserve_previous_production_deployment, deployment_name=deployment_name, environment=environment, + disable_truss_download=disable_truss_download, ) # type: ignore click.echo(f"✨ Model {model_name} was successfully pushed ✨") @@ -1306,7 +1290,7 @@ def kill(target_directory: str) -> None: @container.command() # type: ignore def kill_all() -> None: """Kills all truss containers that are not manually persisted.""" - truss.kill_all() + docker.kill_all() @truss_cli.command() @@ -1319,14 +1303,14 @@ def cleanup() -> None: such as for building docker images. This command clears that data to free up disk space. """ - truss.build.cleanup() + _cleanup() def _get_truss_from_directory(target_directory: Optional[str] = None): """Gets Truss from directory. If none, use the current directory""" if target_directory is None: target_directory = os.getcwd() - return truss.load(target_directory) + return load(target_directory) truss_cli.add_command(container) diff --git a/truss/contexts/image_builder/image_builder.py b/truss/contexts/image_builder/image_builder.py index 81d60cb25..e8f3fb539 100644 --- a/truss/contexts/image_builder/image_builder.py +++ b/truss/contexts/image_builder/image_builder.py @@ -2,7 +2,7 @@ from pathlib import Path from typing import Optional -from truss.docker import Docker +from truss.util.docker import Docker from truss.util.path import given_or_temporary_dir diff --git a/truss/contexts/image_builder/serving_image_builder.py b/truss/contexts/image_builder/serving_image_builder.py index b0abe2a48..698180337 100644 --- a/truss/contexts/image_builder/serving_image_builder.py +++ b/truss/contexts/image_builder/serving_image_builder.py @@ -12,9 +12,8 @@ from google.cloud import storage from huggingface_hub import get_hf_file_metadata, hf_hub_url, list_repo_files from huggingface_hub.utils import filter_repo_objects -from truss import constants -from truss.config.trt_llm import TrussTRTLLMModel -from truss.constants import ( +from truss.base import constants +from truss.base.constants import ( AUDIO_MODEL_TRTLLM_REQUIREMENTS, AUDIO_MODEL_TRTLLM_SYSTEM_PACKAGES, AUDIO_MODEL_TRTLLM_TRUSS_DIR, @@ -41,6 +40,9 @@ TRUSSLESS_MAX_PAYLOAD_SIZE, USER_SUPPLIED_REQUIREMENTS_TXT_FILENAME, ) +from truss.base.trt_llm_config import TrussTRTLLMModel +from truss.base.truss_config import DEFAULT_BUNDLED_PACKAGES_DIR, BaseImage, TrussConfig +from truss.base.truss_spec import TrussSpec from truss.contexts.image_builder.cache_warmer import ( AWSCredentials, parse_s3_credentials_file, @@ -54,9 +56,7 @@ truss_base_image_tag, ) from truss.contexts.truss_context import TrussContext -from truss.patch.hash import directory_content_hash -from truss.truss_config import DEFAULT_BUNDLED_PACKAGES_DIR, BaseImage, TrussConfig -from truss.truss_spec import TrussSpec +from truss.truss_handle.patch.hash import directory_content_hash from truss.util.jinja import read_template_from_fs from truss.util.path import ( build_truss_target_directory, @@ -151,7 +151,7 @@ def prepare_for_cache(self, filenames): class S3Cache(RemoteCache): - def list_files(self, revision=None): + def list_files(self, revision=None) -> List[str]: s3_credentials_file = self.data_dir / S3_CREDENTIALS if s3_credentials_file.exists(): diff --git a/truss/contexts/local_loader/load_model_local.py b/truss/contexts/local_loader/load_model_local.py index dd621cf70..b6acbbb40 100644 --- a/truss/contexts/local_loader/load_model_local.py +++ b/truss/contexts/local_loader/load_model_local.py @@ -1,6 +1,7 @@ import inspect from pathlib import Path +from truss.base.truss_spec import TrussSpec from truss.contexts.local_loader.truss_module_loader import truss_module_loaded from truss.contexts.local_loader.utils import ( prepare_secrets, @@ -8,7 +9,6 @@ ) from truss.contexts.truss_context import TrussContext from truss.templates.server.common.patches import apply_patches -from truss.truss_spec import TrussSpec class LoadModelLocal(TrussContext): diff --git a/truss/contexts/local_loader/utils.py b/truss/contexts/local_loader/utils.py index 0eb869310..15a1efded 100644 --- a/truss/contexts/local_loader/utils.py +++ b/truss/contexts/local_loader/utils.py @@ -2,8 +2,8 @@ import inspect from typing import Dict +from truss.base.truss_spec import TrussSpec from truss.local.local_config_handler import LocalConfigHandler -from truss.truss_spec import TrussSpec def prepare_secrets(spec: TrussSpec) -> Dict[str, str]: diff --git a/truss/local/local_config_handler.py b/truss/local/local_config_handler.py index 6ec2a6151..a9ae4a2ee 100644 --- a/truss/local/local_config_handler.py +++ b/truss/local/local_config_handler.py @@ -3,8 +3,8 @@ from pathlib import Path from typing import Optional +from truss.base.validation import validate_secret_name from truss.local.local_config import LocalConfig -from truss.validation import validate_secret_name class LocalConfigHandler: diff --git a/truss/model_inference.py b/truss/model_inference.py deleted file mode 100644 index 7229be9f0..000000000 --- a/truss/model_inference.py +++ /dev/null @@ -1,124 +0,0 @@ -import inspect -import logging -import sys -from ast import ClassDef, FunctionDef -from dataclasses import dataclass -from typing import Any, Dict, List, Tuple - -logger: logging.Logger = logging.getLogger(__name__) - - -@dataclass -class ModelBuildStageOne: - # the Python Class of the model - model_type: str - # the framework that the model is built in - model_framework: str - - -def _model_class(model: Any): - return model.__class__ - - -def infer_python_version() -> str: - return f"py{sys.version_info.major}{sys.version_info.minor}" - - -def map_to_supported_python_version(python_version: str) -> str: - """Map python version to truss supported python version. - - Currently, it maps any versions greater than 3.11 to 3.11. - - Args: - python_version: in the form py[major_version][minor_version] e.g. py39, - py310 - """ - python_major_version = int(python_version[2:3]) - python_minor_version = int(python_version[3:]) - - if python_major_version != 3: - raise NotImplementedError("Only python version 3 is supported") - - if python_minor_version > 11: - logger.info( - f"Mapping python version {python_major_version}.{python_minor_version}" - " to 3.11, the highest version that Truss currently supports." - ) - return "py311" - - if python_minor_version < 8: - # TODO: consider raising an error instead - it doesn't' seem safe. - logger.info( - f"Mapping python version {python_major_version}.{python_minor_version}" - " to 3.8, the lowest version that Truss currently supports." - ) - return "py38" - - return python_version - - -def _infer_model_init_parameters(model_class: Any) -> Tuple[List, List]: - full_arg_spec = inspect.getfullargspec(model_class.__init__) - named_args = full_arg_spec.args[1:] - number_of_kwargs = full_arg_spec.defaults and len(full_arg_spec.defaults) or 0 - required_args = full_arg_spec.args[1:-number_of_kwargs] - return named_args, required_args - - -def _infer_model_init_parameters_ast(model_class_def: ClassDef) -> Tuple[List, List]: - named_args: List[str] = [] - required_args: List[str] = [] - init_model_functions = [ - node - for node in model_class_def.body - if isinstance(node, FunctionDef) and node.name == "__init__" - ] - - if not init_model_functions: - return named_args, required_args - - assert ( - len(init_model_functions) == 1 - ), "There should only be one __init__ function in the model class" - init_model_function = init_model_functions[0] - named_args = [arg.arg for arg in init_model_function.args.args][1:] - number_of_defaults = len(init_model_function.args.defaults) - required_args = named_args[:-number_of_defaults] - return named_args, required_args - - -def validate_provided_parameters_with_model( - model_class: Any, provided_parameters: Dict[str, Any] -) -> None: - """ - Validates that all provided parameters match the signature of the model. - - Args: - model_class: The model class to validate against - provided_parameters: The parameters to validate - """ - if type(model_class) == ClassDef: - named_args, required_args = _infer_model_init_parameters_ast(model_class) - else: - named_args, required_args = _infer_model_init_parameters(model_class) - - # Check that there are no extra parameters - if not named_args: - return - - if provided_parameters and not isinstance(provided_parameters, dict): - raise TypeError( - f"Provided parameters must be a dict, not {type(provided_parameters)}" - ) - - for arg in provided_parameters: - if arg not in named_args: - raise ValueError( - f"Provided parameter {arg} is not a valid init parameter for the model." - ) - - for arg in required_args: - if arg not in provided_parameters: - raise ValueError( - f"Required init parameter {arg} was not provided for this model." - ) diff --git a/truss/patch/__init__.py b/truss/patch/__init__.py new file mode 100644 index 000000000..c9359903d --- /dev/null +++ b/truss/patch/__init__.py @@ -0,0 +1 @@ +# TODO(marius/TaT): This is a backwards compatibility shim until the baseten context builder is updated. diff --git a/truss/patch/custom_types.py b/truss/patch/custom_types.py deleted file mode 100644 index ab81ad275..000000000 --- a/truss/patch/custom_types.py +++ /dev/null @@ -1,36 +0,0 @@ -from dataclasses import dataclass, field -from typing import Dict, List - - -@dataclass -class TrussSignature: - """Truss signature stores information for calculating patches for future - changes to Truss. - - Currently, it stores hashes of all of the paths in the truss directory excluding the data dir, - and the truss config contents. Path hashes allow calculating added/updated/removes - paths in future trusses compared to this. Config contents allow calculating - config changes, such as add/update/remove of python requirements etc. - """ - - content_hashes_by_path: Dict[str, str] - config: str - requirements_file_requirements: List[str] = field(default_factory=list) - - def to_dict(self) -> dict: - return { - "content_hashes_by_path": self.content_hashes_by_path, - "config": self.config, - "requirements_file_requirements": self.requirements_file_requirements, - } - - @staticmethod - def from_dict(d) -> "TrussSignature": - return TrussSignature( - content_hashes_by_path=d["content_hashes_by_path"], - config=d["config"], - requirements_file_requirements=d.get("requirements_file_requirements", []), - ) - - -ChangedPaths = Dict[str, List[str]] diff --git a/truss/patch/hash.py b/truss/patch/hash.py index bdd25e054..9aa86f691 100644 --- a/truss/patch/hash.py +++ b/truss/patch/hash.py @@ -1,71 +1,5 @@ -from pathlib import Path -from typing import Any, List, Optional +from truss.truss_handle.patch.hash import ( + directory_content_hash, # TODO(marius/TaT): Remove once backend is updated.import +) -from blake3 import blake3 -from truss.util.path import get_unignored_relative_paths_from_root - - -def directory_content_hash( - root: Path, - ignore_patterns: Optional[List[str]] = None, -) -> str: - """Calculate content based hash of a filesystem directory. - - Rough algo: Sort all files by path, then take hash of a content stream, where - we write path hash to the stream followed by hash of content if path is a file. - Note the hash of hash aspect. - - Also, note that name of the root directory is not taken into account, only the contents - underneath. The (root) Directory will have the same hash, even if renamed. - """ - hasher = blake3() - paths = list(get_unignored_relative_paths_from_root(root, ignore_patterns)) - paths.sort() - for path in paths: - hasher.update(str_hash(str(path))) - absolute_path = root / path - if absolute_path.is_file(): - hasher.update(file_content_hash(absolute_path)) - return hasher.hexdigest() - - -def file_content_hash(file: Path) -> bytes: - """Calculate blake3 hash of file content. - Returns: binary hash of content - """ - return _file_content_hash_loaded_hasher(file).digest() - - -def file_content_hash_str(file: Path) -> str: - """Calculate blake3 hash of file content. - - Returns: string hash of content - """ - return _file_content_hash_loaded_hasher(file).hexdigest() - - -def _file_content_hash_loaded_hasher(file: Path) -> Any: - hasher = blake3() - buffer = bytearray(128 * 1024) - mem_view = memoryview(buffer) - with file.open("rb") as f: - done = False - while not done: - n = f.readinto(mem_view) - if n > 0: - hasher.update(mem_view[:n]) - else: - done = True - return hasher - - -def str_hash(content: str) -> bytes: - hasher = blake3() - hasher.update(content.encode("utf-8")) - return hasher.digest() - - -def str_hash_str(content: str) -> str: - hasher = blake3() - hasher.update(content.encode("utf-8")) - return hasher.hexdigest() +__all__ = ["directory_content_hash"] diff --git a/truss/patch/signature.py b/truss/patch/signature.py index 21cbac48f..7fdfbf372 100644 --- a/truss/patch/signature.py +++ b/truss/patch/signature.py @@ -1,22 +1,5 @@ -from pathlib import Path -from typing import List, Optional +from truss.truss_handle.patch.signature import ( + calc_truss_signature, # TODO(marius/TaT): Remove once backend is updated.import +) -from truss.constants import CONFIG_FILE -from truss.patch.custom_types import TrussSignature -from truss.patch.dir_signature import directory_content_signature -from truss.truss_config import TrussConfig - - -def calc_truss_signature( - truss_dir: Path, ignore_patterns: Optional[List[str]] = None -) -> TrussSignature: - content_signature = directory_content_signature(truss_dir, ignore_patterns) - config_path = truss_dir / CONFIG_FILE - with (config_path).open("r") as config_file: - config = config_file.read() - requirements = TrussConfig.load_requirements_file_from_filepath(config_path) - return TrussSignature( - content_hashes_by_path=content_signature, - config=config, - requirements_file_requirements=requirements, - ) +__all__ = ["calc_truss_signature"] diff --git a/truss/patch/truss_dir_patch_applier.py b/truss/patch/truss_dir_patch_applier.py index da08b6e83..499496de3 100644 --- a/truss/patch/truss_dir_patch_applier.py +++ b/truss/patch/truss_dir_patch_applier.py @@ -1,87 +1,5 @@ -import logging -from pathlib import Path -from typing import List - -from truss.templates.control.control.helpers.custom_types import ( - Action, - ConfigPatch, - EnvVarPatch, - ExternalDataPatch, - ModelCodePatch, - Patch, - PythonRequirementPatch, - SystemPackagePatch, -) -from truss.templates.control.control.helpers.errors import UnsupportedPatch -from truss.templates.control.control.helpers.truss_patch.model_code_patch_applier import ( - apply_code_patch, -) -from truss.templates.control.control.helpers.truss_patch.requirement_name_identifier import ( - identify_requirement_name, - reqs_by_name, +from truss.truss_handle.patch.truss_dir_patch_applier import ( + TrussDirPatchApplier, # TODO(marius/TaT): Remove once backend is updated.import ) -from truss.templates.control.control.helpers.truss_patch.system_packages import ( - system_packages_set, -) -from truss.truss_config import TrussConfig - - -class TrussDirPatchApplier: - """Applies patches to a truss directory. - This should be compatible with ModelContainerPatchApplier. - - Note: This class imported via old_build_setup.sh.jinja in the baseten - repository - """ - - def __init__(self, truss_dir: Path, logger: logging.Logger) -> None: - self._truss_dir = truss_dir - self._truss_config_path = self._truss_dir / "config.yaml" - self._truss_config = TrussConfig.from_yaml(self._truss_config_path) - self._logger = logger - - def __call__(self, patches: List[Patch]): - # Apply model code patches immediately - # Aggregate config patches and apply at end - reqs = reqs_by_name(self._truss_config.requirements) - pkgs = system_packages_set(self._truss_config.system_packages) - new_config = self._truss_config - for patch in patches: - self._logger.debug(f"Applying patch {patch.to_dict()}") - action = patch.body.action - if isinstance(patch.body, ModelCodePatch): - model_code_patch: ModelCodePatch = patch.body - model_module_dir = self._truss_dir / self._truss_config.model_module_dir - apply_code_patch(model_module_dir, model_code_patch, self._logger) - continue - if isinstance(patch.body, PythonRequirementPatch): - py_req_patch: PythonRequirementPatch = patch.body - req = py_req_patch.requirement - req_name = identify_requirement_name(req) - if action == Action.REMOVE: - del reqs[req_name] - continue - if action == Action.ADD or Action.UPDATE: - reqs[req_name] = req - continue - if isinstance(patch.body, SystemPackagePatch): - sys_pkg_patch: SystemPackagePatch = patch.body - pkg = sys_pkg_patch.package - if action == Action.REMOVE: - pkgs.remove(pkg) - continue - if action == Action.ADD or Action.UPDATE: - pkgs.add(pkg) - continue - # Each of EnvVarPatch and ExternalDataPatch can be expressed through an overwrite of the config, - # handled below - if isinstance(patch.body, EnvVarPatch): - continue - if isinstance(patch.body, ExternalDataPatch): - continue - if isinstance(patch.body, ConfigPatch): - new_config = TrussConfig.from_dict(patch.body.config) - continue - raise UnsupportedPatch(f"Unknown patch type {patch.type}") - new_config.write_to_yaml_file(self._truss_config_path) +__all__ = ["TrussDirPatchApplier"] diff --git a/truss/remote/baseten/api.py b/truss/remote/baseten/api.py index bf7715c7d..85d903b63 100644 --- a/truss/remote/baseten/api.py +++ b/truss/remote/baseten/api.py @@ -107,6 +107,7 @@ def create_model_from_truss( semver_bump: str, client_version: str, is_trusted: bool, + allow_truss_download: bool = True, deployment_name: Optional[str] = None, origin: Optional[b10_types.ModelOrigin] = None, chain_environment: Optional[str] = None, @@ -122,6 +123,7 @@ def create_model_from_truss( semver_bump: "{semver_bump}", client_version: "{client_version}", is_trusted: {'true' if is_trusted else 'false'}, + allow_truss_download: {'true' if allow_truss_download else 'false'}, {f'version_name: "{deployment_name}"' if deployment_name else ""} {f'model_origin: {origin.value}' if origin else ""} {f'chain_environment: "{chain_environment}"' if chain_environment else ""} @@ -178,6 +180,7 @@ def create_development_model_from_truss( config, client_version, is_trusted=False, + allow_truss_download=True, origin: Optional[b10_types.ModelOrigin] = None, ): query_string = f""" @@ -187,6 +190,7 @@ def create_development_model_from_truss( config: "{config}", client_version: "{client_version}", is_trusted: {'true' if is_trusted else 'false'}, + allow_truss_download: {'true' if allow_truss_download else 'false'}, {f'model_origin: {origin.value}' if origin else ""} ) {{ id, @@ -344,6 +348,25 @@ def models(self): resp = self._post_graphql_query(query_string) return resp["data"] + def get_truss_watch_state(self, model_name: str): + query_string = f""" + {{ + truss_watch_state(name: "{model_name}") {{ + is_container_built_from_push + django_patch_state {{ + current_hash + current_signature + }} + container_patch_state {{ + current_hash + current_signature + }} + }} + }} + """ + resp = self._post_graphql_query(query_string) + return resp["data"] + def get_model(self, model_name): query_string = f""" {{ @@ -407,6 +430,53 @@ def get_model_version_by_id(self, model_version_id: str): resp = self._post_graphql_query(query_string) return resp["data"] + def patch_draft_truss_two_step(self, model_name, patch_request): + patch = base64_encoded_json_str(patch_request.to_dict()) + query_string = f""" + mutation {{ + stage_patch_for_draft_truss(name: "{model_name}", + client_version: "TRUSS", + patch: "{patch}", + ) {{ + id, + name, + version_id + succeeded + needs_full_deploy + error + }} + }} + """ + resp = self._post_graphql_query(query_string) + result = resp["data"]["stage_patch_for_draft_truss"] + if not result["succeeded"]: + logging.debug(f"Failed to stage patch: {result}") + return result + logging.debug("Succesfully staged patch. Syncing patch to truss...") + + return self.sync_draft_truss(model_name) + + def sync_draft_truss(self, model_name): + query_string = f""" + mutation {{ + sync_draft_truss(name: "{model_name}", + client_version: "TRUSS", + ) {{ + id, + name, + version_id + succeeded + needs_full_deploy + error + }} + }} + """ + resp = self._post_graphql_query(query_string) + result = resp["data"]["sync_draft_truss"] + if not result["succeeded"]: + logging.debug(f"Failed to sync patch: {result}") + return result + def patch_draft_truss(self, model_name, patch_request): patch = base64_encoded_json_str(patch_request.to_dict()) query_string = f""" diff --git a/truss/remote/baseten/core.py b/truss/remote/baseten/core.py index e2ac96570..d3b603504 100644 --- a/truss/remote/baseten/core.py +++ b/truss/remote/baseten/core.py @@ -4,13 +4,13 @@ from typing import IO, List, Optional, Tuple import truss -from truss.constants import PRODUCTION_ENVIRONMENT_NAME +from truss.base.constants import PRODUCTION_ENVIRONMENT_NAME from truss.remote.baseten import custom_types as b10_types from truss.remote.baseten.api import BasetenApi from truss.remote.baseten.error import ApiError from truss.remote.baseten.utils.tar import create_tar_with_progress_bar from truss.remote.baseten.utils.transfer import multipart_upload_boto3 -from truss.truss_handle import TrussHandle +from truss.truss_handle.truss_handle import TrussHandle from truss.util.path import load_trussignore_patterns_from_truss_dir logger = logging.getLogger(__name__) @@ -48,6 +48,21 @@ class ChainDeploymentHandle(typing.NamedTuple): is_draft: bool +class PatchState(typing.NamedTuple): + current_hash: str + current_signature: str + + +class TrussPatches(typing.NamedTuple): + django_patch_state: PatchState + container_patch_state: PatchState + + +class TrussWatchState(typing.NamedTuple): + is_container_built_from_push: bool + patches: Optional[TrussPatches] + + def get_chain_id_by_name(api: BasetenApi, chain_name: str) -> Optional[str]: """ Check if a chain with the given name exists in the Baseten remote. @@ -178,6 +193,36 @@ def get_dev_version(api: BasetenApi, model_name: str) -> Optional[dict]: return get_dev_version_from_versions(versions) +def get_truss_watch_state(api: BasetenApi, model_name: str) -> TrussWatchState: + response = api.get_truss_watch_state(model_name)["truss_watch_state"] + django_patch_state = ( + None + if response["django_patch_state"] is None + else PatchState( + current_hash=response["django_patch_state"]["current_hash"], + current_signature=response["django_patch_state"]["current_signature"], + ) + ) + container_patch_state = ( + None + if response["container_patch_state"] is None + else PatchState( + current_hash=response["container_patch_state"]["current_hash"], + current_signature=response["container_patch_state"]["current_signature"], + ) + ) + patches = None + if django_patch_state and container_patch_state: + patches = TrussPatches( + django_patch_state=django_patch_state, + container_patch_state=container_patch_state, + ) + return TrussWatchState( + is_container_built_from_push=response["is_container_built_from_push"], + patches=patches, + ) + + def get_prod_version_from_versions(versions: List[dict]) -> Optional[dict]: # Loop over versions instead of using the primary_version field because # primary_version is set to the development version ID if no published @@ -242,6 +287,7 @@ def create_truss_service( semver_bump: str = "MINOR", is_trusted: bool = False, preserve_previous_prod_deployment: bool = False, + allow_truss_download: bool = False, is_draft: Optional[bool] = False, model_id: Optional[str] = None, deployment_name: Optional[str] = None, @@ -276,7 +322,8 @@ def create_truss_service( s3_key, config, f"truss=={truss.version()}", - is_trusted, + is_trusted=is_trusted, + allow_truss_download=allow_truss_download, origin=origin, ) @@ -292,6 +339,7 @@ def create_truss_service( semver_bump=semver_bump, client_version=f"truss=={truss.version()}", is_trusted=is_trusted, + allow_truss_download=allow_truss_download, deployment_name=deployment_name, origin=origin, chain_environment=chain_environment, diff --git a/truss/remote/baseten/remote.py b/truss/remote/baseten/remote.py index a9619a227..2b5596760 100644 --- a/truss/remote/baseten/remote.py +++ b/truss/remote/baseten/remote.py @@ -6,10 +6,11 @@ import yaml from requests import ReadTimeout -from truss.constants import PRODUCTION_ENVIRONMENT_NAME +from truss.base.constants import PRODUCTION_ENVIRONMENT_NAME if TYPE_CHECKING: from rich import console as rich_console +from truss.base.truss_config import ModelServer from truss.local.local_config_handler import LocalConfigHandler from truss.remote.baseten import custom_types from truss.remote.baseten.api import BasetenApi @@ -29,14 +30,14 @@ get_dev_version_from_versions, get_model_versions, get_prod_version_from_versions, + get_truss_watch_state, upload_truss, ) from truss.remote.baseten.error import ApiError, RemoteError from truss.remote.baseten.service import BasetenService, URLConfig from truss.remote.baseten.utils.transfer import base64_encoded_json_str -from truss.remote.truss_remote import TrussRemote -from truss.truss_config import ModelServer -from truss.truss_handle import TrussHandle +from truss.remote.truss_remote import RemoteUser, TrussRemote +from truss.truss_handle.truss_handle import TrussHandle from truss.util.path import is_ignored, load_trussignore_patterns_from_truss_dir from watchfiles import watch @@ -115,6 +116,17 @@ def get_chainlets( ) ] + def whoami(self) -> RemoteUser: + resp = self._api._post_graphql_query( + "query{organization{workspace_name}user{email}}" + ) + workspace_name = resp["data"]["organization"]["workspace_name"] + user_email = resp["data"]["user"]["email"] + return RemoteUser( + workspace_name, + user_email, + ) + def push( # type: ignore self, truss_handle: TrussHandle, @@ -123,6 +135,7 @@ def push( # type: ignore trusted: bool = False, promote: bool = False, preserve_previous_prod_deployment: bool = False, + disable_truss_download: bool = False, deployment_name: Optional[str] = None, origin: Optional[custom_types.ModelOrigin] = None, environment: Optional[str] = None, @@ -163,6 +176,9 @@ def push( # type: ignore "Deployment name must only contain alphanumeric, -, _ and . characters" ) + if model_id is not None and disable_truss_download: + raise ValueError("disable-truss-download can only be used for new models") + encoded_config_str = base64_encoded_json_str( gathered_truss._spec._config.to_dict() ) @@ -185,6 +201,7 @@ def push( # type: ignore chain_environment=chain_environment, chainlet_name=chainlet_name, chain_name=chain_name, + allow_truss_download=not disable_truss_download, ) return BasetenService( @@ -349,6 +366,19 @@ def _patch( ), ) + truss_watch_state = get_truss_watch_state(self._api, model_name) # type: ignore + # Make sure the patches are calculated against the current django patch state, if it exists. + # This is important to ensure that the sequence of patches for a given sesion forms a + # valid patch sequence (via a linked list) + if truss_watch_state.patches: + truss_hash = truss_watch_state.patches.django_patch_state.current_hash + truss_signature = ( + truss_watch_state.patches.django_patch_state.current_signature + ) + logging.debug(f"db patch hash: {truss_hash}") + logging.debug( + f"container_patch_hash: {truss_watch_state.patches.container_patch_state.current_hash}" + ) LocalConfigHandler.add_signature(truss_hash, truss_signature) try: patch_request = truss_handle.calc_patch(truss_hash, truss_ignore_patterns) @@ -360,19 +390,39 @@ def _patch( "Failed to calculate patch. Change type might not be supported.", ) - if ( - patch_request.prev_hash == patch_request.next_hash - or len(patch_request.patch_ops) == 0 - ): + django_has_unapplied_patches = ( + not truss_watch_state.is_container_built_from_push + and truss_watch_state.patches + and ( + truss_watch_state.patches.django_patch_state.current_hash + != truss_watch_state.patches.container_patch_state.current_hash + ) + ) + should_create_patch = ( + patch_request.prev_hash != patch_request.next_hash + and len(patch_request.patch_ops) > 0 + ) + is_synced = not django_has_unapplied_patches and not should_create_patch + if is_synced: return PatchResult( PatchStatus.SKIPPED, "No changes observed, skipping patching." ) try: if console: with console.status("Applying patch..."): - resp = self._api.patch_draft_truss(model_name, patch_request) + if should_create_patch: + resp = self._api.patch_draft_truss_two_step( + model_name, patch_request + ) + else: + resp = self._api.sync_draft_truss(model_name) else: - resp = self._api.patch_draft_truss(model_name, patch_request) + if should_create_patch: + resp = self._api.patch_draft_truss_two_step( + model_name, patch_request + ) + else: + resp = self._api.sync_draft_truss(model_name) except ReadTimeout: return PatchResult( @@ -386,8 +436,8 @@ def _patch( needs_full_deploy = resp.get("needs_full_deploy", None) if needs_full_deploy: message = ( - f"Model {model_name} is not able to be patched, " - f"use `truss push` to deploy." + f"Model {model_name} is not able to be patched: `{resp['error']}`. " + f"Use `truss push` to deploy." ) else: message = ( diff --git a/truss/remote/baseten/service.py b/truss/remote/baseten/service.py index 7392ace6b..52cb3f402 100644 --- a/truss/remote/baseten/service.py +++ b/truss/remote/baseten/service.py @@ -12,11 +12,11 @@ import requests from tenacity import retry, stop_after_delay, wait_fixed +from truss.base.errors import RemoteNetworkError from truss.remote.baseten.api import BasetenApi from truss.remote.baseten.auth import AuthService from truss.remote.truss_remote import TrussService -from truss.truss_handle import TrussHandle -from truss.util.errors import RemoteNetworkError +from truss.truss_handle.truss_handle import TrussHandle # "classes created inside an enum will not become a member" -> intended here anyway. warnings.filterwarnings("ignore", category=DeprecationWarning, message=".*enum.*") diff --git a/truss/remote/baseten/utils/transfer.py b/truss/remote/baseten/utils/transfer.py index d07d16f13..0629ee908 100644 --- a/truss/remote/baseten/utils/transfer.py +++ b/truss/remote/baseten/utils/transfer.py @@ -5,6 +5,7 @@ import boto3 from boto3.s3.transfer import TransferConfig from rich.progress import Progress +from truss.util.env_vars import override_env_vars def base64_encoded_json_str(obj): @@ -12,26 +13,29 @@ def base64_encoded_json_str(obj): def multipart_upload_boto3(file_path, bucket_name, key, credentials): - s3_resource = boto3.resource("s3", **credentials) - filesize = os.stat(file_path).st_size - - # Create a new progress bar - progress = Progress() - - # Add a new task to the progress bar - task_id = progress.add_task("[cyan]Uploading...", total=filesize) - - with progress: - - def callback(bytes_transferred): - # Update the progress bar - progress.update(task_id, advance=bytes_transferred) - - s3_resource.Object(bucket_name, key).upload_file( - file_path, - Config=TransferConfig( - max_concurrency=10, - use_threads=True, - ), - Callback=callback, - ) + # In the CLI flow, ignore any local ~/.aws/config files, + # which can interfere with uploading the Truss to S3. + with override_env_vars({"AWS_CONFIG_FILE": ""}): + s3_resource = boto3.resource("s3", **credentials) + filesize = os.stat(file_path).st_size + + # Create a new progress bar + progress = Progress() + + # Add a new task to the progress bar + task_id = progress.add_task("[cyan]Uploading...", total=filesize) + + with progress: + + def callback(bytes_transferred): + # Update the progress bar + progress.update(task_id, advance=bytes_transferred) + + s3_resource.Object(bucket_name, key).upload_file( + file_path, + Config=TransferConfig( + max_concurrency=10, + use_threads=True, + ), + Callback=callback, + ) diff --git a/truss/remote/truss_remote.py b/truss/remote/truss_remote.py index e64815e95..727c6e322 100644 --- a/truss/remote/truss_remote.py +++ b/truss/remote/truss_remote.py @@ -6,7 +6,18 @@ if TYPE_CHECKING: from rich import console as rich_console -from truss.truss_handle import TrussHandle +from truss.truss_handle.truss_handle import TrussHandle + + +class RemoteUser: + """Class to hold information about the remote user""" + + workspace_name: str + user_email: str + + def __init__(self, workspace_name: str, user_email: str): + self.workspace_name = workspace_name + self.user_email = user_email class TrussService(ABC): @@ -209,6 +220,16 @@ def push(self, truss_handle: TrussHandle, **kwargs) -> TrussService: """ + @abstractmethod + def whoami(self) -> RemoteUser: + """ + Returns account information for the current user. + + This method should be implemented in subclasses and return a RemoteUser. + + + """ + @abstractmethod def get_service(self, **kwargs) -> TrussService: """ diff --git a/truss/templates/control/control/helpers/truss_patch/model_code_patch_applier.py b/truss/templates/control/control/helpers/truss_patch/model_code_patch_applier.py index 1a1ebc9f2..747fe7758 100644 --- a/truss/templates/control/control/helpers/truss_patch/model_code_patch_applier.py +++ b/truss/templates/control/control/helpers/truss_patch/model_code_patch_applier.py @@ -2,6 +2,7 @@ import os from pathlib import Path +# TODO(marius/TaT): remove try-except after TaT. # TODO(pankaj) In desparate need of refactoring into separate library try: from helpers.custom_types import Action, Patch diff --git a/truss/templates/control/control/helpers/truss_patch/model_container_patch_applier.py b/truss/templates/control/control/helpers/truss_patch/model_container_patch_applier.py index 98be96d71..a93ceabb5 100644 --- a/truss/templates/control/control/helpers/truss_patch/model_container_patch_applier.py +++ b/truss/templates/control/control/helpers/truss_patch/model_container_patch_applier.py @@ -16,7 +16,17 @@ ) from helpers.errors import UnsupportedPatch from helpers.truss_patch.model_code_patch_applier import apply_code_patch -from truss.truss_config import ExternalData, ExternalDataItem, TrussConfig + +# TODO(marius/TaT): remove try-except after TaT. +try: + from truss.base.truss_config import ExternalData, ExternalDataItem, TrussConfig +except ImportError: + from truss.truss_config import ( # type: ignore[no-redef] + ExternalData, + ExternalDataItem, + TrussConfig, + ) + from truss.util.download import download_external_data diff --git a/truss/templates/control/requirements.txt b/truss/templates/control/requirements.txt index 16aa11117..afcddad30 100644 --- a/truss/templates/control/requirements.txt +++ b/truss/templates/control/requirements.txt @@ -1,5 +1,5 @@ dataclasses-json==0.5.7 -truss==0.9.14 +truss==0.9.14 # TODO(marius/TaT): remove after TaT. fastapi==0.114.1 uvicorn==0.24.0 uvloop==0.19.0 diff --git a/truss/templates/server/common/patches.py b/truss/templates/server/common/patches.py index 4f2ca6364..1e9d87715 100644 --- a/truss/templates/server/common/patches.py +++ b/truss/templates/server/common/patches.py @@ -2,8 +2,6 @@ import logging from pathlib import Path -# Set up logging -logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) diff --git a/truss/templates/server/common/schema.py b/truss/templates/server/common/schema.py index 71600eaa4..89e7060f4 100644 --- a/truss/templates/server/common/schema.py +++ b/truss/templates/server/common/schema.py @@ -8,6 +8,7 @@ Optional, Type, Union, + cast, get_args, get_origin, ) @@ -157,7 +158,9 @@ def _extract_pydantic_base_models(union_args: tuple) -> List[Type[BaseModel]]: """ return [ - retrieve_base_class_from_awaitable(arg) if _is_awaitable_type(arg) else arg # type: ignore[misc] # Types are ok per filter condition. + cast(Type[BaseModel], retrieve_base_class_from_awaitable(arg)) + if _is_awaitable_type(arg) + else arg for arg in union_args if _is_awaitable_type(arg) or _annotation_is_pydantic_model(arg) ] diff --git a/truss/templates/server/requirements.txt b/truss/templates/server/requirements.txt index 328a99149..e5f0dd23d 100644 --- a/truss/templates/server/requirements.txt +++ b/truss/templates/server/requirements.txt @@ -8,6 +8,7 @@ joblib==1.2.0 loguru==0.7.2 msgpack-numpy==0.4.8 msgpack==1.0.2 +numpy>=1.23.5 opentelemetry-api>=1.25.0 opentelemetry-sdk>=1.25.0 opentelemetry-exporter-otlp>=1.25.0 diff --git a/truss/templates/shared/lazy_data_resolver.py b/truss/templates/shared/lazy_data_resolver.py index cf12ad3fd..d02b47348 100644 --- a/truss/templates/shared/lazy_data_resolver.py +++ b/truss/templates/shared/lazy_data_resolver.py @@ -1,18 +1,26 @@ +import logging +import os +import shutil from concurrent.futures import ThreadPoolExecutor from datetime import datetime, timezone from pathlib import Path -from typing import Dict, List +from typing import Dict, List, Tuple import pydantic +import requests import yaml try: - from shared.util import download_from_url_using_requests + from shared.util import BLOB_DOWNLOAD_TIMEOUT_SECS except ModuleNotFoundError: - from truss.templates.shared.util import download_from_url_using_requests + from truss.templates.shared.util import BLOB_DOWNLOAD_TIMEOUT_SECS LAZY_DATA_RESOLVER_PATH = Path("/bptr/bptr-manifest") NUM_WORKERS = 4 +CACHE_DIR = Path("/cache/org/artifacts") +BASETEN_FS_ENABLED_ENV_VAR = "BASETEN_FS_ENABLED" + +logger = logging.getLogger(__name__) class Resolution(pydantic.BaseModel): @@ -38,8 +46,52 @@ class BasetenPointerManifest(pydantic.BaseModel): class LazyDataResolver: def __init__(self, data_dir: Path): self._data_dir: Path = data_dir - self._bptr_resolution: Dict[str, str] = _read_bptr_resolution() + self._bptr_resolution: Dict[str, Tuple[str, str]] = _read_bptr_resolution() self._resolution_done = False + self._uses_b10_cache = ( + os.environ.get(BASETEN_FS_ENABLED_ENV_VAR, "False") == "True" + ) + + def cached_download_from_url_using_requests( + self, URL: str, hash: str, file_name: str + ): + """Download object from URL, attempt to write to cache and symlink to data directory if applicable, data directory otherwise. + In case of failure, write to data directory + """ + if self._uses_b10_cache: + file_path = CACHE_DIR / hash + if file_path.exists(): + os.symlink(file_path, self._data_dir / file_name) + return + + # Streaming download to keep memory usage low + resp = requests.get( + URL, + allow_redirects=True, + stream=True, + timeout=BLOB_DOWNLOAD_TIMEOUT_SECS, + ) + resp.raise_for_status() + + if self._uses_b10_cache: + try: + file_path.parent.mkdir(parents=True, exist_ok=True) + with file_path.open("wb") as file: + shutil.copyfileobj(resp.raw, file) + # symlink to data directory + os.symlink(file_path, self._data_dir / file_name) + return + except OSError: + logger.debug( + "Failed to save artifact to cache dir, saving to data dir instead" + ) + # Cache likely has no space left on device, break to download to data dir as fallback + pass + + file_path = self._data_dir / file_name + file_path.parent.mkdir(parents=True, exist_ok=True) + with file_path.open("wb") as file: + shutil.copyfileobj(resp.raw, file) def fetch(self): if self._resolution_done: @@ -47,11 +99,12 @@ def fetch(self): with ThreadPoolExecutor(NUM_WORKERS) as executor: futures = {} - for file_name, resolved_url in self._bptr_resolution.items(): + for file_name, (resolved_url, hash) in self._bptr_resolution.items(): futures[file_name] = executor.submit( - download_from_url_using_requests, + self.cached_download_from_url_using_requests, resolved_url, - self._data_dir / file_name, + hash, + file_name, ) for file_name, future in futures.items(): if not future: @@ -59,7 +112,7 @@ def fetch(self): self._resolution_done = True -def _read_bptr_resolution() -> Dict[str, str]: +def _read_bptr_resolution() -> Dict[str, Tuple[str, str]]: if not LAZY_DATA_RESOLVER_PATH.is_file(): return {} bptr_manifest = BasetenPointerManifest( @@ -71,5 +124,5 @@ def _read_bptr_resolution() -> Dict[str, str]: datetime.now(timezone.utc).timestamp() ): raise RuntimeError("Baseten pointer lazy data resolution has expired") - resolution_map[bptr.file_name] = bptr.resolution.url + resolution_map[bptr.file_name] = bptr.resolution.url, bptr.hash return resolution_map diff --git a/truss/templates/shared/util.py b/truss/templates/shared/util.py index f4d7f45c0..e76ea0f72 100644 --- a/truss/templates/shared/util.py +++ b/truss/templates/shared/util.py @@ -1,12 +1,9 @@ import multiprocessing import os -import shutil import sys -from pathlib import Path from typing import List import psutil -import requests BLOB_DOWNLOAD_TIMEOUT_SECS = 600 # 10 minutes # number of seconds to wait for truss server child processes before sending kill signal @@ -78,17 +75,3 @@ def kill_child_processes(parent_pid: int): ) for process in alive: process.kill() - - -def download_from_url_using_requests(URL: str, download_to: Path): - # Streaming download to keep memory usage low - resp = requests.get( - URL, - allow_redirects=True, - stream=True, - timeout=BLOB_DOWNLOAD_TIMEOUT_SECS, - ) - resp.raise_for_status() - download_to.parent.mkdir(parents=True, exist_ok=True) - with download_to.open("wb") as file: - shutil.copyfileobj(resp.raw, file) diff --git a/truss/test_data/context_builder_image_test/test.py b/truss/test_data/context_builder_image_test/test.py deleted file mode 100644 index 6e04cb56f..000000000 --- a/truss/test_data/context_builder_image_test/test.py +++ /dev/null @@ -1,4 +0,0 @@ -from truss import init - -th = init("test_truss") -th.docker_build_setup() diff --git a/truss/test_data/test_streaming_truss_with_tracing/config.yaml b/truss/test_data/test_streaming_truss_with_tracing/config.yaml deleted file mode 100644 index b897fc6ee..000000000 --- a/truss/test_data/test_streaming_truss_with_tracing/config.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model_name: Test Streaming -python_version: py39 -environment_variables: - OTEL_TRACING_NDJSON_FILE: "/tmp/otel_traces.ndjson" diff --git a/truss/tests/conftest.py b/truss/tests/conftest.py index ce9c6a99e..47fa212a1 100644 --- a/truss/tests/conftest.py +++ b/truss/tests/conftest.py @@ -11,14 +11,14 @@ import requests import yaml -from truss.build import init +from truss.base.custom_types import Example +from truss.base.truss_config import DEFAULT_BUNDLED_PACKAGES_DIR from truss.contexts.image_builder.serving_image_builder import ( ServingImageBuilderContext, ) from truss.contexts.local_loader.docker_build_emulator import DockerBuildEmulator -from truss.custom_types import Example -from truss.truss_config import DEFAULT_BUNDLED_PACKAGES_DIR -from truss.truss_handle import TrussHandle +from truss.truss_handle.build import init +from truss.truss_handle.truss_handle import TrussHandle CUSTOM_MODEL_CODE = """ class Model: @@ -220,6 +220,11 @@ def predict(self, model_input): """ +@pytest.fixture +def test_data_path() -> Path: + return Path(__file__).parent.resolve() / "test_data" + + @pytest.fixture def pytorch_model_init_args(): return {"arg1": 1, "arg2": 2, "kwarg1": 3, "kwarg2": 4} @@ -556,23 +561,18 @@ def custom_model_truss_dir_for_secrets(tmp_path): @pytest.fixture -def truss_container_fs(tmp_path): - ROOT = Path(__file__).parent.parent.parent.resolve() - return _build_truss_fs(ROOT / "truss" / "test_data" / "test_truss", tmp_path) +def truss_container_fs(tmp_path, test_data_path): + return _build_truss_fs(test_data_path / "test_truss", tmp_path) @pytest.fixture -def trt_llm_truss_container_fs(tmp_path): - ROOT = Path(__file__).parent.parent.parent.resolve() - return _build_truss_fs( - ROOT / "truss" / "test_data" / "test_trt_llm_truss", tmp_path - ) +def trt_llm_truss_container_fs(tmp_path, test_data_path): + return _build_truss_fs(test_data_path / "test_trt_llm_truss", tmp_path) @pytest.fixture -def truss_control_container_fs(tmp_path): - ROOT = Path(__file__).parent.parent.parent.resolve() - test_truss_dir = ROOT / "truss" / "test_data" / "test_truss" +def truss_control_container_fs(tmp_path, test_data_path): + test_truss_dir = test_data_path / "test_truss" control_truss_dir = tmp_path / "control_truss" shutil.copytree(str(test_truss_dir), str(control_truss_dir)) with _modify_yaml(control_truss_dir / "config.yaml") as content: @@ -581,7 +581,7 @@ def truss_control_container_fs(tmp_path): @pytest.fixture -def patch_ping_test_server(): +def patch_ping_test_server(test_data_path): port = "5001" proc = subprocess.Popen( [ @@ -596,7 +596,7 @@ def patch_ping_test_server(): "--host", "0.0.0.0", ], - cwd=str(Path(__file__).parent.parent / "test_data" / "patch_ping_test_server"), + cwd=str(test_data_path / "patch_ping_test_server"), ) base_url = f"http://127.0.0.1:{port}" retry_secs = 10 diff --git a/truss/tests/contexts/image_builder/test_serving_image_builder.py b/truss/tests/contexts/image_builder/test_serving_image_builder.py index 2a6d0f730..7531fa800 100644 --- a/truss/tests/contexts/image_builder/test_serving_image_builder.py +++ b/truss/tests/contexts/image_builder/test_serving_image_builder.py @@ -6,26 +6,28 @@ from unittest.mock import patch import pytest -from truss.constants import ( +from truss.base.constants import ( BASE_TRTLLM_REQUIREMENTS, TRTLLM_BASE_IMAGE, TRTLLM_PREDICT_CONCURRENCY, TRTLLM_PYTHON_EXECUTABLE, TRTLLM_TRUSS_DIR, ) +from truss.base.truss_config import ModelCache, ModelRepo, TrussConfig from truss.contexts.image_builder.serving_image_builder import ( HF_ACCESS_TOKEN_FILE_NAME, ServingImageBuilderContext, get_files_to_cache, ) from truss.tests.test_testing_utilities_for_other_tests import ensure_kill_all -from truss.truss_config import ModelCache, ModelRepo, TrussConfig -from truss.truss_handle import TrussHandle +from truss.truss_handle.truss_handle import TrussHandle BASE_DIR = Path(__file__).parent -def test_serving_image_dockerfile_from_user_base_image(custom_model_truss_dir): +def test_serving_image_dockerfile_from_user_base_image( + test_data_path, custom_model_truss_dir +): th = TrussHandle(custom_model_truss_dir) # The test fixture python varies with host version, need to pin here. th.update_python_version("py39") @@ -38,7 +40,7 @@ def test_serving_image_dockerfile_from_user_base_image(custom_model_truss_dir): with open(tmp_path / "Dockerfile", "r") as f: gen_docker_lines = f.readlines() with open( - f"{BASE_DIR}/../../../test_data/server.Dockerfile", + test_data_path / "server.Dockerfile", "r", ) as f: server_docker_lines = f.readlines() @@ -248,12 +250,9 @@ def test_correct_nested_s3_files_accessed_for_caching(mock_list_bucket_files): @pytest.mark.integration -def test_truss_server_caching_truss(): +def test_truss_server_caching_truss(test_data_path): with ensure_kill_all(): - truss_root = ( - Path(__file__).parent.parent.parent.parent.parent.resolve() / "truss" - ) - truss_dir = truss_root / "test_data" / "test_truss_server_caching_truss" + truss_dir = test_data_path / "test_truss_server_caching_truss" tr = TrussHandle(truss_dir) container = tr.docker_run( @@ -263,9 +262,8 @@ def test_truss_server_caching_truss(): assert "Downloading model.safetensors:" not in container.logs() -def test_model_cache_dockerfile(): - truss_root = Path(__file__).parent.parent.parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / "test_truss_server_caching_truss" +def test_model_cache_dockerfile(test_data_path): + truss_dir = test_data_path / "test_truss_server_caching_truss" tr = TrussHandle(truss_dir) builder_context = ServingImageBuilderContext diff --git a/truss/tests/contexts/local_loader/test_load_local.py b/truss/tests/contexts/local_loader/test_load_local.py index 1d5d77df7..2c196e9df 100644 --- a/truss/tests/contexts/local_loader/test_load_local.py +++ b/truss/tests/contexts/local_loader/test_load_local.py @@ -1,7 +1,7 @@ +from truss.base.truss_spec import TrussSpec from truss.contexts.local_loader.utils import prepare_secrets from truss.local.local_config_handler import LocalConfigHandler -from truss.truss_handle import TrussHandle -from truss.truss_spec import TrussSpec +from truss.truss_handle.truss_handle import TrussHandle def test_prepare_secrets(custom_model_truss_dir, tmp_path): diff --git a/truss/tests/contexts/local_loader/test_truss_module_finder.py b/truss/tests/contexts/local_loader/test_truss_module_finder.py index 370bf7cae..07785ccaa 100644 --- a/truss/tests/contexts/local_loader/test_truss_module_finder.py +++ b/truss/tests/contexts/local_loader/test_truss_module_finder.py @@ -1,8 +1,8 @@ import tempfile from pathlib import Path +from truss.base.truss_config import DEFAULT_BUNDLED_PACKAGES_DIR from truss.contexts.local_loader.truss_module_loader import truss_module_loaded -from truss.truss_config import DEFAULT_BUNDLED_PACKAGES_DIR ORIG_MODEL_CLASS_CONTENT = """ class Model: diff --git a/truss/tests/patch/test_calc_patch.py b/truss/tests/patch/test_calc_patch.py index 9ff696b15..55f2561e3 100644 --- a/truss/tests/patch/test_calc_patch.py +++ b/truss/tests/patch/test_calc_patch.py @@ -4,12 +4,7 @@ import pytest import yaml -from truss.patch.calc_patch import ( - _calc_python_requirements_patches, - calc_truss_patch, - calc_unignored_paths, -) -from truss.patch.signature import calc_truss_signature +from truss.base.truss_config import TrussConfig from truss.templates.control.control.helpers.custom_types import ( Action, ConfigPatch, @@ -22,8 +17,13 @@ PythonRequirementPatch, SystemPackagePatch, ) -from truss.truss_config import TrussConfig -from truss.truss_handle import TrussHandle +from truss.truss_handle.patch.calc_patch import ( + _calc_python_requirements_patches, + _calc_unignored_paths, + calc_truss_patch, +) +from truss.truss_handle.patch.signature import calc_truss_signature +from truss.truss_handle.truss_handle import TrussHandle def test_calc_truss_patch_unsupported(custom_model_truss_dir: Path): @@ -304,6 +304,46 @@ def config_op(config: TrussConfig): ] +def test_calc_truss_patch_handles_requirements_comments( + custom_model_truss_dir: Path, +): + def pre_config_op(config: TrussConfig): + requirements_contents = """xformers\n#torch==2.0.1""" + filename = "./requirements.txt" + config.requirements_file = filename + with (custom_model_truss_dir / filename).open("w") as req_file: + req_file.write(requirements_contents) + + def config_op(config: TrussConfig): + requirements_contents = """#xformers\ntorch==2.3.1\n""" + filename = "requirements.txt" + with (custom_model_truss_dir / filename).open("w") as req_file: + req_file.write(requirements_contents) + + patches = _apply_config_change_and_calc_patches( + custom_model_truss_dir, + config_op=config_op, + config_pre_op=pre_config_op, + ) + assert len(patches) == 2 + assert patches == [ + Patch( + type=PatchType.PYTHON_REQUIREMENT, + body=PythonRequirementPatch( + action=Action.REMOVE, + requirement="xformers", + ), + ), + Patch( + type=PatchType.PYTHON_REQUIREMENT, + body=PythonRequirementPatch( + action=Action.ADD, + requirement="torch==2.3.1", + ), + ), + ] + + def test_calc_truss_patch_handles_requirements_file_changes( custom_model_truss_dir: Path, ): @@ -315,7 +355,7 @@ def pre_config_op(config: TrussConfig): req_file.write(requirements_contents) def config_op(config: TrussConfig): - requirements_contents = """requests\ntorch==2.3.1""" + requirements_contents = """requests\ntorch==2.3.1\n""" filename = "requirements.txt" with (custom_model_truss_dir / filename).open("w") as req_file: req_file.write(requirements_contents) @@ -995,7 +1035,7 @@ def test_calc_unignored_paths(): "model/model.py", } - unignored_paths = calc_unignored_paths(root_relative_paths, ignore_patterns) + unignored_paths = _calc_unignored_paths(root_relative_paths, ignore_patterns) assert unignored_paths == { "config.yaml", "model/model.py", diff --git a/truss/tests/patch/test_dir_signature.py b/truss/tests/patch/test_dir_signature.py index 907e8c83c..f62d48757 100644 --- a/truss/tests/patch/test_dir_signature.py +++ b/truss/tests/patch/test_dir_signature.py @@ -1,4 +1,4 @@ -from truss.patch.dir_signature import directory_content_signature +from truss.truss_handle.patch.dir_signature import directory_content_signature def test_directory_content_signature(tmp_path): diff --git a/truss/tests/patch/test_hash.py b/truss/tests/patch/test_hash.py index 04210d126..0b5d872d7 100644 --- a/truss/tests/patch/test_hash.py +++ b/truss/tests/patch/test_hash.py @@ -4,7 +4,7 @@ from typing import Callable, List import pytest -from truss.patch.hash import ( +from truss.truss_handle.patch.hash import ( directory_content_hash, file_content_hash, file_content_hash_str, diff --git a/truss/tests/patch/test_signature.py b/truss/tests/patch/test_signature.py index 83632c558..37a343f51 100644 --- a/truss/tests/patch/test_signature.py +++ b/truss/tests/patch/test_signature.py @@ -1,4 +1,4 @@ -from truss.patch.signature import calc_truss_signature +from truss.truss_handle.patch.signature import calc_truss_signature def test_calc_truss_signature(custom_model_truss_dir): diff --git a/truss/tests/patch/test_truss_dir_patch_applier.py b/truss/tests/patch/test_truss_dir_patch_applier.py index 403059975..8902b2583 100644 --- a/truss/tests/patch/test_truss_dir_patch_applier.py +++ b/truss/tests/patch/test_truss_dir_patch_applier.py @@ -2,7 +2,7 @@ from pathlib import Path import yaml -from truss.patch.truss_dir_patch_applier import TrussDirPatchApplier +from truss.base.truss_config import TrussConfig from truss.templates.control.control.helpers.custom_types import ( Action, ConfigPatch, @@ -12,7 +12,7 @@ PythonRequirementPatch, SystemPackagePatch, ) -from truss.truss_config import TrussConfig +from truss.truss_handle.patch.truss_dir_patch_applier import TrussDirPatchApplier TEST_LOGGER = logging.getLogger("test_logger") diff --git a/truss/tests/patch/test_types.py b/truss/tests/patch/test_types.py index 373cf869d..12ff0ee47 100644 --- a/truss/tests/patch/test_types.py +++ b/truss/tests/patch/test_types.py @@ -1,5 +1,5 @@ -from truss.patch.custom_types import TrussSignature -from truss.patch.signature import calc_truss_signature +from truss.truss_handle.patch.custom_types import TrussSignature +from truss.truss_handle.patch.signature import calc_truss_signature def test_truss_signature_type(custom_model_truss_dir): diff --git a/truss/tests/remote/baseten/test_api.py b/truss/tests/remote/baseten/test_api.py index b8f0f0219..7993f2ef9 100644 --- a/truss/tests/remote/baseten/test_api.py +++ b/truss/tests/remote/baseten/test_api.py @@ -54,6 +54,15 @@ def mock_create_model_response(): return response +def mock_create_development_model_response(): + response = Response() + response.status_code = 200 + response.json = mock.Mock( + return_value={"data": {"deploy_draft_truss": {"id": "12345"}}} + ) + return response + + def mock_deploy_chain_deployment_response(): response = Response() response.status_code = 200 @@ -185,8 +194,8 @@ def test_create_model_from_truss(mock_post, baseten_api): "config_str", "semver_bump", "client_version", - False, - "deployment_name", + is_trusted=False, + deployment_name="deployment_name", ) gql_mutation = mock_post.call_args[1]["data"]["query"] @@ -207,8 +216,8 @@ def test_create_model_from_truss_forwards_chainlet_data(mock_post, baseten_api): "config_str", "semver_bump", "client_version", - False, - "deployment_name", + is_trusted=False, + deployment_name="deployment_name", chain_environment="chainstaging", chain_name="chainchain", chainlet_name="chainlet-1", @@ -237,7 +246,7 @@ def test_create_model_from_truss_does_not_send_deployment_name_if_not_specified( "config_str", "semver_bump", "client_version", - True, + is_trusted=True, deployment_name=None, ) @@ -251,6 +260,50 @@ def test_create_model_from_truss_does_not_send_deployment_name_if_not_specified( assert "version_name: " not in gql_mutation +@mock.patch("requests.post", return_value=mock_create_model_response()) +def test_create_model_from_truss_with_allow_truss_download(mock_post, baseten_api): + baseten_api.create_model_from_truss( + "model_name", + "s3key", + "config_str", + "semver_bump", + "client_version", + is_trusted=True, + allow_truss_download=False, + ) + + gql_mutation = mock_post.call_args[1]["data"]["query"] + assert 'name: "model_name"' in gql_mutation + assert 's3_key: "s3key"' in gql_mutation + assert 'config: "config_str"' in gql_mutation + assert 'semver_bump: "semver_bump"' in gql_mutation + assert 'client_version: "client_version"' in gql_mutation + assert "is_trusted: true" in gql_mutation + assert "allow_truss_download: false" in gql_mutation + + +@mock.patch("requests.post", return_value=mock_create_development_model_response()) +def test_create_development_model_from_truss_with_allow_truss_download( + mock_post, baseten_api +): + baseten_api.create_development_model_from_truss( + "model_name", + "s3key", + "config_str", + "client_version", + is_trusted=True, + allow_truss_download=False, + ) + + gql_mutation = mock_post.call_args[1]["data"]["query"] + assert 'name: "model_name"' in gql_mutation + assert 's3_key: "s3key"' in gql_mutation + assert 'config: "config_str"' in gql_mutation + assert 'client_version: "client_version"' in gql_mutation + assert "is_trusted: true" in gql_mutation + assert "allow_truss_download: false" in gql_mutation + + @mock.patch("requests.post", return_value=mock_deploy_chain_deployment_response()) def test_deploy_chain_deployment(mock_post, baseten_api): baseten_api.deploy_chain_deployment( diff --git a/truss/tests/remote/baseten/test_core.py b/truss/tests/remote/baseten/test_core.py index 5f3727ac4..6c7285007 100644 --- a/truss/tests/remote/baseten/test_core.py +++ b/truss/tests/remote/baseten/test_core.py @@ -2,7 +2,7 @@ from unittest.mock import MagicMock import pytest -from truss.constants import PRODUCTION_ENVIRONMENT_NAME +from truss.base.constants import PRODUCTION_ENVIRONMENT_NAME from truss.remote.baseten import core from truss.remote.baseten.api import BasetenApi from truss.remote.baseten.core import create_truss_service @@ -195,3 +195,48 @@ def test_create_truss_service_handles_existing_model(inputs): _, kwargs = api.create_model_version_from_truss.call_args for k, v in inputs.items(): assert kwargs[k] == v + + +@pytest.mark.parametrize( + "allow_truss_download", + [True, False], +) +@pytest.mark.parametrize( + "is_draft", + [True, False], +) +def test_create_truss_service_handles_allow_truss_download_for_new_models( + is_draft, allow_truss_download +): + api = MagicMock() + return_value = { + "id": "id", + "version_id": "model_version_id", + } + api.create_model_from_truss.return_value = return_value + api.create_development_model_from_truss.return_value = return_value + + model_id = None + model_id, model_version_id = create_truss_service( + api, + "model_name", + "s3_key", + "config", + is_trusted=False, + preserve_previous_prod_deployment=False, + is_draft=is_draft, + model_id=model_id, + deployment_name="deployment_name", + allow_truss_download=allow_truss_download, + ) + assert model_id == return_value["id"] + assert model_version_id == return_value["version_id"] + + create_model_mock = ( + api.create_development_model_from_truss + if is_draft + else api.create_model_from_truss + ) + create_model_mock.assert_called_once() + _, kwargs = create_model_mock.call_args + assert kwargs["allow_truss_download"] is allow_truss_download diff --git a/truss/tests/remote/baseten/test_remote.py b/truss/tests/remote/baseten/test_remote.py index 65929143e..6d13d47b0 100644 --- a/truss/tests/remote/baseten/test_remote.py +++ b/truss/tests/remote/baseten/test_remote.py @@ -6,7 +6,7 @@ from truss.remote.baseten.custom_types import ChainletData from truss.remote.baseten.error import RemoteError from truss.remote.baseten.remote import BasetenRemote -from truss.truss_handle import TrussHandle +from truss.truss_handle.truss_handle import TrussHandle _TEST_REMOTE_URL = "http://test_remote.com" _TEST_REMOTE_GRAPHQL_PATH = "http://test_remote.com/graphql/" @@ -226,7 +226,15 @@ def test_push_raised_value_error_when_deployment_name_and_not_publish( ValueError, match="Deployment name cannot be used for development deployment", ): - remote.push(th, "model_name", False, False, False, False, "dep_name") + remote.push( + th, + "model_name", + publish=False, + trusted=False, + promote=False, + preserve_previous_prod_deployment=False, + deployment_name="dep_name", + ) def test_push_raised_value_error_when_deployment_name_is_not_valid( @@ -253,7 +261,15 @@ def test_push_raised_value_error_when_deployment_name_is_not_valid( ValueError, match="Deployment name must only contain alphanumeric, -, _ and . characters", ): - remote.push(th, "model_name", True, False, False, False, "dep//name") + remote.push( + th, + "model_name", + publish=True, + trusted=False, + promote=False, + preserve_previous_prod_deployment=False, + deployment_name="dep//name", + ) def test_push_raised_value_error_when_keep_previous_prod_settings_and_not_promote( @@ -280,7 +296,14 @@ def test_push_raised_value_error_when_keep_previous_prod_settings_and_not_promot ValueError, match="preserve-previous-production-deployment can only be used with the '--promote' option", ): - remote.push(th, "model_name", False, False, False, True) + remote.push( + th, + "model_name", + publish=False, + trusted=False, + promote=False, + preserve_previous_prod_deployment=True, + ) def test_create_chain_with_no_publish(): @@ -596,3 +619,35 @@ def test_create_chain_existing_chain_publish_true_no_promotion(): assert deployment_handle.chain_id == "new-chain-id" assert deployment_handle.chain_deployment_id == "new-chain-deployment-id" + + +@pytest.mark.parametrize( + "publish", + [True, False], +) +def test_push_raised_value_error_when_disable_truss_download_for_existing_model( + publish, + custom_model_truss_dir_with_pre_and_post, +): + remote = BasetenRemote(_TEST_REMOTE_URL, "api_key") + model_response = { + "data": { + "model": { + "name": "model_name", + "id": "model_id", + "primary_version": {"id": "version_id"}, + } + } + } + with requests_mock.Mocker() as m: + m.post( + _TEST_REMOTE_GRAPHQL_PATH, + json=model_response, + ) + th = TrussHandle(custom_model_truss_dir_with_pre_and_post) + + with pytest.raises( + ValueError, + match="disable-truss-download can only be used for new models", + ): + remote.push(th, "model_name", publish=publish, disable_truss_download=True) diff --git a/truss/tests/remote/test_remote_factory.py b/truss/tests/remote/test_remote_factory.py index 38c693e95..dbb253113 100644 --- a/truss/tests/remote/test_remote_factory.py +++ b/truss/tests/remote/test_remote_factory.py @@ -2,7 +2,7 @@ import pytest from truss.remote.remote_factory import RemoteFactory -from truss.remote.truss_remote import RemoteConfig, TrussRemote +from truss.remote.truss_remote import RemoteConfig, RemoteUser, TrussRemote SAMPLE_CONFIG = {"api_key": "test_key", "remote_url": "http://test.com"} @@ -41,6 +41,9 @@ def get_service(self, **kwargs): def sync_truss_to_dev_version_by_name(self, model_name: str, target_directory: str): raise NotImplementedError + def whoami(self) -> RemoteUser: + return RemoteUser("test_user", "test_email") + def mock_service_config(): return RemoteConfig( diff --git a/truss/tests/templates/control/control/helpers/test_model_container_patch_applier.py b/truss/tests/templates/control/control/helpers/test_model_container_patch_applier.py index b14a74a3e..35487a239 100644 --- a/truss/tests/templates/control/control/helpers/test_model_container_patch_applier.py +++ b/truss/tests/templates/control/control/helpers/test_model_container_patch_applier.py @@ -4,7 +4,7 @@ from unittest import mock import pytest -from truss.truss_config import TrussConfig +from truss.base.truss_config import TrussConfig # Needed to simulate the set up on the model docker container sys.path.append( diff --git a/truss/tests/templates/control/control/test_server.py b/truss/tests/templates/control/control/test_server.py index b9bf0fc6d..33999129e 100644 --- a/truss/tests/templates/control/control/test_server.py +++ b/truss/tests/templates/control/control/test_server.py @@ -6,7 +6,7 @@ import pytest from httpx import AsyncClient -from truss.custom_types import PatchRequest +from truss.truss_handle.patch.custom_types import PatchRequest # Needed to simulate the set up on the model docker container sys.path.append( diff --git a/truss/tests/templates/core/server/test_lazy_data_resolver.py b/truss/tests/templates/core/server/test_lazy_data_resolver.py index 030fc4c31..822066d3e 100644 --- a/truss/tests/templates/core/server/test_lazy_data_resolver.py +++ b/truss/tests/templates/core/server/test_lazy_data_resolver.py @@ -1,5 +1,6 @@ import datetime import json +import os from contextlib import nullcontext from pathlib import Path from typing import Callable @@ -8,6 +9,7 @@ import pytest import requests_mock from truss.templates.shared.lazy_data_resolver import ( + BASETEN_FS_ENABLED_ENV_VAR, LAZY_DATA_RESOLVER_PATH, LazyDataResolver, ) @@ -87,8 +89,8 @@ def test_lazy_data_resolution( with expectation: ldr = LazyDataResolver(Path("foo")) assert ldr._bptr_resolution == { - "foo-name": "https://foo-rl", - "bar-name": "https://bar-rl", + "foo-name": ("https://foo-rl", "foo-hash"), + "bar-name": ("https://bar-rl", "bar-hash"), } @@ -122,11 +124,113 @@ def test_lazy_data_fetch( data_dir = Path(tmp_path) ldr = LazyDataResolver(data_dir) with requests_mock.Mocker() as m: - for file_name, url in ldr._bptr_resolution.items(): + for file_name, (url, _) in ldr._bptr_resolution.items(): resp = {"file_name": file_name, "url": url} m.get(url, json=resp) ldr.fetch() - for file_name, url in ldr._bptr_resolution.items(): + for file_name, (url, _) in ldr._bptr_resolution.items(): assert (ldr._data_dir / file_name).read_text() == json.dumps( {"file_name": file_name, "url": url} ) + + +@pytest.mark.parametrize( + "foo_expiry,bar_expiry", + [ + ( + int( + datetime.datetime(3000, 1, 1, tzinfo=datetime.timezone.utc).timestamp() + ), + int( + datetime.datetime(3000, 1, 1, tzinfo=datetime.timezone.utc).timestamp() + ), + ) + ], +) +def test_lazy_data_fetch_to_cache( + baseten_pointer_manifest_mock, foo_expiry, bar_expiry, tmp_path, monkeypatch +): + monkeypatch.setenv(BASETEN_FS_ENABLED_ENV_VAR, "True") + baseten_pointer_manifest_mock = baseten_pointer_manifest_mock( + foo_expiry, bar_expiry + ) + manifest_path = tmp_path / "bptr" / "bptr-manifest" + manifest_path.parent.mkdir() + manifest_path.touch() + manifest_path.write_text(baseten_pointer_manifest_mock) + cache_dir = tmp_path / "cache" / "org" / "artifacts" + cache_dir.mkdir(parents=True, exist_ok=True) + cache_dir.touch() + with patch( + "truss.templates.shared.lazy_data_resolver.LAZY_DATA_RESOLVER_PATH", + manifest_path, + ) as _, patch( + "truss.templates.shared.lazy_data_resolver.CACHE_DIR", + cache_dir, + ) as CACHE_DIR: + data_dir = Path(tmp_path) + ldr = LazyDataResolver(data_dir) + assert ldr._uses_b10_cache + with requests_mock.Mocker() as m: + for file_name, (url, hash) in ldr._bptr_resolution.items(): + resp = {"file_name": file_name, "url": url} + m.get(url, json=resp) + ldr.fetch() + for file_name, (url, hash) in ldr._bptr_resolution.items(): + assert (CACHE_DIR / hash).read_text() == json.dumps( + {"file_name": file_name, "url": url} + ) + assert os.path.islink(ldr._data_dir / file_name) + assert os.readlink(ldr._data_dir / file_name) == str(CACHE_DIR / hash) + + +@pytest.mark.parametrize( + "foo_expiry,bar_expiry", + [ + ( + int( + datetime.datetime(3000, 1, 1, tzinfo=datetime.timezone.utc).timestamp() + ), + int( + datetime.datetime(3000, 1, 1, tzinfo=datetime.timezone.utc).timestamp() + ), + ) + ], +) +def test_lazy_data_fetch_cached( + baseten_pointer_manifest_mock, foo_expiry, bar_expiry, tmp_path, monkeypatch +): + monkeypatch.setenv(BASETEN_FS_ENABLED_ENV_VAR, "True") + baseten_pointer_manifest_mock = baseten_pointer_manifest_mock( + foo_expiry, bar_expiry + ) + manifest_path = tmp_path / "bptr" / "bptr-manifest" + manifest_path.parent.mkdir() + manifest_path.touch() + manifest_path.write_text(baseten_pointer_manifest_mock) + cache_dir = tmp_path / "cache" / "org" / "artifacts" + cache_dir.mkdir(parents=True, exist_ok=True) + cache_dir.touch() + with patch( + "truss.templates.shared.lazy_data_resolver.LAZY_DATA_RESOLVER_PATH", + manifest_path, + ) as _, patch( + "truss.templates.shared.lazy_data_resolver.CACHE_DIR", + cache_dir, + ) as CACHE_DIR: + data_dir = Path(tmp_path) + ldr = LazyDataResolver(data_dir) + assert ldr._uses_b10_cache + with requests_mock.Mocker() as m: + for file_name, (url, hash) in ldr._bptr_resolution.items(): + resp = {"file_name": file_name, "url": url} + (CACHE_DIR / hash).write_text(json.dumps(resp)) + m.get(url, json=resp) + ldr.fetch() + for file_name, (url, hash) in ldr._bptr_resolution.items(): + assert (CACHE_DIR / hash).read_text() == json.dumps( + {"file_name": file_name, "url": url} + ) + assert not m.called + assert os.path.islink(ldr._data_dir / file_name) + assert os.readlink(ldr._data_dir / file_name) == str(CACHE_DIR / hash) diff --git a/truss/tests/test_build.py b/truss/tests/test_build.py index 675aa74a7..6c5041e63 100644 --- a/truss/tests/test_build.py +++ b/truss/tests/test_build.py @@ -1,7 +1,7 @@ from pathlib import Path -from truss.build import init -from truss.truss_spec import TrussSpec +from truss.base.truss_spec import TrussSpec +from truss.truss_handle.build import init def test_truss_init(tmp_path): diff --git a/truss/tests/test_config.py b/truss/tests/test_config.py index 6529475e7..1fcbfced5 100644 --- a/truss/tests/test_config.py +++ b/truss/tests/test_config.py @@ -6,9 +6,9 @@ import pytest import yaml -from truss.config.trt_llm import TrussTRTLLMQuantizationType -from truss.custom_types import ModelFrameworkType -from truss.truss_config import ( +from truss.base.custom_types import ModelFrameworkType +from truss.base.trt_llm_config import TrussTRTLLMQuantizationType +from truss.base.truss_config import ( DEFAULT_CPU, DEFAULT_MEMORY, DEFAULT_USE_GPU, @@ -22,7 +22,7 @@ Resources, TrussConfig, ) -from truss.truss_handle import TrussHandle +from truss.truss_handle.truss_handle import TrussHandle @pytest.fixture diff --git a/truss/tests/test_context_builder_image.py b/truss/tests/test_context_builder_image.py index 314532a20..d245a0268 100644 --- a/truss/tests/test_context_builder_image.py +++ b/truss/tests/test_context_builder_image.py @@ -5,12 +5,10 @@ @pytest.mark.integration -def test_build_docker_image(): +def test_build_docker_image(test_data_path): root_path = Path(__file__).parent.parent.parent root = str(root_path) - context_builder_image_test_dir = str( - root_path / "truss" / "test_data" / "context_builder_image_test" - ) + context_builder_image_test_dir = str(test_data_path / "context_builder_image_test") subprocess.run( [ diff --git a/truss/tests/test_control_truss_patching.py b/truss/tests/test_control_truss_patching.py index 20abcb539..f792cd03c 100644 --- a/truss/tests/test_control_truss_patching.py +++ b/truss/tests/test_control_truss_patching.py @@ -3,7 +3,8 @@ import pytest -from truss.constants import SUPPORTED_PYTHON_VERSIONS +from truss.base.constants import SUPPORTED_PYTHON_VERSIONS +from truss.base.truss_config import ExternalDataItem from truss.local.local_config_handler import LocalConfigHandler from truss.tests.test_testing_utilities_for_other_tests import ensure_kill_all from truss.tests.test_truss_handle import ( @@ -12,9 +13,8 @@ verify_system_package_installed_on_container, verify_system_requirement_not_installed_on_container, ) -from truss.truss_config import ExternalDataItem -from truss.truss_gatherer import calc_shadow_truss_dirname -from truss.truss_handle import TrussHandle +from truss.truss_handle.truss_gatherer import calc_shadow_truss_dirname +from truss.truss_handle.truss_handle import TrussHandle def current_num_docker_images(th: TrussHandle) -> int: diff --git a/truss/test_data/gcs_fix/model/__init__.py b/truss/tests/test_data/__init__.py similarity index 100% rename from truss/test_data/gcs_fix/model/__init__.py rename to truss/tests/test_data/__init__.py diff --git a/truss/test_data/server_conformance_test_truss/model/__init__.py b/truss/tests/test_data/annotated_types_truss/__init__.py similarity index 100% rename from truss/test_data/server_conformance_test_truss/model/__init__.py rename to truss/tests/test_data/annotated_types_truss/__init__.py diff --git a/truss/test_data/annotated_types_truss/config.yaml b/truss/tests/test_data/annotated_types_truss/config.yaml similarity index 100% rename from truss/test_data/annotated_types_truss/config.yaml rename to truss/tests/test_data/annotated_types_truss/config.yaml diff --git a/truss/test_data/test_basic_truss/model/__init__.py b/truss/tests/test_data/annotated_types_truss/model/__init__.py similarity index 100% rename from truss/test_data/test_basic_truss/model/__init__.py rename to truss/tests/test_data/annotated_types_truss/model/__init__.py diff --git a/truss/test_data/annotated_types_truss/model/model.py b/truss/tests/test_data/annotated_types_truss/model/model.py similarity index 100% rename from truss/test_data/annotated_types_truss/model/model.py rename to truss/tests/test_data/annotated_types_truss/model/model.py diff --git a/truss/test_data/auto-mpg.data b/truss/tests/test_data/auto-mpg.data similarity index 100% rename from truss/test_data/auto-mpg.data rename to truss/tests/test_data/auto-mpg.data diff --git a/truss/test_data/context_builder_image_test/Dockerfile b/truss/tests/test_data/context_builder_image_test/Dockerfile similarity index 100% rename from truss/test_data/context_builder_image_test/Dockerfile rename to truss/tests/test_data/context_builder_image_test/Dockerfile diff --git a/truss/test_data/test_pyantic_v1/model/__init__.py b/truss/tests/test_data/context_builder_image_test/__init__.py similarity index 100% rename from truss/test_data/test_pyantic_v1/model/__init__.py rename to truss/tests/test_data/context_builder_image_test/__init__.py diff --git a/truss/tests/test_data/context_builder_image_test/test.py b/truss/tests/test_data/context_builder_image_test/test.py new file mode 100644 index 000000000..f0952f982 --- /dev/null +++ b/truss/tests/test_data/context_builder_image_test/test.py @@ -0,0 +1,3 @@ +from truss.base import truss_config + +print(truss_config) diff --git a/truss/test_data/test_pyantic_v2/model/__init__.py b/truss/tests/test_data/gcs_fix/__init__.py similarity index 100% rename from truss/test_data/test_pyantic_v2/model/__init__.py rename to truss/tests/test_data/gcs_fix/__init__.py diff --git a/truss/test_data/gcs_fix/config.yaml b/truss/tests/test_data/gcs_fix/config.yaml similarity index 100% rename from truss/test_data/gcs_fix/config.yaml rename to truss/tests/test_data/gcs_fix/config.yaml diff --git a/truss/test_data/test_requirements_file_truss/model/__init__.py b/truss/tests/test_data/gcs_fix/model/__init__.py similarity index 100% rename from truss/test_data/test_requirements_file_truss/model/__init__.py rename to truss/tests/test_data/gcs_fix/model/__init__.py diff --git a/truss/test_data/gcs_fix/model/model.py b/truss/tests/test_data/gcs_fix/model/model.py similarity index 100% rename from truss/test_data/gcs_fix/model/model.py rename to truss/tests/test_data/gcs_fix/model/model.py diff --git a/truss/test_data/happy.ipynb b/truss/tests/test_data/happy.ipynb similarity index 100% rename from truss/test_data/happy.ipynb rename to truss/tests/test_data/happy.ipynb diff --git a/truss/test_data/test_trt_llm_truss/model/__init__.py b/truss/tests/test_data/model_load_failure_test/__init__.py similarity index 100% rename from truss/test_data/test_trt_llm_truss/model/__init__.py rename to truss/tests/test_data/model_load_failure_test/__init__.py diff --git a/truss/test_data/model_load_failure_test/config.yaml b/truss/tests/test_data/model_load_failure_test/config.yaml similarity index 100% rename from truss/test_data/model_load_failure_test/config.yaml rename to truss/tests/test_data/model_load_failure_test/config.yaml diff --git a/truss/test_data/test_truss/model/__init__.py b/truss/tests/test_data/model_load_failure_test/model/__init__.py similarity index 100% rename from truss/test_data/test_truss/model/__init__.py rename to truss/tests/test_data/model_load_failure_test/model/__init__.py diff --git a/truss/test_data/model_load_failure_test/model/model.py b/truss/tests/test_data/model_load_failure_test/model/model.py similarity index 100% rename from truss/test_data/model_load_failure_test/model/model.py rename to truss/tests/test_data/model_load_failure_test/model/model.py diff --git a/truss/test_data/test_truss_server_caching_truss/model/__init__.py b/truss/tests/test_data/patch_ping_test_server/__init__.py similarity index 100% rename from truss/test_data/test_truss_server_caching_truss/model/__init__.py rename to truss/tests/test_data/patch_ping_test_server/__init__.py diff --git a/truss/test_data/patch_ping_test_server/app.py b/truss/tests/test_data/patch_ping_test_server/app.py similarity index 100% rename from truss/test_data/patch_ping_test_server/app.py rename to truss/tests/test_data/patch_ping_test_server/app.py diff --git a/truss/test_data/pima-indians-diabetes.csv b/truss/tests/test_data/pima-indians-diabetes.csv similarity index 100% rename from truss/test_data/pima-indians-diabetes.csv rename to truss/tests/test_data/pima-indians-diabetes.csv diff --git a/truss/test_data/readme_int_example.md b/truss/tests/test_data/readme_int_example.md similarity index 100% rename from truss/test_data/readme_int_example.md rename to truss/tests/test_data/readme_int_example.md diff --git a/truss/test_data/readme_no_example.md b/truss/tests/test_data/readme_no_example.md similarity index 100% rename from truss/test_data/readme_no_example.md rename to truss/tests/test_data/readme_no_example.md diff --git a/truss/test_data/readme_str_example.md b/truss/tests/test_data/readme_str_example.md similarity index 100% rename from truss/test_data/readme_str_example.md rename to truss/tests/test_data/readme_str_example.md diff --git a/truss/test_data/server.Dockerfile b/truss/tests/test_data/server.Dockerfile similarity index 100% rename from truss/test_data/server.Dockerfile rename to truss/tests/test_data/server.Dockerfile diff --git a/truss/test_data/test_truss_with_error/model/__init__.py b/truss/tests/test_data/server_conformance_test_truss/__init__.py similarity index 100% rename from truss/test_data/test_truss_with_error/model/__init__.py rename to truss/tests/test_data/server_conformance_test_truss/__init__.py diff --git a/truss/test_data/server_conformance_test_truss/config.yaml b/truss/tests/test_data/server_conformance_test_truss/config.yaml similarity index 100% rename from truss/test_data/server_conformance_test_truss/config.yaml rename to truss/tests/test_data/server_conformance_test_truss/config.yaml diff --git a/truss/tests/local/__init__.py b/truss/tests/test_data/server_conformance_test_truss/model/__init__.py similarity index 100% rename from truss/tests/local/__init__.py rename to truss/tests/test_data/server_conformance_test_truss/model/__init__.py diff --git a/truss/test_data/server_conformance_test_truss/model/model.py b/truss/tests/test_data/server_conformance_test_truss/model/model.py similarity index 100% rename from truss/test_data/server_conformance_test_truss/model/model.py rename to truss/tests/test_data/server_conformance_test_truss/model/model.py diff --git a/truss/test_data/test_truss/model/dummy b/truss/tests/test_data/test_async_truss/__init__.py similarity index 100% rename from truss/test_data/test_truss/model/dummy rename to truss/tests/test_data/test_async_truss/__init__.py diff --git a/truss/test_data/test_async_truss/config.yaml b/truss/tests/test_data/test_async_truss/config.yaml similarity index 100% rename from truss/test_data/test_async_truss/config.yaml rename to truss/tests/test_data/test_async_truss/config.yaml diff --git a/truss/tests/test_data/test_async_truss/model/__init__.py b/truss/tests/test_data/test_async_truss/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_async_truss/model/model.py b/truss/tests/test_data/test_async_truss/model/model.py similarity index 100% rename from truss/test_data/test_async_truss/model/model.py rename to truss/tests/test_data/test_async_truss/model/model.py diff --git a/truss/tests/test_data/test_basic_truss/__init__.py b/truss/tests/test_data/test_basic_truss/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_basic_truss/config.yaml b/truss/tests/test_data/test_basic_truss/config.yaml similarity index 100% rename from truss/test_data/test_basic_truss/config.yaml rename to truss/tests/test_data/test_basic_truss/config.yaml diff --git a/truss/tests/test_data/test_basic_truss/model/__init__.py b/truss/tests/test_data/test_basic_truss/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_basic_truss/model/model.py b/truss/tests/test_data/test_basic_truss/model/model.py similarity index 100% rename from truss/test_data/test_basic_truss/model/model.py rename to truss/tests/test_data/test_basic_truss/model/model.py diff --git a/truss/tests/test_data/test_build_commands/__init__.py b/truss/tests/test_data/test_build_commands/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_build_commands/config.yaml b/truss/tests/test_data/test_build_commands/config.yaml similarity index 100% rename from truss/test_data/test_build_commands/config.yaml rename to truss/tests/test_data/test_build_commands/config.yaml diff --git a/truss/tests/test_data/test_build_commands/model/__init__.py b/truss/tests/test_data/test_build_commands/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_build_commands/model/model.py b/truss/tests/test_data/test_build_commands/model/model.py similarity index 100% rename from truss/test_data/test_build_commands/model/model.py rename to truss/tests/test_data/test_build_commands/model/model.py diff --git a/truss/tests/test_data/test_build_commands_failure/__init__.py b/truss/tests/test_data/test_build_commands_failure/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_build_commands_failure/config.yaml b/truss/tests/test_data/test_build_commands_failure/config.yaml similarity index 100% rename from truss/test_data/test_build_commands_failure/config.yaml rename to truss/tests/test_data/test_build_commands_failure/config.yaml diff --git a/truss/tests/test_data/test_build_commands_failure/model/__init__.py b/truss/tests/test_data/test_build_commands_failure/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_build_commands_failure/model/model.py b/truss/tests/test_data/test_build_commands_failure/model/model.py similarity index 100% rename from truss/test_data/test_build_commands_failure/model/model.py rename to truss/tests/test_data/test_build_commands_failure/model/model.py diff --git a/truss/tests/test_data/test_concurrency_truss/__init__.py b/truss/tests/test_data/test_concurrency_truss/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_concurrency_truss/config.yaml b/truss/tests/test_data/test_concurrency_truss/config.yaml similarity index 100% rename from truss/test_data/test_concurrency_truss/config.yaml rename to truss/tests/test_data/test_concurrency_truss/config.yaml diff --git a/truss/tests/test_data/test_concurrency_truss/model/__init__.py b/truss/tests/test_data/test_concurrency_truss/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_concurrency_truss/model/model.py b/truss/tests/test_data/test_concurrency_truss/model/model.py similarity index 100% rename from truss/test_data/test_concurrency_truss/model/model.py rename to truss/tests/test_data/test_concurrency_truss/model/model.py diff --git a/truss/tests/test_data/test_docker_server_truss/__init__.py b/truss/tests/test_data/test_docker_server_truss/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_docker_server_truss/config.yaml b/truss/tests/test_data/test_docker_server_truss/config.yaml similarity index 100% rename from truss/test_data/test_docker_server_truss/config.yaml rename to truss/tests/test_data/test_docker_server_truss/config.yaml diff --git a/truss/test_data/test_docker_server_truss/test_docker_image/Dockerfile b/truss/tests/test_data/test_docker_server_truss/test_docker_image/Dockerfile similarity index 100% rename from truss/test_data/test_docker_server_truss/test_docker_image/Dockerfile rename to truss/tests/test_data/test_docker_server_truss/test_docker_image/Dockerfile diff --git a/truss/test_data/test_docker_server_truss/test_docker_image/README.md b/truss/tests/test_data/test_docker_server_truss/test_docker_image/README.md similarity index 100% rename from truss/test_data/test_docker_server_truss/test_docker_image/README.md rename to truss/tests/test_data/test_docker_server_truss/test_docker_image/README.md diff --git a/truss/test_data/test_docker_server_truss/test_docker_image/VERSION b/truss/tests/test_data/test_docker_server_truss/test_docker_image/VERSION similarity index 100% rename from truss/test_data/test_docker_server_truss/test_docker_image/VERSION rename to truss/tests/test_data/test_docker_server_truss/test_docker_image/VERSION diff --git a/truss/tests/test_data/test_docker_server_truss/test_docker_image/__init__.py b/truss/tests/test_data/test_docker_server_truss/test_docker_image/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_docker_server_truss/test_docker_image/app.py b/truss/tests/test_data/test_docker_server_truss/test_docker_image/app.py similarity index 100% rename from truss/test_data/test_docker_server_truss/test_docker_image/app.py rename to truss/tests/test_data/test_docker_server_truss/test_docker_image/app.py diff --git a/truss/test_data/test_docker_server_truss/test_docker_image/build_upload_new_image.sh b/truss/tests/test_data/test_docker_server_truss/test_docker_image/build_upload_new_image.sh similarity index 100% rename from truss/test_data/test_docker_server_truss/test_docker_image/build_upload_new_image.sh rename to truss/tests/test_data/test_docker_server_truss/test_docker_image/build_upload_new_image.sh diff --git a/truss/tests/test_data/test_pyantic_v1/__init__.py b/truss/tests/test_data/test_pyantic_v1/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_pyantic_v1/config.yaml b/truss/tests/test_data/test_pyantic_v1/config.yaml similarity index 100% rename from truss/test_data/test_pyantic_v1/config.yaml rename to truss/tests/test_data/test_pyantic_v1/config.yaml diff --git a/truss/tests/test_data/test_pyantic_v1/model/__init__.py b/truss/tests/test_data/test_pyantic_v1/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_pyantic_v1/model/model.py b/truss/tests/test_data/test_pyantic_v1/model/model.py similarity index 100% rename from truss/test_data/test_pyantic_v1/model/model.py rename to truss/tests/test_data/test_pyantic_v1/model/model.py diff --git a/truss/test_data/test_pyantic_v1/requirements.txt b/truss/tests/test_data/test_pyantic_v1/requirements.txt similarity index 100% rename from truss/test_data/test_pyantic_v1/requirements.txt rename to truss/tests/test_data/test_pyantic_v1/requirements.txt diff --git a/truss/tests/test_data/test_pyantic_v2/__init__.py b/truss/tests/test_data/test_pyantic_v2/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_pyantic_v2/config.yaml b/truss/tests/test_data/test_pyantic_v2/config.yaml similarity index 100% rename from truss/test_data/test_pyantic_v2/config.yaml rename to truss/tests/test_data/test_pyantic_v2/config.yaml diff --git a/truss/tests/test_data/test_pyantic_v2/model/__init__.py b/truss/tests/test_data/test_pyantic_v2/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_pyantic_v2/model/model.py b/truss/tests/test_data/test_pyantic_v2/model/model.py similarity index 100% rename from truss/test_data/test_pyantic_v2/model/model.py rename to truss/tests/test_data/test_pyantic_v2/model/model.py diff --git a/truss/test_data/test_pyantic_v2/requirements.txt b/truss/tests/test_data/test_pyantic_v2/requirements.txt similarity index 100% rename from truss/test_data/test_pyantic_v2/requirements.txt rename to truss/tests/test_data/test_pyantic_v2/requirements.txt diff --git a/truss/tests/test_data/test_requirements_file_truss/__init__.py b/truss/tests/test_data/test_requirements_file_truss/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_requirements_file_truss/config.yaml b/truss/tests/test_data/test_requirements_file_truss/config.yaml similarity index 100% rename from truss/test_data/test_requirements_file_truss/config.yaml rename to truss/tests/test_data/test_requirements_file_truss/config.yaml diff --git a/truss/tests/test_data/test_requirements_file_truss/model/__init__.py b/truss/tests/test_data/test_requirements_file_truss/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_requirements_file_truss/model/model.py b/truss/tests/test_data/test_requirements_file_truss/model/model.py similarity index 100% rename from truss/test_data/test_requirements_file_truss/model/model.py rename to truss/tests/test_data/test_requirements_file_truss/model/model.py diff --git a/truss/test_data/test_requirements_file_truss/requirements.txt b/truss/tests/test_data/test_requirements_file_truss/requirements.txt similarity index 100% rename from truss/test_data/test_requirements_file_truss/requirements.txt rename to truss/tests/test_data/test_requirements_file_truss/requirements.txt diff --git a/truss/tests/test_data/test_streaming_async_generator_truss/__init__.py b/truss/tests/test_data/test_streaming_async_generator_truss/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_streaming_async_generator_truss/config.yaml b/truss/tests/test_data/test_streaming_async_generator_truss/config.yaml similarity index 100% rename from truss/test_data/test_streaming_async_generator_truss/config.yaml rename to truss/tests/test_data/test_streaming_async_generator_truss/config.yaml diff --git a/truss/tests/test_data/test_streaming_async_generator_truss/model/__init__.py b/truss/tests/test_data/test_streaming_async_generator_truss/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_streaming_async_generator_truss/model/model.py b/truss/tests/test_data/test_streaming_async_generator_truss/model/model.py similarity index 100% rename from truss/test_data/test_streaming_async_generator_truss/model/model.py rename to truss/tests/test_data/test_streaming_async_generator_truss/model/model.py diff --git a/truss/tests/test_data/test_streaming_read_timeout/__init__.py b/truss/tests/test_data/test_streaming_read_timeout/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_streaming_read_timeout/config.yaml b/truss/tests/test_data/test_streaming_read_timeout/config.yaml similarity index 100% rename from truss/test_data/test_streaming_read_timeout/config.yaml rename to truss/tests/test_data/test_streaming_read_timeout/config.yaml diff --git a/truss/tests/test_data/test_streaming_read_timeout/model/__init__.py b/truss/tests/test_data/test_streaming_read_timeout/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_streaming_read_timeout/model/model.py b/truss/tests/test_data/test_streaming_read_timeout/model/model.py similarity index 100% rename from truss/test_data/test_streaming_read_timeout/model/model.py rename to truss/tests/test_data/test_streaming_read_timeout/model/model.py diff --git a/truss/tests/test_data/test_streaming_truss/__init__.py b/truss/tests/test_data/test_streaming_truss/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_streaming_truss/config.yaml b/truss/tests/test_data/test_streaming_truss/config.yaml similarity index 100% rename from truss/test_data/test_streaming_truss/config.yaml rename to truss/tests/test_data/test_streaming_truss/config.yaml diff --git a/truss/tests/test_data/test_streaming_truss/model/__init__.py b/truss/tests/test_data/test_streaming_truss/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_streaming_truss/model/model.py b/truss/tests/test_data/test_streaming_truss/model/model.py similarity index 100% rename from truss/test_data/test_streaming_truss/model/model.py rename to truss/tests/test_data/test_streaming_truss/model/model.py diff --git a/truss/tests/test_data/test_streaming_truss_with_error/__init__.py b/truss/tests/test_data/test_streaming_truss_with_error/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_streaming_truss_with_error/config.yaml b/truss/tests/test_data/test_streaming_truss_with_error/config.yaml similarity index 100% rename from truss/test_data/test_streaming_truss_with_error/config.yaml rename to truss/tests/test_data/test_streaming_truss_with_error/config.yaml diff --git a/truss/tests/test_data/test_streaming_truss_with_error/model/__init__.py b/truss/tests/test_data/test_streaming_truss_with_error/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_streaming_truss_with_error/model/model.py b/truss/tests/test_data/test_streaming_truss_with_error/model/model.py similarity index 100% rename from truss/test_data/test_streaming_truss_with_error/model/model.py rename to truss/tests/test_data/test_streaming_truss_with_error/model/model.py diff --git a/truss/tests/test_data/test_streaming_truss_with_error/packages/__init__.py b/truss/tests/test_data/test_streaming_truss_with_error/packages/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_streaming_truss_with_error/packages/helpers_1.py b/truss/tests/test_data/test_streaming_truss_with_error/packages/helpers_1.py similarity index 100% rename from truss/test_data/test_streaming_truss_with_error/packages/helpers_1.py rename to truss/tests/test_data/test_streaming_truss_with_error/packages/helpers_1.py diff --git a/truss/test_data/test_streaming_truss_with_error/packages/helpers_2.py b/truss/tests/test_data/test_streaming_truss_with_error/packages/helpers_2.py similarity index 100% rename from truss/test_data/test_streaming_truss_with_error/packages/helpers_2.py rename to truss/tests/test_data/test_streaming_truss_with_error/packages/helpers_2.py diff --git a/truss/tests/test_data/test_streaming_truss_with_tracing/__init__.py b/truss/tests/test_data/test_streaming_truss_with_tracing/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/tests/test_data/test_streaming_truss_with_tracing/config.yaml b/truss/tests/test_data/test_streaming_truss_with_tracing/config.yaml new file mode 100644 index 000000000..66a8b2e00 --- /dev/null +++ b/truss/tests/test_data/test_streaming_truss_with_tracing/config.yaml @@ -0,0 +1,43 @@ +apply_library_patches: true +base_image: null +build: + arguments: {} + model_server: TrussServer + secret_to_path_mapping: {} +build_commands: [] +bundled_packages_dir: packages +data_dir: data +description: null +docker_server: null +environment_variables: + OTEL_TRACING_NDJSON_FILE: /tmp/otel_traces.ndjson +examples_filename: examples.yaml +external_data: null +external_package_dirs: [] +input_type: Any +live_reload: false +model_cache: [] +model_class_filename: model.py +model_class_name: Model +model_framework: custom +model_metadata: {} +model_module_dir: model +model_name: Test Streaming +model_type: Model +python_version: py39 +requirements: [] +requirements_file: null +resources: + accelerator: null + cpu: '1' + memory: 2Gi + use_gpu: false +runtime: + enable_tracing_data: false + num_workers: 1 + predict_concurrency: 1 + streaming_read_timeout: 60 +secrets: {} +spec_version: '2.0' +system_packages: [] +trt_llm: null diff --git a/truss/tests/test_data/test_streaming_truss_with_tracing/model/__init__.py b/truss/tests/test_data/test_streaming_truss_with_tracing/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_streaming_truss_with_tracing/model/model.py b/truss/tests/test_data/test_streaming_truss_with_tracing/model/model.py similarity index 100% rename from truss/test_data/test_streaming_truss_with_tracing/model/model.py rename to truss/tests/test_data/test_streaming_truss_with_tracing/model/model.py diff --git a/truss/tests/test_data/test_trt_llm_truss/__init__.py b/truss/tests/test_data/test_trt_llm_truss/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_trt_llm_truss/config.yaml b/truss/tests/test_data/test_trt_llm_truss/config.yaml similarity index 100% rename from truss/test_data/test_trt_llm_truss/config.yaml rename to truss/tests/test_data/test_trt_llm_truss/config.yaml diff --git a/truss/tests/test_data/test_trt_llm_truss/model/__init__.py b/truss/tests/test_data/test_trt_llm_truss/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_trt_llm_truss/model/model.py b/truss/tests/test_data/test_trt_llm_truss/model/model.py similarity index 100% rename from truss/test_data/test_trt_llm_truss/model/model.py rename to truss/tests/test_data/test_trt_llm_truss/model/model.py diff --git a/truss/tests/test_data/test_truss/__init__.py b/truss/tests/test_data/test_truss/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_truss/config.yaml b/truss/tests/test_data/test_truss/config.yaml similarity index 100% rename from truss/test_data/test_truss/config.yaml rename to truss/tests/test_data/test_truss/config.yaml diff --git a/truss/test_data/test_truss/examples.yaml b/truss/tests/test_data/test_truss/examples.yaml similarity index 100% rename from truss/test_data/test_truss/examples.yaml rename to truss/tests/test_data/test_truss/examples.yaml diff --git a/truss/tests/test_data/test_truss/model/__init__.py b/truss/tests/test_data/test_truss/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/tests/test_data/test_truss/model/dummy b/truss/tests/test_data/test_truss/model/dummy new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_truss/model/model.py b/truss/tests/test_data/test_truss/model/model.py similarity index 100% rename from truss/test_data/test_truss/model/model.py rename to truss/tests/test_data/test_truss/model/model.py diff --git a/truss/tests/test_data/test_truss/packages/__init__.py b/truss/tests/test_data/test_truss/packages/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/tests/test_data/test_truss/packages/test_package/__init__.py b/truss/tests/test_data/test_truss/packages/test_package/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_truss/packages/test_package/test.py b/truss/tests/test_data/test_truss/packages/test_package/test.py similarity index 100% rename from truss/test_data/test_truss/packages/test_package/test.py rename to truss/tests/test_data/test_truss/packages/test_package/test.py diff --git a/truss/tests/test_data/test_truss_server_caching_truss/__init__.py b/truss/tests/test_data/test_truss_server_caching_truss/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_truss_server_caching_truss/config.yaml b/truss/tests/test_data/test_truss_server_caching_truss/config.yaml similarity index 100% rename from truss/test_data/test_truss_server_caching_truss/config.yaml rename to truss/tests/test_data/test_truss_server_caching_truss/config.yaml diff --git a/truss/tests/test_data/test_truss_server_caching_truss/model/__init__.py b/truss/tests/test_data/test_truss_server_caching_truss/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_truss_server_caching_truss/model/model.py b/truss/tests/test_data/test_truss_server_caching_truss/model/model.py similarity index 100% rename from truss/test_data/test_truss_server_caching_truss/model/model.py rename to truss/tests/test_data/test_truss_server_caching_truss/model/model.py diff --git a/truss/tests/test_data/test_truss_with_error/__init__.py b/truss/tests/test_data/test_truss_with_error/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_truss_with_error/config.yaml b/truss/tests/test_data/test_truss_with_error/config.yaml similarity index 100% rename from truss/test_data/test_truss_with_error/config.yaml rename to truss/tests/test_data/test_truss_with_error/config.yaml diff --git a/truss/tests/test_data/test_truss_with_error/model/__init__.py b/truss/tests/test_data/test_truss_with_error/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_truss_with_error/model/model.py b/truss/tests/test_data/test_truss_with_error/model/model.py similarity index 100% rename from truss/test_data/test_truss_with_error/model/model.py rename to truss/tests/test_data/test_truss_with_error/model/model.py diff --git a/truss/tests/test_data/test_truss_with_error/packages/__init__.py b/truss/tests/test_data/test_truss_with_error/packages/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_truss_with_error/packages/helpers_1.py b/truss/tests/test_data/test_truss_with_error/packages/helpers_1.py similarity index 100% rename from truss/test_data/test_truss_with_error/packages/helpers_1.py rename to truss/tests/test_data/test_truss_with_error/packages/helpers_1.py diff --git a/truss/test_data/test_truss_with_error/packages/helpers_2.py b/truss/tests/test_data/test_truss_with_error/packages/helpers_2.py similarity index 100% rename from truss/test_data/test_truss_with_error/packages/helpers_2.py rename to truss/tests/test_data/test_truss_with_error/packages/helpers_2.py diff --git a/truss/tests/test_docker.py b/truss/tests/test_docker.py index a127d3f57..0efd045d0 100644 --- a/truss/tests/test_docker.py +++ b/truss/tests/test_docker.py @@ -1,7 +1,7 @@ import pytest from python_on_whales import docker -from truss.docker import get_urls_from_container +from truss.util.docker import get_urls_from_container @pytest.fixture diff --git a/truss/tests/test_model_inference.py b/truss/tests/test_model_inference.py index 7efad6320..67ea3bf26 100644 --- a/truss/tests/test_model_inference.py +++ b/truss/tests/test_model_inference.py @@ -23,13 +23,11 @@ from python_on_whales import Container from requests.exceptions import RequestException +from truss.base.truss_config import map_to_supported_python_version from truss.local.local_config_handler import LocalConfigHandler -from truss.model_inference import map_to_supported_python_version from truss.tests.helpers import create_truss -from truss.tests.test_testing_utilities_for_other_tests import ( - ensure_kill_all, -) -from truss.truss_handle import TrussHandle, wait_for_truss +from truss.tests.test_testing_utilities_for_other_tests import ensure_kill_all +from truss.truss_handle.truss_handle import TrussHandle, wait_for_truss logger = logging.getLogger(__name__) @@ -107,10 +105,9 @@ def test_map_to_supported_python_version(python_version, expected_python_version @pytest.mark.integration -def test_model_load_failure_truss(): +def test_model_load_failure_truss(test_data_path): with ensure_kill_all(): - truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / "model_load_failure_test" + truss_dir = test_data_path / "model_load_failure_test" tr = TrussHandle(truss_dir) _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=False) @@ -164,11 +161,10 @@ def _test_invocations(expected_code): @pytest.mark.integration -def test_concurrency_truss(): +def test_concurrency_truss(test_data_path): # Tests that concurrency limits work correctly with ensure_kill_all(): - truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / "test_concurrency_truss" + truss_dir = test_data_path / "test_concurrency_truss" tr = TrussHandle(truss_dir) _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) @@ -196,10 +192,9 @@ def make_request(): @pytest.mark.integration -def test_requirements_file_truss(): +def test_requirements_file_truss(test_data_path): with ensure_kill_all(): - truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / "test_requirements_file_truss" + truss_dir = test_data_path / "test_requirements_file_truss" tr = TrussHandle(truss_dir) _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) @@ -211,10 +206,9 @@ def test_requirements_file_truss(): @pytest.mark.integration @pytest.mark.parametrize("pydantic_major_version", ["1", "2"]) -def test_requirements_pydantic(pydantic_major_version): +def test_requirements_pydantic(test_data_path, pydantic_major_version): with ensure_kill_all(): - truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / f"test_pyantic_v{pydantic_major_version}" + truss_dir = test_data_path / f"test_pyantic_v{pydantic_major_version}" tr = TrussHandle(truss_dir) _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) @@ -224,10 +218,9 @@ def test_requirements_pydantic(pydantic_major_version): @pytest.mark.integration -def test_async_truss(): +def test_async_truss(test_data_path): with ensure_kill_all(): - truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / "test_async_truss" + truss_dir = test_data_path / "test_async_truss" tr = TrussHandle(truss_dir) _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) @@ -239,10 +232,9 @@ def test_async_truss(): @pytest.mark.integration -def test_async_streaming(): +def test_async_streaming(test_data_path): with ensure_kill_all(): - truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / "test_streaming_async_generator_truss" + truss_dir = test_data_path / "test_streaming_async_generator_truss" tr = TrussHandle(truss_dir) _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) @@ -263,10 +255,9 @@ def test_async_streaming(): @pytest.mark.integration -def test_async_streaming_timeout(): +def test_async_streaming_timeout(test_data_path): with ensure_kill_all(): - truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / "test_streaming_read_timeout" + truss_dir = test_data_path / "test_streaming_read_timeout" tr = TrussHandle(truss_dir) container = tr.docker_run( local_port=8090, detach=True, wait_for_server_ready=True @@ -289,10 +280,9 @@ def test_async_streaming_timeout(): @pytest.mark.integration -def test_streaming_with_error_and_stacktrace(): +def test_streaming_with_error_and_stacktrace(test_data_path): with ensure_kill_all(): - truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / "test_streaming_truss_with_error" + truss_dir = test_data_path / "test_streaming_truss_with_error" tr = TrussHandle(truss_dir) container = tr.docker_run( local_port=8090, detach=True, wait_for_server_ready=True @@ -667,10 +657,9 @@ def predict(self, request): @pytest.mark.integration -def test_truss_with_error_stacktrace(): +def test_truss_with_error_stacktrace(test_data_path): with ensure_kill_all(): - truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / "test_truss_with_error" + truss_dir = test_data_path / "test_truss_with_error" tr = TrussHandle(truss_dir) container = tr.docker_run( local_port=8090, detach=True, wait_for_server_ready=True @@ -702,10 +691,9 @@ def test_truss_with_error_stacktrace(): @pytest.mark.integration -def test_slow_truss(): +def test_slow_truss(test_data_path): with ensure_kill_all(): - truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / "server_conformance_test_truss" + truss_dir = test_data_path / "server_conformance_test_truss" tr = TrussHandle(truss_dir) _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=False) @@ -1044,10 +1032,9 @@ def _make_otel_headers() -> Mapping[str, str]: @pytest.mark.integration @pytest.mark.parametrize("enable_tracing_data", [True, False]) -def test_streaming_truss_with_user_tracing(enable_tracing_data): +def test_streaming_truss_with_user_tracing(test_data_path, enable_tracing_data): with ensure_kill_all(): - truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / "test_streaming_truss_with_tracing" + truss_dir = test_data_path / "test_streaming_truss_with_tracing" tr = TrussHandle(truss_dir) def enable_gpu_fn(conf): diff --git a/truss/tests/test_model_schema.py b/truss/tests/test_model_schema.py index c786b1b6f..a95654465 100644 --- a/truss/tests/test_model_schema.py +++ b/truss/tests/test_model_schema.py @@ -8,7 +8,7 @@ from truss.templates.shared import serialization from truss.tests.helpers import create_truss from truss.tests.test_testing_utilities_for_other_tests import ensure_kill_all -from truss.truss_handle import TrussHandle +from truss.truss_handle.truss_handle import TrussHandle DEFAULT_CONFIG = """model_name: test-truss""" TRUSS_SERVER_ADDR = "http://localhost:8090" @@ -17,10 +17,8 @@ @pytest.mark.integration -def test_truss_with_no_annotations(): - truss_root = Path(__file__).parent.parent.parent.resolve() - - truss_dir = truss_root / "truss" / "test_data" / "test_basic_truss" +def test_truss_with_no_annotations(test_data_path): + truss_dir = test_data_path / "test_basic_truss" tr = TrussHandle(truss_dir) @@ -97,10 +95,8 @@ def predict(self, request): @pytest.mark.integration -def test_truss_with_annotated_inputs_outputs(): - truss_root = Path(__file__).parent.parent.resolve() - - truss_dir = truss_root / "test_data" / "annotated_types_truss" +def test_truss_with_annotated_inputs_outputs(test_data_path): + truss_dir = test_data_path / "annotated_types_truss" tr = TrussHandle(truss_dir) diff --git a/truss/tests/test_testing_utilities_for_other_tests.py b/truss/tests/test_testing_utilities_for_other_tests.py index 03c3abd44..1e3041e90 100644 --- a/truss/tests/test_testing_utilities_for_other_tests.py +++ b/truss/tests/test_testing_utilities_for_other_tests.py @@ -8,9 +8,8 @@ import time from contextlib import contextmanager -from truss.build import kill_all -from truss.constants import TRUSS -from truss.docker import get_containers +from truss.base.constants import TRUSS +from truss.util.docker import get_containers, kill_all DISK_SPACE_LOW_PERCENTAGE = 20 diff --git a/truss/tests/test_truss_gatherer.py b/truss/tests/test_truss_gatherer.py index 2e660d906..5a62d94ca 100644 --- a/truss/tests/test_truss_gatherer.py +++ b/truss/tests/test_truss_gatherer.py @@ -1,8 +1,8 @@ from pathlib import Path from typing import List -from truss.patch.dir_signature import directory_content_signature -from truss.truss_gatherer import gather +from truss.truss_handle.patch.dir_signature import directory_content_signature +from truss.truss_handle.truss_gatherer import gather def test_gather(custom_model_with_external_package): diff --git a/truss/tests/test_truss_handle.py b/truss/tests/test_truss_handle.py index 5b74c349f..7f29ee5aa 100644 --- a/truss/tests/test_truss_handle.py +++ b/truss/tests/test_truss_handle.py @@ -8,11 +8,10 @@ from python_on_whales.exceptions import DockerException from tenacity import RetryError -from truss.custom_types import Example, PatchRequest -from truss.docker import Docker, DockerStates -from truss.errors import ContainerIsDownError, ContainerNotFoundError +from truss.base.custom_types import Example +from truss.base.errors import ContainerIsDownError, ContainerNotFoundError +from truss.base.truss_config import map_local_to_supported_python_version from truss.local.local_config_handler import LocalConfigHandler -from truss.model_inference import infer_python_version, map_to_supported_python_version from truss.templates.control.control.helpers.custom_types import ( Action, ModelCodePatch, @@ -23,7 +22,9 @@ ensure_kill_all, kill_all_with_retries, ) -from truss.truss_handle import TrussHandle, wait_for_truss +from truss.truss_handle.patch.custom_types import PatchRequest +from truss.truss_handle.truss_handle import TrussHandle, wait_for_truss +from truss.util.docker import Docker, DockerStates def test_spec(custom_model_truss_dir_with_pre_and_post): @@ -58,15 +59,18 @@ def test_server_predict(custom_model_truss_dir_with_pre_and_post): assert resp == {"predictions": [4, 5, 6, 7]} -def test_readme_generation_int_example(custom_model_truss_dir_with_pre_and_post): +def test_readme_generation_int_example( + test_data_path, custom_model_truss_dir_with_pre_and_post +): th = TrussHandle(custom_model_truss_dir_with_pre_and_post) readme_contents = th.generate_readme() readme_contents = readme_contents.replace("\n", "") - correct_readme_contents = _read_readme("readme_int_example.md") + correct_readme_contents = _read_readme(test_data_path / "readme_int_example.md") assert readme_contents == correct_readme_contents def test_readme_generation_no_example( + test_data_path, custom_model_truss_dir_with_pre_and_post_no_example, ): th = TrussHandle(custom_model_truss_dir_with_pre_and_post_no_example) @@ -75,17 +79,18 @@ def test_readme_generation_no_example( os.remove(th._spec.examples_path) readme_contents = th.generate_readme() readme_contents = readme_contents.replace("\n", "") - correct_readme_contents = _read_readme("readme_no_example.md") + correct_readme_contents = _read_readme(test_data_path / "readme_no_example.md") assert readme_contents == correct_readme_contents def test_readme_generation_str_example( + test_data_path, custom_model_truss_dir_with_pre_and_post_str_example, ): th = TrussHandle(custom_model_truss_dir_with_pre_and_post_str_example) readme_contents = th.generate_readme() readme_contents = readme_contents.replace("\n", "") - correct_readme_contents = _read_readme("readme_str_example.md") + correct_readme_contents = _read_readme(test_data_path / "readme_str_example.md") assert readme_contents == correct_readme_contents @@ -461,9 +466,8 @@ def test_add_environment_variable(custom_model_truss_dir_with_pre_and_post): @pytest.mark.integration -def test_build_commands(): - truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / "test_build_commands" +def test_build_commands(test_data_path): + truss_dir = test_data_path / "test_build_commands" tr = TrussHandle(truss_dir) with ensure_kill_all(): r1 = tr.docker_predict([1, 2]) @@ -471,9 +475,8 @@ def test_build_commands(): @pytest.mark.integration -def test_build_commands_failure(): - truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / "test_build_commands_failure" +def test_build_commands_failure(test_data_path): + truss_dir = test_data_path / "test_build_commands_failure" tr = TrussHandle(truss_dir) try: tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) @@ -635,7 +638,7 @@ def predict(self, model_input): assert len(th.get_all_docker_images()) == orig_num_truss_images + 1 -@patch("truss.truss_handle.directory_content_hash") +@patch("truss.truss_handle.truss_handle.directory_content_hash") def test_truss_hash_caching_based_on_max_mod_time( directory_content_patcher, custom_model_truss_dir, @@ -654,14 +657,14 @@ def test_truss_hash_caching_based_on_max_mod_time( directory_content_patcher.call_count == 2 -@patch("truss.truss_handle.get_container_state") +@patch("truss.truss_handle.truss_handle.get_container_state") def test_container_oom_caught_during_waiting(container_state_mock): container_state_mock.return_value = DockerStates.OOMKILLED with pytest.raises(ContainerIsDownError): wait_for_truss(url="localhost:8000", container=MagicMock()) -@patch("truss.truss_handle.get_container_state") +@patch("truss.truss_handle.truss_handle.get_container_state") @pytest.mark.integration def test_container_stuck_in_created(container_state_mock): container_state_mock.return_value = DockerStates.CREATED @@ -815,15 +818,13 @@ def verify_environment_variable_on_container( assert needle in resp.splitlines() -def _read_readme(filename: str) -> str: - readme_correct_path = Path(__file__).parent.parent / "test_data" / filename - readme_contents = readme_correct_path.open().read().replace("\n", "") - return readme_contents +def _read_readme(readme_correct_path: Path) -> str: + return readme_correct_path.open().read().replace("\n", "") def generate_default_config(): # The test fixture varies with host version. - python_version = map_to_supported_python_version(infer_python_version()) + python_version = map_local_to_supported_python_version() config = { "build_commands": [], "environment_variables": {}, diff --git a/truss/tests/test_trussless_docker_server.py b/truss/tests/test_trussless_docker_server.py index 0ee6207a5..f703f99de 100644 --- a/truss/tests/test_trussless_docker_server.py +++ b/truss/tests/test_trussless_docker_server.py @@ -1,19 +1,15 @@ -from pathlib import Path - import pytest import requests from truss.local.local_config_handler import LocalConfigHandler from truss.tests.test_testing_utilities_for_other_tests import ensure_kill_all -from truss.truss_handle import TrussHandle +from truss.truss_handle.truss_handle import TrussHandle @pytest.mark.integration -def test_docker_server_truss(): +def test_docker_server_truss(test_data_path): with ensure_kill_all(): - truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - - truss_dir = truss_root / "test_data" / "test_docker_server_truss" + truss_dir = test_data_path / "test_docker_server_truss" tr = TrussHandle(truss_dir) LocalConfigHandler.set_secret("hf_access_token", "123") diff --git a/truss/tests/test_util.py b/truss/tests/test_util.py index 42727c78b..abed75d95 100644 --- a/truss/tests/test_util.py +++ b/truss/tests/test_util.py @@ -3,7 +3,7 @@ import requests_mock -from truss.truss_config import ExternalData +from truss.base.truss_config import ExternalData from truss.util.download import download_external_data TEST_DOWNLOAD_URL = "http://example.com/some-download-url" diff --git a/truss/tests/test_validation.py b/truss/tests/test_validation.py index 27e0e02b9..8a8e1b319 100644 --- a/truss/tests/test_validation.py +++ b/truss/tests/test_validation.py @@ -1,7 +1,7 @@ import pytest -from truss.errors import ValidationError -from truss.validation import ( +from truss.base.errors import ValidationError +from truss.base.validation import ( validate_cpu_spec, validate_memory_spec, validate_secret_name, diff --git a/truss/tests/trt_llm/test_validation.py b/truss/tests/trt_llm/test_validation.py index 41011904a..0cba5b31e 100644 --- a/truss/tests/trt_llm/test_validation.py +++ b/truss/tests/trt_llm/test_validation.py @@ -1,7 +1,7 @@ import pytest -from truss.errors import ValidationError +from truss.base.errors import ValidationError +from truss.base.truss_spec import TrussSpec from truss.trt_llm.validation import _verify_has_class_init_arg, validate -from truss.truss_spec import TrussSpec @pytest.mark.parametrize( diff --git a/truss/tests/util/test_config_checks.py b/truss/tests/util/test_config_checks.py index bc46472bd..65154de60 100644 --- a/truss/tests/util/test_config_checks.py +++ b/truss/tests/util/test_config_checks.py @@ -1,15 +1,15 @@ from unittest.mock import patch import pytest -from truss.constants import TRTLLM_MIN_MEMORY_REQUEST_GI -from truss.truss_handle import TrussHandle -from truss.util.config_checks import ( +from truss.base.constants import TRTLLM_MIN_MEMORY_REQUEST_GI +from truss.trt_llm.config_checks import ( check_and_update_memory_for_trt_llm_builder, check_secrets_for_trt_llm_builder, ) +from truss.truss_handle.truss_handle import TrussHandle -@patch("truss.util.config_checks._is_model_public") +@patch("truss.trt_llm.config_checks._is_model_public") @pytest.mark.parametrize( "has_secret, is_model_public, expected_result", [ diff --git a/truss/tests/util/test_env_vars.py b/truss/tests/util/test_env_vars.py new file mode 100644 index 000000000..df130cfd0 --- /dev/null +++ b/truss/tests/util/test_env_vars.py @@ -0,0 +1,14 @@ +import os + +from truss.util.env_vars import override_env_vars + + +def test_override_env_vars(): + os.environ["API_KEY"] = "original_key" + + with override_env_vars({"API_KEY": "new_key", "DEBUG": "true"}): + assert os.environ["API_KEY"] == "new_key" + assert os.environ["DEBUG"] == "true" + + assert os.environ["API_KEY"] == "original_key" + assert "DEBUG" not in os.environ diff --git a/truss/tests/util/test_path.py b/truss/tests/util/test_path.py index 4764801ce..6f4e6bd56 100644 --- a/truss/tests/util/test_path.py +++ b/truss/tests/util/test_path.py @@ -3,10 +3,10 @@ import time from pathlib import Path -from truss import load from truss.contexts.image_builder.serving_image_builder import ( ServingImageBuilderContext, ) +from truss.truss_handle.build import load from truss.util import path diff --git a/truss/util/config_checks.py b/truss/trt_llm/config_checks.py similarity index 90% rename from truss/util/config_checks.py rename to truss/trt_llm/config_checks.py index a9683e353..fc6964151 100644 --- a/truss/util/config_checks.py +++ b/truss/trt_llm/config_checks.py @@ -1,11 +1,11 @@ import requests -from truss.config.trt_llm import CheckpointSource -from truss.constants import ( +from truss.base.constants import ( HF_ACCESS_TOKEN_KEY, HF_MODELS_API_URL, TRTLLM_MIN_MEMORY_REQUEST_GI, ) -from truss.truss_handle import TrussHandle +from truss.base.trt_llm_config import CheckpointSource +from truss.truss_handle.truss_handle import TrussHandle def check_secrets_for_trt_llm_builder(tr: TrussHandle) -> bool: diff --git a/truss/trt_llm/validation.py b/truss/trt_llm/validation.py index d4bd2c659..32070bf54 100644 --- a/truss/trt_llm/validation.py +++ b/truss/trt_llm/validation.py @@ -1,7 +1,7 @@ import ast -from truss.errors import ValidationError -from truss.truss_spec import TrussSpec +from truss.base.errors import ValidationError +from truss.base.truss_spec import TrussSpec def validate(truss_spec: TrussSpec): diff --git a/truss/truss_handle/__init__.py b/truss/truss_handle/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/build.py b/truss/truss_handle/build.py similarity index 83% rename from truss/build.py rename to truss/truss_handle/build.py index e7202c5ba..5eae8821d 100644 --- a/truss/build.py +++ b/truss/truss_handle/build.py @@ -6,12 +6,14 @@ import yaml -from truss.constants import CONFIG_FILE, TEMPLATES_DIR, TRUSS -from truss.docker import kill_containers -from truss.model_inference import infer_python_version, map_to_supported_python_version -from truss.notebook import is_notebook_or_ipython -from truss.truss_config import Build, TrussConfig -from truss.truss_handle import TrussHandle +from truss.base.constants import CONFIG_FILE, TEMPLATES_DIR +from truss.base.truss_config import ( + Build, + TrussConfig, + map_local_to_supported_python_version, +) +from truss.truss_handle.truss_handle import TrussHandle +from truss.util.notebook import is_notebook_or_ipython from truss.util.path import build_truss_target_directory, copy_tree_path logger: logging.Logger = logging.getLogger(__name__) @@ -21,7 +23,7 @@ logger.addHandler(logging.StreamHandler(sys.stdout)) -def populate_target_directory( +def _populate_target_directory( config: TrussConfig, target_directory_path: Optional[str] = None, template: str = "custom", @@ -74,13 +76,13 @@ def init( """ config = TrussConfig( model_name=model_name, - python_version=map_to_supported_python_version(infer_python_version()), + python_version=map_local_to_supported_python_version(), ) if build_config: config.build = build_config - target_directory_path = populate_target_directory( + target_directory_path = _populate_target_directory( config=config, target_directory_path=target_directory, populate_dirs=True, @@ -103,13 +105,6 @@ def load(truss_directory: str) -> TrussHandle: return TrussHandle(Path(truss_directory)) -def from_directory(*args, **kwargs): - logger.warn( - "DeprecationWarning: from_directory() is deprecated. Use load() instead." - ) - return load(*args, **kwargs) - - def cleanup() -> None: """ Cleans up .truss directory. @@ -138,7 +133,3 @@ def _update_truss_props( if requirements_file is not None: scaf.update_requirements_from_file(requirements_file) - - -def kill_all() -> None: - kill_containers({TRUSS: True}) diff --git a/truss/decorators.py b/truss/truss_handle/decorators.py similarity index 84% rename from truss/decorators.py rename to truss/truss_handle/decorators.py index bfd672cf0..a0620413b 100644 --- a/truss/decorators.py +++ b/truss/truss_handle/decorators.py @@ -1,6 +1,6 @@ def proxy_to_shadow_if_scattered(func): def wrapper(*args, **kwargs): - from truss.truss_handle import TrussHandle + from truss.truss_handle.truss_handle import TrussHandle truss_handle = args[0] if not truss_handle.is_scattered(): diff --git a/truss/truss_handle/patch/__init__.py b/truss/truss_handle/patch/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/patch/calc_patch.py b/truss/truss_handle/patch/calc_patch.py similarity index 96% rename from truss/patch/calc_patch.py rename to truss/truss_handle/patch/calc_patch.py index a66905dbc..69346e6f1 100644 --- a/truss/patch/calc_patch.py +++ b/truss/truss_handle/patch/calc_patch.py @@ -3,9 +3,10 @@ from typing import Dict, List, Optional, Set import yaml -from truss.constants import CONFIG_FILE -from truss.patch.custom_types import ChangedPaths, TrussSignature -from truss.patch.hash import file_content_hash_str + +from truss.base.constants import CONFIG_FILE +from truss.base.truss_config import ExternalData, TrussConfig +from truss.base.truss_spec import TrussSpec from truss.templates.control.control.helpers.custom_types import ( Action, ConfigPatch, @@ -25,8 +26,8 @@ from truss.templates.control.control.helpers.truss_patch.system_packages import ( system_packages_set, ) -from truss.truss_config import ExternalData, TrussConfig -from truss.truss_spec import TrussSpec +from truss.truss_handle.patch.custom_types import ChangedPaths, TrussSignature +from truss.truss_handle.patch.hash import file_content_hash_str from truss.util.path import get_ignored_relative_paths logger: logging.Logger = logging.getLogger(__name__) @@ -150,7 +151,7 @@ def _under_unsupported_patch_dir(path: str) -> bool: # or from the config file's path. In any case, we only want to calculate these # patches once. has_calculated_config = True - config_patches = calc_config_patches( + config_patches = _calc_config_patches( truss_dir, previous_truss_signature, prev_config, new_config ) if config_patches: @@ -198,9 +199,11 @@ def _calc_changed_paths( root_relative_new_paths = set( (str(path.relative_to(root)) for path in root.glob("**/*")) ) - unignored_new_paths = calc_unignored_paths(root_relative_new_paths, ignore_patterns) + unignored_new_paths = _calc_unignored_paths( + root_relative_new_paths, ignore_patterns + ) previous_root_relative_paths = set(previous_root_path_content_hashes.keys()) - unignored_prev_paths = calc_unignored_paths( + unignored_prev_paths = _calc_unignored_paths( previous_root_relative_paths, ignore_patterns ) @@ -224,7 +227,7 @@ def _calc_changed_paths( } -def calc_unignored_paths( +def _calc_unignored_paths( root_relative_paths: Set[str], ignore_patterns: Optional[List[str]] = None, ) -> Set[str]: @@ -234,7 +237,7 @@ def calc_unignored_paths( return root_relative_paths - ignored_paths # type: ignore -def calc_config_patches( +def _calc_config_patches( truss_dir: Path, prev_signature: TrussSignature, prev_config: TrussConfig, @@ -247,7 +250,7 @@ def calc_config_patches( """ try: config_patches = _calc_general_config_patches(prev_config, new_config) - python_requirements_patches = calc_requirements_patches( + python_requirements_patches = _calc_requirements_patches( truss_dir, prev_signature, prev_config, new_config ) system_package_patches = _calc_system_packages_patches(prev_config, new_config) @@ -340,7 +343,7 @@ def _calc_external_data_patches( return patches -def calc_requirements_patches( +def _calc_requirements_patches( truss_dir: Path, prev_signature: TrussSignature, prev_config: TrussConfig, diff --git a/truss/patch/constants.py b/truss/truss_handle/patch/constants.py similarity index 100% rename from truss/patch/constants.py rename to truss/truss_handle/patch/constants.py diff --git a/truss/custom_types.py b/truss/truss_handle/patch/custom_types.py similarity index 59% rename from truss/custom_types.py rename to truss/truss_handle/patch/custom_types.py index d44ba9deb..403f29acf 100644 --- a/truss/custom_types.py +++ b/truss/truss_handle/patch/custom_types.py @@ -1,43 +1,50 @@ -from dataclasses import dataclass -from enum import Enum -from typing import Any, Dict, List +from dataclasses import dataclass, field +from typing import Dict, List from pydantic import BaseModel -from truss.patch.custom_types import TrussSignature from truss.templates.control.control.helpers.custom_types import Patch - - -class ModelFrameworkType(Enum): - SKLEARN = "sklearn" - TENSORFLOW = "tensorflow" - KERAS = "keras" - PYTORCH = "pytorch" - HUGGINGFACE_TRANSFORMER = "huggingface_transformer" - XGBOOST = "xgboost" - LIGHTGBM = "lightgbm" - MLFLOW = "mlflow" - CUSTOM = "custom" +from truss.util.requirements import parse_requirement_string @dataclass -class Example: - name: str - input: Any +class TrussSignature: + """Truss signature stores information for calculating patches for future + changes to Truss. - @staticmethod - def from_dict(example_dict): - return Example( - name=example_dict["name"], - input=example_dict["input"], - ) + Currently, it stores hashes of all of the paths in the truss directory excluding the data dir, + and the truss config contents. Path hashes allow calculating added/updated/removes + paths in future trusses compared to this. Config contents allow calculating + config changes, such as add/update/remove of python requirements etc. + """ + + content_hashes_by_path: Dict[str, str] + config: str + requirements_file_requirements: List[str] = field(default_factory=list) def to_dict(self) -> dict: return { - "name": self.name, - "input": self.input, + "content_hashes_by_path": self.content_hashes_by_path, + "config": self.config, + "requirements_file_requirements": self.requirements_file_requirements, } + @staticmethod + def from_dict(d) -> "TrussSignature": + requirements = [] + for req in d.get("requirements_file_requirements", []): + parsed_req = parse_requirement_string(req) + if parsed_req: + requirements.append(parsed_req) + return TrussSignature( + content_hashes_by_path=d["content_hashes_by_path"], + config=d["config"], + requirements_file_requirements=requirements, + ) + + +ChangedPaths = Dict[str, List[str]] + @dataclass class PatchDetails: diff --git a/truss/patch/dir_signature.py b/truss/truss_handle/patch/dir_signature.py similarity index 93% rename from truss/patch/dir_signature.py rename to truss/truss_handle/patch/dir_signature.py index 9bc8aad13..0ab9d12e0 100644 --- a/truss/patch/dir_signature.py +++ b/truss/truss_handle/patch/dir_signature.py @@ -1,7 +1,7 @@ from pathlib import Path from typing import Dict, List, Optional -from truss.patch.hash import file_content_hash_str +from truss.truss_handle.patch.hash import file_content_hash_str from truss.util.path import get_unignored_relative_paths_from_root diff --git a/truss/truss_handle/patch/hash.py b/truss/truss_handle/patch/hash.py new file mode 100644 index 000000000..fb4e7e51d --- /dev/null +++ b/truss/truss_handle/patch/hash.py @@ -0,0 +1,72 @@ +from pathlib import Path +from typing import Any, List, Optional + +from blake3 import blake3 + +from truss.util.path import get_unignored_relative_paths_from_root + + +def directory_content_hash( + root: Path, + ignore_patterns: Optional[List[str]] = None, +) -> str: + """Calculate content based hash of a filesystem directory. + + Rough algo: Sort all files by path, then take hash of a content stream, where + we write path hash to the stream followed by hash of content if path is a file. + Note the hash of hash aspect. + + Also, note that name of the root directory is not taken into account, only the contents + underneath. The (root) Directory will have the same hash, even if renamed. + """ + hasher = blake3() + paths = list(get_unignored_relative_paths_from_root(root, ignore_patterns)) + paths.sort() + for path in paths: + hasher.update(str_hash(str(path))) + absolute_path = root / path + if absolute_path.is_file(): + hasher.update(file_content_hash(absolute_path)) + return hasher.hexdigest() + + +def file_content_hash(file: Path) -> bytes: + """Calculate blake3 hash of file content. + Returns: binary hash of content + """ + return _file_content_hash_loaded_hasher(file).digest() + + +def file_content_hash_str(file: Path) -> str: + """Calculate blake3 hash of file content. + + Returns: string hash of content + """ + return _file_content_hash_loaded_hasher(file).hexdigest() + + +def _file_content_hash_loaded_hasher(file: Path) -> Any: + hasher = blake3() + buffer = bytearray(128 * 1024) + mem_view = memoryview(buffer) + with file.open("rb") as f: + done = False + while not done: + n = f.readinto(mem_view) + if n > 0: + hasher.update(mem_view[:n]) + else: + done = True + return hasher + + +def str_hash(content: str) -> bytes: + hasher = blake3() + hasher.update(content.encode("utf-8")) + return hasher.digest() + + +def str_hash_str(content: str) -> str: + hasher = blake3() + hasher.update(content.encode("utf-8")) + return hasher.hexdigest() diff --git a/truss/patch/local_truss_patch_applier.py b/truss/truss_handle/patch/local_truss_patch_applier.py similarity index 98% rename from truss/patch/local_truss_patch_applier.py rename to truss/truss_handle/patch/local_truss_patch_applier.py index 1c4ec8a82..0b57991c4 100644 --- a/truss/patch/local_truss_patch_applier.py +++ b/truss/truss_handle/patch/local_truss_patch_applier.py @@ -3,6 +3,7 @@ from pathlib import Path from typing import List +from truss.base.truss_config import TrussConfig from truss.templates.control.control.helpers.custom_types import ( Action, ModelCodePatch, @@ -14,7 +15,6 @@ from truss.templates.control.control.helpers.truss_patch.model_code_patch_applier import ( apply_code_patch, ) -from truss.truss_config import TrussConfig class LocalTrussPatchApplier: diff --git a/truss/truss_handle/patch/signature.py b/truss/truss_handle/patch/signature.py new file mode 100644 index 000000000..a35f4921d --- /dev/null +++ b/truss/truss_handle/patch/signature.py @@ -0,0 +1,22 @@ +from pathlib import Path +from typing import List, Optional + +from truss.base.constants import CONFIG_FILE +from truss.base.truss_config import TrussConfig +from truss.truss_handle.patch.custom_types import TrussSignature +from truss.truss_handle.patch.dir_signature import directory_content_signature + + +def calc_truss_signature( + truss_dir: Path, ignore_patterns: Optional[List[str]] = None +) -> TrussSignature: + content_signature = directory_content_signature(truss_dir, ignore_patterns) + config_path = truss_dir / CONFIG_FILE + with (config_path).open("r") as config_file: + config = config_file.read() + requirements = TrussConfig.load_requirements_file_from_filepath(config_path) + return TrussSignature( + content_hashes_by_path=content_signature, + config=config, + requirements_file_requirements=requirements, + ) diff --git a/truss/truss_handle/patch/truss_dir_patch_applier.py b/truss/truss_handle/patch/truss_dir_patch_applier.py new file mode 100644 index 000000000..873423825 --- /dev/null +++ b/truss/truss_handle/patch/truss_dir_patch_applier.py @@ -0,0 +1,87 @@ +import logging +from pathlib import Path +from typing import List + +from truss.base.truss_config import TrussConfig +from truss.templates.control.control.helpers.custom_types import ( + Action, + ConfigPatch, + EnvVarPatch, + ExternalDataPatch, + ModelCodePatch, + Patch, + PythonRequirementPatch, + SystemPackagePatch, +) +from truss.templates.control.control.helpers.errors import UnsupportedPatch +from truss.templates.control.control.helpers.truss_patch.model_code_patch_applier import ( + apply_code_patch, +) +from truss.templates.control.control.helpers.truss_patch.requirement_name_identifier import ( + identify_requirement_name, + reqs_by_name, +) +from truss.templates.control.control.helpers.truss_patch.system_packages import ( + system_packages_set, +) + + +class TrussDirPatchApplier: + """Applies patches to a truss directory. + This should be compatible with ModelContainerPatchApplier. + + Note: This class imported via old_build_setup.sh.jinja in the baseten + repository + """ + + def __init__(self, truss_dir: Path, logger: logging.Logger) -> None: + self._truss_dir = truss_dir + self._truss_config_path = self._truss_dir / "config.yaml" + self._truss_config = TrussConfig.from_yaml(self._truss_config_path) + self._logger = logger + + def __call__(self, patches: List[Patch]): + # Apply model code patches immediately + # Aggregate config patches and apply at end + reqs = reqs_by_name(self._truss_config.requirements) + pkgs = system_packages_set(self._truss_config.system_packages) + new_config = self._truss_config + for patch in patches: + self._logger.debug(f"Applying patch {patch.to_dict()}") + action = patch.body.action + if isinstance(patch.body, ModelCodePatch): + model_code_patch: ModelCodePatch = patch.body + model_module_dir = self._truss_dir / self._truss_config.model_module_dir + apply_code_patch(model_module_dir, model_code_patch, self._logger) + continue + if isinstance(patch.body, PythonRequirementPatch): + py_req_patch: PythonRequirementPatch = patch.body + req = py_req_patch.requirement + req_name = identify_requirement_name(req) + if action == Action.REMOVE: + del reqs[req_name] + continue + if action == Action.ADD or Action.UPDATE: + reqs[req_name] = req + continue + if isinstance(patch.body, SystemPackagePatch): + sys_pkg_patch: SystemPackagePatch = patch.body + pkg = sys_pkg_patch.package + if action == Action.REMOVE: + pkgs.remove(pkg) + continue + if action == Action.ADD or Action.UPDATE: + pkgs.add(pkg) + continue + # Each of EnvVarPatch and ExternalDataPatch can be expressed through an overwrite of the config, + # handled below + if isinstance(patch.body, EnvVarPatch): + continue + if isinstance(patch.body, ExternalDataPatch): + continue + if isinstance(patch.body, ConfigPatch): + new_config = TrussConfig.from_dict(patch.body.config) + continue + raise UnsupportedPatch(f"Unknown patch type {patch.type}") + + new_config.write_to_yaml_file(self._truss_config_path) diff --git a/truss/readme_generator.py b/truss/truss_handle/readme_generator.py similarity index 84% rename from truss/readme_generator.py rename to truss/truss_handle/readme_generator.py index a2f383029..82f9cbdc3 100644 --- a/truss/readme_generator.py +++ b/truss/truss_handle/readme_generator.py @@ -1,7 +1,7 @@ from jinja2 import Template -from truss.constants import README_TEMPLATE_NAME, TEMPLATES_DIR -from truss.truss_spec import TrussSpec +from truss.base.constants import README_TEMPLATE_NAME, TEMPLATES_DIR +from truss.base.truss_spec import TrussSpec def generate_readme(_spec: TrussSpec) -> str: diff --git a/truss/truss_gatherer.py b/truss/truss_handle/truss_gatherer.py similarity index 96% rename from truss/truss_gatherer.py rename to truss/truss_handle/truss_gatherer.py index e8f5f3250..39760fa41 100644 --- a/truss/truss_gatherer.py +++ b/truss/truss_handle/truss_gatherer.py @@ -3,8 +3,8 @@ import yaml from truss.local.local_config_handler import LocalConfigHandler -from truss.patch.hash import str_hash_str -from truss.truss_handle import TrussHandle +from truss.truss_handle.patch.hash import str_hash_str +from truss.truss_handle.truss_handle import TrussHandle from truss.util.path import copy_file_path, copy_tree_path, remove_tree_path diff --git a/truss/truss_handle.py b/truss/truss_handle/truss_handle.py similarity index 97% rename from truss/truss_handle.py rename to truss/truss_handle/truss_handle.py index d79ac9ef8..3ce1bad90 100644 --- a/truss/truss_handle.py +++ b/truss/truss_handle/truss_handle.py @@ -25,20 +25,44 @@ wait_fixed, ) -from truss.constants import ( +from truss.base.constants import ( INFERENCE_SERVER_PORT, TRUSS, TRUSS_DIR, TRUSS_HASH, TRUSS_MODIFIED_TIME, ) +from truss.base.custom_types import Example +from truss.base.errors import ContainerIsDownError, ContainerNotFoundError +from truss.base.truss_config import ( + BaseImage, + ExternalData, + ExternalDataItem, + TrussConfig, +) +from truss.base.truss_spec import TrussSpec +from truss.base.validation import validate_secret_name from truss.contexts.image_builder.serving_image_builder import ( ServingImageBuilderContext, ) from truss.contexts.local_loader.load_model_local import LoadModelLocal -from truss.custom_types import Example, PatchDetails, PatchRequest -from truss.decorators import proxy_to_shadow_if_scattered -from truss.docker import ( +from truss.local.local_config_handler import LocalConfigHandler +from truss.templates.shared.serialization import ( + truss_msgpack_deserialize, + truss_msgpack_serialize, +) +from truss.trt_llm.validation import validate +from truss.truss_handle.decorators import proxy_to_shadow_if_scattered +from truss.truss_handle.patch.calc_patch import calc_truss_patch +from truss.truss_handle.patch.custom_types import ( + PatchDetails, + PatchRequest, + TrussSignature, +) +from truss.truss_handle.patch.hash import directory_content_hash +from truss.truss_handle.patch.signature import calc_truss_signature +from truss.truss_handle.readme_generator import generate_readme +from truss.util.docker import ( Docker, DockerStates, get_container_logs, @@ -48,28 +72,13 @@ get_urls_from_container, kill_containers, ) -from truss.errors import ContainerIsDownError, ContainerNotFoundError -from truss.local.local_config_handler import LocalConfigHandler -from truss.notebook import is_notebook_or_ipython -from truss.patch.calc_patch import calc_truss_patch -from truss.patch.custom_types import TrussSignature -from truss.patch.hash import directory_content_hash -from truss.patch.signature import calc_truss_signature -from truss.readme_generator import generate_readme -from truss.templates.shared.serialization import ( - truss_msgpack_deserialize, - truss_msgpack_serialize, -) -from truss.trt_llm.validation import validate -from truss.truss_config import BaseImage, ExternalData, ExternalDataItem, TrussConfig -from truss.truss_spec import TrussSpec +from truss.util.notebook import is_notebook_or_ipython from truss.util.path import ( copy_file_path, copy_tree_path, get_max_modified_time_of_dir, load_trussignore_patterns, ) -from truss.validation import validate_secret_name logger: logging.Logger = logging.getLogger(__name__) @@ -357,6 +366,7 @@ def predict( else: return self.server_predict(request) + # TODO(marius): can we kill this? def server_predict(self, request: Dict): """Run the prediction flow locally.""" model = LoadModelLocal.run(self._truss_dir) @@ -876,7 +886,7 @@ def gather(self) -> Path: gatherer and a handle to that truss is returned. These gathered trusses are caches and resused. """ - from truss.truss_gatherer import gather + from truss.truss_handle.truss_gatherer import gather if not self.is_scattered(): return self._truss_dir diff --git a/truss/util/data_structures.py b/truss/util/data_structures.py deleted file mode 100644 index 0834dbfe6..000000000 --- a/truss/util/data_structures.py +++ /dev/null @@ -1,11 +0,0 @@ -from typing import Callable, Optional, TypeVar - -X = TypeVar("X") -Y = TypeVar("Y") - - -def transform_optional(x: Optional[X], fn: Callable[[X], Optional[Y]]) -> Optional[Y]: - if x is None: - return None - - return fn(x) diff --git a/truss/docker.py b/truss/util/docker.py similarity index 97% rename from truss/docker.py rename to truss/util/docker.py index ad8558896..dc00c664d 100644 --- a/truss/docker.py +++ b/truss/util/docker.py @@ -5,7 +5,7 @@ if TYPE_CHECKING: from python_on_whales.components.container.cli_wrapper import Container -from truss.constants import TRUSS_DIR +from truss.base.constants import TRUSS, TRUSS_DIR from truss.local.local_config_handler import LocalConfigHandler @@ -115,3 +115,7 @@ def _create_label_filters(labels: Dict) -> Dict[str, Any]: return { f"label={label_key}": label_value for label_key, label_value in labels.items() } + + +def kill_all() -> None: + kill_containers({TRUSS: True}) diff --git a/truss/util/download.py b/truss/util/download.py index 4a27520c0..9c327aded 100644 --- a/truss/util/download.py +++ b/truss/util/download.py @@ -5,7 +5,7 @@ from typing import Optional import requests -from truss.truss_config import ExternalData +from truss.base.truss_config import ExternalData B10CP_EXECUTABLE_NAME = "b10cp" BLOB_DOWNLOAD_TIMEOUT_SECS = 600 # 10 minutes diff --git a/truss/util/env_vars.py b/truss/util/env_vars.py new file mode 100644 index 000000000..d97c25254 --- /dev/null +++ b/truss/util/env_vars.py @@ -0,0 +1,41 @@ +import os +from typing import Dict, Optional + + +class override_env_vars: + """A context manager for temporarily overwriting environment variables. + + Usage: + with override_env_vars({'API_KEY': 'test_key', 'DEBUG': 'true'}): + # Environment variables are modified here + ... + # Original environment is restored here + """ + + def __init__(self, env_vars: Dict[str, str]): + """ + Args: + env_vars: Dictionary of environment variables to set + """ + self.env_vars = env_vars + self.original_vars: Dict[str, Optional[str]] = {} + + def __enter__(self): + for key in self.env_vars: + self.original_vars[key] = os.environ.get(key) + + for key, value in self.env_vars.items(): + os.environ[key] = value + + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + # Restore original environment + for key, value in self.original_vars.items(): + if value is None: + # Variable didn't exist originally + if key in os.environ: + del os.environ[key] + else: + # Restore original value + os.environ[key] = value diff --git a/truss/util/errors.py b/truss/util/errors.py deleted file mode 100644 index 1a6df56ec..000000000 --- a/truss/util/errors.py +++ /dev/null @@ -1,2 +0,0 @@ -class RemoteNetworkError(Exception): - pass diff --git a/truss/notebook.py b/truss/util/notebook.py similarity index 100% rename from truss/notebook.py rename to truss/util/notebook.py diff --git a/truss/util/requirements.py b/truss/util/requirements.py new file mode 100644 index 000000000..7073c4b27 --- /dev/null +++ b/truss/util/requirements.py @@ -0,0 +1,11 @@ +from typing import Optional + + +def parse_requirement_string(req_str: str) -> Optional[str]: + """ + Collects requirements from a list of requirement lines. + """ + stripped_line = req_str.strip() + if stripped_line and not stripped_line.startswith("#"): + return stripped_line + return None