From 4e2aa32ad8242745f56e5a8b810d33c362967dad Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 29 Aug 2025 18:41:08 +0000 Subject: [PATCH 01/32] feat(client): add support for aiohttp --- pyproject.toml | 2 ++ requirements-dev.lock | 27 +++++++++++++++++ requirements.lock | 27 +++++++++++++++++ src/parallel/__init__.py | 3 +- src/parallel/_base_client.py | 22 ++++++++++++++ tests/api_resources/test_task_run.py | 4 ++- tests/conftest.py | 43 ++++++++++++++++++++++++---- 7 files changed, 120 insertions(+), 8 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 48ac916..d4bdd3f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,6 +37,8 @@ classifiers = [ Homepage = "https://github.com/parallel-web/parallel-sdk-python" Repository = "https://github.com/parallel-web/parallel-sdk-python" +[project.optional-dependencies] +aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.6"] [tool.rye] managed = true diff --git a/requirements-dev.lock b/requirements-dev.lock index 830c13c..b0c2b95 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -10,6 +10,13 @@ # universal: false -e file:. +aiohappyeyeballs==2.6.1 + # via aiohttp +aiohttp==3.12.8 + # via httpx-aiohttp + # via parallel-web +aiosignal==1.3.2 + # via aiohttp annotated-types==0.6.0 # via pydantic anyio==4.4.0 @@ -17,6 +24,10 @@ anyio==4.4.0 # via parallel-web argcomplete==3.1.2 # via nox +async-timeout==5.0.1 + # via aiohttp +attrs==25.3.0 + # via aiohttp certifi==2023.7.22 # via httpcore # via httpx @@ -34,16 +45,23 @@ execnet==2.1.1 # via pytest-xdist filelock==3.12.4 # via virtualenv +frozenlist==1.6.2 + # via aiohttp + # via aiosignal h11==0.14.0 # via httpcore httpcore==1.0.2 # via httpx httpx==0.28.1 + # via httpx-aiohttp # via parallel-web # via respx +httpx-aiohttp==0.1.6 + # via parallel-web idna==3.4 # via anyio # via httpx + # via yarl importlib-metadata==7.0.0 iniconfig==2.0.0 # via pytest @@ -51,6 +69,9 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py +multidict==6.4.4 + # via aiohttp + # via yarl mypy==1.14.1 mypy-extensions==1.0.0 # via mypy @@ -65,6 +86,9 @@ platformdirs==3.11.0 # via virtualenv pluggy==1.5.0 # via pytest +propcache==0.3.1 + # via aiohttp + # via yarl pydantic==2.10.3 # via parallel-web pydantic-core==2.27.1 @@ -97,6 +121,7 @@ tomli==2.0.2 # via pytest typing-extensions==4.12.2 # via anyio + # via multidict # via mypy # via parallel-web # via pydantic @@ -104,5 +129,7 @@ typing-extensions==4.12.2 # via pyright virtualenv==20.24.5 # via nox +yarl==1.20.0 + # via aiohttp zipp==3.17.0 # via importlib-metadata diff --git a/requirements.lock b/requirements.lock index e432805..5151da0 100644 --- a/requirements.lock +++ b/requirements.lock @@ -10,11 +10,22 @@ # universal: false -e file:. +aiohappyeyeballs==2.6.1 + # via aiohttp +aiohttp==3.12.8 + # via httpx-aiohttp + # via parallel-web +aiosignal==1.3.2 + # via aiohttp annotated-types==0.6.0 # via pydantic anyio==4.4.0 # via httpx # via parallel-web +async-timeout==5.0.1 + # via aiohttp +attrs==25.3.0 + # via aiohttp certifi==2023.7.22 # via httpcore # via httpx @@ -22,15 +33,28 @@ distro==1.8.0 # via parallel-web exceptiongroup==1.2.2 # via anyio +frozenlist==1.6.2 + # via aiohttp + # via aiosignal h11==0.14.0 # via httpcore httpcore==1.0.2 # via httpx httpx==0.28.1 + # via httpx-aiohttp + # via parallel-web +httpx-aiohttp==0.1.6 # via parallel-web idna==3.4 # via anyio # via httpx + # via yarl +multidict==6.4.4 + # via aiohttp + # via yarl +propcache==0.3.1 + # via aiohttp + # via yarl pydantic==2.10.3 # via parallel-web pydantic-core==2.27.1 @@ -40,6 +64,9 @@ sniffio==1.3.0 # via parallel-web typing-extensions==4.12.2 # via anyio + # via multidict # via parallel-web # via pydantic # via pydantic-core +yarl==1.20.0 + # via aiohttp diff --git a/src/parallel/__init__.py b/src/parallel/__init__.py index 3db210f..8cb966d 100644 --- a/src/parallel/__init__.py +++ b/src/parallel/__init__.py @@ -36,7 +36,7 @@ UnprocessableEntityError, APIResponseValidationError, ) -from ._base_client import DefaultHttpxClient, DefaultAsyncHttpxClient +from ._base_client import DefaultHttpxClient, DefaultAioHttpClient, DefaultAsyncHttpxClient from ._utils._logs import setup_logging as _setup_logging __all__ = [ @@ -78,6 +78,7 @@ "DEFAULT_CONNECTION_LIMITS", "DefaultHttpxClient", "DefaultAsyncHttpxClient", + "DefaultAioHttpClient", ] if not _t.TYPE_CHECKING: diff --git a/src/parallel/_base_client.py b/src/parallel/_base_client.py index a63dc09..7b5ac6d 100644 --- a/src/parallel/_base_client.py +++ b/src/parallel/_base_client.py @@ -1289,6 +1289,24 @@ def __init__(self, **kwargs: Any) -> None: super().__init__(**kwargs) +try: + import httpx_aiohttp +except ImportError: + + class _DefaultAioHttpClient(httpx.AsyncClient): + def __init__(self, **_kwargs: Any) -> None: + raise RuntimeError("To use the aiohttp client you must have installed the package with the `aiohttp` extra") +else: + + class _DefaultAioHttpClient(httpx_aiohttp.HttpxAiohttpClient): # type: ignore + def __init__(self, **kwargs: Any) -> None: + kwargs.setdefault("timeout", DEFAULT_TIMEOUT) + kwargs.setdefault("limits", DEFAULT_CONNECTION_LIMITS) + kwargs.setdefault("follow_redirects", True) + + super().__init__(**kwargs) + + if TYPE_CHECKING: DefaultAsyncHttpxClient = httpx.AsyncClient """An alias to `httpx.AsyncClient` that provides the same defaults that this SDK @@ -1297,8 +1315,12 @@ def __init__(self, **kwargs: Any) -> None: This is useful because overriding the `http_client` with your own instance of `httpx.AsyncClient` will result in httpx's defaults being used, not ours. """ + + DefaultAioHttpClient = httpx.AsyncClient + """An alias to `httpx.AsyncClient` that changes the default HTTP transport to `aiohttp`.""" else: DefaultAsyncHttpxClient = _DefaultAsyncHttpxClient + DefaultAioHttpClient = _DefaultAioHttpClient class AsyncHttpxClientWrapper(DefaultAsyncHttpxClient): diff --git a/tests/api_resources/test_task_run.py b/tests/api_resources/test_task_run.py index 00ff9c1..871e3fa 100644 --- a/tests/api_resources/test_task_run.py +++ b/tests/api_resources/test_task_run.py @@ -176,7 +176,9 @@ def test_path_params_result(self, client: Parallel) -> None: class TestAsyncTaskRun: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @parametrize async def test_method_create(self, async_client: AsyncParallel) -> None: diff --git a/tests/conftest.py b/tests/conftest.py index 3bef126..226474b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -6,10 +6,12 @@ import logging from typing import TYPE_CHECKING, Iterator, AsyncIterator +import httpx import pytest from pytest_asyncio import is_async_test -from parallel import Parallel, AsyncParallel +from parallel import Parallel, AsyncParallel, DefaultAioHttpClient +from parallel._utils import is_dict if TYPE_CHECKING: from _pytest.fixtures import FixtureRequest # pyright: ignore[reportPrivateImportUsage] @@ -27,6 +29,19 @@ def pytest_collection_modifyitems(items: list[pytest.Function]) -> None: for async_test in pytest_asyncio_tests: async_test.add_marker(session_scope_marker, append=False) + # We skip tests that use both the aiohttp client and respx_mock as respx_mock + # doesn't support custom transports. + for item in items: + if "async_client" not in item.fixturenames or "respx_mock" not in item.fixturenames: + continue + + if not hasattr(item, "callspec"): + continue + + async_client_param = item.callspec.params.get("async_client") + if is_dict(async_client_param) and async_client_param.get("http_client") == "aiohttp": + item.add_marker(pytest.mark.skip(reason="aiohttp client is not compatible with respx_mock")) + base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -45,9 +60,25 @@ def client(request: FixtureRequest) -> Iterator[Parallel]: @pytest.fixture(scope="session") async def async_client(request: FixtureRequest) -> AsyncIterator[AsyncParallel]: - strict = getattr(request, "param", True) - if not isinstance(strict, bool): - raise TypeError(f"Unexpected fixture parameter type {type(strict)}, expected {bool}") - - async with AsyncParallel(base_url=base_url, api_key=api_key, _strict_response_validation=strict) as client: + param = getattr(request, "param", True) + + # defaults + strict = True + http_client: None | httpx.AsyncClient = None + + if isinstance(param, bool): + strict = param + elif is_dict(param): + strict = param.get("strict", True) + assert isinstance(strict, bool) + + http_client_type = param.get("http_client", "httpx") + if http_client_type == "aiohttp": + http_client = DefaultAioHttpClient() + else: + raise TypeError(f"Unexpected fixture parameter type {type(param)}, expected bool or dict") + + async with AsyncParallel( + base_url=base_url, api_key=api_key, _strict_response_validation=strict, http_client=http_client + ) as client: yield client From 13b153381e9b7c998a7ebef878518222678dfa83 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 24 Jun 2025 04:11:44 +0000 Subject: [PATCH 02/32] chore(tests): skip some failing tests on the latest python versions --- tests/test_client.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/test_client.py b/tests/test_client.py index 00a03ea..c59e847 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -191,6 +191,7 @@ def test_copy_signature(self) -> None: copy_param = copy_signature.parameters.get(name) assert copy_param is not None, f"copy() signature is missing the {name} param" + @pytest.mark.skipif(sys.version_info >= (3, 10), reason="fails because of a memory leak that started from 3.12") def test_copy_build_request(self) -> None: options = FinalRequestOptions(method="get", url="/foo") @@ -995,6 +996,7 @@ def test_copy_signature(self) -> None: copy_param = copy_signature.parameters.get(name) assert copy_param is not None, f"copy() signature is missing the {name} param" + @pytest.mark.skipif(sys.version_info >= (3, 10), reason="fails because of a memory leak that started from 3.12") def test_copy_build_request(self) -> None: options = FinalRequestOptions(method="get", url="/foo") From 90d26a5e8db8bd6a27f9bbc96595da87bd7ea0f3 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sun, 31 Aug 2025 20:35:02 +0000 Subject: [PATCH 03/32] chore(internal): version bump From 310076b2f8a75ed29ba2a1fae0f6e840ec43bb5b Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 27 Jun 2025 02:43:34 +0000 Subject: [PATCH 04/32] =?UTF-8?q?fix(ci):=20release-doctor=20=E2=80=94=20r?= =?UTF-8?q?eport=20correct=20token=20name?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- bin/check-release-environment | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/check-release-environment b/bin/check-release-environment index ff47020..b845b0f 100644 --- a/bin/check-release-environment +++ b/bin/check-release-environment @@ -3,7 +3,7 @@ errors=() if [ -z "${PYPI_TOKEN}" ]; then - errors+=("The PARALLEL_PYPI_TOKEN secret has not been set. Please set it in either this repository's secrets or your organization secrets.") + errors+=("The PYPI_TOKEN secret has not been set. Please set it in either this repository's secrets or your organization secrets.") fi lenErrors=${#errors[@]} From d55fbea54037d2d833ecc281cbddbc8d6700d24d Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 28 Jun 2025 08:42:13 +0000 Subject: [PATCH 05/32] chore(ci): only run for pushes and fork pull requests --- .github/workflows/ci.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6ffb787..ed64a05 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,6 +17,7 @@ jobs: timeout-minutes: 10 name: lint runs-on: ${{ github.repository == 'stainless-sdks/parallel-sdk-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + if: github.event_name == 'push' || github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 @@ -42,6 +43,7 @@ jobs: contents: read id-token: write runs-on: depot-ubuntu-24.04 + if: github.event_name == 'push' || github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 @@ -62,6 +64,7 @@ jobs: timeout-minutes: 10 name: test runs-on: ${{ github.repository == 'stainless-sdks/parallel-sdk-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + if: github.event_name == 'push' || github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 From 99d37f657a249987ccae60dd0e62f296ab0c1d85 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 30 Jun 2025 02:26:32 +0000 Subject: [PATCH 06/32] fix(ci): correct conditional --- .github/workflows/ci.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ed64a05..cfaba2e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -36,14 +36,13 @@ jobs: run: ./scripts/lint upload: - if: github.repository == 'stainless-sdks/parallel-sdk-python' + if: github.repository == 'stainless-sdks/parallel-sdk-python' && (github.event_name == 'push' || github.event.pull_request.head.repo.fork) timeout-minutes: 10 name: upload permissions: contents: read id-token: write runs-on: depot-ubuntu-24.04 - if: github.event_name == 'push' || github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 From 40dbd3b7d5becf0fe54b62a4acd8696957380053 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 2 Jul 2025 05:15:58 +0000 Subject: [PATCH 07/32] chore(ci): change upload type --- .github/workflows/ci.yml | 18 ++++++++++++++++-- scripts/utils/upload-artifact.sh | 12 +++++++----- 2 files changed, 23 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cfaba2e..4958ade 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -35,10 +35,10 @@ jobs: - name: Run lints run: ./scripts/lint - upload: + build: if: github.repository == 'stainless-sdks/parallel-sdk-python' && (github.event_name == 'push' || github.event.pull_request.head.repo.fork) timeout-minutes: 10 - name: upload + name: build permissions: contents: read id-token: write @@ -46,6 +46,20 @@ jobs: steps: - uses: actions/checkout@v4 + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + + - name: Install dependencies + run: rye sync --all-features + + - name: Run build + run: rye build + - name: Get GitHub OIDC Token id: github-oidc uses: actions/github-script@v6 diff --git a/scripts/utils/upload-artifact.sh b/scripts/utils/upload-artifact.sh index d594fab..f3f256b 100755 --- a/scripts/utils/upload-artifact.sh +++ b/scripts/utils/upload-artifact.sh @@ -1,7 +1,9 @@ #!/usr/bin/env bash set -exuo pipefail -RESPONSE=$(curl -X POST "$URL" \ +FILENAME=$(basename dist/*.whl) + +RESPONSE=$(curl -X POST "$URL?filename=$FILENAME" \ -H "Authorization: Bearer $AUTH" \ -H "Content-Type: application/json") @@ -12,13 +14,13 @@ if [[ "$SIGNED_URL" == "null" ]]; then exit 1 fi -UPLOAD_RESPONSE=$(tar -cz . | curl -v -X PUT \ - -H "Content-Type: application/gzip" \ - --data-binary @- "$SIGNED_URL" 2>&1) +UPLOAD_RESPONSE=$(curl -v -X PUT \ + -H "Content-Type: binary/octet-stream" \ + --data-binary "@dist/$FILENAME" "$SIGNED_URL" 2>&1) if echo "$UPLOAD_RESPONSE" | grep -q "HTTP/[0-9.]* 200"; then echo -e "\033[32mUploaded build to Stainless storage.\033[0m" - echo -e "\033[32mInstallation: pip install 'https://pkg.stainless.com/s/parallel-sdk-python/$SHA'\033[0m" + echo -e "\033[32mInstallation: pip install 'https://pkg.stainless.com/s/parallel-sdk-python/$SHA/$FILENAME'\033[0m" else echo -e "\033[31mFailed to upload artifact.\033[0m" exit 1 From 47ea68bd44ad52ac1c18e7215c013f408914890c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 8 Jul 2025 02:10:27 +0000 Subject: [PATCH 08/32] chore(internal): codegen related update --- requirements-dev.lock | 2 +- requirements.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-dev.lock b/requirements-dev.lock index b0c2b95..f4c08cb 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -56,7 +56,7 @@ httpx==0.28.1 # via httpx-aiohttp # via parallel-web # via respx -httpx-aiohttp==0.1.6 +httpx-aiohttp==0.1.8 # via parallel-web idna==3.4 # via anyio diff --git a/requirements.lock b/requirements.lock index 5151da0..ff91f58 100644 --- a/requirements.lock +++ b/requirements.lock @@ -43,7 +43,7 @@ httpcore==1.0.2 httpx==0.28.1 # via httpx-aiohttp # via parallel-web -httpx-aiohttp==0.1.6 +httpx-aiohttp==0.1.8 # via parallel-web idna==3.4 # via anyio From 818f1ddb3ba1be6bfdb9aee1322d6a3d8a98667a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 9 Jul 2025 02:26:34 +0000 Subject: [PATCH 09/32] chore(internal): bump pinned h11 dep --- requirements-dev.lock | 4 ++-- requirements.lock | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements-dev.lock b/requirements-dev.lock index f4c08cb..c06ee41 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -48,9 +48,9 @@ filelock==3.12.4 frozenlist==1.6.2 # via aiohttp # via aiosignal -h11==0.14.0 +h11==0.16.0 # via httpcore -httpcore==1.0.2 +httpcore==1.0.9 # via httpx httpx==0.28.1 # via httpx-aiohttp diff --git a/requirements.lock b/requirements.lock index ff91f58..777669a 100644 --- a/requirements.lock +++ b/requirements.lock @@ -36,9 +36,9 @@ exceptiongroup==1.2.2 frozenlist==1.6.2 # via aiohttp # via aiosignal -h11==0.14.0 +h11==0.16.0 # via httpcore -httpcore==1.0.2 +httpcore==1.0.9 # via httpx httpx==0.28.1 # via httpx-aiohttp From 6fa54c42a17f5e731f5e97214f0212a0828d3cb8 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 9 Jul 2025 02:45:41 +0000 Subject: [PATCH 10/32] chore(package): mark python 3.13 as supported --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index d4bdd3f..4c1808d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,6 +24,7 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Operating System :: POSIX", "Operating System :: MacOS", From c9a23002be2d78a11b5c1b7c901f4ddb32663393 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 10 Jul 2025 02:41:44 +0000 Subject: [PATCH 11/32] fix(parsing): correctly handle nested discriminated unions --- src/parallel/_models.py | 13 +++++++----- tests/test_models.py | 45 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+), 5 deletions(-) diff --git a/src/parallel/_models.py b/src/parallel/_models.py index 4f21498..528d568 100644 --- a/src/parallel/_models.py +++ b/src/parallel/_models.py @@ -2,9 +2,10 @@ import os import inspect -from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, cast +from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, Optional, cast from datetime import date, datetime from typing_extensions import ( + List, Unpack, Literal, ClassVar, @@ -366,7 +367,7 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object: if type_ is None: raise RuntimeError(f"Unexpected field type is None for {key}") - return construct_type(value=value, type_=type_) + return construct_type(value=value, type_=type_, metadata=getattr(field, "metadata", None)) def is_basemodel(type_: type) -> bool: @@ -420,7 +421,7 @@ def construct_type_unchecked(*, value: object, type_: type[_T]) -> _T: return cast(_T, construct_type(value=value, type_=type_)) -def construct_type(*, value: object, type_: object) -> object: +def construct_type(*, value: object, type_: object, metadata: Optional[List[Any]] = None) -> object: """Loose coercion to the expected type with construction of nested values. If the given value does not match the expected type then it is returned as-is. @@ -438,8 +439,10 @@ def construct_type(*, value: object, type_: object) -> object: type_ = type_.__value__ # type: ignore[unreachable] # unwrap `Annotated[T, ...]` -> `T` - if is_annotated_type(type_): - meta: tuple[Any, ...] = get_args(type_)[1:] + if metadata is not None: + meta: tuple[Any, ...] = tuple(metadata) + elif is_annotated_type(type_): + meta = get_args(type_)[1:] type_ = extract_type_arg(type_, 0) else: meta = tuple() diff --git a/tests/test_models.py b/tests/test_models.py index 8ff0e06..d441722 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -889,3 +889,48 @@ class ModelB(BaseModel): ) assert isinstance(m, ModelB) + + +def test_nested_discriminated_union() -> None: + class InnerType1(BaseModel): + type: Literal["type_1"] + + class InnerModel(BaseModel): + inner_value: str + + class InnerType2(BaseModel): + type: Literal["type_2"] + some_inner_model: InnerModel + + class Type1(BaseModel): + base_type: Literal["base_type_1"] + value: Annotated[ + Union[ + InnerType1, + InnerType2, + ], + PropertyInfo(discriminator="type"), + ] + + class Type2(BaseModel): + base_type: Literal["base_type_2"] + + T = Annotated[ + Union[ + Type1, + Type2, + ], + PropertyInfo(discriminator="base_type"), + ] + + model = construct_type( + type_=T, + value={ + "base_type": "base_type_1", + "value": { + "type": "type_2", + }, + }, + ) + assert isinstance(model, Type1) + assert isinstance(model.value, InnerType2) From 2bf10b073ab7e015b08c106d265a9091752df51a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 11 Jul 2025 02:59:44 +0000 Subject: [PATCH 12/32] chore(readme): fix version rendering on pypi --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 470a586..572ed6c 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ # Parallel Python API library -[![PyPI version]()](https://pypi.org/project/parallel-web/) + +[![PyPI version](https://img.shields.io/pypi/v/parallel-web.svg?label=pypi%20(stable))](https://pypi.org/project/parallel-web/) The Parallel Python library provides convenient access to the Parallel REST API from any Python 3.8+ application. The library includes type definitions for all request params and response fields, From f103b4a72fc25f6a8dd1bda0c8d040aba1f527d1 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 12 Jul 2025 02:06:27 +0000 Subject: [PATCH 13/32] fix(client): don't send Content-Type header on GET requests --- pyproject.toml | 2 +- src/parallel/_base_client.py | 11 +++++++++-- tests/test_client.py | 4 ++-- 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4c1808d..3d0e9e6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ Homepage = "https://github.com/parallel-web/parallel-sdk-python" Repository = "https://github.com/parallel-web/parallel-sdk-python" [project.optional-dependencies] -aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.6"] +aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.8"] [tool.rye] managed = true diff --git a/src/parallel/_base_client.py b/src/parallel/_base_client.py index 7b5ac6d..0df16f7 100644 --- a/src/parallel/_base_client.py +++ b/src/parallel/_base_client.py @@ -529,6 +529,15 @@ def _build_request( # work around https://github.com/encode/httpx/discussions/2880 kwargs["extensions"] = {"sni_hostname": prepared_url.host.replace("_", "-")} + is_body_allowed = options.method.lower() != "get" + + if is_body_allowed: + kwargs["json"] = json_data if is_given(json_data) else None + kwargs["files"] = files + else: + headers.pop("Content-Type", None) + kwargs.pop("data", None) + # TODO: report this error to httpx return self._client.build_request( # pyright: ignore[reportUnknownMemberType] headers=headers, @@ -540,8 +549,6 @@ def _build_request( # so that passing a `TypedDict` doesn't cause an error. # https://github.com/microsoft/pyright/issues/3526#event-6715453066 params=self.qs.stringify(cast(Mapping[str, Any], params)) if params else None, - json=json_data if is_given(json_data) else None, - files=files, **kwargs, ) diff --git a/tests/test_client.py b/tests/test_client.py index c59e847..878365f 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -464,7 +464,7 @@ def test_request_extra_query(self) -> None: def test_multipart_repeating_array(self, client: Parallel) -> None: request = client._build_request( FinalRequestOptions.construct( - method="get", + method="post", url="/foo", headers={"Content-Type": "multipart/form-data; boundary=6b7ba517decee4a450543ea6ae821c82"}, json_data={"array": ["foo", "bar"]}, @@ -1269,7 +1269,7 @@ def test_request_extra_query(self) -> None: def test_multipart_repeating_array(self, async_client: AsyncParallel) -> None: request = async_client._build_request( FinalRequestOptions.construct( - method="get", + method="post", url="/foo", headers={"Content-Type": "multipart/form-data; boundary=6b7ba517decee4a450543ea6ae821c82"}, json_data={"array": ["foo", "bar"]}, From 3a102e9a05476e4d28c0ac386cd156cc0fe8b5cf Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sun, 31 Aug 2025 20:38:56 +0000 Subject: [PATCH 14/32] feat: clean up environment call outs --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index 572ed6c..69a617d 100644 --- a/README.md +++ b/README.md @@ -97,7 +97,6 @@ instance of the provided output type. The parsed output can be accessed via the `parsed` property on the output field of the response. ```python -import os import asyncio from parallel import AsyncParallel from pydantic import BaseModel From ab434aa7bd088fc16279255ae36138ab6dff0730 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 22 Jul 2025 02:05:56 +0000 Subject: [PATCH 15/32] fix(parsing): ignore empty metadata --- src/parallel/_models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/parallel/_models.py b/src/parallel/_models.py index 528d568..ffcbf67 100644 --- a/src/parallel/_models.py +++ b/src/parallel/_models.py @@ -439,7 +439,7 @@ def construct_type(*, value: object, type_: object, metadata: Optional[List[Any] type_ = type_.__value__ # type: ignore[unreachable] # unwrap `Annotated[T, ...]` -> `T` - if metadata is not None: + if metadata is not None and len(metadata) > 0: meta: tuple[Any, ...] = tuple(metadata) elif is_annotated_type(type_): meta = get_args(type_)[1:] From 85f5cd4191ae168ed443e78a2c7bd747d51404b3 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 23 Jul 2025 02:08:26 +0000 Subject: [PATCH 16/32] fix(parsing): parse extra field types --- src/parallel/_models.py | 25 +++++++++++++++++++++++-- tests/test_models.py | 29 ++++++++++++++++++++++++++++- 2 files changed, 51 insertions(+), 3 deletions(-) diff --git a/src/parallel/_models.py b/src/parallel/_models.py index ffcbf67..b8387ce 100644 --- a/src/parallel/_models.py +++ b/src/parallel/_models.py @@ -208,14 +208,18 @@ def construct( # pyright: ignore[reportIncompatibleMethodOverride] else: fields_values[name] = field_get_default(field) + extra_field_type = _get_extra_fields_type(__cls) + _extra = {} for key, value in values.items(): if key not in model_fields: + parsed = construct_type(value=value, type_=extra_field_type) if extra_field_type is not None else value + if PYDANTIC_V2: - _extra[key] = value + _extra[key] = parsed else: _fields_set.add(key) - fields_values[key] = value + fields_values[key] = parsed object.__setattr__(m, "__dict__", fields_values) @@ -370,6 +374,23 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object: return construct_type(value=value, type_=type_, metadata=getattr(field, "metadata", None)) +def _get_extra_fields_type(cls: type[pydantic.BaseModel]) -> type | None: + if not PYDANTIC_V2: + # TODO + return None + + schema = cls.__pydantic_core_schema__ + if schema["type"] == "model": + fields = schema["schema"] + if fields["type"] == "model-fields": + extras = fields.get("extras_schema") + if extras and "cls" in extras: + # mypy can't narrow the type + return extras["cls"] # type: ignore[no-any-return] + + return None + + def is_basemodel(type_: type) -> bool: """Returns whether or not the given type is either a `BaseModel` or a union of `BaseModel`""" if is_union(type_): diff --git a/tests/test_models.py b/tests/test_models.py index d441722..7be7c56 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -1,5 +1,5 @@ import json -from typing import Any, Dict, List, Union, Optional, cast +from typing import TYPE_CHECKING, Any, Dict, List, Union, Optional, cast from datetime import datetime, timezone from typing_extensions import Literal, Annotated, TypeAliasType @@ -934,3 +934,30 @@ class Type2(BaseModel): ) assert isinstance(model, Type1) assert isinstance(model.value, InnerType2) + + +@pytest.mark.skipif(not PYDANTIC_V2, reason="this is only supported in pydantic v2 for now") +def test_extra_properties() -> None: + class Item(BaseModel): + prop: int + + class Model(BaseModel): + __pydantic_extra__: Dict[str, Item] = Field(init=False) # pyright: ignore[reportIncompatibleVariableOverride] + + other: str + + if TYPE_CHECKING: + + def __getattr__(self, attr: str) -> Item: ... + + model = construct_type( + type_=Model, + value={ + "a": {"prop": 1}, + "other": "foo", + }, + ) + assert isinstance(model, Model) + assert model.a.prop == 1 + assert isinstance(model.a, Item) + assert model.other == "foo" From acdeda2f1f95f5bade2da52d5a2aa8560e71369d Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 25 Jul 2025 03:01:52 +0000 Subject: [PATCH 17/32] chore(project): add settings file for vscode --- .gitignore | 1 - .vscode/settings.json | 3 +++ 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 .vscode/settings.json diff --git a/.gitignore b/.gitignore index 8779740..95ceb18 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,4 @@ .prism.log -.vscode _dev __pycache__ diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..5b01030 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "python.analysis.importFormat": "relative", +} From ec0c2cf30bd24524567232ad0f661facda124203 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 31 Jul 2025 04:09:13 +0000 Subject: [PATCH 18/32] feat(client): support file upload requests --- src/parallel/_base_client.py | 5 ++++- src/parallel/_files.py | 8 ++++---- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/src/parallel/_base_client.py b/src/parallel/_base_client.py index 0df16f7..52ecfa3 100644 --- a/src/parallel/_base_client.py +++ b/src/parallel/_base_client.py @@ -532,7 +532,10 @@ def _build_request( is_body_allowed = options.method.lower() != "get" if is_body_allowed: - kwargs["json"] = json_data if is_given(json_data) else None + if isinstance(json_data, bytes): + kwargs["content"] = json_data + else: + kwargs["json"] = json_data if is_given(json_data) else None kwargs["files"] = files else: headers.pop("Content-Type", None) diff --git a/src/parallel/_files.py b/src/parallel/_files.py index 715cc20..cc14c14 100644 --- a/src/parallel/_files.py +++ b/src/parallel/_files.py @@ -69,12 +69,12 @@ def _transform_file(file: FileTypes) -> HttpxFileTypes: return file if is_tuple_t(file): - return (file[0], _read_file_content(file[1]), *file[2:]) + return (file[0], read_file_content(file[1]), *file[2:]) raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple") -def _read_file_content(file: FileContent) -> HttpxFileContent: +def read_file_content(file: FileContent) -> HttpxFileContent: if isinstance(file, os.PathLike): return pathlib.Path(file).read_bytes() return file @@ -111,12 +111,12 @@ async def _async_transform_file(file: FileTypes) -> HttpxFileTypes: return file if is_tuple_t(file): - return (file[0], await _async_read_file_content(file[1]), *file[2:]) + return (file[0], await async_read_file_content(file[1]), *file[2:]) raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple") -async def _async_read_file_content(file: FileContent) -> HttpxFileContent: +async def async_read_file_content(file: FileContent) -> HttpxFileContent: if isinstance(file, os.PathLike): return await anyio.Path(file).read_bytes() From 4e5dbda03907f45ac31d18d89714e86f26e79866 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 6 Aug 2025 04:29:28 +0000 Subject: [PATCH 19/32] chore(internal): fix ruff target version --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 3d0e9e6..1caa2f9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -159,7 +159,7 @@ reportPrivateUsage = false [tool.ruff] line-length = 120 output-format = "grouped" -target-version = "py37" +target-version = "py38" [tool.ruff.format] docstring-code-format = true From 56b5aab87a833c27b8e1a2bc7c4bf2169ee281a8 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 9 Aug 2025 03:28:54 +0000 Subject: [PATCH 20/32] chore: update @stainless-api/prism-cli to v5.15.0 --- scripts/mock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/mock b/scripts/mock index d2814ae..0b28f6e 100755 --- a/scripts/mock +++ b/scripts/mock @@ -21,7 +21,7 @@ echo "==> Starting mock server with URL ${URL}" # Run prism mock on the given spec if [ "$1" == "--daemon" ]; then - npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL" &> .prism.log & + npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL" &> .prism.log & # Wait for server to come online echo -n "Waiting for server" @@ -37,5 +37,5 @@ if [ "$1" == "--daemon" ]; then echo else - npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL" + npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL" fi From 631b045ae2f138e4c8098fafd9466451d61ca82a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 9 Aug 2025 03:30:52 +0000 Subject: [PATCH 21/32] chore(internal): update comment in script --- scripts/test | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/test b/scripts/test index 2b87845..dbeda2d 100755 --- a/scripts/test +++ b/scripts/test @@ -43,7 +43,7 @@ elif ! prism_is_running ; then echo -e "To run the server, pass in the path or url of your OpenAPI" echo -e "spec to the prism command:" echo - echo -e " \$ ${YELLOW}npm exec --package=@stoplight/prism-cli@~5.3.2 -- prism mock path/to/your.openapi.yml${NC}" + echo -e " \$ ${YELLOW}npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock path/to/your.openapi.yml${NC}" echo exit 1 From 3d90e196184e540242fb310cc55b0219d20dff45 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 22 Aug 2025 04:27:58 +0000 Subject: [PATCH 22/32] chore: update github action --- .github/workflows/ci.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4958ade..9a3cde3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -36,7 +36,7 @@ jobs: run: ./scripts/lint build: - if: github.repository == 'stainless-sdks/parallel-sdk-python' && (github.event_name == 'push' || github.event.pull_request.head.repo.fork) + if: github.event_name == 'push' || github.event.pull_request.head.repo.fork timeout-minutes: 10 name: build permissions: @@ -61,12 +61,14 @@ jobs: run: rye build - name: Get GitHub OIDC Token + if: github.repository == 'stainless-sdks/parallel-sdk-python' id: github-oidc uses: actions/github-script@v6 with: script: core.setOutput('github_token', await core.getIDToken()); - name: Upload tarball + if: github.repository == 'stainless-sdks/parallel-sdk-python' env: URL: https://pkg.stainless.com/s AUTH: ${{ steps.github-oidc.outputs.github_token }} From a90da34910585453eac918a5f273749c00d2f743 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 26 Aug 2025 03:39:45 +0000 Subject: [PATCH 23/32] chore(internal): change ci workflow machines --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9a3cde3..d94f786 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -42,7 +42,7 @@ jobs: permissions: contents: read id-token: write - runs-on: depot-ubuntu-24.04 + runs-on: ${{ github.repository == 'stainless-sdks/parallel-sdk-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} steps: - uses: actions/checkout@v4 From 2ea196d5d4c7881e61dc848a1387770b4e27e304 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 27 Aug 2025 04:53:24 +0000 Subject: [PATCH 24/32] fix: avoid newer type syntax --- src/parallel/_models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/parallel/_models.py b/src/parallel/_models.py index b8387ce..92f7c10 100644 --- a/src/parallel/_models.py +++ b/src/parallel/_models.py @@ -304,7 +304,7 @@ def model_dump( exclude_none=exclude_none, ) - return cast(dict[str, Any], json_safe(dumped)) if mode == "json" else dumped + return cast("dict[str, Any]", json_safe(dumped)) if mode == "json" else dumped @override def model_dump_json( From 8d2fb29b5d80a2fa9ee81a6f9510134fb7bab908 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 27 Aug 2025 05:09:58 +0000 Subject: [PATCH 25/32] chore(internal): update pyright exclude list --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 1caa2f9..6d9f357 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -148,6 +148,7 @@ exclude = [ "_dev", ".venv", ".nox", + ".git", ] reportImplicitOverride = true From cb9a7a905ca4a4a9ba35e540f6c47a8bf89c87d2 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 30 Aug 2025 02:48:58 +0000 Subject: [PATCH 26/32] chore(internal): add Sequence related utils --- src/parallel/_types.py | 36 ++++++++++++++++++++++++++++++++- src/parallel/_utils/__init__.py | 1 + src/parallel/_utils/_typing.py | 5 +++++ tests/utils.py | 10 ++++++++- 4 files changed, 50 insertions(+), 2 deletions(-) diff --git a/src/parallel/_types.py b/src/parallel/_types.py index 0c7a10e..ea5c561 100644 --- a/src/parallel/_types.py +++ b/src/parallel/_types.py @@ -13,10 +13,21 @@ Mapping, TypeVar, Callable, + Iterator, Optional, Sequence, ) -from typing_extensions import Set, Literal, Protocol, TypeAlias, TypedDict, override, runtime_checkable +from typing_extensions import ( + Set, + Literal, + Protocol, + TypeAlias, + TypedDict, + SupportsIndex, + overload, + override, + runtime_checkable, +) import httpx import pydantic @@ -217,3 +228,26 @@ class _GenericAlias(Protocol): class HttpxSendArgs(TypedDict, total=False): auth: httpx.Auth follow_redirects: bool + + +_T_co = TypeVar("_T_co", covariant=True) + + +if TYPE_CHECKING: + # This works because str.__contains__ does not accept object (either in typeshed or at runtime) + # https://github.com/hauntsaninja/useful_types/blob/5e9710f3875107d068e7679fd7fec9cfab0eff3b/useful_types/__init__.py#L285 + class SequenceNotStr(Protocol[_T_co]): + @overload + def __getitem__(self, index: SupportsIndex, /) -> _T_co: ... + @overload + def __getitem__(self, index: slice, /) -> Sequence[_T_co]: ... + def __contains__(self, value: object, /) -> bool: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_T_co]: ... + def index(self, value: Any, start: int = 0, stop: int = ..., /) -> int: ... + def count(self, value: Any, /) -> int: ... + def __reversed__(self) -> Iterator[_T_co]: ... +else: + # just point this to a normal `Sequence` at runtime to avoid having to special case + # deserializing our custom sequence type + SequenceNotStr = Sequence diff --git a/src/parallel/_utils/__init__.py b/src/parallel/_utils/__init__.py index 1db99fa..e0e14e0 100644 --- a/src/parallel/_utils/__init__.py +++ b/src/parallel/_utils/__init__.py @@ -39,6 +39,7 @@ extract_type_arg as extract_type_arg, is_iterable_type as is_iterable_type, is_required_type as is_required_type, + is_sequence_type as is_sequence_type, is_annotated_type as is_annotated_type, is_type_alias_type as is_type_alias_type, strip_annotated_type as strip_annotated_type, diff --git a/src/parallel/_utils/_typing.py b/src/parallel/_utils/_typing.py index 1bac954..845cd6b 100644 --- a/src/parallel/_utils/_typing.py +++ b/src/parallel/_utils/_typing.py @@ -26,6 +26,11 @@ def is_list_type(typ: type) -> bool: return (get_origin(typ) or typ) == list +def is_sequence_type(typ: type) -> bool: + origin = get_origin(typ) or typ + return origin == typing_extensions.Sequence or origin == typing.Sequence or origin == _c_abc.Sequence + + def is_iterable_type(typ: type) -> bool: """If the given type is `typing.Iterable[T]`""" origin = get_origin(typ) or typ diff --git a/tests/utils.py b/tests/utils.py index 08157de..4181132 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -4,7 +4,7 @@ import inspect import traceback import contextlib -from typing import Any, TypeVar, Iterator, cast +from typing import Any, TypeVar, Iterator, Sequence, cast from datetime import date, datetime from typing_extensions import Literal, get_args, get_origin, assert_type @@ -15,6 +15,7 @@ is_list_type, is_union_type, extract_type_arg, + is_sequence_type, is_annotated_type, is_type_alias_type, ) @@ -71,6 +72,13 @@ def assert_matches_type( if is_list_type(type_): return _assert_list_type(type_, value) + if is_sequence_type(type_): + assert isinstance(value, Sequence) + inner_type = get_args(type_)[0] + for entry in value: # type: ignore + assert_type(inner_type, entry) # type: ignore + return + if origin == str: assert isinstance(value, str) elif origin == int: From 43266988c2123fa1aff00bf0b62c355b0c2bf04e Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sun, 31 Aug 2025 20:31:47 +0000 Subject: [PATCH 27/32] feat(api): update via SDK Studio --- .github/workflows/detect-breaking-changes.yml | 42 ++ .stats.yml | 8 +- README.md | 60 +- api.md | 78 ++- pyproject.toml | 1 + requirements-dev.lock | 3 + scripts/detect-breaking-changes | 19 + scripts/detect-breaking-changes.py | 79 +++ src/parallel/_client.py | 9 + src/parallel/_compat.py | 1 + src/parallel/lib/_pydantic.py | 1 + src/parallel/resources/__init__.py | 14 + src/parallel/resources/beta/__init__.py | 47 ++ src/parallel/resources/beta/beta.py | 301 +++++++++ src/parallel/resources/beta/task_group.py | 632 ++++++++++++++++++ src/parallel/resources/beta/task_run.py | 499 ++++++++++++++ src/parallel/resources/task_run.py | 65 +- src/parallel/types/__init__.py | 15 + src/parallel/types/auto_schema.py | 13 + src/parallel/types/auto_schema_param.py | 12 + src/parallel/types/beta/__init__.py | 30 + src/parallel/types/beta/beta_run_input.py | 63 ++ .../types/beta/beta_run_input_param.py | 65 ++ src/parallel/types/beta/beta_search_params.py | 47 ++ .../types/beta/beta_task_run_result.py | 74 ++ src/parallel/types/beta/error_event.py | 16 + src/parallel/types/beta/mcp_server.py | 25 + src/parallel/types/beta/mcp_server_param.py | 25 + src/parallel/types/beta/mcp_tool_call.py | 27 + .../types/beta/parallel_beta_param.py | 12 + src/parallel/types/beta/search_result.py | 16 + src/parallel/types/beta/task_group.py | 24 + .../types/beta/task_group_add_runs_params.py | 30 + .../types/beta/task_group_create_params.py | 13 + .../types/beta/task_group_events_params.py | 16 + .../types/beta/task_group_events_response.py | 28 + .../types/beta/task_group_get_runs_params.py | 18 + .../beta/task_group_get_runs_response.py | 12 + .../types/beta/task_group_run_response.py | 30 + src/parallel/types/beta/task_group_status.py | 27 + .../types/beta/task_run_create_params.py | 70 ++ src/parallel/types/beta/task_run_event.py | 32 + .../types/beta/task_run_events_response.py | 58 ++ .../types/beta/task_run_result_params.py | 18 + src/parallel/types/beta/web_search_result.py | 18 + src/parallel/types/beta/webhook.py | 16 + src/parallel/types/beta/webhook_param.py | 16 + src/parallel/types/citation.py | 21 + src/parallel/types/field_basis.py | 25 + src/parallel/types/json_schema.py | 16 + src/parallel/types/json_schema_param.py | 3 +- src/parallel/types/shared/__init__.py | 6 + src/parallel/types/shared/error_object.py | 18 + src/parallel/types/shared/error_response.py | 16 + src/parallel/types/shared/source_policy.py | 21 + src/parallel/types/shared/warning.py | 22 + src/parallel/types/shared_params/__init__.py | 3 + .../types/shared_params/source_policy.py | 22 + src/parallel/types/task_run.py | 35 +- src/parallel/types/task_run_create_params.py | 15 +- src/parallel/types/task_run_json_output.py | 46 ++ src/parallel/types/task_run_result.py | 112 +--- src/parallel/types/task_run_text_output.py | 37 + src/parallel/types/task_spec.py | 31 + src/parallel/types/task_spec_param.py | 5 +- src/parallel/types/text_schema.py | 16 + tests/api_resources/beta/__init__.py | 1 + tests/api_resources/beta/test_task_group.py | 613 +++++++++++++++++ tests/api_resources/beta/test_task_run.py | 349 ++++++++++ tests/api_resources/test_beta.py | 104 +++ tests/api_resources/test_task_run.py | 96 +-- tests/test_client.py | 36 +- 72 files changed, 4146 insertions(+), 248 deletions(-) create mode 100644 .github/workflows/detect-breaking-changes.yml create mode 100755 scripts/detect-breaking-changes create mode 100644 scripts/detect-breaking-changes.py create mode 100644 src/parallel/resources/beta/__init__.py create mode 100644 src/parallel/resources/beta/beta.py create mode 100644 src/parallel/resources/beta/task_group.py create mode 100644 src/parallel/resources/beta/task_run.py create mode 100644 src/parallel/types/auto_schema.py create mode 100644 src/parallel/types/auto_schema_param.py create mode 100644 src/parallel/types/beta/__init__.py create mode 100644 src/parallel/types/beta/beta_run_input.py create mode 100644 src/parallel/types/beta/beta_run_input_param.py create mode 100644 src/parallel/types/beta/beta_search_params.py create mode 100644 src/parallel/types/beta/beta_task_run_result.py create mode 100644 src/parallel/types/beta/error_event.py create mode 100644 src/parallel/types/beta/mcp_server.py create mode 100644 src/parallel/types/beta/mcp_server_param.py create mode 100644 src/parallel/types/beta/mcp_tool_call.py create mode 100644 src/parallel/types/beta/parallel_beta_param.py create mode 100644 src/parallel/types/beta/search_result.py create mode 100644 src/parallel/types/beta/task_group.py create mode 100644 src/parallel/types/beta/task_group_add_runs_params.py create mode 100644 src/parallel/types/beta/task_group_create_params.py create mode 100644 src/parallel/types/beta/task_group_events_params.py create mode 100644 src/parallel/types/beta/task_group_events_response.py create mode 100644 src/parallel/types/beta/task_group_get_runs_params.py create mode 100644 src/parallel/types/beta/task_group_get_runs_response.py create mode 100644 src/parallel/types/beta/task_group_run_response.py create mode 100644 src/parallel/types/beta/task_group_status.py create mode 100644 src/parallel/types/beta/task_run_create_params.py create mode 100644 src/parallel/types/beta/task_run_event.py create mode 100644 src/parallel/types/beta/task_run_events_response.py create mode 100644 src/parallel/types/beta/task_run_result_params.py create mode 100644 src/parallel/types/beta/web_search_result.py create mode 100644 src/parallel/types/beta/webhook.py create mode 100644 src/parallel/types/beta/webhook_param.py create mode 100644 src/parallel/types/citation.py create mode 100644 src/parallel/types/field_basis.py create mode 100644 src/parallel/types/json_schema.py create mode 100644 src/parallel/types/shared/__init__.py create mode 100644 src/parallel/types/shared/error_object.py create mode 100644 src/parallel/types/shared/error_response.py create mode 100644 src/parallel/types/shared/source_policy.py create mode 100644 src/parallel/types/shared/warning.py create mode 100644 src/parallel/types/shared_params/__init__.py create mode 100644 src/parallel/types/shared_params/source_policy.py create mode 100644 src/parallel/types/task_run_json_output.py create mode 100644 src/parallel/types/task_run_text_output.py create mode 100644 src/parallel/types/task_spec.py create mode 100644 src/parallel/types/text_schema.py create mode 100644 tests/api_resources/beta/__init__.py create mode 100644 tests/api_resources/beta/test_task_group.py create mode 100644 tests/api_resources/beta/test_task_run.py create mode 100644 tests/api_resources/test_beta.py diff --git a/.github/workflows/detect-breaking-changes.yml b/.github/workflows/detect-breaking-changes.yml new file mode 100644 index 0000000..59e2569 --- /dev/null +++ b/.github/workflows/detect-breaking-changes.yml @@ -0,0 +1,42 @@ +name: CI +on: + pull_request: + branches: + - main + - next + +jobs: + detect_breaking_changes: + runs-on: 'ubuntu-latest' + name: detect-breaking-changes + if: github.repository == 'parallel-web/parallel-sdk-python' + steps: + - name: Calculate fetch-depth + run: | + echo "FETCH_DEPTH=$(expr ${{ github.event.pull_request.commits }} + 1)" >> $GITHUB_ENV + + - uses: actions/checkout@v4 + with: + # Ensure we can check out the pull request base in the script below. + fetch-depth: ${{ env.FETCH_DEPTH }} + + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + - name: Install dependencies + run: | + rye sync --all-features + - name: Detect removed symbols + run: | + rye run python scripts/detect-breaking-changes.py "${{ github.event.pull_request.base.sha }}" + + - name: Detect breaking changes + run: | + # Try to check out previous versions of the breaking change detection script. This ensures that + # we still detect breaking changes when entire files and their tests are removed. + git checkout "${{ github.event.pull_request.base.sha }}" -- ./scripts/detect-breaking-changes 2>/dev/null || true + ./scripts/detect-breaking-changes ${{ github.event.pull_request.base.sha }} \ No newline at end of file diff --git a/.stats.yml b/.stats.yml index 89e6e53..57243e7 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ -configured_endpoints: 3 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/parallel-web%2Fparallel-sdk-ff0d5939e135b67b3448abf72d8bb0f9a574194337c7c7192453781347a9601d.yml -openapi_spec_hash: f3ce85349af6273a671d3d2781c4c877 -config_hash: 95578fd0fb46916a9ee6b87857ee6683 +configured_endpoints: 12 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/parallel-web%2Fparallel-sdk-1aeb1c81a84999f2d27ca9e86b041d74b892926bed126dc9b0f3cff4d7b26963.yml +openapi_spec_hash: 6280f6c6fb537f7c9ac5cc33ee2e433d +config_hash: 284b51e02bda8519b1f21bb67f1809e0 diff --git a/README.md b/README.md index 69a617d..d2694f8 100644 --- a/README.md +++ b/README.md @@ -34,11 +34,7 @@ client = Parallel( api_key=os.environ.get("PARALLEL_API_KEY"), # This is the default and can be omitted ) -run_result = client.task_run.execute( - input="France (2023)", - processor="core", - output="GDP" -) +run_result = client.task_run.execute(input="France (2023)", processor="core", output="GDP") print(run_result.output.parsed) ``` @@ -68,9 +64,7 @@ client = AsyncParallel( async def main() -> None: run_result = await client.task_run.execute( - input="France (2023)", - processor="core", - output="GDP" + input="France (2023)", processor="core", output="GDP" ) print(run_result.output.parsed) @@ -103,9 +97,11 @@ from pydantic import BaseModel client = AsyncParallel() + class SampleOutputStructure(BaseModel): output: str + async def main() -> None: # with pydantic run_result = await client.task_run.execute( @@ -117,9 +113,7 @@ async def main() -> None: print(run_result.output.parsed) # without pydantic run_result = await client.task_run.execute( - input="France (2023)", - processor="core", - output="GDP" + input="France (2023)", processor="core", output="GDP" ) print(run_result.output.parsed) @@ -139,6 +133,7 @@ from parallel import AsyncParallel from pydantic import BaseModel, Field from typing import List + class CountryInput(BaseModel): country: str = Field( description="Name of the country to research. Must be a recognized " @@ -146,13 +141,12 @@ class CountryInput(BaseModel): ) year: int = Field( description="Year for which to retrieve data. Must be 2000 or later. " - "Use most recent full-year estimates if year is current." + "Use most recent full-year estimates if year is current." ) + class CountryOutput(BaseModel): - gdp: str = Field( - description="GDP in USD for the year, formatted like '$3.1 trillion (2023)'." - ) + gdp: str = Field(description="GDP in USD for the year, formatted like '$3.1 trillion (2023)'.") top_exports: List[str] = Field( description="Top 3 exported goods/services by value. Use credible sources." ) @@ -160,6 +154,7 @@ class CountryOutput(BaseModel): description="Top 3 imported goods/services by value. Use credible sources." ) + async def main(): # Initialize the Parallel client client = AsyncParallel(api_key=os.environ.get("PARALLEL_API_KEY")) @@ -168,21 +163,20 @@ async def main(): input_data = [ CountryInput(country="France", year=2023), CountryInput(country="Germany", year=2023), - CountryInput(country="Italy", year=2023) + CountryInput(country="Italy", year=2023), ] - run_results = await asyncio.gather(*[ - client.task_run.execute( - input=datum, - output=CountryOutput, - processor="core" - ) - for datum in input_data - ]) + run_results = await asyncio.gather( + *[ + client.task_run.execute(input=datum, output=CountryOutput, processor="core") + for datum in input_data + ] + ) for run_input, run_result in zip(input_data, run_results): print(f"Task run output for {run_input}: {run_result.output.parsed}") + if __name__ == "__main__": asyncio.run(main()) ``` @@ -303,11 +297,7 @@ from parallel import Parallel client = Parallel() try: - client.task_run.execute( - input="France (2023)", - processor="core", - output="GDP" - ) + client.task_run.execute(input="France (2023)", processor="core", output="GDP") except parallel.APIConnectionError as e: print("The server could not be reached") print(e.__cause__) # an underlying Exception, likely raised within httpx. @@ -351,9 +341,7 @@ client = Parallel( # Or, configure per-request: client.with_options(max_retries=5).task_run.execute( - input="France (2023)", - processor="core", - output="GDP" + input="France (2023)", processor="core", output="GDP" ) ``` @@ -378,9 +366,7 @@ client = Parallel( # Override per-request: client.with_options(timeout=5.0).task_run.execute( - input="France (2023)", - processor="core", - output="GDP" + input="France (2023)", processor="core", output="GDP" ) ``` @@ -445,9 +431,7 @@ To stream the response body, use `.with_streaming_response` instead, which requi ```python with client.task_run.with_streaming_response.execute( - input="France (2023)", - processor="core", - output="GDP" + input="France (2023)", processor="core", output="GDP" ) as response: print(response.headers.get("X-My-Header")) diff --git a/api.md b/api.md index 1e5aeed..665af26 100644 --- a/api.md +++ b/api.md @@ -1,9 +1,28 @@ +# Shared Types + +```python +from parallel.types import ErrorObject, ErrorResponse, SourcePolicy, Warning +``` + # TaskRun Types: ```python -from parallel.types import Input, JsonSchema, ParsedTaskRunResult, TaskRun, TaskRunResult, TaskSpec, TextSchema +from parallel.types import ( + AutoSchema, + Citation, + FieldBasis, + JsonSchema, + ParsedTaskRunResult, + RunInput, + TaskRun, + TaskRunJsonOutput, + TaskRunResult, + TaskRunTextOutput, + TaskSpec, + TextSchema, +) ``` Methods: @@ -16,3 +35,60 @@ Convenience methods: - client.task_run.execute(input, processor, output: OutputSchema) -> TaskRunResult - client.task_run.execute(input, processor, output: Type[OutputT]) -> ParsedTaskRunResult[OutputT] +# Beta + +Types: + +```python +from parallel.types.beta import SearchResult, WebSearchResult +``` + +Methods: + +- client.beta.search(\*\*params) -> SearchResult + +## TaskRun + +Types: + +```python +from parallel.types.beta import ( + BetaRunInput, + BetaTaskRunResult, + ErrorEvent, + McpServer, + McpToolCall, + ParallelBeta, + TaskRunEvent, + Webhook, + TaskRunEventsResponse, +) +``` + +Methods: + +- client.beta.task_run.create(\*\*params) -> TaskRun +- client.beta.task_run.events(run_id) -> TaskRunEventsResponse +- client.beta.task_run.result(run_id, \*\*params) -> BetaTaskRunResult + +## TaskGroup + +Types: + +```python +from parallel.types.beta import ( + TaskGroup, + TaskGroupRunResponse, + TaskGroupStatus, + TaskGroupEventsResponse, + TaskGroupGetRunsResponse, +) +``` + +Methods: + +- client.beta.task_group.create(\*\*params) -> TaskGroup +- client.beta.task_group.retrieve(task_group_id) -> TaskGroup +- client.beta.task_group.add_runs(task_group_id, \*\*params) -> TaskGroupRunResponse +- client.beta.task_group.events(task_group_id, \*\*params) -> TaskGroupEventsResponse +- client.beta.task_group.get_runs(task_group_id, \*\*params) -> TaskGroupGetRunsResponse diff --git a/pyproject.toml b/pyproject.toml index 6d9f357..2708962 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,6 +58,7 @@ dev-dependencies = [ "rich>=13.7.1", "nest_asyncio==1.6.0", "pytest-xdist>=3.6.1", + "griffe>=1", ] [tool.rye.scripts] diff --git a/requirements-dev.lock b/requirements-dev.lock index c06ee41..3d61689 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -31,6 +31,8 @@ attrs==25.3.0 certifi==2023.7.22 # via httpcore # via httpx +colorama==0.4.6 + # via griffe colorlog==6.7.0 # via nox dirty-equals==0.6.0 @@ -48,6 +50,7 @@ filelock==3.12.4 frozenlist==1.6.2 # via aiohttp # via aiosignal +griffe==1.13.0 h11==0.16.0 # via httpcore httpcore==1.0.9 diff --git a/scripts/detect-breaking-changes b/scripts/detect-breaking-changes new file mode 100755 index 0000000..fb28f3a --- /dev/null +++ b/scripts/detect-breaking-changes @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +set -e + +cd "$(dirname "$0")/.." + +echo "==> Detecting breaking changes" + +TEST_PATHS=( tests/api_resources tests/test_client.py tests/test_response.py ) + +for PATHSPEC in "${TEST_PATHS[@]}"; do + # Try to check out previous versions of the test files + # with the current SDK. + git checkout "$1" -- "${PATHSPEC}" 2>/dev/null || true +done + +# Instead of running the tests, use the linter to check if an +# older test is no longer compatible with the latest SDK. +./scripts/lint diff --git a/scripts/detect-breaking-changes.py b/scripts/detect-breaking-changes.py new file mode 100644 index 0000000..4fc5250 --- /dev/null +++ b/scripts/detect-breaking-changes.py @@ -0,0 +1,79 @@ +from __future__ import annotations + +import sys +from typing import Iterator +from pathlib import Path + +import rich +import griffe +from rich.text import Text +from rich.style import Style + + +def public_members(obj: griffe.Object | griffe.Alias) -> dict[str, griffe.Object | griffe.Alias]: + if isinstance(obj, griffe.Alias): + # ignore imports for now, they're technically part of the public API + # but we don't have good preventative measures in place to prevent + # changing them + return {} + + return {name: value for name, value in obj.all_members.items() if not name.startswith("_")} + + +def find_breaking_changes( + new_obj: griffe.Object | griffe.Alias, + old_obj: griffe.Object | griffe.Alias, + *, + path: list[str], +) -> Iterator[Text | str]: + new_members = public_members(new_obj) + old_members = public_members(old_obj) + + for name, old_member in old_members.items(): + if isinstance(old_member, griffe.Alias) and len(path) > 2: + # ignore imports in `/types/` for now, they're technically part of the public API + # but we don't have good preventative measures in place to prevent changing them + continue + + new_member = new_members.get(name) + if new_member is None: + cls_name = old_member.__class__.__name__ + yield Text(f"({cls_name})", style=Style(color="rgb(119, 119, 119)")) + yield from [" " for _ in range(10 - len(cls_name))] + yield f" {'.'.join(path)}.{name}" + yield "\n" + continue + + yield from find_breaking_changes(new_member, old_member, path=[*path, name]) + + +def main() -> None: + try: + against_ref = sys.argv[1] + except IndexError as err: + raise RuntimeError("You must specify a base ref to run breaking change detection against") from err + + package = griffe.load( + "parallel", + search_paths=[Path(__file__).parent.parent.joinpath("src")], + ) + old_package = griffe.load_git( + "parallel", + ref=against_ref, + search_paths=["src"], + ) + assert isinstance(package, griffe.Module) + assert isinstance(old_package, griffe.Module) + + output = list(find_breaking_changes(package, old_package, path=["parallel"])) + if output: + rich.print(Text("Breaking changes detected!", style=Style(color="rgb(165, 79, 87)"))) + rich.print() + + for text in output: + rich.print(text, end="") + + sys.exit(1) + + +main() diff --git a/src/parallel/_client.py b/src/parallel/_client.py index dcb4a10..bb7c4a4 100644 --- a/src/parallel/_client.py +++ b/src/parallel/_client.py @@ -29,6 +29,7 @@ SyncAPIClient, AsyncAPIClient, ) +from .resources.beta import beta __all__ = [ "Timeout", @@ -44,6 +45,7 @@ class Parallel(SyncAPIClient): task_run: task_run.TaskRunResource + beta: beta.BetaResource with_raw_response: ParallelWithRawResponse with_streaming_response: ParallelWithStreamedResponse @@ -102,6 +104,7 @@ def __init__( ) self.task_run = task_run.TaskRunResource(self) + self.beta = beta.BetaResource(self) self.with_raw_response = ParallelWithRawResponse(self) self.with_streaming_response = ParallelWithStreamedResponse(self) @@ -212,6 +215,7 @@ def _make_status_error( class AsyncParallel(AsyncAPIClient): task_run: task_run.AsyncTaskRunResource + beta: beta.AsyncBetaResource with_raw_response: AsyncParallelWithRawResponse with_streaming_response: AsyncParallelWithStreamedResponse @@ -270,6 +274,7 @@ def __init__( ) self.task_run = task_run.AsyncTaskRunResource(self) + self.beta = beta.AsyncBetaResource(self) self.with_raw_response = AsyncParallelWithRawResponse(self) self.with_streaming_response = AsyncParallelWithStreamedResponse(self) @@ -381,21 +386,25 @@ def _make_status_error( class ParallelWithRawResponse: def __init__(self, client: Parallel) -> None: self.task_run = task_run.TaskRunResourceWithRawResponse(client.task_run) + self.beta = beta.BetaResourceWithRawResponse(client.beta) class AsyncParallelWithRawResponse: def __init__(self, client: AsyncParallel) -> None: self.task_run = task_run.AsyncTaskRunResourceWithRawResponse(client.task_run) + self.beta = beta.AsyncBetaResourceWithRawResponse(client.beta) class ParallelWithStreamedResponse: def __init__(self, client: Parallel) -> None: self.task_run = task_run.TaskRunResourceWithStreamingResponse(client.task_run) + self.beta = beta.BetaResourceWithStreamingResponse(client.beta) class AsyncParallelWithStreamedResponse: def __init__(self, client: AsyncParallel) -> None: self.task_run = task_run.AsyncTaskRunResourceWithStreamingResponse(client.task_run) + self.beta = beta.AsyncBetaResourceWithStreamingResponse(client.beta) Client = Parallel diff --git a/src/parallel/_compat.py b/src/parallel/_compat.py index 96f1722..87fc370 100644 --- a/src/parallel/_compat.py +++ b/src/parallel/_compat.py @@ -175,6 +175,7 @@ def model_json_schema(model: type[_ModelT]) -> dict[str, Any]: return model.model_json_schema() return model.schema() # pyright: ignore[reportDeprecated] + # generic models if TYPE_CHECKING: diff --git a/src/parallel/lib/_pydantic.py b/src/parallel/lib/_pydantic.py index 3e77c19..b277802 100644 --- a/src/parallel/lib/_pydantic.py +++ b/src/parallel/lib/_pydantic.py @@ -26,6 +26,7 @@ def to_json_schema( schema["additionalProperties"] = False return schema + def is_basemodel_type(model_type: object) -> TypeGuard[type[pydantic.BaseModel]]: """Check if a type is a Pydantic BaseModel to avoid using type: ignore.""" return inspect.isclass(model_type) and issubclass(model_type, pydantic.BaseModel) diff --git a/src/parallel/resources/__init__.py b/src/parallel/resources/__init__.py index 6fc7c06..9d1df4f 100644 --- a/src/parallel/resources/__init__.py +++ b/src/parallel/resources/__init__.py @@ -1,5 +1,13 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +from .beta import ( + BetaResource, + AsyncBetaResource, + BetaResourceWithRawResponse, + AsyncBetaResourceWithRawResponse, + BetaResourceWithStreamingResponse, + AsyncBetaResourceWithStreamingResponse, +) from .task_run import ( TaskRunResource, AsyncTaskRunResource, @@ -16,4 +24,10 @@ "AsyncTaskRunResourceWithRawResponse", "TaskRunResourceWithStreamingResponse", "AsyncTaskRunResourceWithStreamingResponse", + "BetaResource", + "AsyncBetaResource", + "BetaResourceWithRawResponse", + "AsyncBetaResourceWithRawResponse", + "BetaResourceWithStreamingResponse", + "AsyncBetaResourceWithStreamingResponse", ] diff --git a/src/parallel/resources/beta/__init__.py b/src/parallel/resources/beta/__init__.py new file mode 100644 index 0000000..3bd45ec --- /dev/null +++ b/src/parallel/resources/beta/__init__.py @@ -0,0 +1,47 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .beta import ( + BetaResource, + AsyncBetaResource, + BetaResourceWithRawResponse, + AsyncBetaResourceWithRawResponse, + BetaResourceWithStreamingResponse, + AsyncBetaResourceWithStreamingResponse, +) +from .task_run import ( + TaskRunResource, + AsyncTaskRunResource, + TaskRunResourceWithRawResponse, + AsyncTaskRunResourceWithRawResponse, + TaskRunResourceWithStreamingResponse, + AsyncTaskRunResourceWithStreamingResponse, +) +from .task_group import ( + TaskGroupResource, + AsyncTaskGroupResource, + TaskGroupResourceWithRawResponse, + AsyncTaskGroupResourceWithRawResponse, + TaskGroupResourceWithStreamingResponse, + AsyncTaskGroupResourceWithStreamingResponse, +) + +__all__ = [ + "TaskRunResource", + "AsyncTaskRunResource", + "TaskRunResourceWithRawResponse", + "AsyncTaskRunResourceWithRawResponse", + "TaskRunResourceWithStreamingResponse", + "AsyncTaskRunResourceWithStreamingResponse", + "TaskGroupResource", + "AsyncTaskGroupResource", + "TaskGroupResourceWithRawResponse", + "AsyncTaskGroupResourceWithRawResponse", + "TaskGroupResourceWithStreamingResponse", + "AsyncTaskGroupResourceWithStreamingResponse", + "BetaResource", + "AsyncBetaResource", + "BetaResourceWithRawResponse", + "AsyncBetaResourceWithRawResponse", + "BetaResourceWithStreamingResponse", + "AsyncBetaResourceWithStreamingResponse", +] diff --git a/src/parallel/resources/beta/beta.py b/src/parallel/resources/beta/beta.py new file mode 100644 index 0000000..c12ec5a --- /dev/null +++ b/src/parallel/resources/beta/beta.py @@ -0,0 +1,301 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Optional +from typing_extensions import Literal + +import httpx + +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._utils import maybe_transform, async_maybe_transform +from .task_run import ( + TaskRunResource, + AsyncTaskRunResource, + TaskRunResourceWithRawResponse, + AsyncTaskRunResourceWithRawResponse, + TaskRunResourceWithStreamingResponse, + AsyncTaskRunResourceWithStreamingResponse, +) +from ..._compat import cached_property +from .task_group import ( + TaskGroupResource, + AsyncTaskGroupResource, + TaskGroupResourceWithRawResponse, + AsyncTaskGroupResourceWithRawResponse, + TaskGroupResourceWithStreamingResponse, + AsyncTaskGroupResourceWithStreamingResponse, +) +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ...types.beta import beta_search_params +from ..._base_client import make_request_options +from ...types.beta.search_result import SearchResult +from ...types.shared_params.source_policy import SourcePolicy + +__all__ = ["BetaResource", "AsyncBetaResource"] + + +class BetaResource(SyncAPIResource): + @cached_property + def task_run(self) -> TaskRunResource: + return TaskRunResource(self._client) + + @cached_property + def task_group(self) -> TaskGroupResource: + return TaskGroupResource(self._client) + + @cached_property + def with_raw_response(self) -> BetaResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers + """ + return BetaResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> BetaResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response + """ + return BetaResourceWithStreamingResponse(self) + + def search( + self, + *, + max_chars_per_result: Optional[int] | NotGiven = NOT_GIVEN, + max_results: Optional[int] | NotGiven = NOT_GIVEN, + objective: Optional[str] | NotGiven = NOT_GIVEN, + processor: Literal["base", "pro"] | NotGiven = NOT_GIVEN, + search_queries: Optional[List[str]] | NotGiven = NOT_GIVEN, + source_policy: Optional[SourcePolicy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SearchResult: + """ + Searches the web. + + Args: + max_chars_per_result: Upper bound on the number of characters to include in excerpts for each search + result. + + max_results: Upper bound on the number of results to return. May be limited by the processor. + Defaults to 10 if not provided. + + objective: Natural-language description of what the web search is trying to find. May + include guidance about preferred sources or freshness. At least one of objective + or search_queries must be provided. + + processor: Search processor. + + search_queries: Optional list of traditional keyword search queries to guide the search. May + contain search operators. At least one of objective or search_queries must be + provided. + + source_policy: Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/v1beta/search", + body=maybe_transform( + { + "max_chars_per_result": max_chars_per_result, + "max_results": max_results, + "objective": objective, + "processor": processor, + "search_queries": search_queries, + "source_policy": source_policy, + }, + beta_search_params.BetaSearchParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=SearchResult, + ) + + +class AsyncBetaResource(AsyncAPIResource): + @cached_property + def task_run(self) -> AsyncTaskRunResource: + return AsyncTaskRunResource(self._client) + + @cached_property + def task_group(self) -> AsyncTaskGroupResource: + return AsyncTaskGroupResource(self._client) + + @cached_property + def with_raw_response(self) -> AsyncBetaResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers + """ + return AsyncBetaResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncBetaResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response + """ + return AsyncBetaResourceWithStreamingResponse(self) + + async def search( + self, + *, + max_chars_per_result: Optional[int] | NotGiven = NOT_GIVEN, + max_results: Optional[int] | NotGiven = NOT_GIVEN, + objective: Optional[str] | NotGiven = NOT_GIVEN, + processor: Literal["base", "pro"] | NotGiven = NOT_GIVEN, + search_queries: Optional[List[str]] | NotGiven = NOT_GIVEN, + source_policy: Optional[SourcePolicy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SearchResult: + """ + Searches the web. + + Args: + max_chars_per_result: Upper bound on the number of characters to include in excerpts for each search + result. + + max_results: Upper bound on the number of results to return. May be limited by the processor. + Defaults to 10 if not provided. + + objective: Natural-language description of what the web search is trying to find. May + include guidance about preferred sources or freshness. At least one of objective + or search_queries must be provided. + + processor: Search processor. + + search_queries: Optional list of traditional keyword search queries to guide the search. May + contain search operators. At least one of objective or search_queries must be + provided. + + source_policy: Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/v1beta/search", + body=await async_maybe_transform( + { + "max_chars_per_result": max_chars_per_result, + "max_results": max_results, + "objective": objective, + "processor": processor, + "search_queries": search_queries, + "source_policy": source_policy, + }, + beta_search_params.BetaSearchParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=SearchResult, + ) + + +class BetaResourceWithRawResponse: + def __init__(self, beta: BetaResource) -> None: + self._beta = beta + + self.search = to_raw_response_wrapper( + beta.search, + ) + + @cached_property + def task_run(self) -> TaskRunResourceWithRawResponse: + return TaskRunResourceWithRawResponse(self._beta.task_run) + + @cached_property + def task_group(self) -> TaskGroupResourceWithRawResponse: + return TaskGroupResourceWithRawResponse(self._beta.task_group) + + +class AsyncBetaResourceWithRawResponse: + def __init__(self, beta: AsyncBetaResource) -> None: + self._beta = beta + + self.search = async_to_raw_response_wrapper( + beta.search, + ) + + @cached_property + def task_run(self) -> AsyncTaskRunResourceWithRawResponse: + return AsyncTaskRunResourceWithRawResponse(self._beta.task_run) + + @cached_property + def task_group(self) -> AsyncTaskGroupResourceWithRawResponse: + return AsyncTaskGroupResourceWithRawResponse(self._beta.task_group) + + +class BetaResourceWithStreamingResponse: + def __init__(self, beta: BetaResource) -> None: + self._beta = beta + + self.search = to_streamed_response_wrapper( + beta.search, + ) + + @cached_property + def task_run(self) -> TaskRunResourceWithStreamingResponse: + return TaskRunResourceWithStreamingResponse(self._beta.task_run) + + @cached_property + def task_group(self) -> TaskGroupResourceWithStreamingResponse: + return TaskGroupResourceWithStreamingResponse(self._beta.task_group) + + +class AsyncBetaResourceWithStreamingResponse: + def __init__(self, beta: AsyncBetaResource) -> None: + self._beta = beta + + self.search = async_to_streamed_response_wrapper( + beta.search, + ) + + @cached_property + def task_run(self) -> AsyncTaskRunResourceWithStreamingResponse: + return AsyncTaskRunResourceWithStreamingResponse(self._beta.task_run) + + @cached_property + def task_group(self) -> AsyncTaskGroupResourceWithStreamingResponse: + return AsyncTaskGroupResourceWithStreamingResponse(self._beta.task_group) diff --git a/src/parallel/resources/beta/task_group.py b/src/parallel/resources/beta/task_group.py new file mode 100644 index 0000000..0a34e67 --- /dev/null +++ b/src/parallel/resources/beta/task_group.py @@ -0,0 +1,632 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Any, Dict, List, Union, Iterable, Optional, cast +from typing_extensions import Literal + +import httpx + +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._utils import is_given, maybe_transform, strip_not_given, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..._streaming import Stream, AsyncStream +from ...types.beta import ( + task_group_create_params, + task_group_events_params, + task_group_add_runs_params, + task_group_get_runs_params, +) +from ..._base_client import make_request_options +from ...types.beta.task_group import TaskGroup +from ...types.task_spec_param import TaskSpecParam +from ...types.beta.parallel_beta_param import ParallelBetaParam +from ...types.beta.beta_run_input_param import BetaRunInputParam +from ...types.beta.task_group_run_response import TaskGroupRunResponse +from ...types.beta.task_group_events_response import TaskGroupEventsResponse +from ...types.beta.task_group_get_runs_response import TaskGroupGetRunsResponse + +__all__ = ["TaskGroupResource", "AsyncTaskGroupResource"] + + +class TaskGroupResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> TaskGroupResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers + """ + return TaskGroupResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> TaskGroupResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response + """ + return TaskGroupResourceWithStreamingResponse(self) + + def create( + self, + *, + metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskGroup: + """ + Initiates a TaskGroup to group and track multiple runs. + + Args: + metadata: User-provided metadata stored with the task group. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/v1beta/tasks/groups", + body=maybe_transform({"metadata": metadata}, task_group_create_params.TaskGroupCreateParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskGroup, + ) + + def retrieve( + self, + task_group_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskGroup: + """ + Retrieves aggregated status across runs in a TaskGroup. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_group_id: + raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") + return self._get( + f"/v1beta/tasks/groups/{task_group_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskGroup, + ) + + def add_runs( + self, + task_group_id: str, + *, + inputs: Iterable[BetaRunInputParam], + default_task_spec: Optional[TaskSpecParam] | NotGiven = NOT_GIVEN, + betas: List[ParallelBetaParam] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskGroupRunResponse: + """ + Initiates multiple task runs within a TaskGroup. + + Args: + inputs: List of task runs to execute. + + default_task_spec: Specification for a task. + + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. + + betas: Optional header to specify the beta version(s) to enable. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_group_id: + raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") + extra_headers = { + **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}), + **(extra_headers or {}), + } + return self._post( + f"/v1beta/tasks/groups/{task_group_id}/runs", + body=maybe_transform( + { + "inputs": inputs, + "default_task_spec": default_task_spec, + }, + task_group_add_runs_params.TaskGroupAddRunsParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskGroupRunResponse, + ) + + def events( + self, + task_group_id: str, + *, + last_event_id: Optional[str] | NotGiven = NOT_GIVEN, + api_timeout: Optional[float] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Stream[TaskGroupEventsResponse]: + """ + Streams events from a TaskGroup: status updates and run completions. + + The connection will remain open for up to 10 minutes as long as at least one run + in the TaskGroup is active. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_group_id: + raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return self._get( + f"/v1beta/tasks/groups/{task_group_id}/events", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "last_event_id": last_event_id, + "api_timeout": api_timeout, + }, + task_group_events_params.TaskGroupEventsParams, + ), + ), + cast_to=cast( + Any, TaskGroupEventsResponse + ), # Union types cannot be passed in as arguments in the type system + stream=True, + stream_cls=Stream[TaskGroupEventsResponse], + ) + + def get_runs( + self, + task_group_id: str, + *, + include_input: bool | NotGiven = NOT_GIVEN, + include_output: bool | NotGiven = NOT_GIVEN, + last_event_id: Optional[str] | NotGiven = NOT_GIVEN, + status: Optional[ + Literal["queued", "action_required", "running", "completed", "failed", "cancelling", "cancelled"] + ] + | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Stream[TaskGroupGetRunsResponse]: + """ + Retrieves task runs in a TaskGroup and optionally their inputs and outputs. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_group_id: + raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return self._get( + f"/v1beta/tasks/groups/{task_group_id}/runs", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "include_input": include_input, + "include_output": include_output, + "last_event_id": last_event_id, + "status": status, + }, + task_group_get_runs_params.TaskGroupGetRunsParams, + ), + ), + cast_to=cast( + Any, TaskGroupGetRunsResponse + ), # Union types cannot be passed in as arguments in the type system + stream=True, + stream_cls=Stream[TaskGroupGetRunsResponse], + ) + + +class AsyncTaskGroupResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncTaskGroupResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers + """ + return AsyncTaskGroupResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncTaskGroupResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response + """ + return AsyncTaskGroupResourceWithStreamingResponse(self) + + async def create( + self, + *, + metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskGroup: + """ + Initiates a TaskGroup to group and track multiple runs. + + Args: + metadata: User-provided metadata stored with the task group. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/v1beta/tasks/groups", + body=await async_maybe_transform({"metadata": metadata}, task_group_create_params.TaskGroupCreateParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskGroup, + ) + + async def retrieve( + self, + task_group_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskGroup: + """ + Retrieves aggregated status across runs in a TaskGroup. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_group_id: + raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") + return await self._get( + f"/v1beta/tasks/groups/{task_group_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskGroup, + ) + + async def add_runs( + self, + task_group_id: str, + *, + inputs: Iterable[BetaRunInputParam], + default_task_spec: Optional[TaskSpecParam] | NotGiven = NOT_GIVEN, + betas: List[ParallelBetaParam] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskGroupRunResponse: + """ + Initiates multiple task runs within a TaskGroup. + + Args: + inputs: List of task runs to execute. + + default_task_spec: Specification for a task. + + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. + + betas: Optional header to specify the beta version(s) to enable. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_group_id: + raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") + extra_headers = { + **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}), + **(extra_headers or {}), + } + return await self._post( + f"/v1beta/tasks/groups/{task_group_id}/runs", + body=await async_maybe_transform( + { + "inputs": inputs, + "default_task_spec": default_task_spec, + }, + task_group_add_runs_params.TaskGroupAddRunsParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskGroupRunResponse, + ) + + async def events( + self, + task_group_id: str, + *, + last_event_id: Optional[str] | NotGiven = NOT_GIVEN, + api_timeout: Optional[float] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncStream[TaskGroupEventsResponse]: + """ + Streams events from a TaskGroup: status updates and run completions. + + The connection will remain open for up to 10 minutes as long as at least one run + in the TaskGroup is active. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_group_id: + raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return await self._get( + f"/v1beta/tasks/groups/{task_group_id}/events", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "last_event_id": last_event_id, + "api_timeout": api_timeout, + }, + task_group_events_params.TaskGroupEventsParams, + ), + ), + cast_to=cast( + Any, TaskGroupEventsResponse + ), # Union types cannot be passed in as arguments in the type system + stream=True, + stream_cls=AsyncStream[TaskGroupEventsResponse], + ) + + async def get_runs( + self, + task_group_id: str, + *, + include_input: bool | NotGiven = NOT_GIVEN, + include_output: bool | NotGiven = NOT_GIVEN, + last_event_id: Optional[str] | NotGiven = NOT_GIVEN, + status: Optional[ + Literal["queued", "action_required", "running", "completed", "failed", "cancelling", "cancelled"] + ] + | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncStream[TaskGroupGetRunsResponse]: + """ + Retrieves task runs in a TaskGroup and optionally their inputs and outputs. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_group_id: + raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return await self._get( + f"/v1beta/tasks/groups/{task_group_id}/runs", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "include_input": include_input, + "include_output": include_output, + "last_event_id": last_event_id, + "status": status, + }, + task_group_get_runs_params.TaskGroupGetRunsParams, + ), + ), + cast_to=cast( + Any, TaskGroupGetRunsResponse + ), # Union types cannot be passed in as arguments in the type system + stream=True, + stream_cls=AsyncStream[TaskGroupGetRunsResponse], + ) + + +class TaskGroupResourceWithRawResponse: + def __init__(self, task_group: TaskGroupResource) -> None: + self._task_group = task_group + + self.create = to_raw_response_wrapper( + task_group.create, + ) + self.retrieve = to_raw_response_wrapper( + task_group.retrieve, + ) + self.add_runs = to_raw_response_wrapper( + task_group.add_runs, + ) + self.events = to_raw_response_wrapper( + task_group.events, + ) + self.get_runs = to_raw_response_wrapper( + task_group.get_runs, + ) + + +class AsyncTaskGroupResourceWithRawResponse: + def __init__(self, task_group: AsyncTaskGroupResource) -> None: + self._task_group = task_group + + self.create = async_to_raw_response_wrapper( + task_group.create, + ) + self.retrieve = async_to_raw_response_wrapper( + task_group.retrieve, + ) + self.add_runs = async_to_raw_response_wrapper( + task_group.add_runs, + ) + self.events = async_to_raw_response_wrapper( + task_group.events, + ) + self.get_runs = async_to_raw_response_wrapper( + task_group.get_runs, + ) + + +class TaskGroupResourceWithStreamingResponse: + def __init__(self, task_group: TaskGroupResource) -> None: + self._task_group = task_group + + self.create = to_streamed_response_wrapper( + task_group.create, + ) + self.retrieve = to_streamed_response_wrapper( + task_group.retrieve, + ) + self.add_runs = to_streamed_response_wrapper( + task_group.add_runs, + ) + self.events = to_streamed_response_wrapper( + task_group.events, + ) + self.get_runs = to_streamed_response_wrapper( + task_group.get_runs, + ) + + +class AsyncTaskGroupResourceWithStreamingResponse: + def __init__(self, task_group: AsyncTaskGroupResource) -> None: + self._task_group = task_group + + self.create = async_to_streamed_response_wrapper( + task_group.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + task_group.retrieve, + ) + self.add_runs = async_to_streamed_response_wrapper( + task_group.add_runs, + ) + self.events = async_to_streamed_response_wrapper( + task_group.events, + ) + self.get_runs = async_to_streamed_response_wrapper( + task_group.get_runs, + ) diff --git a/src/parallel/resources/beta/task_run.py b/src/parallel/resources/beta/task_run.py new file mode 100644 index 0000000..8235a1a --- /dev/null +++ b/src/parallel/resources/beta/task_run.py @@ -0,0 +1,499 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Any, Dict, List, Union, Iterable, Optional, cast + +import httpx + +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._utils import is_given, maybe_transform, strip_not_given, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..._streaming import Stream, AsyncStream +from ...types.beta import task_run_create_params, task_run_result_params +from ..._base_client import make_request_options +from ...types.task_run import TaskRun +from ...types.task_spec_param import TaskSpecParam +from ...types.beta.webhook_param import WebhookParam +from ...types.beta.mcp_server_param import McpServerParam +from ...types.beta.parallel_beta_param import ParallelBetaParam +from ...types.beta.beta_task_run_result import BetaTaskRunResult +from ...types.shared_params.source_policy import SourcePolicy +from ...types.beta.task_run_events_response import TaskRunEventsResponse + +__all__ = ["TaskRunResource", "AsyncTaskRunResource"] + + +class TaskRunResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> TaskRunResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers + """ + return TaskRunResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> TaskRunResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response + """ + return TaskRunResourceWithStreamingResponse(self) + + def create( + self, + *, + input: Union[str, Dict[str, object]], + processor: str, + enable_events: Optional[bool] | NotGiven = NOT_GIVEN, + mcp_servers: Optional[Iterable[McpServerParam]] | NotGiven = NOT_GIVEN, + metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, + source_policy: Optional[SourcePolicy] | NotGiven = NOT_GIVEN, + task_spec: Optional[TaskSpecParam] | NotGiven = NOT_GIVEN, + webhook: Optional[WebhookParam] | NotGiven = NOT_GIVEN, + betas: List[ParallelBetaParam] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskRun: + """ + Initiates a task run. + + Returns immediately with a run object in status 'queued'. + + Beta features can be enabled by setting the 'parallel-beta' header. + + Args: + input: Input to the task, either text or a JSON object. + + processor: Processor to use for the task. + + enable_events: Controls tracking of task run execution progress. When set to true, progress + events are recorded and can be accessed via the + [Task Run events](https://platform.parallel.ai/api-reference) endpoint. When + false, no progress events are tracked. Note that progress tracking cannot be + enabled after a run has been created. The flag is set to true by default for + premium processors (pro and above). This feature is not available via the Python + SDK. To enable this feature in your API requests, specify the `parallel-beta` + header with `events-sse-2025-07-24` value. + + mcp_servers: Optional list of MCP servers to use for the run. This feature is not available + via the Python SDK. To enable this feature in your API requests, specify the + `parallel-beta` header with `mcp-server-2025-07-17` value. + + metadata: User-provided metadata stored with the run. Keys and values must be strings with + a maximum length of 16 and 512 characters respectively. + + source_policy: Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + + task_spec: Specification for a task. + + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. + + webhook: Webhooks for Task Runs. + + betas: Optional header to specify the beta version(s) to enable. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = { + **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}), + **(extra_headers or {}), + } + return self._post( + "/v1/tasks/runs?beta=true", + body=maybe_transform( + { + "input": input, + "processor": processor, + "enable_events": enable_events, + "mcp_servers": mcp_servers, + "metadata": metadata, + "source_policy": source_policy, + "task_spec": task_spec, + "webhook": webhook, + }, + task_run_create_params.TaskRunCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskRun, + ) + + def events( + self, + run_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Stream[TaskRunEventsResponse]: + """ + Streams events for a task run. + + Returns a stream of events showing progress updates and state changes for the + task run. + + For task runs that did not have enable_events set to true during creation, the + frequency of events will be reduced. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return self._get( + f"/v1beta/tasks/runs/{run_id}/events", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=cast(Any, TaskRunEventsResponse), # Union types cannot be passed in as arguments in the type system + stream=True, + stream_cls=Stream[TaskRunEventsResponse], + ) + + def result( + self, + run_id: str, + *, + api_timeout: int | NotGiven = NOT_GIVEN, + betas: List[ParallelBetaParam] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> BetaTaskRunResult: + """ + Retrieves a run result by run_id, blocking until the run is completed. + + Args: + betas: Optional header to specify the beta version(s) to enable. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + extra_headers = { + **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}), + **(extra_headers or {}), + } + return self._get( + f"/v1/tasks/runs/{run_id}/result?beta=true", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform({"api_timeout": api_timeout}, task_run_result_params.TaskRunResultParams), + ), + cast_to=BetaTaskRunResult, + ) + + +class AsyncTaskRunResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncTaskRunResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers + """ + return AsyncTaskRunResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncTaskRunResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response + """ + return AsyncTaskRunResourceWithStreamingResponse(self) + + async def create( + self, + *, + input: Union[str, Dict[str, object]], + processor: str, + enable_events: Optional[bool] | NotGiven = NOT_GIVEN, + mcp_servers: Optional[Iterable[McpServerParam]] | NotGiven = NOT_GIVEN, + metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, + source_policy: Optional[SourcePolicy] | NotGiven = NOT_GIVEN, + task_spec: Optional[TaskSpecParam] | NotGiven = NOT_GIVEN, + webhook: Optional[WebhookParam] | NotGiven = NOT_GIVEN, + betas: List[ParallelBetaParam] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskRun: + """ + Initiates a task run. + + Returns immediately with a run object in status 'queued'. + + Beta features can be enabled by setting the 'parallel-beta' header. + + Args: + input: Input to the task, either text or a JSON object. + + processor: Processor to use for the task. + + enable_events: Controls tracking of task run execution progress. When set to true, progress + events are recorded and can be accessed via the + [Task Run events](https://platform.parallel.ai/api-reference) endpoint. When + false, no progress events are tracked. Note that progress tracking cannot be + enabled after a run has been created. The flag is set to true by default for + premium processors (pro and above). This feature is not available via the Python + SDK. To enable this feature in your API requests, specify the `parallel-beta` + header with `events-sse-2025-07-24` value. + + mcp_servers: Optional list of MCP servers to use for the run. This feature is not available + via the Python SDK. To enable this feature in your API requests, specify the + `parallel-beta` header with `mcp-server-2025-07-17` value. + + metadata: User-provided metadata stored with the run. Keys and values must be strings with + a maximum length of 16 and 512 characters respectively. + + source_policy: Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + + task_spec: Specification for a task. + + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. + + webhook: Webhooks for Task Runs. + + betas: Optional header to specify the beta version(s) to enable. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = { + **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}), + **(extra_headers or {}), + } + return await self._post( + "/v1/tasks/runs?beta=true", + body=await async_maybe_transform( + { + "input": input, + "processor": processor, + "enable_events": enable_events, + "mcp_servers": mcp_servers, + "metadata": metadata, + "source_policy": source_policy, + "task_spec": task_spec, + "webhook": webhook, + }, + task_run_create_params.TaskRunCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskRun, + ) + + async def events( + self, + run_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncStream[TaskRunEventsResponse]: + """ + Streams events for a task run. + + Returns a stream of events showing progress updates and state changes for the + task run. + + For task runs that did not have enable_events set to true during creation, the + frequency of events will be reduced. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return await self._get( + f"/v1beta/tasks/runs/{run_id}/events", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=cast(Any, TaskRunEventsResponse), # Union types cannot be passed in as arguments in the type system + stream=True, + stream_cls=AsyncStream[TaskRunEventsResponse], + ) + + async def result( + self, + run_id: str, + *, + api_timeout: int | NotGiven = NOT_GIVEN, + betas: List[ParallelBetaParam] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> BetaTaskRunResult: + """ + Retrieves a run result by run_id, blocking until the run is completed. + + Args: + betas: Optional header to specify the beta version(s) to enable. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + extra_headers = { + **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}), + **(extra_headers or {}), + } + return await self._get( + f"/v1/tasks/runs/{run_id}/result?beta=true", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + {"api_timeout": api_timeout}, task_run_result_params.TaskRunResultParams + ), + ), + cast_to=BetaTaskRunResult, + ) + + +class TaskRunResourceWithRawResponse: + def __init__(self, task_run: TaskRunResource) -> None: + self._task_run = task_run + + self.create = to_raw_response_wrapper( + task_run.create, + ) + self.events = to_raw_response_wrapper( + task_run.events, + ) + self.result = to_raw_response_wrapper( + task_run.result, + ) + + +class AsyncTaskRunResourceWithRawResponse: + def __init__(self, task_run: AsyncTaskRunResource) -> None: + self._task_run = task_run + + self.create = async_to_raw_response_wrapper( + task_run.create, + ) + self.events = async_to_raw_response_wrapper( + task_run.events, + ) + self.result = async_to_raw_response_wrapper( + task_run.result, + ) + + +class TaskRunResourceWithStreamingResponse: + def __init__(self, task_run: TaskRunResource) -> None: + self._task_run = task_run + + self.create = to_streamed_response_wrapper( + task_run.create, + ) + self.events = to_streamed_response_wrapper( + task_run.events, + ) + self.result = to_streamed_response_wrapper( + task_run.result, + ) + + +class AsyncTaskRunResourceWithStreamingResponse: + def __init__(self, task_run: AsyncTaskRunResource) -> None: + self._task_run = task_run + + self.create = async_to_streamed_response_wrapper( + task_run.create, + ) + self.events = async_to_streamed_response_wrapper( + task_run.events, + ) + self.result = async_to_streamed_response_wrapper( + task_run.result, + ) diff --git a/src/parallel/resources/task_run.py b/src/parallel/resources/task_run.py index 073ee1c..9fe16d9 100644 --- a/src/parallel/resources/task_run.py +++ b/src/parallel/resources/task_run.py @@ -31,6 +31,7 @@ wait_for_result_async as _wait_for_result_async, task_run_result_parser, ) +from ..types.shared_params.source_policy import SourcePolicy __all__ = ["TaskRunResource", "AsyncTaskRunResource"] @@ -58,9 +59,10 @@ def with_streaming_response(self) -> TaskRunResourceWithStreamingResponse: def create( self, *, - input: Union[str, object], + input: Union[str, Dict[str, object]], processor: str, metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, + source_policy: Optional[SourcePolicy] | NotGiven = NOT_GIVEN, task_spec: Optional[TaskSpecParam] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -70,7 +72,11 @@ def create( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> TaskRun: """ - Initiates a single task run. + Initiates a task run. + + Returns immediately with a run object in status 'queued'. + + Beta features can be enabled by setting the 'parallel-beta' header. Args: input: Input to the task, either text or a JSON object. @@ -80,10 +86,16 @@ def create( metadata: User-provided metadata stored with the run. Keys and values must be strings with a maximum length of 16 and 512 characters respectively. + source_policy: Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + task_spec: Specification for a task. - For convenience we allow bare strings as input or output schemas, which is - equivalent to a text schema with the same description. + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. extra_headers: Send extra headers @@ -100,6 +112,7 @@ def create( "input": input, "processor": processor, "metadata": metadata, + "source_policy": source_policy, "task_spec": task_spec, }, task_run_create_params.TaskRunCreateParams, @@ -122,7 +135,9 @@ def retrieve( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> TaskRun: """ - Retrieves a run by run_id. + Retrieves run status by run_id. + + The run result is available from the `/result` endpoint. Args: extra_headers: Send extra headers @@ -156,7 +171,7 @@ def result( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> TaskRunResult: """ - Retrieves a run by run_id, blocking until the run is completed. + Retrieves a run result by run_id, blocking until the run is completed. Args: extra_headers: Send extra headers @@ -212,7 +227,7 @@ def _fetcher(run_id: str, deadline: float) -> TaskRunResult | ParsedTaskRunResul def execute( self, *, - input: Union[str, object], + input: Union[str, Dict[str, object]], processor: str, metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, output: Optional[OutputSchema] | NotGiven = NOT_GIVEN, @@ -227,7 +242,7 @@ def execute( def execute( self, *, - input: Union[str, object], + input: Union[str, Dict[str, object]], processor: str, metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, output: Type[OutputT], @@ -241,7 +256,7 @@ def execute( def execute( self, *, - input: Union[str, object], + input: Union[str, Dict[str, object]], processor: str, metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, output: Optional[OutputSchema] | Type[OutputT] | NotGiven = NOT_GIVEN, @@ -333,9 +348,10 @@ def with_streaming_response(self) -> AsyncTaskRunResourceWithStreamingResponse: async def create( self, *, - input: Union[str, object], + input: Union[str, Dict[str, object]], processor: str, metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, + source_policy: Optional[SourcePolicy] | NotGiven = NOT_GIVEN, task_spec: Optional[TaskSpecParam] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -345,7 +361,11 @@ async def create( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> TaskRun: """ - Initiates a single task run. + Initiates a task run. + + Returns immediately with a run object in status 'queued'. + + Beta features can be enabled by setting the 'parallel-beta' header. Args: input: Input to the task, either text or a JSON object. @@ -355,10 +375,16 @@ async def create( metadata: User-provided metadata stored with the run. Keys and values must be strings with a maximum length of 16 and 512 characters respectively. + source_policy: Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + task_spec: Specification for a task. - For convenience we allow bare strings as input or output schemas, which is - equivalent to a text schema with the same description. + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. extra_headers: Send extra headers @@ -375,6 +401,7 @@ async def create( "input": input, "processor": processor, "metadata": metadata, + "source_policy": source_policy, "task_spec": task_spec, }, task_run_create_params.TaskRunCreateParams, @@ -397,7 +424,9 @@ async def retrieve( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> TaskRun: """ - Retrieves a run by run_id. + Retrieves run status by run_id. + + The run result is available from the `/result` endpoint. Args: extra_headers: Send extra headers @@ -431,7 +460,7 @@ async def result( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> TaskRunResult: """ - Retrieves a run by run_id, blocking until the run is completed. + Retrieves a run result by run_id, blocking until the run is completed. Args: extra_headers: Send extra headers @@ -489,7 +518,7 @@ async def _fetcher(run_id: str, deadline: float) -> TaskRunResult | ParsedTaskRu async def execute( self, *, - input: Union[str, object], + input: Union[str, Dict[str, object]], processor: str, metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, output: Optional[OutputSchema] | NotGiven = NOT_GIVEN, @@ -502,7 +531,7 @@ async def execute( async def execute( self, *, - input: Union[str, object], + input: Union[str, Dict[str, object]], processor: str, metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, output: Type[OutputT], @@ -514,7 +543,7 @@ async def execute( async def execute( self, *, - input: Union[str, object], + input: Union[str, Dict[str, object]], processor: str, metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, output: Optional[OutputSchema] | Type[OutputT] | NotGiven = NOT_GIVEN, diff --git a/src/parallel/types/__init__.py b/src/parallel/types/__init__.py index e3d0181..15d056e 100644 --- a/src/parallel/types/__init__.py +++ b/src/parallel/types/__init__.py @@ -2,11 +2,26 @@ from __future__ import annotations +from .shared import ( + Warning as Warning, + ErrorObject as ErrorObject, + SourcePolicy as SourcePolicy, + ErrorResponse as ErrorResponse, +) +from .citation import Citation as Citation from .task_run import TaskRun as TaskRun +from .task_spec import TaskSpec as TaskSpec +from .auto_schema import AutoSchema as AutoSchema +from .field_basis import FieldBasis as FieldBasis +from .json_schema import JsonSchema as JsonSchema +from .text_schema import TextSchema as TextSchema from .task_run_result import TaskRunResult as TaskRunResult from .task_spec_param import TaskSpecParam as TaskSpecParam +from .auto_schema_param import AutoSchemaParam as AutoSchemaParam from .json_schema_param import JsonSchemaParam as JsonSchemaParam from .text_schema_param import TextSchemaParam as TextSchemaParam +from .task_run_json_output import TaskRunJsonOutput as TaskRunJsonOutput +from .task_run_text_output import TaskRunTextOutput as TaskRunTextOutput from .parsed_task_run_result import ParsedTaskRunResult as ParsedTaskRunResult from .task_run_create_params import TaskRunCreateParams as TaskRunCreateParams from .task_run_result_params import TaskRunResultParams as TaskRunResultParams diff --git a/src/parallel/types/auto_schema.py b/src/parallel/types/auto_schema.py new file mode 100644 index 0000000..6ff429e --- /dev/null +++ b/src/parallel/types/auto_schema.py @@ -0,0 +1,13 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["AutoSchema"] + + +class AutoSchema(BaseModel): + type: Optional[Literal["auto"]] = None + """The type of schema being defined. Always `auto`.""" diff --git a/src/parallel/types/auto_schema_param.py b/src/parallel/types/auto_schema_param.py new file mode 100644 index 0000000..54d5196 --- /dev/null +++ b/src/parallel/types/auto_schema_param.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, TypedDict + +__all__ = ["AutoSchemaParam"] + + +class AutoSchemaParam(TypedDict, total=False): + type: Literal["auto"] + """The type of schema being defined. Always `auto`.""" diff --git a/src/parallel/types/beta/__init__.py b/src/parallel/types/beta/__init__.py new file mode 100644 index 0000000..a872a43 --- /dev/null +++ b/src/parallel/types/beta/__init__.py @@ -0,0 +1,30 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .webhook import Webhook as Webhook +from .mcp_server import McpServer as McpServer +from .task_group import TaskGroup as TaskGroup +from .error_event import ErrorEvent as ErrorEvent +from .mcp_tool_call import McpToolCall as McpToolCall +from .search_result import SearchResult as SearchResult +from .webhook_param import WebhookParam as WebhookParam +from .beta_run_input import BetaRunInput as BetaRunInput +from .task_run_event import TaskRunEvent as TaskRunEvent +from .mcp_server_param import McpServerParam as McpServerParam +from .task_group_status import TaskGroupStatus as TaskGroupStatus +from .web_search_result import WebSearchResult as WebSearchResult +from .beta_search_params import BetaSearchParams as BetaSearchParams +from .parallel_beta_param import ParallelBetaParam as ParallelBetaParam +from .beta_run_input_param import BetaRunInputParam as BetaRunInputParam +from .beta_task_run_result import BetaTaskRunResult as BetaTaskRunResult +from .task_run_create_params import TaskRunCreateParams as TaskRunCreateParams +from .task_run_result_params import TaskRunResultParams as TaskRunResultParams +from .task_group_run_response import TaskGroupRunResponse as TaskGroupRunResponse +from .task_group_create_params import TaskGroupCreateParams as TaskGroupCreateParams +from .task_group_events_params import TaskGroupEventsParams as TaskGroupEventsParams +from .task_run_events_response import TaskRunEventsResponse as TaskRunEventsResponse +from .task_group_add_runs_params import TaskGroupAddRunsParams as TaskGroupAddRunsParams +from .task_group_events_response import TaskGroupEventsResponse as TaskGroupEventsResponse +from .task_group_get_runs_params import TaskGroupGetRunsParams as TaskGroupGetRunsParams +from .task_group_get_runs_response import TaskGroupGetRunsResponse as TaskGroupGetRunsResponse diff --git a/src/parallel/types/beta/beta_run_input.py b/src/parallel/types/beta/beta_run_input.py new file mode 100644 index 0000000..7707711 --- /dev/null +++ b/src/parallel/types/beta/beta_run_input.py @@ -0,0 +1,63 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, List, Union, Optional + +from .webhook import Webhook +from ..._models import BaseModel +from ..task_spec import TaskSpec +from .mcp_server import McpServer +from ..shared.source_policy import SourcePolicy + +__all__ = ["BetaRunInput"] + + +class BetaRunInput(BaseModel): + input: Union[str, Dict[str, object]] + """Input to the task, either text or a JSON object.""" + + processor: str + """Processor to use for the task.""" + + enable_events: Optional[bool] = None + """Controls tracking of task run execution progress. + + When set to true, progress events are recorded and can be accessed via the + [Task Run events](https://platform.parallel.ai/api-reference) endpoint. When + false, no progress events are tracked. Note that progress tracking cannot be + enabled after a run has been created. The flag is set to true by default for + premium processors (pro and above). This feature is not available via the Python + SDK. To enable this feature in your API requests, specify the `parallel-beta` + header with `events-sse-2025-07-24` value. + """ + + mcp_servers: Optional[List[McpServer]] = None + """ + Optional list of MCP servers to use for the run. This feature is not available + via the Python SDK. To enable this feature in your API requests, specify the + `parallel-beta` header with `mcp-server-2025-07-17` value. + """ + + metadata: Optional[Dict[str, Union[str, float, bool]]] = None + """User-provided metadata stored with the run. + + Keys and values must be strings with a maximum length of 16 and 512 characters + respectively. + """ + + source_policy: Optional[SourcePolicy] = None + """Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + """ + + task_spec: Optional[TaskSpec] = None + """Specification for a task. + + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. + """ + + webhook: Optional[Webhook] = None + """Webhooks for Task Runs.""" diff --git a/src/parallel/types/beta/beta_run_input_param.py b/src/parallel/types/beta/beta_run_input_param.py new file mode 100644 index 0000000..715f6ed --- /dev/null +++ b/src/parallel/types/beta/beta_run_input_param.py @@ -0,0 +1,65 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Union, Iterable, Optional +from typing_extensions import Required, TypedDict + +from .webhook_param import WebhookParam +from ..task_spec_param import TaskSpecParam +from .mcp_server_param import McpServerParam +from ..shared_params.source_policy import SourcePolicy + +__all__ = ["BetaRunInputParam"] + + +class BetaRunInputParam(TypedDict, total=False): + input: Required[Union[str, Dict[str, object]]] + """Input to the task, either text or a JSON object.""" + + processor: Required[str] + """Processor to use for the task.""" + + enable_events: Optional[bool] + """Controls tracking of task run execution progress. + + When set to true, progress events are recorded and can be accessed via the + [Task Run events](https://platform.parallel.ai/api-reference) endpoint. When + false, no progress events are tracked. Note that progress tracking cannot be + enabled after a run has been created. The flag is set to true by default for + premium processors (pro and above). This feature is not available via the Python + SDK. To enable this feature in your API requests, specify the `parallel-beta` + header with `events-sse-2025-07-24` value. + """ + + mcp_servers: Optional[Iterable[McpServerParam]] + """ + Optional list of MCP servers to use for the run. This feature is not available + via the Python SDK. To enable this feature in your API requests, specify the + `parallel-beta` header with `mcp-server-2025-07-17` value. + """ + + metadata: Optional[Dict[str, Union[str, float, bool]]] + """User-provided metadata stored with the run. + + Keys and values must be strings with a maximum length of 16 and 512 characters + respectively. + """ + + source_policy: Optional[SourcePolicy] + """Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + """ + + task_spec: Optional[TaskSpecParam] + """Specification for a task. + + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. + """ + + webhook: Optional[WebhookParam] + """Webhooks for Task Runs.""" diff --git a/src/parallel/types/beta/beta_search_params.py b/src/parallel/types/beta/beta_search_params.py new file mode 100644 index 0000000..b6b55a6 --- /dev/null +++ b/src/parallel/types/beta/beta_search_params.py @@ -0,0 +1,47 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Optional +from typing_extensions import Literal, TypedDict + +from ..shared_params.source_policy import SourcePolicy + +__all__ = ["BetaSearchParams"] + + +class BetaSearchParams(TypedDict, total=False): + max_chars_per_result: Optional[int] + """ + Upper bound on the number of characters to include in excerpts for each search + result. + """ + + max_results: Optional[int] + """Upper bound on the number of results to return. + + May be limited by the processor. Defaults to 10 if not provided. + """ + + objective: Optional[str] + """Natural-language description of what the web search is trying to find. + + May include guidance about preferred sources or freshness. At least one of + objective or search_queries must be provided. + """ + + processor: Literal["base", "pro"] + """Search processor.""" + + search_queries: Optional[List[str]] + """Optional list of traditional keyword search queries to guide the search. + + May contain search operators. At least one of objective or search_queries must + be provided. + """ + + source_policy: Optional[SourcePolicy] + """Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + """ diff --git a/src/parallel/types/beta/beta_task_run_result.py b/src/parallel/types/beta/beta_task_run_result.py new file mode 100644 index 0000000..3db1c50 --- /dev/null +++ b/src/parallel/types/beta/beta_task_run_result.py @@ -0,0 +1,74 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, List, Union, Optional +from typing_extensions import Literal, Annotated, TypeAlias + +from ..._utils import PropertyInfo +from ..._models import BaseModel +from ..task_run import TaskRun +from ..field_basis import FieldBasis +from .mcp_tool_call import McpToolCall + +__all__ = ["BetaTaskRunResult", "Output", "OutputBetaTaskRunTextOutput", "OutputBetaTaskRunJsonOutput"] + + +class OutputBetaTaskRunTextOutput(BaseModel): + basis: List[FieldBasis] + """Basis for the output.""" + + content: str + """Text output from the task.""" + + type: Literal["text"] + """ + The type of output being returned, as determined by the output schema of the + task spec. + """ + + beta_fields: Optional[Dict[str, object]] = None + """Always None.""" + + mcp_tool_calls: Optional[List[McpToolCall]] = None + """MCP tool calls made by the task.""" + + +class OutputBetaTaskRunJsonOutput(BaseModel): + basis: List[FieldBasis] + """Basis for the output.""" + + content: Dict[str, object] + """ + Output from the task as a native JSON object, as determined by the output schema + of the task spec. + """ + + type: Literal["json"] + """ + The type of output being returned, as determined by the output schema of the + task spec. + """ + + beta_fields: Optional[Dict[str, object]] = None + """Always None.""" + + mcp_tool_calls: Optional[List[McpToolCall]] = None + """MCP tool calls made by the task.""" + + output_schema: Optional[Dict[str, object]] = None + """Output schema for the Task Run. + + Populated only if the task was executed with an auto schema. + """ + + +Output: TypeAlias = Annotated[ + Union[OutputBetaTaskRunTextOutput, OutputBetaTaskRunJsonOutput], PropertyInfo(discriminator="type") +] + + +class BetaTaskRunResult(BaseModel): + output: Output + """Output from the task conforming to the output schema.""" + + run: TaskRun + """Status of a task run.""" diff --git a/src/parallel/types/beta/error_event.py b/src/parallel/types/beta/error_event.py new file mode 100644 index 0000000..aefa872 --- /dev/null +++ b/src/parallel/types/beta/error_event.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ..._models import BaseModel +from ..shared.error_object import ErrorObject + +__all__ = ["ErrorEvent"] + + +class ErrorEvent(BaseModel): + error: ErrorObject + """An error message.""" + + type: Literal["error"] + """Event type; always 'error'.""" diff --git a/src/parallel/types/beta/mcp_server.py b/src/parallel/types/beta/mcp_server.py new file mode 100644 index 0000000..192493f --- /dev/null +++ b/src/parallel/types/beta/mcp_server.py @@ -0,0 +1,25 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, List, Optional +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["McpServer"] + + +class McpServer(BaseModel): + name: str + """Name of the MCP server.""" + + url: str + """URL of the MCP server.""" + + allowed_tools: Optional[List[str]] = None + """List of allowed tools for the MCP server.""" + + headers: Optional[Dict[str, str]] = None + """Headers for the MCP server.""" + + type: Optional[Literal["url"]] = None + """Type of MCP server being configured. Always `url`.""" diff --git a/src/parallel/types/beta/mcp_server_param.py b/src/parallel/types/beta/mcp_server_param.py new file mode 100644 index 0000000..cef54ce --- /dev/null +++ b/src/parallel/types/beta/mcp_server_param.py @@ -0,0 +1,25 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, List, Optional +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["McpServerParam"] + + +class McpServerParam(TypedDict, total=False): + name: Required[str] + """Name of the MCP server.""" + + url: Required[str] + """URL of the MCP server.""" + + allowed_tools: Optional[List[str]] + """List of allowed tools for the MCP server.""" + + headers: Optional[Dict[str, str]] + """Headers for the MCP server.""" + + type: Literal["url"] + """Type of MCP server being configured. Always `url`.""" diff --git a/src/parallel/types/beta/mcp_tool_call.py b/src/parallel/types/beta/mcp_tool_call.py new file mode 100644 index 0000000..daa7cca --- /dev/null +++ b/src/parallel/types/beta/mcp_tool_call.py @@ -0,0 +1,27 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ..._models import BaseModel + +__all__ = ["McpToolCall"] + + +class McpToolCall(BaseModel): + arguments: str + """Arguments used to call the MCP tool.""" + + server_name: str + """Name of the MCP server.""" + + tool_call_id: str + """Identifier for the tool call.""" + + tool_name: str + """Name of the tool being called.""" + + content: Optional[str] = None + """Output received from the tool call, if successful.""" + + error: Optional[str] = None + """Error message if the tool call failed.""" diff --git a/src/parallel/types/beta/parallel_beta_param.py b/src/parallel/types/beta/parallel_beta_param.py new file mode 100644 index 0000000..80b4de0 --- /dev/null +++ b/src/parallel/types/beta/parallel_beta_param.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union +from typing_extensions import Literal, TypeAlias + +__all__ = ["ParallelBetaParam"] + +ParallelBetaParam: TypeAlias = Union[ + Literal["mcp-server-2025-07-17", "events-sse-2025-07-24", "webhook-2025-08-12"], str +] diff --git a/src/parallel/types/beta/search_result.py b/src/parallel/types/beta/search_result.py new file mode 100644 index 0000000..05c945c --- /dev/null +++ b/src/parallel/types/beta/search_result.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List + +from ..._models import BaseModel +from .web_search_result import WebSearchResult + +__all__ = ["SearchResult"] + + +class SearchResult(BaseModel): + results: List[WebSearchResult] + """A list of WebSearchResult objects, ordered by decreasing relevance.""" + + search_id: str + """Search ID. Example: `search_cad0a6d2-dec0-46bd-95ae-900527d880e7`""" diff --git a/src/parallel/types/beta/task_group.py b/src/parallel/types/beta/task_group.py new file mode 100644 index 0000000..247bddb --- /dev/null +++ b/src/parallel/types/beta/task_group.py @@ -0,0 +1,24 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, Union, Optional + +from pydantic import Field as FieldInfo + +from ..._models import BaseModel +from .task_group_status import TaskGroupStatus + +__all__ = ["TaskGroup"] + + +class TaskGroup(BaseModel): + created_at: Optional[str] = None + """Timestamp of the creation of the group, as an RFC 3339 string.""" + + status: TaskGroupStatus + """Status of a task group.""" + + task_group_id: str = FieldInfo(alias="taskgroup_id") + """ID of the group.""" + + metadata: Optional[Dict[str, Union[str, float, bool]]] = None + """User-provided metadata stored with the group.""" diff --git a/src/parallel/types/beta/task_group_add_runs_params.py b/src/parallel/types/beta/task_group_add_runs_params.py new file mode 100644 index 0000000..68523f9 --- /dev/null +++ b/src/parallel/types/beta/task_group_add_runs_params.py @@ -0,0 +1,30 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Iterable, Optional +from typing_extensions import Required, Annotated, TypedDict + +from ..._utils import PropertyInfo +from ..task_spec_param import TaskSpecParam +from .parallel_beta_param import ParallelBetaParam +from .beta_run_input_param import BetaRunInputParam + +__all__ = ["TaskGroupAddRunsParams"] + + +class TaskGroupAddRunsParams(TypedDict, total=False): + inputs: Required[Iterable[BetaRunInputParam]] + """List of task runs to execute.""" + + default_task_spec: Optional[TaskSpecParam] + """Specification for a task. + + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. + """ + + betas: Annotated[List[ParallelBetaParam], PropertyInfo(alias="parallel-beta")] + """Optional header to specify the beta version(s) to enable.""" diff --git a/src/parallel/types/beta/task_group_create_params.py b/src/parallel/types/beta/task_group_create_params.py new file mode 100644 index 0000000..2b5cc73 --- /dev/null +++ b/src/parallel/types/beta/task_group_create_params.py @@ -0,0 +1,13 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Union, Optional +from typing_extensions import TypedDict + +__all__ = ["TaskGroupCreateParams"] + + +class TaskGroupCreateParams(TypedDict, total=False): + metadata: Optional[Dict[str, Union[str, float, bool]]] + """User-provided metadata stored with the task group.""" diff --git a/src/parallel/types/beta/task_group_events_params.py b/src/parallel/types/beta/task_group_events_params.py new file mode 100644 index 0000000..15f0d00 --- /dev/null +++ b/src/parallel/types/beta/task_group_events_params.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional +from typing_extensions import Annotated, TypedDict + +from ..._utils import PropertyInfo + +__all__ = ["TaskGroupEventsParams"] + + +class TaskGroupEventsParams(TypedDict, total=False): + last_event_id: Optional[str] + + api_timeout: Annotated[Optional[float], PropertyInfo(alias="timeout")] diff --git a/src/parallel/types/beta/task_group_events_response.py b/src/parallel/types/beta/task_group_events_response.py new file mode 100644 index 0000000..cbb6522 --- /dev/null +++ b/src/parallel/types/beta/task_group_events_response.py @@ -0,0 +1,28 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union +from typing_extensions import Literal, Annotated, TypeAlias + +from ..._utils import PropertyInfo +from ..._models import BaseModel +from .error_event import ErrorEvent +from .task_run_event import TaskRunEvent +from .task_group_status import TaskGroupStatus + +__all__ = ["TaskGroupEventsResponse", "TaskGroupStatusEvent"] + + +class TaskGroupStatusEvent(BaseModel): + event_id: str + """Cursor to resume the event stream.""" + + status: TaskGroupStatus + """Status of a task group.""" + + type: Literal["task_group_status"] + """Event type; always 'task_group_status'.""" + + +TaskGroupEventsResponse: TypeAlias = Annotated[ + Union[TaskGroupStatusEvent, TaskRunEvent, ErrorEvent], PropertyInfo(discriminator="type") +] diff --git a/src/parallel/types/beta/task_group_get_runs_params.py b/src/parallel/types/beta/task_group_get_runs_params.py new file mode 100644 index 0000000..b6b1ef7 --- /dev/null +++ b/src/parallel/types/beta/task_group_get_runs_params.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional +from typing_extensions import Literal, TypedDict + +__all__ = ["TaskGroupGetRunsParams"] + + +class TaskGroupGetRunsParams(TypedDict, total=False): + include_input: bool + + include_output: bool + + last_event_id: Optional[str] + + status: Optional[Literal["queued", "action_required", "running", "completed", "failed", "cancelling", "cancelled"]] diff --git a/src/parallel/types/beta/task_group_get_runs_response.py b/src/parallel/types/beta/task_group_get_runs_response.py new file mode 100644 index 0000000..b287dcb --- /dev/null +++ b/src/parallel/types/beta/task_group_get_runs_response.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union +from typing_extensions import Annotated, TypeAlias + +from ..._utils import PropertyInfo +from .error_event import ErrorEvent +from .task_run_event import TaskRunEvent + +__all__ = ["TaskGroupGetRunsResponse"] + +TaskGroupGetRunsResponse: TypeAlias = Annotated[Union[TaskRunEvent, ErrorEvent], PropertyInfo(discriminator="type")] diff --git a/src/parallel/types/beta/task_group_run_response.py b/src/parallel/types/beta/task_group_run_response.py new file mode 100644 index 0000000..98161bb --- /dev/null +++ b/src/parallel/types/beta/task_group_run_response.py @@ -0,0 +1,30 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from ..._models import BaseModel +from .task_group_status import TaskGroupStatus + +__all__ = ["TaskGroupRunResponse"] + + +class TaskGroupRunResponse(BaseModel): + event_cursor: Optional[str] = None + """ + Cursor for these runs in the event stream at + taskgroup/events?last_event_id=. Empty for the first runs in the + group. + """ + + run_cursor: Optional[str] = None + """ + Cursor for these runs in the run stream at + taskgroup/runs?last_event_id=. Empty for the first runs in the + group. + """ + + run_ids: List[str] + """IDs of the newly created runs.""" + + status: TaskGroupStatus + """Status of a task group.""" diff --git a/src/parallel/types/beta/task_group_status.py b/src/parallel/types/beta/task_group_status.py new file mode 100644 index 0000000..221b527 --- /dev/null +++ b/src/parallel/types/beta/task_group_status.py @@ -0,0 +1,27 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, Optional + +from ..._models import BaseModel + +__all__ = ["TaskGroupStatus"] + + +class TaskGroupStatus(BaseModel): + is_active: bool + """True if at least one run in the group is currently active, i.e. + + status is one of {'cancelling', 'queued', 'running'}. + """ + + modified_at: Optional[str] = None + """Timestamp of the last status update to the group, as an RFC 3339 string.""" + + num_task_runs: int + """Number of task runs in the group.""" + + status_message: Optional[str] = None + """Human-readable status message for the group.""" + + task_run_status_counts: Dict[str, int] + """Number of task runs with each status.""" diff --git a/src/parallel/types/beta/task_run_create_params.py b/src/parallel/types/beta/task_run_create_params.py new file mode 100644 index 0000000..80bd0da --- /dev/null +++ b/src/parallel/types/beta/task_run_create_params.py @@ -0,0 +1,70 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, List, Union, Iterable, Optional +from typing_extensions import Required, Annotated, TypedDict + +from ..._utils import PropertyInfo +from .webhook_param import WebhookParam +from ..task_spec_param import TaskSpecParam +from .mcp_server_param import McpServerParam +from .parallel_beta_param import ParallelBetaParam +from ..shared_params.source_policy import SourcePolicy + +__all__ = ["TaskRunCreateParams"] + + +class TaskRunCreateParams(TypedDict, total=False): + input: Required[Union[str, Dict[str, object]]] + """Input to the task, either text or a JSON object.""" + + processor: Required[str] + """Processor to use for the task.""" + + enable_events: Optional[bool] + """Controls tracking of task run execution progress. + + When set to true, progress events are recorded and can be accessed via the + [Task Run events](https://platform.parallel.ai/api-reference) endpoint. When + false, no progress events are tracked. Note that progress tracking cannot be + enabled after a run has been created. The flag is set to true by default for + premium processors (pro and above). This feature is not available via the Python + SDK. To enable this feature in your API requests, specify the `parallel-beta` + header with `events-sse-2025-07-24` value. + """ + + mcp_servers: Optional[Iterable[McpServerParam]] + """ + Optional list of MCP servers to use for the run. This feature is not available + via the Python SDK. To enable this feature in your API requests, specify the + `parallel-beta` header with `mcp-server-2025-07-17` value. + """ + + metadata: Optional[Dict[str, Union[str, float, bool]]] + """User-provided metadata stored with the run. + + Keys and values must be strings with a maximum length of 16 and 512 characters + respectively. + """ + + source_policy: Optional[SourcePolicy] + """Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + """ + + task_spec: Optional[TaskSpecParam] + """Specification for a task. + + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. + """ + + webhook: Optional[WebhookParam] + """Webhooks for Task Runs.""" + + betas: Annotated[List[ParallelBetaParam], PropertyInfo(alias="parallel-beta")] + """Optional header to specify the beta version(s) to enable.""" diff --git a/src/parallel/types/beta/task_run_event.py b/src/parallel/types/beta/task_run_event.py new file mode 100644 index 0000000..7d809fe --- /dev/null +++ b/src/parallel/types/beta/task_run_event.py @@ -0,0 +1,32 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union, Optional +from typing_extensions import Literal, Annotated, TypeAlias + +from ..._utils import PropertyInfo +from ..._models import BaseModel +from ..task_run import TaskRun +from .beta_run_input import BetaRunInput +from ..task_run_json_output import TaskRunJsonOutput +from ..task_run_text_output import TaskRunTextOutput + +__all__ = ["TaskRunEvent", "Output"] + +Output: TypeAlias = Annotated[Union[TaskRunTextOutput, TaskRunJsonOutput, None], PropertyInfo(discriminator="type")] + + +class TaskRunEvent(BaseModel): + event_id: Optional[str] = None + """Cursor to resume the event stream. Always empty for non Task Group runs.""" + + run: TaskRun + """Status of a task run.""" + + type: Literal["task_run.state"] + """Event type; always 'task_run.state'.""" + + input: Optional[BetaRunInput] = None + """Task run input with additional beta fields.""" + + output: Optional[Output] = None + """Output from the run; included only if requested and if status == `completed`.""" diff --git a/src/parallel/types/beta/task_run_events_response.py b/src/parallel/types/beta/task_run_events_response.py new file mode 100644 index 0000000..27ea82f --- /dev/null +++ b/src/parallel/types/beta/task_run_events_response.py @@ -0,0 +1,58 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Union, Optional +from typing_extensions import Literal, Annotated, TypeAlias + +from ..._utils import PropertyInfo +from ..._models import BaseModel +from .error_event import ErrorEvent +from .task_run_event import TaskRunEvent + +__all__ = [ + "TaskRunEventsResponse", + "TaskRunProgressStatsEvent", + "TaskRunProgressStatsEventSourceStats", + "TaskRunProgressMessageEvent", +] + + +class TaskRunProgressStatsEventSourceStats(BaseModel): + num_sources_considered: Optional[int] = None + """Number of sources considered in processing the task.""" + + num_sources_read: Optional[int] = None + """Number of sources read in processing the task.""" + + sources_read_sample: Optional[List[str]] = None + """A sample of URLs of sources read in processing the task.""" + + +class TaskRunProgressStatsEvent(BaseModel): + source_stats: TaskRunProgressStatsEventSourceStats + """Source stats for a task run.""" + + type: Literal["task_run.progress_stats"] + """Event type; always 'task_run.progress_stats'.""" + + +class TaskRunProgressMessageEvent(BaseModel): + message: str + """Progress update message.""" + + timestamp: Optional[str] = None + """Timestamp of the message.""" + + type: Literal[ + "task_run.progress_msg.plan", + "task_run.progress_msg.search", + "task_run.progress_msg.result", + "task_run.progress_msg.tool_call", + "task_run.progress_msg.exec_status", + ] + """Event type; always starts with 'task_run.progress_msg'.""" + + +TaskRunEventsResponse: TypeAlias = Annotated[ + Union[TaskRunProgressStatsEvent, TaskRunProgressMessageEvent, TaskRunEvent, ErrorEvent], + PropertyInfo(discriminator="type"), +] diff --git a/src/parallel/types/beta/task_run_result_params.py b/src/parallel/types/beta/task_run_result_params.py new file mode 100644 index 0000000..c48ef15 --- /dev/null +++ b/src/parallel/types/beta/task_run_result_params.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List +from typing_extensions import Annotated, TypedDict + +from ..._utils import PropertyInfo +from .parallel_beta_param import ParallelBetaParam + +__all__ = ["TaskRunResultParams"] + + +class TaskRunResultParams(TypedDict, total=False): + api_timeout: Annotated[int, PropertyInfo(alias="timeout")] + + betas: Annotated[List[ParallelBetaParam], PropertyInfo(alias="parallel-beta")] + """Optional header to specify the beta version(s) to enable.""" diff --git a/src/parallel/types/beta/web_search_result.py b/src/parallel/types/beta/web_search_result.py new file mode 100644 index 0000000..cbc7d5e --- /dev/null +++ b/src/parallel/types/beta/web_search_result.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List + +from ..._models import BaseModel + +__all__ = ["WebSearchResult"] + + +class WebSearchResult(BaseModel): + excerpts: List[str] + """Text excerpts from the search result which are relevant to the request.""" + + title: str + """Title of the search result.""" + + url: str + """URL associated with the search result.""" diff --git a/src/parallel/types/beta/webhook.py b/src/parallel/types/beta/webhook.py new file mode 100644 index 0000000..afa1b5b --- /dev/null +++ b/src/parallel/types/beta/webhook.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["Webhook"] + + +class Webhook(BaseModel): + url: str + """URL for the webhook.""" + + event_types: Optional[List[Literal["task_run.status"]]] = None + """Event types to send the webhook notifications for.""" diff --git a/src/parallel/types/beta/webhook_param.py b/src/parallel/types/beta/webhook_param.py new file mode 100644 index 0000000..b7e6cd6 --- /dev/null +++ b/src/parallel/types/beta/webhook_param.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["WebhookParam"] + + +class WebhookParam(TypedDict, total=False): + url: Required[str] + """URL for the webhook.""" + + event_types: List[Literal["task_run.status"]] + """Event types to send the webhook notifications for.""" diff --git a/src/parallel/types/citation.py b/src/parallel/types/citation.py new file mode 100644 index 0000000..2a3abb5 --- /dev/null +++ b/src/parallel/types/citation.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from .._models import BaseModel + +__all__ = ["Citation"] + + +class Citation(BaseModel): + url: str + """URL of the citation.""" + + excerpts: Optional[List[str]] = None + """Excerpts from the citation supporting the output. + + Only certain processors provide excerpts. + """ + + title: Optional[str] = None + """Title of the citation.""" diff --git a/src/parallel/types/field_basis.py b/src/parallel/types/field_basis.py new file mode 100644 index 0000000..de4d4a1 --- /dev/null +++ b/src/parallel/types/field_basis.py @@ -0,0 +1,25 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from .._models import BaseModel +from .citation import Citation + +__all__ = ["FieldBasis"] + + +class FieldBasis(BaseModel): + field: str + """Name of the output field.""" + + reasoning: str + """Reasoning for the output field.""" + + citations: Optional[List[Citation]] = None + """List of citations supporting the output field.""" + + confidence: Optional[str] = None + """Confidence level for the output field. + + Only certain processors provide confidence levels. + """ diff --git a/src/parallel/types/json_schema.py b/src/parallel/types/json_schema.py new file mode 100644 index 0000000..7bff20b --- /dev/null +++ b/src/parallel/types/json_schema.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, Optional +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["JsonSchema"] + + +class JsonSchema(BaseModel): + json_schema: Dict[str, object] + """A JSON Schema object. Only a subset of JSON Schema is supported.""" + + type: Optional[Literal["json"]] = None + """The type of schema being defined. Always `json`.""" diff --git a/src/parallel/types/json_schema_param.py b/src/parallel/types/json_schema_param.py index 6b04665..90bae27 100644 --- a/src/parallel/types/json_schema_param.py +++ b/src/parallel/types/json_schema_param.py @@ -2,13 +2,14 @@ from __future__ import annotations +from typing import Dict from typing_extensions import Literal, Required, TypedDict __all__ = ["JsonSchemaParam"] class JsonSchemaParam(TypedDict, total=False): - json_schema: Required[object] + json_schema: Required[Dict[str, object]] """A JSON Schema object. Only a subset of JSON Schema is supported.""" type: Literal["json"] diff --git a/src/parallel/types/shared/__init__.py b/src/parallel/types/shared/__init__.py new file mode 100644 index 0000000..c7a4d05 --- /dev/null +++ b/src/parallel/types/shared/__init__.py @@ -0,0 +1,6 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .warning import Warning as Warning +from .error_object import ErrorObject as ErrorObject +from .source_policy import SourcePolicy as SourcePolicy +from .error_response import ErrorResponse as ErrorResponse diff --git a/src/parallel/types/shared/error_object.py b/src/parallel/types/shared/error_object.py new file mode 100644 index 0000000..52b9656 --- /dev/null +++ b/src/parallel/types/shared/error_object.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, Optional + +from ..._models import BaseModel + +__all__ = ["ErrorObject"] + + +class ErrorObject(BaseModel): + message: str + """Human-readable message.""" + + ref_id: str + """Reference ID for the error.""" + + detail: Optional[Dict[str, object]] = None + """Optional detail supporting the error.""" diff --git a/src/parallel/types/shared/error_response.py b/src/parallel/types/shared/error_response.py new file mode 100644 index 0000000..4a72af6 --- /dev/null +++ b/src/parallel/types/shared/error_response.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ..._models import BaseModel +from .error_object import ErrorObject + +__all__ = ["ErrorResponse"] + + +class ErrorResponse(BaseModel): + error: ErrorObject + """An error message.""" + + type: Literal["error"] + """Always 'error'.""" diff --git a/src/parallel/types/shared/source_policy.py b/src/parallel/types/shared/source_policy.py new file mode 100644 index 0000000..50c38ca --- /dev/null +++ b/src/parallel/types/shared/source_policy.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from ..._models import BaseModel + +__all__ = ["SourcePolicy"] + + +class SourcePolicy(BaseModel): + exclude_domains: Optional[List[str]] = None + """List of domains to exclude from results. + + If specified, sources from these domains will be excluded. + """ + + include_domains: Optional[List[str]] = None + """List of domains to restrict the results to. + + If specified, only sources from these domains will be included. + """ diff --git a/src/parallel/types/shared/warning.py b/src/parallel/types/shared/warning.py new file mode 100644 index 0000000..84a2b84 --- /dev/null +++ b/src/parallel/types/shared/warning.py @@ -0,0 +1,22 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, Optional +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["Warning"] + + +class Warning(BaseModel): + message: str + """Human-readable message.""" + + type: Literal["spec_validation_warning", "input_validation_warning", "warning"] + """Type of warning. + + Note that adding new warning types is considered a backward-compatible change. + """ + + detail: Optional[Dict[str, object]] = None + """Optional detail supporting the warning.""" diff --git a/src/parallel/types/shared_params/__init__.py b/src/parallel/types/shared_params/__init__.py new file mode 100644 index 0000000..1ab16e6 --- /dev/null +++ b/src/parallel/types/shared_params/__init__.py @@ -0,0 +1,3 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .source_policy import SourcePolicy as SourcePolicy diff --git a/src/parallel/types/shared_params/source_policy.py b/src/parallel/types/shared_params/source_policy.py new file mode 100644 index 0000000..0a5d5ec --- /dev/null +++ b/src/parallel/types/shared_params/source_policy.py @@ -0,0 +1,22 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List +from typing_extensions import TypedDict + +__all__ = ["SourcePolicy"] + + +class SourcePolicy(TypedDict, total=False): + exclude_domains: List[str] + """List of domains to exclude from results. + + If specified, sources from these domains will be excluded. + """ + + include_domains: List[str] + """List of domains to restrict the results to. + + If specified, only sources from these domains will be included. + """ diff --git a/src/parallel/types/task_run.py b/src/parallel/types/task_run.py index c4a3693..a4fc96d 100644 --- a/src/parallel/types/task_run.py +++ b/src/parallel/types/task_run.py @@ -3,23 +3,16 @@ from typing import Dict, List, Union, Optional from typing_extensions import Literal -from .._models import BaseModel - -__all__ = ["TaskRun", "Warning"] - - -class Warning(BaseModel): - message: str - """Human-readable message.""" - - type: str - """Type of warning. +from pydantic import Field as FieldInfo - Note that adding new warning types is considered a backward-compatible change. - """ +from .._models import BaseModel +from .shared.warning import Warning +from .shared.error_object import ErrorObject - detail: Optional[object] = None - """Optional detail supporting the warning.""" +__all__ = [ + "TaskRun", + "Warning" # for backwards compatibility with v0.1.3 +] class TaskRun(BaseModel): @@ -27,9 +20,9 @@ class TaskRun(BaseModel): """Timestamp of the creation of the task, as an RFC 3339 string.""" is_active: bool - """Whether the run is currently active; i.e. + """Whether the run is currently active, i.e. - status is one of {'running', 'queued', 'cancelling'}. + status is one of {'cancelling', 'queued', 'running'}. """ modified_at: Optional[str] = None @@ -44,8 +37,14 @@ class TaskRun(BaseModel): status: Literal["queued", "action_required", "running", "completed", "failed", "cancelling", "cancelled"] """Status of the run.""" + error: Optional[ErrorObject] = None + """An error message.""" + metadata: Optional[Dict[str, Union[str, float, bool]]] = None """User-provided metadata stored with the run.""" + task_group_id: Optional[str] = FieldInfo(alias="taskgroup_id", default=None) + """ID of the taskgroup to which the run belongs.""" + warnings: Optional[List[Warning]] = None - """Warnings for the run.""" + """Warnings for the run, if any.""" diff --git a/src/parallel/types/task_run_create_params.py b/src/parallel/types/task_run_create_params.py index a6f39e0..6c81803 100644 --- a/src/parallel/types/task_run_create_params.py +++ b/src/parallel/types/task_run_create_params.py @@ -6,12 +6,13 @@ from typing_extensions import Required, TypedDict from .task_spec_param import TaskSpecParam +from .shared_params.source_policy import SourcePolicy __all__ = ["TaskRunCreateParams"] class TaskRunCreateParams(TypedDict, total=False): - input: Required[Union[str, object]] + input: Required[Union[str, Dict[str, object]]] """Input to the task, either text or a JSON object.""" processor: Required[str] @@ -24,9 +25,17 @@ class TaskRunCreateParams(TypedDict, total=False): respectively. """ + source_policy: Optional[SourcePolicy] + """Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + """ + task_spec: Optional[TaskSpecParam] """Specification for a task. - For convenience we allow bare strings as input or output schemas, which is - equivalent to a text schema with the same description. + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. """ diff --git a/src/parallel/types/task_run_json_output.py b/src/parallel/types/task_run_json_output.py new file mode 100644 index 0000000..ee63e60 --- /dev/null +++ b/src/parallel/types/task_run_json_output.py @@ -0,0 +1,46 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, List, Optional +from typing_extensions import Literal + +from .._models import BaseModel +from .field_basis import FieldBasis + +__all__ = ["TaskRunJsonOutput"] + + +class TaskRunJsonOutput(BaseModel): + basis: List[FieldBasis] + """Basis for each top-level field in the JSON output.""" + + content: Dict[str, object] + """ + Output from the task as a native JSON object, as determined by the output schema + of the task spec. + """ + + type: Literal["json"] + """ + The type of output being returned, as determined by the output schema of the + task spec. + """ + + beta_fields: Optional[Dict[str, object]] = None + """Additional fields from beta features used in this task run. + + When beta features are specified during both task run creation and result + retrieval, this field will be empty and instead the relevant beta attributes + will be directly included in the `BetaTaskRunJsonOutput` or corresponding output + type. However, if beta features were specified during task run creation but not + during result retrieval, this field will contain the dump of fields from those + beta features. Each key represents the beta feature version (one amongst + parallel-beta headers) and the values correspond to the beta feature attributes, + if any. For now, only MCP server beta features have attributes. For example, + `{mcp-server-2025-07-17: [{'server_name':'mcp_server', 'tool_call_id': 'tc_123', ...}]}}` + """ + + output_schema: Optional[Dict[str, object]] = None + """Output schema for the Task Run. + + Populated only if the task was executed with an auto schema. + """ diff --git a/src/parallel/types/task_run_result.py b/src/parallel/types/task_run_result.py index 81797f0..7b52026 100644 --- a/src/parallel/types/task_run_result.py +++ b/src/parallel/types/task_run_result.py @@ -1,10 +1,15 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -from typing import List, Union, Optional -from typing_extensions import Literal, TypeAlias +from typing import Union +from typing_extensions import Annotated, TypeAlias +from .._utils import PropertyInfo from .._models import BaseModel +from .citation import Citation from .task_run import TaskRun +from .field_basis import FieldBasis +from .task_run_json_output import TaskRunJsonOutput +from .task_run_text_output import TaskRunTextOutput __all__ = [ "TaskRunResult", @@ -17,101 +22,26 @@ "OutputTaskRunJsonOutputBasisCitation", ] +OutputTaskRunTextOutputBasis = FieldBasis # for backwards compatibility with v0.1.3 +"""This is deprecated, `FieldBasis` should be used instead""" -class OutputTaskRunTextOutputBasisCitation(BaseModel): - url: str - """URL of the citation.""" +OutputTaskRunTextOutputBasisCitation = Citation # for backwards compatibility with v0.1.3 +"""This is deprecated, `Citation` should be used instead""" - excerpts: Optional[List[str]] = None - """Excerpts from the citation supporting the output. +OutputTaskRunJsonOutputBasis = FieldBasis # for backwards compatibility with v0.1.3 +"""This is deprecated, `FieldBasis` should be used instead""" - Only certain processors provide excerpts. - """ +OutputTaskRunJsonOutputBasisCitation = Citation # for backwards compatibility with v0.1.3 +"""This is deprecated, `Citation` should be used instead""" - title: Optional[str] = None - """Title of the citation.""" +OutputTaskRunTextOutput = TaskRunTextOutput # for backwards compatibility with v0.1.3 +"""This is deprecated, `TaskRunTextOutput` should be used instead""" +OutputTaskRunJsonOutput = TaskRunJsonOutput # for backwards compatibility with v0.1.3 +"""This is deprecated, `TaskRunJsonOutput` should be used instead""" -class OutputTaskRunTextOutputBasis(BaseModel): - field: str - """Name of the output field.""" - reasoning: str - """Reasoning for the output field.""" - - citations: Optional[List[OutputTaskRunTextOutputBasisCitation]] = None - """List of citations supporting the output field.""" - - confidence: Optional[str] = None - """Confidence level for the output field. - - Only certain processors provide confidence levels. - """ - - -class OutputTaskRunTextOutput(BaseModel): - basis: List[OutputTaskRunTextOutputBasis] - """Basis for the output. The basis has a single field 'output'.""" - - content: str - """Text output from the task.""" - - type: Literal["text"] - """ - The type of output being returned, as determined by the output schema of the - task spec. - """ - - -class OutputTaskRunJsonOutputBasisCitation(BaseModel): - url: str - """URL of the citation.""" - - excerpts: Optional[List[str]] = None - """Excerpts from the citation supporting the output. - - Only certain processors provide excerpts. - """ - - title: Optional[str] = None - """Title of the citation.""" - - -class OutputTaskRunJsonOutputBasis(BaseModel): - field: str - """Name of the output field.""" - - reasoning: str - """Reasoning for the output field.""" - - citations: Optional[List[OutputTaskRunJsonOutputBasisCitation]] = None - """List of citations supporting the output field.""" - - confidence: Optional[str] = None - """Confidence level for the output field. - - Only certain processors provide confidence levels. - """ - - -class OutputTaskRunJsonOutput(BaseModel): - basis: List[OutputTaskRunJsonOutputBasis] - """Basis for each top-level field in the JSON output.""" - - content: object - """ - Output from the task as a native JSON object, as determined by the output schema - of the task spec. - """ - - type: Literal["json"] - """ - The type of output being returned, as determined by the output schema of the - task spec. - """ - - -Output: TypeAlias = Union[OutputTaskRunTextOutput, OutputTaskRunJsonOutput] +Output: TypeAlias = Annotated[Union[TaskRunTextOutput, TaskRunJsonOutput], PropertyInfo(discriminator="type")] class TaskRunResult(BaseModel): @@ -119,4 +49,4 @@ class TaskRunResult(BaseModel): """Output from the task conforming to the output schema.""" run: TaskRun - """Status of a task.""" + """Status of a task run.""" diff --git a/src/parallel/types/task_run_text_output.py b/src/parallel/types/task_run_text_output.py new file mode 100644 index 0000000..ef38d7e --- /dev/null +++ b/src/parallel/types/task_run_text_output.py @@ -0,0 +1,37 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, List, Optional +from typing_extensions import Literal + +from .._models import BaseModel +from .field_basis import FieldBasis + +__all__ = ["TaskRunTextOutput"] + + +class TaskRunTextOutput(BaseModel): + basis: List[FieldBasis] + """Basis for the output. The basis has a single field 'output'.""" + + content: str + """Text output from the task.""" + + type: Literal["text"] + """ + The type of output being returned, as determined by the output schema of the + task spec. + """ + + beta_fields: Optional[Dict[str, object]] = None + """Additional fields from beta features used in this task run. + + When beta features are specified during both task run creation and result + retrieval, this field will be empty and instead the relevant beta attributes + will be directly included in the `BetaTaskRunJsonOutput` or corresponding output + type. However, if beta features were specified during task run creation but not + during result retrieval, this field will contain the dump of fields from those + beta features. Each key represents the beta feature version (one amongst + parallel-beta headers) and the values correspond to the beta feature attributes, + if any. For now, only MCP server beta features have attributes. For example, + `{mcp-server-2025-07-17: [{'server_name':'mcp_server', 'tool_call_id': 'tc_123', ...}]}}` + """ diff --git a/src/parallel/types/task_spec.py b/src/parallel/types/task_spec.py new file mode 100644 index 0000000..fc7aefe --- /dev/null +++ b/src/parallel/types/task_spec.py @@ -0,0 +1,31 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union, Optional +from typing_extensions import TypeAlias + +from .._models import BaseModel +from .auto_schema import AutoSchema +from .json_schema import JsonSchema +from .text_schema import TextSchema + +__all__ = ["TaskSpec", "OutputSchema", "InputSchema"] + +OutputSchema: TypeAlias = Union[JsonSchema, TextSchema, AutoSchema, str] + +InputSchema: TypeAlias = Union[str, JsonSchema, TextSchema, None] + + +class TaskSpec(BaseModel): + output_schema: OutputSchema + """JSON schema or text fully describing the desired output from the task. + + Descriptions of output fields will determine the form and content of the + response. A bare string is equivalent to a text schema with the same + description. + """ + + input_schema: Optional[InputSchema] = None + """Optional JSON schema or text description of expected input to the task. + + A bare string is equivalent to a text schema with the same description. + """ diff --git a/src/parallel/types/task_spec_param.py b/src/parallel/types/task_spec_param.py index 8ab84ed..ae434bf 100644 --- a/src/parallel/types/task_spec_param.py +++ b/src/parallel/types/task_spec_param.py @@ -7,14 +7,15 @@ from pydantic import BaseModel +from .auto_schema_param import AutoSchemaParam from .json_schema_param import JsonSchemaParam from .text_schema_param import TextSchemaParam __all__ = ["TaskSpecParam", "OutputSchema", "InputSchema"] -OutputSchema: TypeAlias = Union[JsonSchemaParam, TextSchemaParam, str] +OutputSchema: TypeAlias = Union[JsonSchemaParam, TextSchemaParam, AutoSchemaParam, str] -InputSchema: TypeAlias = Union[JsonSchemaParam, TextSchemaParam, str] +InputSchema: TypeAlias = Union[str, JsonSchemaParam, TextSchemaParam] OutputT = TypeVar("OutputT", bound=BaseModel) diff --git a/src/parallel/types/text_schema.py b/src/parallel/types/text_schema.py new file mode 100644 index 0000000..22da813 --- /dev/null +++ b/src/parallel/types/text_schema.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["TextSchema"] + + +class TextSchema(BaseModel): + description: str + """A text description of the desired output from the task.""" + + type: Optional[Literal["text"]] = None + """The type of schema being defined. Always `text`.""" diff --git a/tests/api_resources/beta/__init__.py b/tests/api_resources/beta/__init__.py new file mode 100644 index 0000000..fd8019a --- /dev/null +++ b/tests/api_resources/beta/__init__.py @@ -0,0 +1 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. diff --git a/tests/api_resources/beta/test_task_group.py b/tests/api_resources/beta/test_task_group.py new file mode 100644 index 0000000..150452f --- /dev/null +++ b/tests/api_resources/beta/test_task_group.py @@ -0,0 +1,613 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from parallel import Parallel, AsyncParallel +from tests.utils import assert_matches_type +from parallel.types.beta import ( + TaskGroup, + TaskGroupRunResponse, +) + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestTaskGroup: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @parametrize + def test_method_create(self, client: Parallel) -> None: + task_group = client.beta.task_group.create() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + def test_method_create_with_all_params(self, client: Parallel) -> None: + task_group = client.beta.task_group.create( + metadata={"foo": "string"}, + ) + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + def test_raw_response_create(self, client: Parallel) -> None: + response = client.beta.task_group.with_raw_response.create() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_group = response.parse() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + def test_streaming_response_create(self, client: Parallel) -> None: + with client.beta.task_group.with_streaming_response.create() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_group = response.parse() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + def test_method_retrieve(self, client: Parallel) -> None: + task_group = client.beta.task_group.retrieve( + "taskgroup_id", + ) + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + def test_raw_response_retrieve(self, client: Parallel) -> None: + response = client.beta.task_group.with_raw_response.retrieve( + "taskgroup_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_group = response.parse() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + def test_streaming_response_retrieve(self, client: Parallel) -> None: + with client.beta.task_group.with_streaming_response.retrieve( + "taskgroup_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_group = response.parse() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + def test_path_params_retrieve(self, client: Parallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): + client.beta.task_group.with_raw_response.retrieve( + "", + ) + + @parametrize + def test_method_add_runs(self, client: Parallel) -> None: + task_group = client.beta.task_group.add_runs( + task_group_id="taskgroup_id", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + } + ], + ) + assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) + + @parametrize + def test_method_add_runs_with_all_params(self, client: Parallel) -> None: + task_group = client.beta.task_group.add_runs( + task_group_id="taskgroup_id", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + "enable_events": True, + "mcp_servers": [ + { + "name": "name", + "url": "url", + "allowed_tools": ["string"], + "headers": {"foo": "string"}, + "type": "url", + } + ], + "metadata": {"foo": "string"}, + "source_policy": { + "exclude_domains": ["string"], + "include_domains": ["string"], + }, + "task_spec": { + "output_schema": { + "json_schema": { + "additionalProperties": "bar", + "properties": "bar", + "required": "bar", + "type": "bar", + }, + "type": "json", + }, + "input_schema": "string", + }, + "webhook": { + "url": "url", + "event_types": ["task_run.status"], + }, + } + ], + default_task_spec={ + "output_schema": { + "json_schema": { + "additionalProperties": "bar", + "properties": "bar", + "required": "bar", + "type": "bar", + }, + "type": "json", + }, + "input_schema": "string", + }, + betas=["mcp-server-2025-07-17"], + ) + assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) + + @parametrize + def test_raw_response_add_runs(self, client: Parallel) -> None: + response = client.beta.task_group.with_raw_response.add_runs( + task_group_id="taskgroup_id", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + } + ], + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_group = response.parse() + assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) + + @parametrize + def test_streaming_response_add_runs(self, client: Parallel) -> None: + with client.beta.task_group.with_streaming_response.add_runs( + task_group_id="taskgroup_id", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + } + ], + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_group = response.parse() + assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + def test_path_params_add_runs(self, client: Parallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): + client.beta.task_group.with_raw_response.add_runs( + task_group_id="", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + } + ], + ) + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_method_events(self, client: Parallel) -> None: + task_group_stream = client.beta.task_group.events( + task_group_id="taskgroup_id", + ) + task_group_stream.response.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_method_events_with_all_params(self, client: Parallel) -> None: + task_group_stream = client.beta.task_group.events( + task_group_id="taskgroup_id", + last_event_id="last_event_id", + api_timeout=0, + ) + task_group_stream.response.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_raw_response_events(self, client: Parallel) -> None: + response = client.beta.task_group.with_raw_response.events( + task_group_id="taskgroup_id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = response.parse() + stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_streaming_response_events(self, client: Parallel) -> None: + with client.beta.task_group.with_streaming_response.events( + task_group_id="taskgroup_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = response.parse() + stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_path_params_events(self, client: Parallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): + client.beta.task_group.with_raw_response.events( + task_group_id="", + ) + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_method_get_runs(self, client: Parallel) -> None: + task_group_stream = client.beta.task_group.get_runs( + task_group_id="taskgroup_id", + ) + task_group_stream.response.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_method_get_runs_with_all_params(self, client: Parallel) -> None: + task_group_stream = client.beta.task_group.get_runs( + task_group_id="taskgroup_id", + include_input=True, + include_output=True, + last_event_id="last_event_id", + status="queued", + ) + task_group_stream.response.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_raw_response_get_runs(self, client: Parallel) -> None: + response = client.beta.task_group.with_raw_response.get_runs( + task_group_id="taskgroup_id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = response.parse() + stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_streaming_response_get_runs(self, client: Parallel) -> None: + with client.beta.task_group.with_streaming_response.get_runs( + task_group_id="taskgroup_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = response.parse() + stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_path_params_get_runs(self, client: Parallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): + client.beta.task_group.with_raw_response.get_runs( + task_group_id="", + ) + + +class TestAsyncTaskGroup: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @parametrize + async def test_method_create(self, async_client: AsyncParallel) -> None: + task_group = await async_client.beta.task_group.create() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + async def test_method_create_with_all_params(self, async_client: AsyncParallel) -> None: + task_group = await async_client.beta.task_group.create( + metadata={"foo": "string"}, + ) + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + async def test_raw_response_create(self, async_client: AsyncParallel) -> None: + response = await async_client.beta.task_group.with_raw_response.create() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_group = await response.parse() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + async def test_streaming_response_create(self, async_client: AsyncParallel) -> None: + async with async_client.beta.task_group.with_streaming_response.create() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_group = await response.parse() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + async def test_method_retrieve(self, async_client: AsyncParallel) -> None: + task_group = await async_client.beta.task_group.retrieve( + "taskgroup_id", + ) + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncParallel) -> None: + response = await async_client.beta.task_group.with_raw_response.retrieve( + "taskgroup_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_group = await response.parse() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncParallel) -> None: + async with async_client.beta.task_group.with_streaming_response.retrieve( + "taskgroup_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_group = await response.parse() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncParallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): + await async_client.beta.task_group.with_raw_response.retrieve( + "", + ) + + @parametrize + async def test_method_add_runs(self, async_client: AsyncParallel) -> None: + task_group = await async_client.beta.task_group.add_runs( + task_group_id="taskgroup_id", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + } + ], + ) + assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) + + @parametrize + async def test_method_add_runs_with_all_params(self, async_client: AsyncParallel) -> None: + task_group = await async_client.beta.task_group.add_runs( + task_group_id="taskgroup_id", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + "enable_events": True, + "mcp_servers": [ + { + "name": "name", + "url": "url", + "allowed_tools": ["string"], + "headers": {"foo": "string"}, + "type": "url", + } + ], + "metadata": {"foo": "string"}, + "source_policy": { + "exclude_domains": ["string"], + "include_domains": ["string"], + }, + "task_spec": { + "output_schema": { + "json_schema": { + "additionalProperties": "bar", + "properties": "bar", + "required": "bar", + "type": "bar", + }, + "type": "json", + }, + "input_schema": "string", + }, + "webhook": { + "url": "url", + "event_types": ["task_run.status"], + }, + } + ], + default_task_spec={ + "output_schema": { + "json_schema": { + "additionalProperties": "bar", + "properties": "bar", + "required": "bar", + "type": "bar", + }, + "type": "json", + }, + "input_schema": "string", + }, + betas=["mcp-server-2025-07-17"], + ) + assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) + + @parametrize + async def test_raw_response_add_runs(self, async_client: AsyncParallel) -> None: + response = await async_client.beta.task_group.with_raw_response.add_runs( + task_group_id="taskgroup_id", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + } + ], + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_group = await response.parse() + assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) + + @parametrize + async def test_streaming_response_add_runs(self, async_client: AsyncParallel) -> None: + async with async_client.beta.task_group.with_streaming_response.add_runs( + task_group_id="taskgroup_id", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + } + ], + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_group = await response.parse() + assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + async def test_path_params_add_runs(self, async_client: AsyncParallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): + await async_client.beta.task_group.with_raw_response.add_runs( + task_group_id="", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + } + ], + ) + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_method_events(self, async_client: AsyncParallel) -> None: + task_group_stream = await async_client.beta.task_group.events( + task_group_id="taskgroup_id", + ) + await task_group_stream.response.aclose() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_method_events_with_all_params(self, async_client: AsyncParallel) -> None: + task_group_stream = await async_client.beta.task_group.events( + task_group_id="taskgroup_id", + last_event_id="last_event_id", + api_timeout=0, + ) + await task_group_stream.response.aclose() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_raw_response_events(self, async_client: AsyncParallel) -> None: + response = await async_client.beta.task_group.with_raw_response.events( + task_group_id="taskgroup_id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = await response.parse() + await stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_streaming_response_events(self, async_client: AsyncParallel) -> None: + async with async_client.beta.task_group.with_streaming_response.events( + task_group_id="taskgroup_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = await response.parse() + await stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_path_params_events(self, async_client: AsyncParallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): + await async_client.beta.task_group.with_raw_response.events( + task_group_id="", + ) + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_method_get_runs(self, async_client: AsyncParallel) -> None: + task_group_stream = await async_client.beta.task_group.get_runs( + task_group_id="taskgroup_id", + ) + await task_group_stream.response.aclose() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_method_get_runs_with_all_params(self, async_client: AsyncParallel) -> None: + task_group_stream = await async_client.beta.task_group.get_runs( + task_group_id="taskgroup_id", + include_input=True, + include_output=True, + last_event_id="last_event_id", + status="queued", + ) + await task_group_stream.response.aclose() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_raw_response_get_runs(self, async_client: AsyncParallel) -> None: + response = await async_client.beta.task_group.with_raw_response.get_runs( + task_group_id="taskgroup_id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = await response.parse() + await stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_streaming_response_get_runs(self, async_client: AsyncParallel) -> None: + async with async_client.beta.task_group.with_streaming_response.get_runs( + task_group_id="taskgroup_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = await response.parse() + await stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_path_params_get_runs(self, async_client: AsyncParallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): + await async_client.beta.task_group.with_raw_response.get_runs( + task_group_id="", + ) diff --git a/tests/api_resources/beta/test_task_run.py b/tests/api_resources/beta/test_task_run.py new file mode 100644 index 0000000..7a188f6 --- /dev/null +++ b/tests/api_resources/beta/test_task_run.py @@ -0,0 +1,349 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from parallel import Parallel, AsyncParallel +from tests.utils import assert_matches_type +from parallel.types import TaskRun +from parallel.types.beta import BetaTaskRunResult + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestTaskRun: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @parametrize + def test_method_create(self, client: Parallel) -> None: + task_run = client.beta.task_run.create( + input="What was the GDP of France in 2023?", + processor="base", + ) + assert_matches_type(TaskRun, task_run, path=["response"]) + + @parametrize + def test_method_create_with_all_params(self, client: Parallel) -> None: + task_run = client.beta.task_run.create( + input="What was the GDP of France in 2023?", + processor="base", + enable_events=True, + mcp_servers=[ + { + "name": "name", + "url": "url", + "allowed_tools": ["string"], + "headers": {"foo": "string"}, + "type": "url", + } + ], + metadata={"foo": "string"}, + source_policy={ + "exclude_domains": ["string"], + "include_domains": ["string"], + }, + task_spec={ + "output_schema": { + "json_schema": { + "additionalProperties": "bar", + "properties": "bar", + "required": "bar", + "type": "bar", + }, + "type": "json", + }, + "input_schema": "string", + }, + webhook={ + "url": "url", + "event_types": ["task_run.status"], + }, + betas=["mcp-server-2025-07-17"], + ) + assert_matches_type(TaskRun, task_run, path=["response"]) + + @parametrize + def test_raw_response_create(self, client: Parallel) -> None: + response = client.beta.task_run.with_raw_response.create( + input="What was the GDP of France in 2023?", + processor="base", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_run = response.parse() + assert_matches_type(TaskRun, task_run, path=["response"]) + + @parametrize + def test_streaming_response_create(self, client: Parallel) -> None: + with client.beta.task_run.with_streaming_response.create( + input="What was the GDP of France in 2023?", + processor="base", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_run = response.parse() + assert_matches_type(TaskRun, task_run, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_method_events(self, client: Parallel) -> None: + task_run_stream = client.beta.task_run.events( + "run_id", + ) + task_run_stream.response.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_raw_response_events(self, client: Parallel) -> None: + response = client.beta.task_run.with_raw_response.events( + "run_id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = response.parse() + stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_streaming_response_events(self, client: Parallel) -> None: + with client.beta.task_run.with_streaming_response.events( + "run_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = response.parse() + stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_path_params_events(self, client: Parallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"): + client.beta.task_run.with_raw_response.events( + "", + ) + + @parametrize + def test_method_result(self, client: Parallel) -> None: + task_run = client.beta.task_run.result( + run_id="run_id", + ) + assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) + + @parametrize + def test_method_result_with_all_params(self, client: Parallel) -> None: + task_run = client.beta.task_run.result( + run_id="run_id", + api_timeout=0, + betas=["mcp-server-2025-07-17"], + ) + assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) + + @parametrize + def test_raw_response_result(self, client: Parallel) -> None: + response = client.beta.task_run.with_raw_response.result( + run_id="run_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_run = response.parse() + assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) + + @parametrize + def test_streaming_response_result(self, client: Parallel) -> None: + with client.beta.task_run.with_streaming_response.result( + run_id="run_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_run = response.parse() + assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + def test_path_params_result(self, client: Parallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"): + client.beta.task_run.with_raw_response.result( + run_id="", + ) + + +class TestAsyncTaskRun: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @parametrize + async def test_method_create(self, async_client: AsyncParallel) -> None: + task_run = await async_client.beta.task_run.create( + input="What was the GDP of France in 2023?", + processor="base", + ) + assert_matches_type(TaskRun, task_run, path=["response"]) + + @parametrize + async def test_method_create_with_all_params(self, async_client: AsyncParallel) -> None: + task_run = await async_client.beta.task_run.create( + input="What was the GDP of France in 2023?", + processor="base", + enable_events=True, + mcp_servers=[ + { + "name": "name", + "url": "url", + "allowed_tools": ["string"], + "headers": {"foo": "string"}, + "type": "url", + } + ], + metadata={"foo": "string"}, + source_policy={ + "exclude_domains": ["string"], + "include_domains": ["string"], + }, + task_spec={ + "output_schema": { + "json_schema": { + "additionalProperties": "bar", + "properties": "bar", + "required": "bar", + "type": "bar", + }, + "type": "json", + }, + "input_schema": "string", + }, + webhook={ + "url": "url", + "event_types": ["task_run.status"], + }, + betas=["mcp-server-2025-07-17"], + ) + assert_matches_type(TaskRun, task_run, path=["response"]) + + @parametrize + async def test_raw_response_create(self, async_client: AsyncParallel) -> None: + response = await async_client.beta.task_run.with_raw_response.create( + input="What was the GDP of France in 2023?", + processor="base", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_run = await response.parse() + assert_matches_type(TaskRun, task_run, path=["response"]) + + @parametrize + async def test_streaming_response_create(self, async_client: AsyncParallel) -> None: + async with async_client.beta.task_run.with_streaming_response.create( + input="What was the GDP of France in 2023?", + processor="base", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_run = await response.parse() + assert_matches_type(TaskRun, task_run, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_method_events(self, async_client: AsyncParallel) -> None: + task_run_stream = await async_client.beta.task_run.events( + "run_id", + ) + await task_run_stream.response.aclose() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_raw_response_events(self, async_client: AsyncParallel) -> None: + response = await async_client.beta.task_run.with_raw_response.events( + "run_id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = await response.parse() + await stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_streaming_response_events(self, async_client: AsyncParallel) -> None: + async with async_client.beta.task_run.with_streaming_response.events( + "run_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = await response.parse() + await stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_path_params_events(self, async_client: AsyncParallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"): + await async_client.beta.task_run.with_raw_response.events( + "", + ) + + @parametrize + async def test_method_result(self, async_client: AsyncParallel) -> None: + task_run = await async_client.beta.task_run.result( + run_id="run_id", + ) + assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) + + @parametrize + async def test_method_result_with_all_params(self, async_client: AsyncParallel) -> None: + task_run = await async_client.beta.task_run.result( + run_id="run_id", + api_timeout=0, + betas=["mcp-server-2025-07-17"], + ) + assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) + + @parametrize + async def test_raw_response_result(self, async_client: AsyncParallel) -> None: + response = await async_client.beta.task_run.with_raw_response.result( + run_id="run_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_run = await response.parse() + assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) + + @parametrize + async def test_streaming_response_result(self, async_client: AsyncParallel) -> None: + async with async_client.beta.task_run.with_streaming_response.result( + run_id="run_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_run = await response.parse() + assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + async def test_path_params_result(self, async_client: AsyncParallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"): + await async_client.beta.task_run.with_raw_response.result( + run_id="", + ) diff --git a/tests/api_resources/test_beta.py b/tests/api_resources/test_beta.py new file mode 100644 index 0000000..b733643 --- /dev/null +++ b/tests/api_resources/test_beta.py @@ -0,0 +1,104 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from parallel import Parallel, AsyncParallel +from tests.utils import assert_matches_type +from parallel.types.beta import SearchResult + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestBeta: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @parametrize + def test_method_search(self, client: Parallel) -> None: + beta = client.beta.search() + assert_matches_type(SearchResult, beta, path=["response"]) + + @parametrize + def test_method_search_with_all_params(self, client: Parallel) -> None: + beta = client.beta.search( + max_chars_per_result=0, + max_results=0, + objective="objective", + processor="base", + search_queries=["string"], + source_policy={ + "exclude_domains": ["string"], + "include_domains": ["string"], + }, + ) + assert_matches_type(SearchResult, beta, path=["response"]) + + @parametrize + def test_raw_response_search(self, client: Parallel) -> None: + response = client.beta.with_raw_response.search() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + beta = response.parse() + assert_matches_type(SearchResult, beta, path=["response"]) + + @parametrize + def test_streaming_response_search(self, client: Parallel) -> None: + with client.beta.with_streaming_response.search() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + beta = response.parse() + assert_matches_type(SearchResult, beta, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncBeta: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @parametrize + async def test_method_search(self, async_client: AsyncParallel) -> None: + beta = await async_client.beta.search() + assert_matches_type(SearchResult, beta, path=["response"]) + + @parametrize + async def test_method_search_with_all_params(self, async_client: AsyncParallel) -> None: + beta = await async_client.beta.search( + max_chars_per_result=0, + max_results=0, + objective="objective", + processor="base", + search_queries=["string"], + source_policy={ + "exclude_domains": ["string"], + "include_domains": ["string"], + }, + ) + assert_matches_type(SearchResult, beta, path=["response"]) + + @parametrize + async def test_raw_response_search(self, async_client: AsyncParallel) -> None: + response = await async_client.beta.with_raw_response.search() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + beta = await response.parse() + assert_matches_type(SearchResult, beta, path=["response"]) + + @parametrize + async def test_streaming_response_search(self, async_client: AsyncParallel) -> None: + async with async_client.beta.with_streaming_response.search() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + beta = await response.parse() + assert_matches_type(SearchResult, beta, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/test_task_run.py b/tests/api_resources/test_task_run.py index 871e3fa..096a73e 100644 --- a/tests/api_resources/test_task_run.py +++ b/tests/api_resources/test_task_run.py @@ -20,46 +20,32 @@ class TestTaskRun: @parametrize def test_method_create(self, client: Parallel) -> None: task_run = client.task_run.create( - input="France (2023)", - processor="processor", + input="What was the GDP of France in 2023?", + processor="base", ) assert_matches_type(TaskRun, task_run, path=["response"]) @parametrize def test_method_create_with_all_params(self, client: Parallel) -> None: task_run = client.task_run.create( - input="France (2023)", - processor="processor", + input="What was the GDP of France in 2023?", + processor="base", metadata={"foo": "string"}, + source_policy={ + "exclude_domains": ["string"], + "include_domains": ["string"], + }, task_spec={ "output_schema": { "json_schema": { - "additionalProperties": False, - "properties": { - "gdp": { - "description": "GDP in USD for the year, formatted like '$3.1 trillion (2023)'", - "type": "string", - } - }, - "required": ["gdp"], - "type": "object", - }, - "type": "json", - }, - "input_schema": { - "json_schema": { - "additionalProperties": False, - "properties": { - "gdp": { - "description": "GDP in USD for the year, formatted like '$3.1 trillion (2023)'", - "type": "string", - } - }, - "required": ["gdp"], - "type": "object", + "additionalProperties": "bar", + "properties": "bar", + "required": "bar", + "type": "bar", }, "type": "json", }, + "input_schema": "string", }, ) assert_matches_type(TaskRun, task_run, path=["response"]) @@ -67,8 +53,8 @@ def test_method_create_with_all_params(self, client: Parallel) -> None: @parametrize def test_raw_response_create(self, client: Parallel) -> None: response = client.task_run.with_raw_response.create( - input="France (2023)", - processor="processor", + input="What was the GDP of France in 2023?", + processor="base", ) assert response.is_closed is True @@ -79,8 +65,8 @@ def test_raw_response_create(self, client: Parallel) -> None: @parametrize def test_streaming_response_create(self, client: Parallel) -> None: with client.task_run.with_streaming_response.create( - input="France (2023)", - processor="processor", + input="What was the GDP of France in 2023?", + processor="base", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -183,46 +169,32 @@ class TestAsyncTaskRun: @parametrize async def test_method_create(self, async_client: AsyncParallel) -> None: task_run = await async_client.task_run.create( - input="France (2023)", - processor="processor", + input="What was the GDP of France in 2023?", + processor="base", ) assert_matches_type(TaskRun, task_run, path=["response"]) @parametrize async def test_method_create_with_all_params(self, async_client: AsyncParallel) -> None: task_run = await async_client.task_run.create( - input="France (2023)", - processor="processor", + input="What was the GDP of France in 2023?", + processor="base", metadata={"foo": "string"}, + source_policy={ + "exclude_domains": ["string"], + "include_domains": ["string"], + }, task_spec={ "output_schema": { "json_schema": { - "additionalProperties": False, - "properties": { - "gdp": { - "description": "GDP in USD for the year, formatted like '$3.1 trillion (2023)'", - "type": "string", - } - }, - "required": ["gdp"], - "type": "object", - }, - "type": "json", - }, - "input_schema": { - "json_schema": { - "additionalProperties": False, - "properties": { - "gdp": { - "description": "GDP in USD for the year, formatted like '$3.1 trillion (2023)'", - "type": "string", - } - }, - "required": ["gdp"], - "type": "object", + "additionalProperties": "bar", + "properties": "bar", + "required": "bar", + "type": "bar", }, "type": "json", }, + "input_schema": "string", }, ) assert_matches_type(TaskRun, task_run, path=["response"]) @@ -230,8 +202,8 @@ async def test_method_create_with_all_params(self, async_client: AsyncParallel) @parametrize async def test_raw_response_create(self, async_client: AsyncParallel) -> None: response = await async_client.task_run.with_raw_response.create( - input="France (2023)", - processor="processor", + input="What was the GDP of France in 2023?", + processor="base", ) assert response.is_closed is True @@ -242,8 +214,8 @@ async def test_raw_response_create(self, async_client: AsyncParallel) -> None: @parametrize async def test_streaming_response_create(self, async_client: AsyncParallel) -> None: async with async_client.task_run.with_streaming_response.create( - input="France (2023)", - processor="processor", + input="What was the GDP of France in 2023?", + processor="base", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" diff --git a/tests/test_client.py b/tests/test_client.py index 878365f..f3c8287 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -717,7 +717,9 @@ def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter, clien respx_mock.post("/v1/tasks/runs").mock(side_effect=httpx.TimeoutException("Test timeout error")) with pytest.raises(APITimeoutError): - client.task_run.with_streaming_response.create(input="France (2023)", processor="processor").__enter__() + client.task_run.with_streaming_response.create( + input="What was the GDP of France in 2023?", processor="base" + ).__enter__() assert _get_open_connections(self.client) == 0 @@ -727,7 +729,9 @@ def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, client respx_mock.post("/v1/tasks/runs").mock(return_value=httpx.Response(500)) with pytest.raises(APIStatusError): - client.task_run.with_streaming_response.create(input="France (2023)", processor="processor").__enter__() + client.task_run.with_streaming_response.create( + input="What was the GDP of France in 2023?", processor="base" + ).__enter__() assert _get_open_connections(self.client) == 0 @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) @@ -756,7 +760,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/v1/tasks/runs").mock(side_effect=retry_handler) - response = client.task_run.with_raw_response.create(input="France (2023)", processor="processor") + response = client.task_run.with_raw_response.create( + input="What was the GDP of France in 2023?", processor="base" + ) assert response.retries_taken == failures_before_success assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success @@ -781,7 +787,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/v1/tasks/runs").mock(side_effect=retry_handler) response = client.task_run.with_raw_response.create( - input="France (2023)", processor="processor", extra_headers={"x-stainless-retry-count": Omit()} + input="What was the GDP of France in 2023?", + processor="base", + extra_headers={"x-stainless-retry-count": Omit()}, ) assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 @@ -806,7 +814,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/v1/tasks/runs").mock(side_effect=retry_handler) response = client.task_run.with_raw_response.create( - input="France (2023)", processor="processor", extra_headers={"x-stainless-retry-count": "42"} + input="What was the GDP of France in 2023?", + processor="base", + extra_headers={"x-stainless-retry-count": "42"}, ) assert response.http_request.headers.get("x-stainless-retry-count") == "42" @@ -1539,7 +1549,7 @@ async def test_retrying_timeout_errors_doesnt_leak( with pytest.raises(APITimeoutError): await async_client.task_run.with_streaming_response.create( - input="France (2023)", processor="processor" + input="What was the GDP of France in 2023?", processor="base" ).__aenter__() assert _get_open_connections(self.client) == 0 @@ -1553,7 +1563,7 @@ async def test_retrying_status_errors_doesnt_leak( with pytest.raises(APIStatusError): await async_client.task_run.with_streaming_response.create( - input="France (2023)", processor="processor" + input="What was the GDP of France in 2023?", processor="base" ).__aenter__() assert _get_open_connections(self.client) == 0 @@ -1584,7 +1594,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/v1/tasks/runs").mock(side_effect=retry_handler) - response = await client.task_run.with_raw_response.create(input="France (2023)", processor="processor") + response = await client.task_run.with_raw_response.create( + input="What was the GDP of France in 2023?", processor="base" + ) assert response.retries_taken == failures_before_success assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success @@ -1610,7 +1622,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/v1/tasks/runs").mock(side_effect=retry_handler) response = await client.task_run.with_raw_response.create( - input="France (2023)", processor="processor", extra_headers={"x-stainless-retry-count": Omit()} + input="What was the GDP of France in 2023?", + processor="base", + extra_headers={"x-stainless-retry-count": Omit()}, ) assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 @@ -1636,7 +1650,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/v1/tasks/runs").mock(side_effect=retry_handler) response = await client.task_run.with_raw_response.create( - input="France (2023)", processor="processor", extra_headers={"x-stainless-retry-count": "42"} + input="What was the GDP of France in 2023?", + processor="base", + extra_headers={"x-stainless-retry-count": "42"}, ) assert response.http_request.headers.get("x-stainless-retry-count") == "42" From b9abf3c8b0e22b260149f01b1ef608924eefe735 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sun, 31 Aug 2025 21:55:19 +0000 Subject: [PATCH 28/32] feat(api): update via SDK Studio --- .stats.yml | 6 +- api.md | 4 +- src/parallel/_client.py | 9 - src/parallel/resources/__init__.py | 14 - src/parallel/resources/beta/__init__.py | 47 -- src/parallel/resources/beta/beta.py | 301 --------- src/parallel/resources/beta/task_group.py | 632 ------------------ src/parallel/resources/beta/task_run.py | 499 -------------- src/parallel/resources/task_run.py | 53 +- src/parallel/types/__init__.py | 13 +- src/parallel/types/auto_schema.py | 13 - src/parallel/types/auto_schema_param.py | 12 - src/parallel/types/beta/__init__.py | 27 - src/parallel/types/beta/beta_run_input.py | 63 -- .../types/beta/beta_run_input_param.py | 65 -- src/parallel/types/beta/beta_search_params.py | 47 -- .../types/beta/beta_task_run_result.py | 74 -- src/parallel/types/beta/error_event.py | 16 - src/parallel/types/beta/mcp_server.py | 25 - src/parallel/types/beta/mcp_server_param.py | 25 - src/parallel/types/beta/mcp_tool_call.py | 27 - .../types/beta/parallel_beta_param.py | 12 - src/parallel/types/beta/search_result.py | 16 - src/parallel/types/beta/task_group.py | 24 - .../types/beta/task_group_add_runs_params.py | 30 - .../types/beta/task_group_create_params.py | 13 - .../types/beta/task_group_events_params.py | 16 - .../types/beta/task_group_events_response.py | 28 - .../types/beta/task_group_get_runs_params.py | 18 - .../beta/task_group_get_runs_response.py | 12 - .../types/beta/task_group_run_response.py | 30 - src/parallel/types/beta/task_group_status.py | 27 - .../types/beta/task_run_create_params.py | 70 -- src/parallel/types/beta/task_run_event.py | 32 - .../types/beta/task_run_events_response.py | 58 -- .../types/beta/task_run_result_params.py | 18 - src/parallel/types/beta/web_search_result.py | 18 - src/parallel/types/beta/webhook.py | 16 - src/parallel/types/beta/webhook_param.py | 16 - src/parallel/types/field_basis.py | 25 - src/parallel/types/json_schema.py | 16 - src/parallel/types/json_schema_param.py | 3 +- src/parallel/types/shared/__init__.py | 2 - src/parallel/types/shared/error_object.py | 4 +- src/parallel/types/shared/error_response.py | 3 +- src/parallel/types/shared/source_policy.py | 21 - src/parallel/types/shared/warning.py | 22 - src/parallel/types/shared_params/__init__.py | 3 - .../types/shared_params/source_policy.py | 22 - src/parallel/types/task_run.py | 33 +- src/parallel/types/task_run_create_params.py | 15 +- src/parallel/types/task_run_json_output.py | 47 +- src/parallel/types/task_run_result.py | 7 +- src/parallel/types/task_run_text_output.py | 39 +- src/parallel/types/task_spec.py | 31 - src/parallel/types/task_spec_param.py | 5 +- src/parallel/types/text_schema.py | 16 - tests/api_resources/beta/__init__.py | 1 - tests/api_resources/beta/test_task_group.py | 613 ----------------- tests/api_resources/beta/test_task_run.py | 349 ---------- tests/api_resources/test_beta.py | 104 --- tests/api_resources/test_task_run.py | 96 ++- tests/test_client.py | 36 +- 63 files changed, 164 insertions(+), 3775 deletions(-) delete mode 100644 src/parallel/resources/beta/__init__.py delete mode 100644 src/parallel/resources/beta/beta.py delete mode 100644 src/parallel/resources/beta/task_group.py delete mode 100644 src/parallel/resources/beta/task_run.py delete mode 100644 src/parallel/types/auto_schema.py delete mode 100644 src/parallel/types/auto_schema_param.py delete mode 100644 src/parallel/types/beta/beta_run_input.py delete mode 100644 src/parallel/types/beta/beta_run_input_param.py delete mode 100644 src/parallel/types/beta/beta_search_params.py delete mode 100644 src/parallel/types/beta/beta_task_run_result.py delete mode 100644 src/parallel/types/beta/error_event.py delete mode 100644 src/parallel/types/beta/mcp_server.py delete mode 100644 src/parallel/types/beta/mcp_server_param.py delete mode 100644 src/parallel/types/beta/mcp_tool_call.py delete mode 100644 src/parallel/types/beta/parallel_beta_param.py delete mode 100644 src/parallel/types/beta/search_result.py delete mode 100644 src/parallel/types/beta/task_group.py delete mode 100644 src/parallel/types/beta/task_group_add_runs_params.py delete mode 100644 src/parallel/types/beta/task_group_create_params.py delete mode 100644 src/parallel/types/beta/task_group_events_params.py delete mode 100644 src/parallel/types/beta/task_group_events_response.py delete mode 100644 src/parallel/types/beta/task_group_get_runs_params.py delete mode 100644 src/parallel/types/beta/task_group_get_runs_response.py delete mode 100644 src/parallel/types/beta/task_group_run_response.py delete mode 100644 src/parallel/types/beta/task_group_status.py delete mode 100644 src/parallel/types/beta/task_run_create_params.py delete mode 100644 src/parallel/types/beta/task_run_event.py delete mode 100644 src/parallel/types/beta/task_run_events_response.py delete mode 100644 src/parallel/types/beta/task_run_result_params.py delete mode 100644 src/parallel/types/beta/web_search_result.py delete mode 100644 src/parallel/types/beta/webhook.py delete mode 100644 src/parallel/types/beta/webhook_param.py delete mode 100644 src/parallel/types/field_basis.py delete mode 100644 src/parallel/types/json_schema.py delete mode 100644 src/parallel/types/shared/source_policy.py delete mode 100644 src/parallel/types/shared/warning.py delete mode 100644 src/parallel/types/shared_params/__init__.py delete mode 100644 src/parallel/types/shared_params/source_policy.py delete mode 100644 src/parallel/types/task_spec.py delete mode 100644 src/parallel/types/text_schema.py delete mode 100644 tests/api_resources/beta/__init__.py delete mode 100644 tests/api_resources/beta/test_task_group.py delete mode 100644 tests/api_resources/beta/test_task_run.py delete mode 100644 tests/api_resources/test_beta.py diff --git a/.stats.yml b/.stats.yml index 57243e7..c703e97 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ -configured_endpoints: 12 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/parallel-web%2Fparallel-sdk-1aeb1c81a84999f2d27ca9e86b041d74b892926bed126dc9b0f3cff4d7b26963.yml -openapi_spec_hash: 6280f6c6fb537f7c9ac5cc33ee2e433d +configured_endpoints: 3 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/parallel-web%2Fparallel-sdk-ff0d5939e135b67b3448abf72d8bb0f9a574194337c7c7192453781347a9601d.yml +openapi_spec_hash: f3ce85349af6273a671d3d2781c4c877 config_hash: 284b51e02bda8519b1f21bb67f1809e0 diff --git a/api.md b/api.md index 665af26..6b41468 100644 --- a/api.md +++ b/api.md @@ -1,7 +1,7 @@ # Shared Types ```python -from parallel.types import ErrorObject, ErrorResponse, SourcePolicy, Warning +from parallel.types import ErrorObject, ErrorResponse ``` # TaskRun @@ -10,9 +10,7 @@ Types: ```python from parallel.types import ( - AutoSchema, Citation, - FieldBasis, JsonSchema, ParsedTaskRunResult, RunInput, diff --git a/src/parallel/_client.py b/src/parallel/_client.py index bb7c4a4..dcb4a10 100644 --- a/src/parallel/_client.py +++ b/src/parallel/_client.py @@ -29,7 +29,6 @@ SyncAPIClient, AsyncAPIClient, ) -from .resources.beta import beta __all__ = [ "Timeout", @@ -45,7 +44,6 @@ class Parallel(SyncAPIClient): task_run: task_run.TaskRunResource - beta: beta.BetaResource with_raw_response: ParallelWithRawResponse with_streaming_response: ParallelWithStreamedResponse @@ -104,7 +102,6 @@ def __init__( ) self.task_run = task_run.TaskRunResource(self) - self.beta = beta.BetaResource(self) self.with_raw_response = ParallelWithRawResponse(self) self.with_streaming_response = ParallelWithStreamedResponse(self) @@ -215,7 +212,6 @@ def _make_status_error( class AsyncParallel(AsyncAPIClient): task_run: task_run.AsyncTaskRunResource - beta: beta.AsyncBetaResource with_raw_response: AsyncParallelWithRawResponse with_streaming_response: AsyncParallelWithStreamedResponse @@ -274,7 +270,6 @@ def __init__( ) self.task_run = task_run.AsyncTaskRunResource(self) - self.beta = beta.AsyncBetaResource(self) self.with_raw_response = AsyncParallelWithRawResponse(self) self.with_streaming_response = AsyncParallelWithStreamedResponse(self) @@ -386,25 +381,21 @@ def _make_status_error( class ParallelWithRawResponse: def __init__(self, client: Parallel) -> None: self.task_run = task_run.TaskRunResourceWithRawResponse(client.task_run) - self.beta = beta.BetaResourceWithRawResponse(client.beta) class AsyncParallelWithRawResponse: def __init__(self, client: AsyncParallel) -> None: self.task_run = task_run.AsyncTaskRunResourceWithRawResponse(client.task_run) - self.beta = beta.AsyncBetaResourceWithRawResponse(client.beta) class ParallelWithStreamedResponse: def __init__(self, client: Parallel) -> None: self.task_run = task_run.TaskRunResourceWithStreamingResponse(client.task_run) - self.beta = beta.BetaResourceWithStreamingResponse(client.beta) class AsyncParallelWithStreamedResponse: def __init__(self, client: AsyncParallel) -> None: self.task_run = task_run.AsyncTaskRunResourceWithStreamingResponse(client.task_run) - self.beta = beta.AsyncBetaResourceWithStreamingResponse(client.beta) Client = Parallel diff --git a/src/parallel/resources/__init__.py b/src/parallel/resources/__init__.py index 9d1df4f..6fc7c06 100644 --- a/src/parallel/resources/__init__.py +++ b/src/parallel/resources/__init__.py @@ -1,13 +1,5 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -from .beta import ( - BetaResource, - AsyncBetaResource, - BetaResourceWithRawResponse, - AsyncBetaResourceWithRawResponse, - BetaResourceWithStreamingResponse, - AsyncBetaResourceWithStreamingResponse, -) from .task_run import ( TaskRunResource, AsyncTaskRunResource, @@ -24,10 +16,4 @@ "AsyncTaskRunResourceWithRawResponse", "TaskRunResourceWithStreamingResponse", "AsyncTaskRunResourceWithStreamingResponse", - "BetaResource", - "AsyncBetaResource", - "BetaResourceWithRawResponse", - "AsyncBetaResourceWithRawResponse", - "BetaResourceWithStreamingResponse", - "AsyncBetaResourceWithStreamingResponse", ] diff --git a/src/parallel/resources/beta/__init__.py b/src/parallel/resources/beta/__init__.py deleted file mode 100644 index 3bd45ec..0000000 --- a/src/parallel/resources/beta/__init__.py +++ /dev/null @@ -1,47 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from .beta import ( - BetaResource, - AsyncBetaResource, - BetaResourceWithRawResponse, - AsyncBetaResourceWithRawResponse, - BetaResourceWithStreamingResponse, - AsyncBetaResourceWithStreamingResponse, -) -from .task_run import ( - TaskRunResource, - AsyncTaskRunResource, - TaskRunResourceWithRawResponse, - AsyncTaskRunResourceWithRawResponse, - TaskRunResourceWithStreamingResponse, - AsyncTaskRunResourceWithStreamingResponse, -) -from .task_group import ( - TaskGroupResource, - AsyncTaskGroupResource, - TaskGroupResourceWithRawResponse, - AsyncTaskGroupResourceWithRawResponse, - TaskGroupResourceWithStreamingResponse, - AsyncTaskGroupResourceWithStreamingResponse, -) - -__all__ = [ - "TaskRunResource", - "AsyncTaskRunResource", - "TaskRunResourceWithRawResponse", - "AsyncTaskRunResourceWithRawResponse", - "TaskRunResourceWithStreamingResponse", - "AsyncTaskRunResourceWithStreamingResponse", - "TaskGroupResource", - "AsyncTaskGroupResource", - "TaskGroupResourceWithRawResponse", - "AsyncTaskGroupResourceWithRawResponse", - "TaskGroupResourceWithStreamingResponse", - "AsyncTaskGroupResourceWithStreamingResponse", - "BetaResource", - "AsyncBetaResource", - "BetaResourceWithRawResponse", - "AsyncBetaResourceWithRawResponse", - "BetaResourceWithStreamingResponse", - "AsyncBetaResourceWithStreamingResponse", -] diff --git a/src/parallel/resources/beta/beta.py b/src/parallel/resources/beta/beta.py deleted file mode 100644 index c12ec5a..0000000 --- a/src/parallel/resources/beta/beta.py +++ /dev/null @@ -1,301 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import List, Optional -from typing_extensions import Literal - -import httpx - -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven -from ..._utils import maybe_transform, async_maybe_transform -from .task_run import ( - TaskRunResource, - AsyncTaskRunResource, - TaskRunResourceWithRawResponse, - AsyncTaskRunResourceWithRawResponse, - TaskRunResourceWithStreamingResponse, - AsyncTaskRunResourceWithStreamingResponse, -) -from ..._compat import cached_property -from .task_group import ( - TaskGroupResource, - AsyncTaskGroupResource, - TaskGroupResourceWithRawResponse, - AsyncTaskGroupResourceWithRawResponse, - TaskGroupResourceWithStreamingResponse, - AsyncTaskGroupResourceWithStreamingResponse, -) -from ..._resource import SyncAPIResource, AsyncAPIResource -from ..._response import ( - to_raw_response_wrapper, - to_streamed_response_wrapper, - async_to_raw_response_wrapper, - async_to_streamed_response_wrapper, -) -from ...types.beta import beta_search_params -from ..._base_client import make_request_options -from ...types.beta.search_result import SearchResult -from ...types.shared_params.source_policy import SourcePolicy - -__all__ = ["BetaResource", "AsyncBetaResource"] - - -class BetaResource(SyncAPIResource): - @cached_property - def task_run(self) -> TaskRunResource: - return TaskRunResource(self._client) - - @cached_property - def task_group(self) -> TaskGroupResource: - return TaskGroupResource(self._client) - - @cached_property - def with_raw_response(self) -> BetaResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers - """ - return BetaResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> BetaResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response - """ - return BetaResourceWithStreamingResponse(self) - - def search( - self, - *, - max_chars_per_result: Optional[int] | NotGiven = NOT_GIVEN, - max_results: Optional[int] | NotGiven = NOT_GIVEN, - objective: Optional[str] | NotGiven = NOT_GIVEN, - processor: Literal["base", "pro"] | NotGiven = NOT_GIVEN, - search_queries: Optional[List[str]] | NotGiven = NOT_GIVEN, - source_policy: Optional[SourcePolicy] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> SearchResult: - """ - Searches the web. - - Args: - max_chars_per_result: Upper bound on the number of characters to include in excerpts for each search - result. - - max_results: Upper bound on the number of results to return. May be limited by the processor. - Defaults to 10 if not provided. - - objective: Natural-language description of what the web search is trying to find. May - include guidance about preferred sources or freshness. At least one of objective - or search_queries must be provided. - - processor: Search processor. - - search_queries: Optional list of traditional keyword search queries to guide the search. May - contain search operators. At least one of objective or search_queries must be - provided. - - source_policy: Source policy for web search results. - - This policy governs which sources are allowed/disallowed in results. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return self._post( - "/v1beta/search", - body=maybe_transform( - { - "max_chars_per_result": max_chars_per_result, - "max_results": max_results, - "objective": objective, - "processor": processor, - "search_queries": search_queries, - "source_policy": source_policy, - }, - beta_search_params.BetaSearchParams, - ), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=SearchResult, - ) - - -class AsyncBetaResource(AsyncAPIResource): - @cached_property - def task_run(self) -> AsyncTaskRunResource: - return AsyncTaskRunResource(self._client) - - @cached_property - def task_group(self) -> AsyncTaskGroupResource: - return AsyncTaskGroupResource(self._client) - - @cached_property - def with_raw_response(self) -> AsyncBetaResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers - """ - return AsyncBetaResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AsyncBetaResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response - """ - return AsyncBetaResourceWithStreamingResponse(self) - - async def search( - self, - *, - max_chars_per_result: Optional[int] | NotGiven = NOT_GIVEN, - max_results: Optional[int] | NotGiven = NOT_GIVEN, - objective: Optional[str] | NotGiven = NOT_GIVEN, - processor: Literal["base", "pro"] | NotGiven = NOT_GIVEN, - search_queries: Optional[List[str]] | NotGiven = NOT_GIVEN, - source_policy: Optional[SourcePolicy] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> SearchResult: - """ - Searches the web. - - Args: - max_chars_per_result: Upper bound on the number of characters to include in excerpts for each search - result. - - max_results: Upper bound on the number of results to return. May be limited by the processor. - Defaults to 10 if not provided. - - objective: Natural-language description of what the web search is trying to find. May - include guidance about preferred sources or freshness. At least one of objective - or search_queries must be provided. - - processor: Search processor. - - search_queries: Optional list of traditional keyword search queries to guide the search. May - contain search operators. At least one of objective or search_queries must be - provided. - - source_policy: Source policy for web search results. - - This policy governs which sources are allowed/disallowed in results. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return await self._post( - "/v1beta/search", - body=await async_maybe_transform( - { - "max_chars_per_result": max_chars_per_result, - "max_results": max_results, - "objective": objective, - "processor": processor, - "search_queries": search_queries, - "source_policy": source_policy, - }, - beta_search_params.BetaSearchParams, - ), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=SearchResult, - ) - - -class BetaResourceWithRawResponse: - def __init__(self, beta: BetaResource) -> None: - self._beta = beta - - self.search = to_raw_response_wrapper( - beta.search, - ) - - @cached_property - def task_run(self) -> TaskRunResourceWithRawResponse: - return TaskRunResourceWithRawResponse(self._beta.task_run) - - @cached_property - def task_group(self) -> TaskGroupResourceWithRawResponse: - return TaskGroupResourceWithRawResponse(self._beta.task_group) - - -class AsyncBetaResourceWithRawResponse: - def __init__(self, beta: AsyncBetaResource) -> None: - self._beta = beta - - self.search = async_to_raw_response_wrapper( - beta.search, - ) - - @cached_property - def task_run(self) -> AsyncTaskRunResourceWithRawResponse: - return AsyncTaskRunResourceWithRawResponse(self._beta.task_run) - - @cached_property - def task_group(self) -> AsyncTaskGroupResourceWithRawResponse: - return AsyncTaskGroupResourceWithRawResponse(self._beta.task_group) - - -class BetaResourceWithStreamingResponse: - def __init__(self, beta: BetaResource) -> None: - self._beta = beta - - self.search = to_streamed_response_wrapper( - beta.search, - ) - - @cached_property - def task_run(self) -> TaskRunResourceWithStreamingResponse: - return TaskRunResourceWithStreamingResponse(self._beta.task_run) - - @cached_property - def task_group(self) -> TaskGroupResourceWithStreamingResponse: - return TaskGroupResourceWithStreamingResponse(self._beta.task_group) - - -class AsyncBetaResourceWithStreamingResponse: - def __init__(self, beta: AsyncBetaResource) -> None: - self._beta = beta - - self.search = async_to_streamed_response_wrapper( - beta.search, - ) - - @cached_property - def task_run(self) -> AsyncTaskRunResourceWithStreamingResponse: - return AsyncTaskRunResourceWithStreamingResponse(self._beta.task_run) - - @cached_property - def task_group(self) -> AsyncTaskGroupResourceWithStreamingResponse: - return AsyncTaskGroupResourceWithStreamingResponse(self._beta.task_group) diff --git a/src/parallel/resources/beta/task_group.py b/src/parallel/resources/beta/task_group.py deleted file mode 100644 index 0a34e67..0000000 --- a/src/parallel/resources/beta/task_group.py +++ /dev/null @@ -1,632 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Any, Dict, List, Union, Iterable, Optional, cast -from typing_extensions import Literal - -import httpx - -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven -from ..._utils import is_given, maybe_transform, strip_not_given, async_maybe_transform -from ..._compat import cached_property -from ..._resource import SyncAPIResource, AsyncAPIResource -from ..._response import ( - to_raw_response_wrapper, - to_streamed_response_wrapper, - async_to_raw_response_wrapper, - async_to_streamed_response_wrapper, -) -from ..._streaming import Stream, AsyncStream -from ...types.beta import ( - task_group_create_params, - task_group_events_params, - task_group_add_runs_params, - task_group_get_runs_params, -) -from ..._base_client import make_request_options -from ...types.beta.task_group import TaskGroup -from ...types.task_spec_param import TaskSpecParam -from ...types.beta.parallel_beta_param import ParallelBetaParam -from ...types.beta.beta_run_input_param import BetaRunInputParam -from ...types.beta.task_group_run_response import TaskGroupRunResponse -from ...types.beta.task_group_events_response import TaskGroupEventsResponse -from ...types.beta.task_group_get_runs_response import TaskGroupGetRunsResponse - -__all__ = ["TaskGroupResource", "AsyncTaskGroupResource"] - - -class TaskGroupResource(SyncAPIResource): - @cached_property - def with_raw_response(self) -> TaskGroupResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers - """ - return TaskGroupResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> TaskGroupResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response - """ - return TaskGroupResourceWithStreamingResponse(self) - - def create( - self, - *, - metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> TaskGroup: - """ - Initiates a TaskGroup to group and track multiple runs. - - Args: - metadata: User-provided metadata stored with the task group. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return self._post( - "/v1beta/tasks/groups", - body=maybe_transform({"metadata": metadata}, task_group_create_params.TaskGroupCreateParams), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=TaskGroup, - ) - - def retrieve( - self, - task_group_id: str, - *, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> TaskGroup: - """ - Retrieves aggregated status across runs in a TaskGroup. - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not task_group_id: - raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") - return self._get( - f"/v1beta/tasks/groups/{task_group_id}", - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=TaskGroup, - ) - - def add_runs( - self, - task_group_id: str, - *, - inputs: Iterable[BetaRunInputParam], - default_task_spec: Optional[TaskSpecParam] | NotGiven = NOT_GIVEN, - betas: List[ParallelBetaParam] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> TaskGroupRunResponse: - """ - Initiates multiple task runs within a TaskGroup. - - Args: - inputs: List of task runs to execute. - - default_task_spec: Specification for a task. - - Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. - Not specifying a TaskSpec is the same as setting an auto output schema. - - For convenience bare strings are also accepted as input or output schemas. - - betas: Optional header to specify the beta version(s) to enable. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not task_group_id: - raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") - extra_headers = { - **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}), - **(extra_headers or {}), - } - return self._post( - f"/v1beta/tasks/groups/{task_group_id}/runs", - body=maybe_transform( - { - "inputs": inputs, - "default_task_spec": default_task_spec, - }, - task_group_add_runs_params.TaskGroupAddRunsParams, - ), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=TaskGroupRunResponse, - ) - - def events( - self, - task_group_id: str, - *, - last_event_id: Optional[str] | NotGiven = NOT_GIVEN, - api_timeout: Optional[float] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Stream[TaskGroupEventsResponse]: - """ - Streams events from a TaskGroup: status updates and run completions. - - The connection will remain open for up to 10 minutes as long as at least one run - in the TaskGroup is active. - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not task_group_id: - raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") - extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} - return self._get( - f"/v1beta/tasks/groups/{task_group_id}/events", - options=make_request_options( - extra_headers=extra_headers, - extra_query=extra_query, - extra_body=extra_body, - timeout=timeout, - query=maybe_transform( - { - "last_event_id": last_event_id, - "api_timeout": api_timeout, - }, - task_group_events_params.TaskGroupEventsParams, - ), - ), - cast_to=cast( - Any, TaskGroupEventsResponse - ), # Union types cannot be passed in as arguments in the type system - stream=True, - stream_cls=Stream[TaskGroupEventsResponse], - ) - - def get_runs( - self, - task_group_id: str, - *, - include_input: bool | NotGiven = NOT_GIVEN, - include_output: bool | NotGiven = NOT_GIVEN, - last_event_id: Optional[str] | NotGiven = NOT_GIVEN, - status: Optional[ - Literal["queued", "action_required", "running", "completed", "failed", "cancelling", "cancelled"] - ] - | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Stream[TaskGroupGetRunsResponse]: - """ - Retrieves task runs in a TaskGroup and optionally their inputs and outputs. - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not task_group_id: - raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") - extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} - return self._get( - f"/v1beta/tasks/groups/{task_group_id}/runs", - options=make_request_options( - extra_headers=extra_headers, - extra_query=extra_query, - extra_body=extra_body, - timeout=timeout, - query=maybe_transform( - { - "include_input": include_input, - "include_output": include_output, - "last_event_id": last_event_id, - "status": status, - }, - task_group_get_runs_params.TaskGroupGetRunsParams, - ), - ), - cast_to=cast( - Any, TaskGroupGetRunsResponse - ), # Union types cannot be passed in as arguments in the type system - stream=True, - stream_cls=Stream[TaskGroupGetRunsResponse], - ) - - -class AsyncTaskGroupResource(AsyncAPIResource): - @cached_property - def with_raw_response(self) -> AsyncTaskGroupResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers - """ - return AsyncTaskGroupResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AsyncTaskGroupResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response - """ - return AsyncTaskGroupResourceWithStreamingResponse(self) - - async def create( - self, - *, - metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> TaskGroup: - """ - Initiates a TaskGroup to group and track multiple runs. - - Args: - metadata: User-provided metadata stored with the task group. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return await self._post( - "/v1beta/tasks/groups", - body=await async_maybe_transform({"metadata": metadata}, task_group_create_params.TaskGroupCreateParams), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=TaskGroup, - ) - - async def retrieve( - self, - task_group_id: str, - *, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> TaskGroup: - """ - Retrieves aggregated status across runs in a TaskGroup. - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not task_group_id: - raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") - return await self._get( - f"/v1beta/tasks/groups/{task_group_id}", - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=TaskGroup, - ) - - async def add_runs( - self, - task_group_id: str, - *, - inputs: Iterable[BetaRunInputParam], - default_task_spec: Optional[TaskSpecParam] | NotGiven = NOT_GIVEN, - betas: List[ParallelBetaParam] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> TaskGroupRunResponse: - """ - Initiates multiple task runs within a TaskGroup. - - Args: - inputs: List of task runs to execute. - - default_task_spec: Specification for a task. - - Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. - Not specifying a TaskSpec is the same as setting an auto output schema. - - For convenience bare strings are also accepted as input or output schemas. - - betas: Optional header to specify the beta version(s) to enable. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not task_group_id: - raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") - extra_headers = { - **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}), - **(extra_headers or {}), - } - return await self._post( - f"/v1beta/tasks/groups/{task_group_id}/runs", - body=await async_maybe_transform( - { - "inputs": inputs, - "default_task_spec": default_task_spec, - }, - task_group_add_runs_params.TaskGroupAddRunsParams, - ), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=TaskGroupRunResponse, - ) - - async def events( - self, - task_group_id: str, - *, - last_event_id: Optional[str] | NotGiven = NOT_GIVEN, - api_timeout: Optional[float] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> AsyncStream[TaskGroupEventsResponse]: - """ - Streams events from a TaskGroup: status updates and run completions. - - The connection will remain open for up to 10 minutes as long as at least one run - in the TaskGroup is active. - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not task_group_id: - raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") - extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} - return await self._get( - f"/v1beta/tasks/groups/{task_group_id}/events", - options=make_request_options( - extra_headers=extra_headers, - extra_query=extra_query, - extra_body=extra_body, - timeout=timeout, - query=await async_maybe_transform( - { - "last_event_id": last_event_id, - "api_timeout": api_timeout, - }, - task_group_events_params.TaskGroupEventsParams, - ), - ), - cast_to=cast( - Any, TaskGroupEventsResponse - ), # Union types cannot be passed in as arguments in the type system - stream=True, - stream_cls=AsyncStream[TaskGroupEventsResponse], - ) - - async def get_runs( - self, - task_group_id: str, - *, - include_input: bool | NotGiven = NOT_GIVEN, - include_output: bool | NotGiven = NOT_GIVEN, - last_event_id: Optional[str] | NotGiven = NOT_GIVEN, - status: Optional[ - Literal["queued", "action_required", "running", "completed", "failed", "cancelling", "cancelled"] - ] - | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> AsyncStream[TaskGroupGetRunsResponse]: - """ - Retrieves task runs in a TaskGroup and optionally their inputs and outputs. - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not task_group_id: - raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") - extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} - return await self._get( - f"/v1beta/tasks/groups/{task_group_id}/runs", - options=make_request_options( - extra_headers=extra_headers, - extra_query=extra_query, - extra_body=extra_body, - timeout=timeout, - query=await async_maybe_transform( - { - "include_input": include_input, - "include_output": include_output, - "last_event_id": last_event_id, - "status": status, - }, - task_group_get_runs_params.TaskGroupGetRunsParams, - ), - ), - cast_to=cast( - Any, TaskGroupGetRunsResponse - ), # Union types cannot be passed in as arguments in the type system - stream=True, - stream_cls=AsyncStream[TaskGroupGetRunsResponse], - ) - - -class TaskGroupResourceWithRawResponse: - def __init__(self, task_group: TaskGroupResource) -> None: - self._task_group = task_group - - self.create = to_raw_response_wrapper( - task_group.create, - ) - self.retrieve = to_raw_response_wrapper( - task_group.retrieve, - ) - self.add_runs = to_raw_response_wrapper( - task_group.add_runs, - ) - self.events = to_raw_response_wrapper( - task_group.events, - ) - self.get_runs = to_raw_response_wrapper( - task_group.get_runs, - ) - - -class AsyncTaskGroupResourceWithRawResponse: - def __init__(self, task_group: AsyncTaskGroupResource) -> None: - self._task_group = task_group - - self.create = async_to_raw_response_wrapper( - task_group.create, - ) - self.retrieve = async_to_raw_response_wrapper( - task_group.retrieve, - ) - self.add_runs = async_to_raw_response_wrapper( - task_group.add_runs, - ) - self.events = async_to_raw_response_wrapper( - task_group.events, - ) - self.get_runs = async_to_raw_response_wrapper( - task_group.get_runs, - ) - - -class TaskGroupResourceWithStreamingResponse: - def __init__(self, task_group: TaskGroupResource) -> None: - self._task_group = task_group - - self.create = to_streamed_response_wrapper( - task_group.create, - ) - self.retrieve = to_streamed_response_wrapper( - task_group.retrieve, - ) - self.add_runs = to_streamed_response_wrapper( - task_group.add_runs, - ) - self.events = to_streamed_response_wrapper( - task_group.events, - ) - self.get_runs = to_streamed_response_wrapper( - task_group.get_runs, - ) - - -class AsyncTaskGroupResourceWithStreamingResponse: - def __init__(self, task_group: AsyncTaskGroupResource) -> None: - self._task_group = task_group - - self.create = async_to_streamed_response_wrapper( - task_group.create, - ) - self.retrieve = async_to_streamed_response_wrapper( - task_group.retrieve, - ) - self.add_runs = async_to_streamed_response_wrapper( - task_group.add_runs, - ) - self.events = async_to_streamed_response_wrapper( - task_group.events, - ) - self.get_runs = async_to_streamed_response_wrapper( - task_group.get_runs, - ) diff --git a/src/parallel/resources/beta/task_run.py b/src/parallel/resources/beta/task_run.py deleted file mode 100644 index 8235a1a..0000000 --- a/src/parallel/resources/beta/task_run.py +++ /dev/null @@ -1,499 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Any, Dict, List, Union, Iterable, Optional, cast - -import httpx - -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven -from ..._utils import is_given, maybe_transform, strip_not_given, async_maybe_transform -from ..._compat import cached_property -from ..._resource import SyncAPIResource, AsyncAPIResource -from ..._response import ( - to_raw_response_wrapper, - to_streamed_response_wrapper, - async_to_raw_response_wrapper, - async_to_streamed_response_wrapper, -) -from ..._streaming import Stream, AsyncStream -from ...types.beta import task_run_create_params, task_run_result_params -from ..._base_client import make_request_options -from ...types.task_run import TaskRun -from ...types.task_spec_param import TaskSpecParam -from ...types.beta.webhook_param import WebhookParam -from ...types.beta.mcp_server_param import McpServerParam -from ...types.beta.parallel_beta_param import ParallelBetaParam -from ...types.beta.beta_task_run_result import BetaTaskRunResult -from ...types.shared_params.source_policy import SourcePolicy -from ...types.beta.task_run_events_response import TaskRunEventsResponse - -__all__ = ["TaskRunResource", "AsyncTaskRunResource"] - - -class TaskRunResource(SyncAPIResource): - @cached_property - def with_raw_response(self) -> TaskRunResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers - """ - return TaskRunResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> TaskRunResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response - """ - return TaskRunResourceWithStreamingResponse(self) - - def create( - self, - *, - input: Union[str, Dict[str, object]], - processor: str, - enable_events: Optional[bool] | NotGiven = NOT_GIVEN, - mcp_servers: Optional[Iterable[McpServerParam]] | NotGiven = NOT_GIVEN, - metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, - source_policy: Optional[SourcePolicy] | NotGiven = NOT_GIVEN, - task_spec: Optional[TaskSpecParam] | NotGiven = NOT_GIVEN, - webhook: Optional[WebhookParam] | NotGiven = NOT_GIVEN, - betas: List[ParallelBetaParam] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> TaskRun: - """ - Initiates a task run. - - Returns immediately with a run object in status 'queued'. - - Beta features can be enabled by setting the 'parallel-beta' header. - - Args: - input: Input to the task, either text or a JSON object. - - processor: Processor to use for the task. - - enable_events: Controls tracking of task run execution progress. When set to true, progress - events are recorded and can be accessed via the - [Task Run events](https://platform.parallel.ai/api-reference) endpoint. When - false, no progress events are tracked. Note that progress tracking cannot be - enabled after a run has been created. The flag is set to true by default for - premium processors (pro and above). This feature is not available via the Python - SDK. To enable this feature in your API requests, specify the `parallel-beta` - header with `events-sse-2025-07-24` value. - - mcp_servers: Optional list of MCP servers to use for the run. This feature is not available - via the Python SDK. To enable this feature in your API requests, specify the - `parallel-beta` header with `mcp-server-2025-07-17` value. - - metadata: User-provided metadata stored with the run. Keys and values must be strings with - a maximum length of 16 and 512 characters respectively. - - source_policy: Source policy for web search results. - - This policy governs which sources are allowed/disallowed in results. - - task_spec: Specification for a task. - - Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. - Not specifying a TaskSpec is the same as setting an auto output schema. - - For convenience bare strings are also accepted as input or output schemas. - - webhook: Webhooks for Task Runs. - - betas: Optional header to specify the beta version(s) to enable. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - extra_headers = { - **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}), - **(extra_headers or {}), - } - return self._post( - "/v1/tasks/runs?beta=true", - body=maybe_transform( - { - "input": input, - "processor": processor, - "enable_events": enable_events, - "mcp_servers": mcp_servers, - "metadata": metadata, - "source_policy": source_policy, - "task_spec": task_spec, - "webhook": webhook, - }, - task_run_create_params.TaskRunCreateParams, - ), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=TaskRun, - ) - - def events( - self, - run_id: str, - *, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Stream[TaskRunEventsResponse]: - """ - Streams events for a task run. - - Returns a stream of events showing progress updates and state changes for the - task run. - - For task runs that did not have enable_events set to true during creation, the - frequency of events will be reduced. - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not run_id: - raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") - extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} - return self._get( - f"/v1beta/tasks/runs/{run_id}/events", - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=cast(Any, TaskRunEventsResponse), # Union types cannot be passed in as arguments in the type system - stream=True, - stream_cls=Stream[TaskRunEventsResponse], - ) - - def result( - self, - run_id: str, - *, - api_timeout: int | NotGiven = NOT_GIVEN, - betas: List[ParallelBetaParam] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> BetaTaskRunResult: - """ - Retrieves a run result by run_id, blocking until the run is completed. - - Args: - betas: Optional header to specify the beta version(s) to enable. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not run_id: - raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") - extra_headers = { - **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}), - **(extra_headers or {}), - } - return self._get( - f"/v1/tasks/runs/{run_id}/result?beta=true", - options=make_request_options( - extra_headers=extra_headers, - extra_query=extra_query, - extra_body=extra_body, - timeout=timeout, - query=maybe_transform({"api_timeout": api_timeout}, task_run_result_params.TaskRunResultParams), - ), - cast_to=BetaTaskRunResult, - ) - - -class AsyncTaskRunResource(AsyncAPIResource): - @cached_property - def with_raw_response(self) -> AsyncTaskRunResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers - """ - return AsyncTaskRunResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AsyncTaskRunResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response - """ - return AsyncTaskRunResourceWithStreamingResponse(self) - - async def create( - self, - *, - input: Union[str, Dict[str, object]], - processor: str, - enable_events: Optional[bool] | NotGiven = NOT_GIVEN, - mcp_servers: Optional[Iterable[McpServerParam]] | NotGiven = NOT_GIVEN, - metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, - source_policy: Optional[SourcePolicy] | NotGiven = NOT_GIVEN, - task_spec: Optional[TaskSpecParam] | NotGiven = NOT_GIVEN, - webhook: Optional[WebhookParam] | NotGiven = NOT_GIVEN, - betas: List[ParallelBetaParam] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> TaskRun: - """ - Initiates a task run. - - Returns immediately with a run object in status 'queued'. - - Beta features can be enabled by setting the 'parallel-beta' header. - - Args: - input: Input to the task, either text or a JSON object. - - processor: Processor to use for the task. - - enable_events: Controls tracking of task run execution progress. When set to true, progress - events are recorded and can be accessed via the - [Task Run events](https://platform.parallel.ai/api-reference) endpoint. When - false, no progress events are tracked. Note that progress tracking cannot be - enabled after a run has been created. The flag is set to true by default for - premium processors (pro and above). This feature is not available via the Python - SDK. To enable this feature in your API requests, specify the `parallel-beta` - header with `events-sse-2025-07-24` value. - - mcp_servers: Optional list of MCP servers to use for the run. This feature is not available - via the Python SDK. To enable this feature in your API requests, specify the - `parallel-beta` header with `mcp-server-2025-07-17` value. - - metadata: User-provided metadata stored with the run. Keys and values must be strings with - a maximum length of 16 and 512 characters respectively. - - source_policy: Source policy for web search results. - - This policy governs which sources are allowed/disallowed in results. - - task_spec: Specification for a task. - - Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. - Not specifying a TaskSpec is the same as setting an auto output schema. - - For convenience bare strings are also accepted as input or output schemas. - - webhook: Webhooks for Task Runs. - - betas: Optional header to specify the beta version(s) to enable. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - extra_headers = { - **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}), - **(extra_headers or {}), - } - return await self._post( - "/v1/tasks/runs?beta=true", - body=await async_maybe_transform( - { - "input": input, - "processor": processor, - "enable_events": enable_events, - "mcp_servers": mcp_servers, - "metadata": metadata, - "source_policy": source_policy, - "task_spec": task_spec, - "webhook": webhook, - }, - task_run_create_params.TaskRunCreateParams, - ), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=TaskRun, - ) - - async def events( - self, - run_id: str, - *, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> AsyncStream[TaskRunEventsResponse]: - """ - Streams events for a task run. - - Returns a stream of events showing progress updates and state changes for the - task run. - - For task runs that did not have enable_events set to true during creation, the - frequency of events will be reduced. - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not run_id: - raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") - extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} - return await self._get( - f"/v1beta/tasks/runs/{run_id}/events", - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=cast(Any, TaskRunEventsResponse), # Union types cannot be passed in as arguments in the type system - stream=True, - stream_cls=AsyncStream[TaskRunEventsResponse], - ) - - async def result( - self, - run_id: str, - *, - api_timeout: int | NotGiven = NOT_GIVEN, - betas: List[ParallelBetaParam] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> BetaTaskRunResult: - """ - Retrieves a run result by run_id, blocking until the run is completed. - - Args: - betas: Optional header to specify the beta version(s) to enable. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not run_id: - raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") - extra_headers = { - **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}), - **(extra_headers or {}), - } - return await self._get( - f"/v1/tasks/runs/{run_id}/result?beta=true", - options=make_request_options( - extra_headers=extra_headers, - extra_query=extra_query, - extra_body=extra_body, - timeout=timeout, - query=await async_maybe_transform( - {"api_timeout": api_timeout}, task_run_result_params.TaskRunResultParams - ), - ), - cast_to=BetaTaskRunResult, - ) - - -class TaskRunResourceWithRawResponse: - def __init__(self, task_run: TaskRunResource) -> None: - self._task_run = task_run - - self.create = to_raw_response_wrapper( - task_run.create, - ) - self.events = to_raw_response_wrapper( - task_run.events, - ) - self.result = to_raw_response_wrapper( - task_run.result, - ) - - -class AsyncTaskRunResourceWithRawResponse: - def __init__(self, task_run: AsyncTaskRunResource) -> None: - self._task_run = task_run - - self.create = async_to_raw_response_wrapper( - task_run.create, - ) - self.events = async_to_raw_response_wrapper( - task_run.events, - ) - self.result = async_to_raw_response_wrapper( - task_run.result, - ) - - -class TaskRunResourceWithStreamingResponse: - def __init__(self, task_run: TaskRunResource) -> None: - self._task_run = task_run - - self.create = to_streamed_response_wrapper( - task_run.create, - ) - self.events = to_streamed_response_wrapper( - task_run.events, - ) - self.result = to_streamed_response_wrapper( - task_run.result, - ) - - -class AsyncTaskRunResourceWithStreamingResponse: - def __init__(self, task_run: AsyncTaskRunResource) -> None: - self._task_run = task_run - - self.create = async_to_streamed_response_wrapper( - task_run.create, - ) - self.events = async_to_streamed_response_wrapper( - task_run.events, - ) - self.result = async_to_streamed_response_wrapper( - task_run.result, - ) diff --git a/src/parallel/resources/task_run.py b/src/parallel/resources/task_run.py index 9fe16d9..d05648d 100644 --- a/src/parallel/resources/task_run.py +++ b/src/parallel/resources/task_run.py @@ -31,7 +31,6 @@ wait_for_result_async as _wait_for_result_async, task_run_result_parser, ) -from ..types.shared_params.source_policy import SourcePolicy __all__ = ["TaskRunResource", "AsyncTaskRunResource"] @@ -59,10 +58,9 @@ def with_streaming_response(self) -> TaskRunResourceWithStreamingResponse: def create( self, *, - input: Union[str, Dict[str, object]], + input: Union[str, object], processor: str, metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, - source_policy: Optional[SourcePolicy] | NotGiven = NOT_GIVEN, task_spec: Optional[TaskSpecParam] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -72,11 +70,7 @@ def create( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> TaskRun: """ - Initiates a task run. - - Returns immediately with a run object in status 'queued'. - - Beta features can be enabled by setting the 'parallel-beta' header. + Initiates a single task run. Args: input: Input to the task, either text or a JSON object. @@ -86,16 +80,10 @@ def create( metadata: User-provided metadata stored with the run. Keys and values must be strings with a maximum length of 16 and 512 characters respectively. - source_policy: Source policy for web search results. - - This policy governs which sources are allowed/disallowed in results. - task_spec: Specification for a task. - Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. - Not specifying a TaskSpec is the same as setting an auto output schema. - - For convenience bare strings are also accepted as input or output schemas. + For convenience we allow bare strings as input or output schemas, which is + equivalent to a text schema with the same description. extra_headers: Send extra headers @@ -112,7 +100,6 @@ def create( "input": input, "processor": processor, "metadata": metadata, - "source_policy": source_policy, "task_spec": task_spec, }, task_run_create_params.TaskRunCreateParams, @@ -135,9 +122,7 @@ def retrieve( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> TaskRun: """ - Retrieves run status by run_id. - - The run result is available from the `/result` endpoint. + Retrieves a run by run_id. Args: extra_headers: Send extra headers @@ -171,7 +156,7 @@ def result( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> TaskRunResult: """ - Retrieves a run result by run_id, blocking until the run is completed. + Retrieves a run by run_id, blocking until the run is completed. Args: extra_headers: Send extra headers @@ -348,10 +333,9 @@ def with_streaming_response(self) -> AsyncTaskRunResourceWithStreamingResponse: async def create( self, *, - input: Union[str, Dict[str, object]], + input: Union[str, object], processor: str, metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, - source_policy: Optional[SourcePolicy] | NotGiven = NOT_GIVEN, task_spec: Optional[TaskSpecParam] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -361,11 +345,7 @@ async def create( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> TaskRun: """ - Initiates a task run. - - Returns immediately with a run object in status 'queued'. - - Beta features can be enabled by setting the 'parallel-beta' header. + Initiates a single task run. Args: input: Input to the task, either text or a JSON object. @@ -375,16 +355,10 @@ async def create( metadata: User-provided metadata stored with the run. Keys and values must be strings with a maximum length of 16 and 512 characters respectively. - source_policy: Source policy for web search results. - - This policy governs which sources are allowed/disallowed in results. - task_spec: Specification for a task. - Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. - Not specifying a TaskSpec is the same as setting an auto output schema. - - For convenience bare strings are also accepted as input or output schemas. + For convenience we allow bare strings as input or output schemas, which is + equivalent to a text schema with the same description. extra_headers: Send extra headers @@ -401,7 +375,6 @@ async def create( "input": input, "processor": processor, "metadata": metadata, - "source_policy": source_policy, "task_spec": task_spec, }, task_run_create_params.TaskRunCreateParams, @@ -424,9 +397,7 @@ async def retrieve( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> TaskRun: """ - Retrieves run status by run_id. - - The run result is available from the `/result` endpoint. + Retrieves a run by run_id. Args: extra_headers: Send extra headers @@ -460,7 +431,7 @@ async def result( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> TaskRunResult: """ - Retrieves a run result by run_id, blocking until the run is completed. + Retrieves a run by run_id, blocking until the run is completed. Args: extra_headers: Send extra headers diff --git a/src/parallel/types/__init__.py b/src/parallel/types/__init__.py index 15d056e..9404593 100644 --- a/src/parallel/types/__init__.py +++ b/src/parallel/types/__init__.py @@ -2,22 +2,11 @@ from __future__ import annotations -from .shared import ( - Warning as Warning, - ErrorObject as ErrorObject, - SourcePolicy as SourcePolicy, - ErrorResponse as ErrorResponse, -) +from .shared import ErrorObject as ErrorObject, ErrorResponse as ErrorResponse from .citation import Citation as Citation from .task_run import TaskRun as TaskRun -from .task_spec import TaskSpec as TaskSpec -from .auto_schema import AutoSchema as AutoSchema -from .field_basis import FieldBasis as FieldBasis -from .json_schema import JsonSchema as JsonSchema -from .text_schema import TextSchema as TextSchema from .task_run_result import TaskRunResult as TaskRunResult from .task_spec_param import TaskSpecParam as TaskSpecParam -from .auto_schema_param import AutoSchemaParam as AutoSchemaParam from .json_schema_param import JsonSchemaParam as JsonSchemaParam from .text_schema_param import TextSchemaParam as TextSchemaParam from .task_run_json_output import TaskRunJsonOutput as TaskRunJsonOutput diff --git a/src/parallel/types/auto_schema.py b/src/parallel/types/auto_schema.py deleted file mode 100644 index 6ff429e..0000000 --- a/src/parallel/types/auto_schema.py +++ /dev/null @@ -1,13 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Optional -from typing_extensions import Literal - -from .._models import BaseModel - -__all__ = ["AutoSchema"] - - -class AutoSchema(BaseModel): - type: Optional[Literal["auto"]] = None - """The type of schema being defined. Always `auto`.""" diff --git a/src/parallel/types/auto_schema_param.py b/src/parallel/types/auto_schema_param.py deleted file mode 100644 index 54d5196..0000000 --- a/src/parallel/types/auto_schema_param.py +++ /dev/null @@ -1,12 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Literal, TypedDict - -__all__ = ["AutoSchemaParam"] - - -class AutoSchemaParam(TypedDict, total=False): - type: Literal["auto"] - """The type of schema being defined. Always `auto`.""" diff --git a/src/parallel/types/beta/__init__.py b/src/parallel/types/beta/__init__.py index a872a43..f8ee8b1 100644 --- a/src/parallel/types/beta/__init__.py +++ b/src/parallel/types/beta/__init__.py @@ -1,30 +1,3 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. from __future__ import annotations - -from .webhook import Webhook as Webhook -from .mcp_server import McpServer as McpServer -from .task_group import TaskGroup as TaskGroup -from .error_event import ErrorEvent as ErrorEvent -from .mcp_tool_call import McpToolCall as McpToolCall -from .search_result import SearchResult as SearchResult -from .webhook_param import WebhookParam as WebhookParam -from .beta_run_input import BetaRunInput as BetaRunInput -from .task_run_event import TaskRunEvent as TaskRunEvent -from .mcp_server_param import McpServerParam as McpServerParam -from .task_group_status import TaskGroupStatus as TaskGroupStatus -from .web_search_result import WebSearchResult as WebSearchResult -from .beta_search_params import BetaSearchParams as BetaSearchParams -from .parallel_beta_param import ParallelBetaParam as ParallelBetaParam -from .beta_run_input_param import BetaRunInputParam as BetaRunInputParam -from .beta_task_run_result import BetaTaskRunResult as BetaTaskRunResult -from .task_run_create_params import TaskRunCreateParams as TaskRunCreateParams -from .task_run_result_params import TaskRunResultParams as TaskRunResultParams -from .task_group_run_response import TaskGroupRunResponse as TaskGroupRunResponse -from .task_group_create_params import TaskGroupCreateParams as TaskGroupCreateParams -from .task_group_events_params import TaskGroupEventsParams as TaskGroupEventsParams -from .task_run_events_response import TaskRunEventsResponse as TaskRunEventsResponse -from .task_group_add_runs_params import TaskGroupAddRunsParams as TaskGroupAddRunsParams -from .task_group_events_response import TaskGroupEventsResponse as TaskGroupEventsResponse -from .task_group_get_runs_params import TaskGroupGetRunsParams as TaskGroupGetRunsParams -from .task_group_get_runs_response import TaskGroupGetRunsResponse as TaskGroupGetRunsResponse diff --git a/src/parallel/types/beta/beta_run_input.py b/src/parallel/types/beta/beta_run_input.py deleted file mode 100644 index 7707711..0000000 --- a/src/parallel/types/beta/beta_run_input.py +++ /dev/null @@ -1,63 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Dict, List, Union, Optional - -from .webhook import Webhook -from ..._models import BaseModel -from ..task_spec import TaskSpec -from .mcp_server import McpServer -from ..shared.source_policy import SourcePolicy - -__all__ = ["BetaRunInput"] - - -class BetaRunInput(BaseModel): - input: Union[str, Dict[str, object]] - """Input to the task, either text or a JSON object.""" - - processor: str - """Processor to use for the task.""" - - enable_events: Optional[bool] = None - """Controls tracking of task run execution progress. - - When set to true, progress events are recorded and can be accessed via the - [Task Run events](https://platform.parallel.ai/api-reference) endpoint. When - false, no progress events are tracked. Note that progress tracking cannot be - enabled after a run has been created. The flag is set to true by default for - premium processors (pro and above). This feature is not available via the Python - SDK. To enable this feature in your API requests, specify the `parallel-beta` - header with `events-sse-2025-07-24` value. - """ - - mcp_servers: Optional[List[McpServer]] = None - """ - Optional list of MCP servers to use for the run. This feature is not available - via the Python SDK. To enable this feature in your API requests, specify the - `parallel-beta` header with `mcp-server-2025-07-17` value. - """ - - metadata: Optional[Dict[str, Union[str, float, bool]]] = None - """User-provided metadata stored with the run. - - Keys and values must be strings with a maximum length of 16 and 512 characters - respectively. - """ - - source_policy: Optional[SourcePolicy] = None - """Source policy for web search results. - - This policy governs which sources are allowed/disallowed in results. - """ - - task_spec: Optional[TaskSpec] = None - """Specification for a task. - - Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. - Not specifying a TaskSpec is the same as setting an auto output schema. - - For convenience bare strings are also accepted as input or output schemas. - """ - - webhook: Optional[Webhook] = None - """Webhooks for Task Runs.""" diff --git a/src/parallel/types/beta/beta_run_input_param.py b/src/parallel/types/beta/beta_run_input_param.py deleted file mode 100644 index 715f6ed..0000000 --- a/src/parallel/types/beta/beta_run_input_param.py +++ /dev/null @@ -1,65 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Dict, Union, Iterable, Optional -from typing_extensions import Required, TypedDict - -from .webhook_param import WebhookParam -from ..task_spec_param import TaskSpecParam -from .mcp_server_param import McpServerParam -from ..shared_params.source_policy import SourcePolicy - -__all__ = ["BetaRunInputParam"] - - -class BetaRunInputParam(TypedDict, total=False): - input: Required[Union[str, Dict[str, object]]] - """Input to the task, either text or a JSON object.""" - - processor: Required[str] - """Processor to use for the task.""" - - enable_events: Optional[bool] - """Controls tracking of task run execution progress. - - When set to true, progress events are recorded and can be accessed via the - [Task Run events](https://platform.parallel.ai/api-reference) endpoint. When - false, no progress events are tracked. Note that progress tracking cannot be - enabled after a run has been created. The flag is set to true by default for - premium processors (pro and above). This feature is not available via the Python - SDK. To enable this feature in your API requests, specify the `parallel-beta` - header with `events-sse-2025-07-24` value. - """ - - mcp_servers: Optional[Iterable[McpServerParam]] - """ - Optional list of MCP servers to use for the run. This feature is not available - via the Python SDK. To enable this feature in your API requests, specify the - `parallel-beta` header with `mcp-server-2025-07-17` value. - """ - - metadata: Optional[Dict[str, Union[str, float, bool]]] - """User-provided metadata stored with the run. - - Keys and values must be strings with a maximum length of 16 and 512 characters - respectively. - """ - - source_policy: Optional[SourcePolicy] - """Source policy for web search results. - - This policy governs which sources are allowed/disallowed in results. - """ - - task_spec: Optional[TaskSpecParam] - """Specification for a task. - - Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. - Not specifying a TaskSpec is the same as setting an auto output schema. - - For convenience bare strings are also accepted as input or output schemas. - """ - - webhook: Optional[WebhookParam] - """Webhooks for Task Runs.""" diff --git a/src/parallel/types/beta/beta_search_params.py b/src/parallel/types/beta/beta_search_params.py deleted file mode 100644 index b6b55a6..0000000 --- a/src/parallel/types/beta/beta_search_params.py +++ /dev/null @@ -1,47 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import List, Optional -from typing_extensions import Literal, TypedDict - -from ..shared_params.source_policy import SourcePolicy - -__all__ = ["BetaSearchParams"] - - -class BetaSearchParams(TypedDict, total=False): - max_chars_per_result: Optional[int] - """ - Upper bound on the number of characters to include in excerpts for each search - result. - """ - - max_results: Optional[int] - """Upper bound on the number of results to return. - - May be limited by the processor. Defaults to 10 if not provided. - """ - - objective: Optional[str] - """Natural-language description of what the web search is trying to find. - - May include guidance about preferred sources or freshness. At least one of - objective or search_queries must be provided. - """ - - processor: Literal["base", "pro"] - """Search processor.""" - - search_queries: Optional[List[str]] - """Optional list of traditional keyword search queries to guide the search. - - May contain search operators. At least one of objective or search_queries must - be provided. - """ - - source_policy: Optional[SourcePolicy] - """Source policy for web search results. - - This policy governs which sources are allowed/disallowed in results. - """ diff --git a/src/parallel/types/beta/beta_task_run_result.py b/src/parallel/types/beta/beta_task_run_result.py deleted file mode 100644 index 3db1c50..0000000 --- a/src/parallel/types/beta/beta_task_run_result.py +++ /dev/null @@ -1,74 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Dict, List, Union, Optional -from typing_extensions import Literal, Annotated, TypeAlias - -from ..._utils import PropertyInfo -from ..._models import BaseModel -from ..task_run import TaskRun -from ..field_basis import FieldBasis -from .mcp_tool_call import McpToolCall - -__all__ = ["BetaTaskRunResult", "Output", "OutputBetaTaskRunTextOutput", "OutputBetaTaskRunJsonOutput"] - - -class OutputBetaTaskRunTextOutput(BaseModel): - basis: List[FieldBasis] - """Basis for the output.""" - - content: str - """Text output from the task.""" - - type: Literal["text"] - """ - The type of output being returned, as determined by the output schema of the - task spec. - """ - - beta_fields: Optional[Dict[str, object]] = None - """Always None.""" - - mcp_tool_calls: Optional[List[McpToolCall]] = None - """MCP tool calls made by the task.""" - - -class OutputBetaTaskRunJsonOutput(BaseModel): - basis: List[FieldBasis] - """Basis for the output.""" - - content: Dict[str, object] - """ - Output from the task as a native JSON object, as determined by the output schema - of the task spec. - """ - - type: Literal["json"] - """ - The type of output being returned, as determined by the output schema of the - task spec. - """ - - beta_fields: Optional[Dict[str, object]] = None - """Always None.""" - - mcp_tool_calls: Optional[List[McpToolCall]] = None - """MCP tool calls made by the task.""" - - output_schema: Optional[Dict[str, object]] = None - """Output schema for the Task Run. - - Populated only if the task was executed with an auto schema. - """ - - -Output: TypeAlias = Annotated[ - Union[OutputBetaTaskRunTextOutput, OutputBetaTaskRunJsonOutput], PropertyInfo(discriminator="type") -] - - -class BetaTaskRunResult(BaseModel): - output: Output - """Output from the task conforming to the output schema.""" - - run: TaskRun - """Status of a task run.""" diff --git a/src/parallel/types/beta/error_event.py b/src/parallel/types/beta/error_event.py deleted file mode 100644 index aefa872..0000000 --- a/src/parallel/types/beta/error_event.py +++ /dev/null @@ -1,16 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing_extensions import Literal - -from ..._models import BaseModel -from ..shared.error_object import ErrorObject - -__all__ = ["ErrorEvent"] - - -class ErrorEvent(BaseModel): - error: ErrorObject - """An error message.""" - - type: Literal["error"] - """Event type; always 'error'.""" diff --git a/src/parallel/types/beta/mcp_server.py b/src/parallel/types/beta/mcp_server.py deleted file mode 100644 index 192493f..0000000 --- a/src/parallel/types/beta/mcp_server.py +++ /dev/null @@ -1,25 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Dict, List, Optional -from typing_extensions import Literal - -from ..._models import BaseModel - -__all__ = ["McpServer"] - - -class McpServer(BaseModel): - name: str - """Name of the MCP server.""" - - url: str - """URL of the MCP server.""" - - allowed_tools: Optional[List[str]] = None - """List of allowed tools for the MCP server.""" - - headers: Optional[Dict[str, str]] = None - """Headers for the MCP server.""" - - type: Optional[Literal["url"]] = None - """Type of MCP server being configured. Always `url`.""" diff --git a/src/parallel/types/beta/mcp_server_param.py b/src/parallel/types/beta/mcp_server_param.py deleted file mode 100644 index cef54ce..0000000 --- a/src/parallel/types/beta/mcp_server_param.py +++ /dev/null @@ -1,25 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Dict, List, Optional -from typing_extensions import Literal, Required, TypedDict - -__all__ = ["McpServerParam"] - - -class McpServerParam(TypedDict, total=False): - name: Required[str] - """Name of the MCP server.""" - - url: Required[str] - """URL of the MCP server.""" - - allowed_tools: Optional[List[str]] - """List of allowed tools for the MCP server.""" - - headers: Optional[Dict[str, str]] - """Headers for the MCP server.""" - - type: Literal["url"] - """Type of MCP server being configured. Always `url`.""" diff --git a/src/parallel/types/beta/mcp_tool_call.py b/src/parallel/types/beta/mcp_tool_call.py deleted file mode 100644 index daa7cca..0000000 --- a/src/parallel/types/beta/mcp_tool_call.py +++ /dev/null @@ -1,27 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Optional - -from ..._models import BaseModel - -__all__ = ["McpToolCall"] - - -class McpToolCall(BaseModel): - arguments: str - """Arguments used to call the MCP tool.""" - - server_name: str - """Name of the MCP server.""" - - tool_call_id: str - """Identifier for the tool call.""" - - tool_name: str - """Name of the tool being called.""" - - content: Optional[str] = None - """Output received from the tool call, if successful.""" - - error: Optional[str] = None - """Error message if the tool call failed.""" diff --git a/src/parallel/types/beta/parallel_beta_param.py b/src/parallel/types/beta/parallel_beta_param.py deleted file mode 100644 index 80b4de0..0000000 --- a/src/parallel/types/beta/parallel_beta_param.py +++ /dev/null @@ -1,12 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Union -from typing_extensions import Literal, TypeAlias - -__all__ = ["ParallelBetaParam"] - -ParallelBetaParam: TypeAlias = Union[ - Literal["mcp-server-2025-07-17", "events-sse-2025-07-24", "webhook-2025-08-12"], str -] diff --git a/src/parallel/types/beta/search_result.py b/src/parallel/types/beta/search_result.py deleted file mode 100644 index 05c945c..0000000 --- a/src/parallel/types/beta/search_result.py +++ /dev/null @@ -1,16 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import List - -from ..._models import BaseModel -from .web_search_result import WebSearchResult - -__all__ = ["SearchResult"] - - -class SearchResult(BaseModel): - results: List[WebSearchResult] - """A list of WebSearchResult objects, ordered by decreasing relevance.""" - - search_id: str - """Search ID. Example: `search_cad0a6d2-dec0-46bd-95ae-900527d880e7`""" diff --git a/src/parallel/types/beta/task_group.py b/src/parallel/types/beta/task_group.py deleted file mode 100644 index 247bddb..0000000 --- a/src/parallel/types/beta/task_group.py +++ /dev/null @@ -1,24 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Dict, Union, Optional - -from pydantic import Field as FieldInfo - -from ..._models import BaseModel -from .task_group_status import TaskGroupStatus - -__all__ = ["TaskGroup"] - - -class TaskGroup(BaseModel): - created_at: Optional[str] = None - """Timestamp of the creation of the group, as an RFC 3339 string.""" - - status: TaskGroupStatus - """Status of a task group.""" - - task_group_id: str = FieldInfo(alias="taskgroup_id") - """ID of the group.""" - - metadata: Optional[Dict[str, Union[str, float, bool]]] = None - """User-provided metadata stored with the group.""" diff --git a/src/parallel/types/beta/task_group_add_runs_params.py b/src/parallel/types/beta/task_group_add_runs_params.py deleted file mode 100644 index 68523f9..0000000 --- a/src/parallel/types/beta/task_group_add_runs_params.py +++ /dev/null @@ -1,30 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import List, Iterable, Optional -from typing_extensions import Required, Annotated, TypedDict - -from ..._utils import PropertyInfo -from ..task_spec_param import TaskSpecParam -from .parallel_beta_param import ParallelBetaParam -from .beta_run_input_param import BetaRunInputParam - -__all__ = ["TaskGroupAddRunsParams"] - - -class TaskGroupAddRunsParams(TypedDict, total=False): - inputs: Required[Iterable[BetaRunInputParam]] - """List of task runs to execute.""" - - default_task_spec: Optional[TaskSpecParam] - """Specification for a task. - - Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. - Not specifying a TaskSpec is the same as setting an auto output schema. - - For convenience bare strings are also accepted as input or output schemas. - """ - - betas: Annotated[List[ParallelBetaParam], PropertyInfo(alias="parallel-beta")] - """Optional header to specify the beta version(s) to enable.""" diff --git a/src/parallel/types/beta/task_group_create_params.py b/src/parallel/types/beta/task_group_create_params.py deleted file mode 100644 index 2b5cc73..0000000 --- a/src/parallel/types/beta/task_group_create_params.py +++ /dev/null @@ -1,13 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Dict, Union, Optional -from typing_extensions import TypedDict - -__all__ = ["TaskGroupCreateParams"] - - -class TaskGroupCreateParams(TypedDict, total=False): - metadata: Optional[Dict[str, Union[str, float, bool]]] - """User-provided metadata stored with the task group.""" diff --git a/src/parallel/types/beta/task_group_events_params.py b/src/parallel/types/beta/task_group_events_params.py deleted file mode 100644 index 15f0d00..0000000 --- a/src/parallel/types/beta/task_group_events_params.py +++ /dev/null @@ -1,16 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Optional -from typing_extensions import Annotated, TypedDict - -from ..._utils import PropertyInfo - -__all__ = ["TaskGroupEventsParams"] - - -class TaskGroupEventsParams(TypedDict, total=False): - last_event_id: Optional[str] - - api_timeout: Annotated[Optional[float], PropertyInfo(alias="timeout")] diff --git a/src/parallel/types/beta/task_group_events_response.py b/src/parallel/types/beta/task_group_events_response.py deleted file mode 100644 index cbb6522..0000000 --- a/src/parallel/types/beta/task_group_events_response.py +++ /dev/null @@ -1,28 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Union -from typing_extensions import Literal, Annotated, TypeAlias - -from ..._utils import PropertyInfo -from ..._models import BaseModel -from .error_event import ErrorEvent -from .task_run_event import TaskRunEvent -from .task_group_status import TaskGroupStatus - -__all__ = ["TaskGroupEventsResponse", "TaskGroupStatusEvent"] - - -class TaskGroupStatusEvent(BaseModel): - event_id: str - """Cursor to resume the event stream.""" - - status: TaskGroupStatus - """Status of a task group.""" - - type: Literal["task_group_status"] - """Event type; always 'task_group_status'.""" - - -TaskGroupEventsResponse: TypeAlias = Annotated[ - Union[TaskGroupStatusEvent, TaskRunEvent, ErrorEvent], PropertyInfo(discriminator="type") -] diff --git a/src/parallel/types/beta/task_group_get_runs_params.py b/src/parallel/types/beta/task_group_get_runs_params.py deleted file mode 100644 index b6b1ef7..0000000 --- a/src/parallel/types/beta/task_group_get_runs_params.py +++ /dev/null @@ -1,18 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Optional -from typing_extensions import Literal, TypedDict - -__all__ = ["TaskGroupGetRunsParams"] - - -class TaskGroupGetRunsParams(TypedDict, total=False): - include_input: bool - - include_output: bool - - last_event_id: Optional[str] - - status: Optional[Literal["queued", "action_required", "running", "completed", "failed", "cancelling", "cancelled"]] diff --git a/src/parallel/types/beta/task_group_get_runs_response.py b/src/parallel/types/beta/task_group_get_runs_response.py deleted file mode 100644 index b287dcb..0000000 --- a/src/parallel/types/beta/task_group_get_runs_response.py +++ /dev/null @@ -1,12 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Union -from typing_extensions import Annotated, TypeAlias - -from ..._utils import PropertyInfo -from .error_event import ErrorEvent -from .task_run_event import TaskRunEvent - -__all__ = ["TaskGroupGetRunsResponse"] - -TaskGroupGetRunsResponse: TypeAlias = Annotated[Union[TaskRunEvent, ErrorEvent], PropertyInfo(discriminator="type")] diff --git a/src/parallel/types/beta/task_group_run_response.py b/src/parallel/types/beta/task_group_run_response.py deleted file mode 100644 index 98161bb..0000000 --- a/src/parallel/types/beta/task_group_run_response.py +++ /dev/null @@ -1,30 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import List, Optional - -from ..._models import BaseModel -from .task_group_status import TaskGroupStatus - -__all__ = ["TaskGroupRunResponse"] - - -class TaskGroupRunResponse(BaseModel): - event_cursor: Optional[str] = None - """ - Cursor for these runs in the event stream at - taskgroup/events?last_event_id=. Empty for the first runs in the - group. - """ - - run_cursor: Optional[str] = None - """ - Cursor for these runs in the run stream at - taskgroup/runs?last_event_id=. Empty for the first runs in the - group. - """ - - run_ids: List[str] - """IDs of the newly created runs.""" - - status: TaskGroupStatus - """Status of a task group.""" diff --git a/src/parallel/types/beta/task_group_status.py b/src/parallel/types/beta/task_group_status.py deleted file mode 100644 index 221b527..0000000 --- a/src/parallel/types/beta/task_group_status.py +++ /dev/null @@ -1,27 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Dict, Optional - -from ..._models import BaseModel - -__all__ = ["TaskGroupStatus"] - - -class TaskGroupStatus(BaseModel): - is_active: bool - """True if at least one run in the group is currently active, i.e. - - status is one of {'cancelling', 'queued', 'running'}. - """ - - modified_at: Optional[str] = None - """Timestamp of the last status update to the group, as an RFC 3339 string.""" - - num_task_runs: int - """Number of task runs in the group.""" - - status_message: Optional[str] = None - """Human-readable status message for the group.""" - - task_run_status_counts: Dict[str, int] - """Number of task runs with each status.""" diff --git a/src/parallel/types/beta/task_run_create_params.py b/src/parallel/types/beta/task_run_create_params.py deleted file mode 100644 index 80bd0da..0000000 --- a/src/parallel/types/beta/task_run_create_params.py +++ /dev/null @@ -1,70 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Dict, List, Union, Iterable, Optional -from typing_extensions import Required, Annotated, TypedDict - -from ..._utils import PropertyInfo -from .webhook_param import WebhookParam -from ..task_spec_param import TaskSpecParam -from .mcp_server_param import McpServerParam -from .parallel_beta_param import ParallelBetaParam -from ..shared_params.source_policy import SourcePolicy - -__all__ = ["TaskRunCreateParams"] - - -class TaskRunCreateParams(TypedDict, total=False): - input: Required[Union[str, Dict[str, object]]] - """Input to the task, either text or a JSON object.""" - - processor: Required[str] - """Processor to use for the task.""" - - enable_events: Optional[bool] - """Controls tracking of task run execution progress. - - When set to true, progress events are recorded and can be accessed via the - [Task Run events](https://platform.parallel.ai/api-reference) endpoint. When - false, no progress events are tracked. Note that progress tracking cannot be - enabled after a run has been created. The flag is set to true by default for - premium processors (pro and above). This feature is not available via the Python - SDK. To enable this feature in your API requests, specify the `parallel-beta` - header with `events-sse-2025-07-24` value. - """ - - mcp_servers: Optional[Iterable[McpServerParam]] - """ - Optional list of MCP servers to use for the run. This feature is not available - via the Python SDK. To enable this feature in your API requests, specify the - `parallel-beta` header with `mcp-server-2025-07-17` value. - """ - - metadata: Optional[Dict[str, Union[str, float, bool]]] - """User-provided metadata stored with the run. - - Keys and values must be strings with a maximum length of 16 and 512 characters - respectively. - """ - - source_policy: Optional[SourcePolicy] - """Source policy for web search results. - - This policy governs which sources are allowed/disallowed in results. - """ - - task_spec: Optional[TaskSpecParam] - """Specification for a task. - - Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. - Not specifying a TaskSpec is the same as setting an auto output schema. - - For convenience bare strings are also accepted as input or output schemas. - """ - - webhook: Optional[WebhookParam] - """Webhooks for Task Runs.""" - - betas: Annotated[List[ParallelBetaParam], PropertyInfo(alias="parallel-beta")] - """Optional header to specify the beta version(s) to enable.""" diff --git a/src/parallel/types/beta/task_run_event.py b/src/parallel/types/beta/task_run_event.py deleted file mode 100644 index 7d809fe..0000000 --- a/src/parallel/types/beta/task_run_event.py +++ /dev/null @@ -1,32 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Union, Optional -from typing_extensions import Literal, Annotated, TypeAlias - -from ..._utils import PropertyInfo -from ..._models import BaseModel -from ..task_run import TaskRun -from .beta_run_input import BetaRunInput -from ..task_run_json_output import TaskRunJsonOutput -from ..task_run_text_output import TaskRunTextOutput - -__all__ = ["TaskRunEvent", "Output"] - -Output: TypeAlias = Annotated[Union[TaskRunTextOutput, TaskRunJsonOutput, None], PropertyInfo(discriminator="type")] - - -class TaskRunEvent(BaseModel): - event_id: Optional[str] = None - """Cursor to resume the event stream. Always empty for non Task Group runs.""" - - run: TaskRun - """Status of a task run.""" - - type: Literal["task_run.state"] - """Event type; always 'task_run.state'.""" - - input: Optional[BetaRunInput] = None - """Task run input with additional beta fields.""" - - output: Optional[Output] = None - """Output from the run; included only if requested and if status == `completed`.""" diff --git a/src/parallel/types/beta/task_run_events_response.py b/src/parallel/types/beta/task_run_events_response.py deleted file mode 100644 index 27ea82f..0000000 --- a/src/parallel/types/beta/task_run_events_response.py +++ /dev/null @@ -1,58 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import List, Union, Optional -from typing_extensions import Literal, Annotated, TypeAlias - -from ..._utils import PropertyInfo -from ..._models import BaseModel -from .error_event import ErrorEvent -from .task_run_event import TaskRunEvent - -__all__ = [ - "TaskRunEventsResponse", - "TaskRunProgressStatsEvent", - "TaskRunProgressStatsEventSourceStats", - "TaskRunProgressMessageEvent", -] - - -class TaskRunProgressStatsEventSourceStats(BaseModel): - num_sources_considered: Optional[int] = None - """Number of sources considered in processing the task.""" - - num_sources_read: Optional[int] = None - """Number of sources read in processing the task.""" - - sources_read_sample: Optional[List[str]] = None - """A sample of URLs of sources read in processing the task.""" - - -class TaskRunProgressStatsEvent(BaseModel): - source_stats: TaskRunProgressStatsEventSourceStats - """Source stats for a task run.""" - - type: Literal["task_run.progress_stats"] - """Event type; always 'task_run.progress_stats'.""" - - -class TaskRunProgressMessageEvent(BaseModel): - message: str - """Progress update message.""" - - timestamp: Optional[str] = None - """Timestamp of the message.""" - - type: Literal[ - "task_run.progress_msg.plan", - "task_run.progress_msg.search", - "task_run.progress_msg.result", - "task_run.progress_msg.tool_call", - "task_run.progress_msg.exec_status", - ] - """Event type; always starts with 'task_run.progress_msg'.""" - - -TaskRunEventsResponse: TypeAlias = Annotated[ - Union[TaskRunProgressStatsEvent, TaskRunProgressMessageEvent, TaskRunEvent, ErrorEvent], - PropertyInfo(discriminator="type"), -] diff --git a/src/parallel/types/beta/task_run_result_params.py b/src/parallel/types/beta/task_run_result_params.py deleted file mode 100644 index c48ef15..0000000 --- a/src/parallel/types/beta/task_run_result_params.py +++ /dev/null @@ -1,18 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import List -from typing_extensions import Annotated, TypedDict - -from ..._utils import PropertyInfo -from .parallel_beta_param import ParallelBetaParam - -__all__ = ["TaskRunResultParams"] - - -class TaskRunResultParams(TypedDict, total=False): - api_timeout: Annotated[int, PropertyInfo(alias="timeout")] - - betas: Annotated[List[ParallelBetaParam], PropertyInfo(alias="parallel-beta")] - """Optional header to specify the beta version(s) to enable.""" diff --git a/src/parallel/types/beta/web_search_result.py b/src/parallel/types/beta/web_search_result.py deleted file mode 100644 index cbc7d5e..0000000 --- a/src/parallel/types/beta/web_search_result.py +++ /dev/null @@ -1,18 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import List - -from ..._models import BaseModel - -__all__ = ["WebSearchResult"] - - -class WebSearchResult(BaseModel): - excerpts: List[str] - """Text excerpts from the search result which are relevant to the request.""" - - title: str - """Title of the search result.""" - - url: str - """URL associated with the search result.""" diff --git a/src/parallel/types/beta/webhook.py b/src/parallel/types/beta/webhook.py deleted file mode 100644 index afa1b5b..0000000 --- a/src/parallel/types/beta/webhook.py +++ /dev/null @@ -1,16 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import List, Optional -from typing_extensions import Literal - -from ..._models import BaseModel - -__all__ = ["Webhook"] - - -class Webhook(BaseModel): - url: str - """URL for the webhook.""" - - event_types: Optional[List[Literal["task_run.status"]]] = None - """Event types to send the webhook notifications for.""" diff --git a/src/parallel/types/beta/webhook_param.py b/src/parallel/types/beta/webhook_param.py deleted file mode 100644 index b7e6cd6..0000000 --- a/src/parallel/types/beta/webhook_param.py +++ /dev/null @@ -1,16 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import List -from typing_extensions import Literal, Required, TypedDict - -__all__ = ["WebhookParam"] - - -class WebhookParam(TypedDict, total=False): - url: Required[str] - """URL for the webhook.""" - - event_types: List[Literal["task_run.status"]] - """Event types to send the webhook notifications for.""" diff --git a/src/parallel/types/field_basis.py b/src/parallel/types/field_basis.py deleted file mode 100644 index de4d4a1..0000000 --- a/src/parallel/types/field_basis.py +++ /dev/null @@ -1,25 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import List, Optional - -from .._models import BaseModel -from .citation import Citation - -__all__ = ["FieldBasis"] - - -class FieldBasis(BaseModel): - field: str - """Name of the output field.""" - - reasoning: str - """Reasoning for the output field.""" - - citations: Optional[List[Citation]] = None - """List of citations supporting the output field.""" - - confidence: Optional[str] = None - """Confidence level for the output field. - - Only certain processors provide confidence levels. - """ diff --git a/src/parallel/types/json_schema.py b/src/parallel/types/json_schema.py deleted file mode 100644 index 7bff20b..0000000 --- a/src/parallel/types/json_schema.py +++ /dev/null @@ -1,16 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Dict, Optional -from typing_extensions import Literal - -from .._models import BaseModel - -__all__ = ["JsonSchema"] - - -class JsonSchema(BaseModel): - json_schema: Dict[str, object] - """A JSON Schema object. Only a subset of JSON Schema is supported.""" - - type: Optional[Literal["json"]] = None - """The type of schema being defined. Always `json`.""" diff --git a/src/parallel/types/json_schema_param.py b/src/parallel/types/json_schema_param.py index 90bae27..6b04665 100644 --- a/src/parallel/types/json_schema_param.py +++ b/src/parallel/types/json_schema_param.py @@ -2,14 +2,13 @@ from __future__ import annotations -from typing import Dict from typing_extensions import Literal, Required, TypedDict __all__ = ["JsonSchemaParam"] class JsonSchemaParam(TypedDict, total=False): - json_schema: Required[Dict[str, object]] + json_schema: Required[object] """A JSON Schema object. Only a subset of JSON Schema is supported.""" type: Literal["json"] diff --git a/src/parallel/types/shared/__init__.py b/src/parallel/types/shared/__init__.py index c7a4d05..ea1f600 100644 --- a/src/parallel/types/shared/__init__.py +++ b/src/parallel/types/shared/__init__.py @@ -1,6 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -from .warning import Warning as Warning from .error_object import ErrorObject as ErrorObject -from .source_policy import SourcePolicy as SourcePolicy from .error_response import ErrorResponse as ErrorResponse diff --git a/src/parallel/types/shared/error_object.py b/src/parallel/types/shared/error_object.py index 52b9656..5dcece5 100644 --- a/src/parallel/types/shared/error_object.py +++ b/src/parallel/types/shared/error_object.py @@ -1,6 +1,6 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -from typing import Dict, Optional +from typing import Optional from ..._models import BaseModel @@ -14,5 +14,5 @@ class ErrorObject(BaseModel): ref_id: str """Reference ID for the error.""" - detail: Optional[Dict[str, object]] = None + detail: Optional[object] = None """Optional detail supporting the error.""" diff --git a/src/parallel/types/shared/error_response.py b/src/parallel/types/shared/error_response.py index 4a72af6..6123db5 100644 --- a/src/parallel/types/shared/error_response.py +++ b/src/parallel/types/shared/error_response.py @@ -1,5 +1,6 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +from typing import Optional from typing_extensions import Literal from ..._models import BaseModel @@ -12,5 +13,5 @@ class ErrorResponse(BaseModel): error: ErrorObject """An error message.""" - type: Literal["error"] + type: Optional[Literal["error"]] = None """Always 'error'.""" diff --git a/src/parallel/types/shared/source_policy.py b/src/parallel/types/shared/source_policy.py deleted file mode 100644 index 50c38ca..0000000 --- a/src/parallel/types/shared/source_policy.py +++ /dev/null @@ -1,21 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import List, Optional - -from ..._models import BaseModel - -__all__ = ["SourcePolicy"] - - -class SourcePolicy(BaseModel): - exclude_domains: Optional[List[str]] = None - """List of domains to exclude from results. - - If specified, sources from these domains will be excluded. - """ - - include_domains: Optional[List[str]] = None - """List of domains to restrict the results to. - - If specified, only sources from these domains will be included. - """ diff --git a/src/parallel/types/shared/warning.py b/src/parallel/types/shared/warning.py deleted file mode 100644 index 84a2b84..0000000 --- a/src/parallel/types/shared/warning.py +++ /dev/null @@ -1,22 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Dict, Optional -from typing_extensions import Literal - -from ..._models import BaseModel - -__all__ = ["Warning"] - - -class Warning(BaseModel): - message: str - """Human-readable message.""" - - type: Literal["spec_validation_warning", "input_validation_warning", "warning"] - """Type of warning. - - Note that adding new warning types is considered a backward-compatible change. - """ - - detail: Optional[Dict[str, object]] = None - """Optional detail supporting the warning.""" diff --git a/src/parallel/types/shared_params/__init__.py b/src/parallel/types/shared_params/__init__.py deleted file mode 100644 index 1ab16e6..0000000 --- a/src/parallel/types/shared_params/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from .source_policy import SourcePolicy as SourcePolicy diff --git a/src/parallel/types/shared_params/source_policy.py b/src/parallel/types/shared_params/source_policy.py deleted file mode 100644 index 0a5d5ec..0000000 --- a/src/parallel/types/shared_params/source_policy.py +++ /dev/null @@ -1,22 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import List -from typing_extensions import TypedDict - -__all__ = ["SourcePolicy"] - - -class SourcePolicy(TypedDict, total=False): - exclude_domains: List[str] - """List of domains to exclude from results. - - If specified, sources from these domains will be excluded. - """ - - include_domains: List[str] - """List of domains to restrict the results to. - - If specified, only sources from these domains will be included. - """ diff --git a/src/parallel/types/task_run.py b/src/parallel/types/task_run.py index a4fc96d..3db862c 100644 --- a/src/parallel/types/task_run.py +++ b/src/parallel/types/task_run.py @@ -3,26 +3,37 @@ from typing import Dict, List, Union, Optional from typing_extensions import Literal -from pydantic import Field as FieldInfo - from .._models import BaseModel -from .shared.warning import Warning -from .shared.error_object import ErrorObject __all__ = [ "TaskRun", - "Warning" # for backwards compatibility with v0.1.3 + "Warning", +# for backwards compatibility with v0.1.3 ] +class Warning(BaseModel): + message: str + """Human-readable message.""" + + type: str + """Type of warning. + + Note that adding new warning types is considered a backward-compatible change. + """ + + detail: Optional[object] = None + """Optional detail supporting the warning.""" + + class TaskRun(BaseModel): created_at: Optional[str] = None """Timestamp of the creation of the task, as an RFC 3339 string.""" is_active: bool - """Whether the run is currently active, i.e. + """Whether the run is currently active; i.e. - status is one of {'cancelling', 'queued', 'running'}. + status is one of {'running', 'queued', 'cancelling'}. """ modified_at: Optional[str] = None @@ -37,14 +48,8 @@ class TaskRun(BaseModel): status: Literal["queued", "action_required", "running", "completed", "failed", "cancelling", "cancelled"] """Status of the run.""" - error: Optional[ErrorObject] = None - """An error message.""" - metadata: Optional[Dict[str, Union[str, float, bool]]] = None """User-provided metadata stored with the run.""" - task_group_id: Optional[str] = FieldInfo(alias="taskgroup_id", default=None) - """ID of the taskgroup to which the run belongs.""" - warnings: Optional[List[Warning]] = None - """Warnings for the run, if any.""" + """Warnings for the run.""" diff --git a/src/parallel/types/task_run_create_params.py b/src/parallel/types/task_run_create_params.py index 6c81803..a6f39e0 100644 --- a/src/parallel/types/task_run_create_params.py +++ b/src/parallel/types/task_run_create_params.py @@ -6,13 +6,12 @@ from typing_extensions import Required, TypedDict from .task_spec_param import TaskSpecParam -from .shared_params.source_policy import SourcePolicy __all__ = ["TaskRunCreateParams"] class TaskRunCreateParams(TypedDict, total=False): - input: Required[Union[str, Dict[str, object]]] + input: Required[Union[str, object]] """Input to the task, either text or a JSON object.""" processor: Required[str] @@ -25,17 +24,9 @@ class TaskRunCreateParams(TypedDict, total=False): respectively. """ - source_policy: Optional[SourcePolicy] - """Source policy for web search results. - - This policy governs which sources are allowed/disallowed in results. - """ - task_spec: Optional[TaskSpecParam] """Specification for a task. - Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. - Not specifying a TaskSpec is the same as setting an auto output schema. - - For convenience bare strings are also accepted as input or output schemas. + For convenience we allow bare strings as input or output schemas, which is + equivalent to a text schema with the same description. """ diff --git a/src/parallel/types/task_run_json_output.py b/src/parallel/types/task_run_json_output.py index ee63e60..48f2cf9 100644 --- a/src/parallel/types/task_run_json_output.py +++ b/src/parallel/types/task_run_json_output.py @@ -1,19 +1,36 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -from typing import Dict, List, Optional +from typing import List, Optional from typing_extensions import Literal from .._models import BaseModel -from .field_basis import FieldBasis +from .citation import Citation -__all__ = ["TaskRunJsonOutput"] +__all__ = ["TaskRunJsonOutput", "Basis"] + + +class Basis(BaseModel): + field: str + """Name of the output field.""" + + reasoning: str + """Reasoning for the output field.""" + + citations: Optional[List[Citation]] = None + """List of citations supporting the output field.""" + + confidence: Optional[str] = None + """Confidence level for the output field. + + Only certain processors provide confidence levels. + """ class TaskRunJsonOutput(BaseModel): - basis: List[FieldBasis] + basis: List[Basis] """Basis for each top-level field in the JSON output.""" - content: Dict[str, object] + content: object """ Output from the task as a native JSON object, as determined by the output schema of the task spec. @@ -24,23 +41,3 @@ class TaskRunJsonOutput(BaseModel): The type of output being returned, as determined by the output schema of the task spec. """ - - beta_fields: Optional[Dict[str, object]] = None - """Additional fields from beta features used in this task run. - - When beta features are specified during both task run creation and result - retrieval, this field will be empty and instead the relevant beta attributes - will be directly included in the `BetaTaskRunJsonOutput` or corresponding output - type. However, if beta features were specified during task run creation but not - during result retrieval, this field will contain the dump of fields from those - beta features. Each key represents the beta feature version (one amongst - parallel-beta headers) and the values correspond to the beta feature attributes, - if any. For now, only MCP server beta features have attributes. For example, - `{mcp-server-2025-07-17: [{'server_name':'mcp_server', 'tool_call_id': 'tc_123', ...}]}}` - """ - - output_schema: Optional[Dict[str, object]] = None - """Output schema for the Task Run. - - Populated only if the task was executed with an auto schema. - """ diff --git a/src/parallel/types/task_run_result.py b/src/parallel/types/task_run_result.py index 7b52026..9c3cce4 100644 --- a/src/parallel/types/task_run_result.py +++ b/src/parallel/types/task_run_result.py @@ -1,9 +1,8 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. from typing import Union -from typing_extensions import Annotated, TypeAlias +from typing_extensions import TypeAlias -from .._utils import PropertyInfo from .._models import BaseModel from .citation import Citation from .task_run import TaskRun @@ -41,7 +40,7 @@ """This is deprecated, `TaskRunJsonOutput` should be used instead""" -Output: TypeAlias = Annotated[Union[TaskRunTextOutput, TaskRunJsonOutput], PropertyInfo(discriminator="type")] +Output: TypeAlias = Union[TaskRunTextOutput, TaskRunJsonOutput] class TaskRunResult(BaseModel): @@ -49,4 +48,4 @@ class TaskRunResult(BaseModel): """Output from the task conforming to the output schema.""" run: TaskRun - """Status of a task run.""" + """Status of a task.""" diff --git a/src/parallel/types/task_run_text_output.py b/src/parallel/types/task_run_text_output.py index ef38d7e..de50939 100644 --- a/src/parallel/types/task_run_text_output.py +++ b/src/parallel/types/task_run_text_output.py @@ -1,16 +1,33 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -from typing import Dict, List, Optional +from typing import List, Optional from typing_extensions import Literal from .._models import BaseModel -from .field_basis import FieldBasis +from .citation import Citation -__all__ = ["TaskRunTextOutput"] +__all__ = ["TaskRunTextOutput", "Basis"] + + +class Basis(BaseModel): + field: str + """Name of the output field.""" + + reasoning: str + """Reasoning for the output field.""" + + citations: Optional[List[Citation]] = None + """List of citations supporting the output field.""" + + confidence: Optional[str] = None + """Confidence level for the output field. + + Only certain processors provide confidence levels. + """ class TaskRunTextOutput(BaseModel): - basis: List[FieldBasis] + basis: List[Basis] """Basis for the output. The basis has a single field 'output'.""" content: str @@ -21,17 +38,3 @@ class TaskRunTextOutput(BaseModel): The type of output being returned, as determined by the output schema of the task spec. """ - - beta_fields: Optional[Dict[str, object]] = None - """Additional fields from beta features used in this task run. - - When beta features are specified during both task run creation and result - retrieval, this field will be empty and instead the relevant beta attributes - will be directly included in the `BetaTaskRunJsonOutput` or corresponding output - type. However, if beta features were specified during task run creation but not - during result retrieval, this field will contain the dump of fields from those - beta features. Each key represents the beta feature version (one amongst - parallel-beta headers) and the values correspond to the beta feature attributes, - if any. For now, only MCP server beta features have attributes. For example, - `{mcp-server-2025-07-17: [{'server_name':'mcp_server', 'tool_call_id': 'tc_123', ...}]}}` - """ diff --git a/src/parallel/types/task_spec.py b/src/parallel/types/task_spec.py deleted file mode 100644 index fc7aefe..0000000 --- a/src/parallel/types/task_spec.py +++ /dev/null @@ -1,31 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Union, Optional -from typing_extensions import TypeAlias - -from .._models import BaseModel -from .auto_schema import AutoSchema -from .json_schema import JsonSchema -from .text_schema import TextSchema - -__all__ = ["TaskSpec", "OutputSchema", "InputSchema"] - -OutputSchema: TypeAlias = Union[JsonSchema, TextSchema, AutoSchema, str] - -InputSchema: TypeAlias = Union[str, JsonSchema, TextSchema, None] - - -class TaskSpec(BaseModel): - output_schema: OutputSchema - """JSON schema or text fully describing the desired output from the task. - - Descriptions of output fields will determine the form and content of the - response. A bare string is equivalent to a text schema with the same - description. - """ - - input_schema: Optional[InputSchema] = None - """Optional JSON schema or text description of expected input to the task. - - A bare string is equivalent to a text schema with the same description. - """ diff --git a/src/parallel/types/task_spec_param.py b/src/parallel/types/task_spec_param.py index ae434bf..8ab84ed 100644 --- a/src/parallel/types/task_spec_param.py +++ b/src/parallel/types/task_spec_param.py @@ -7,15 +7,14 @@ from pydantic import BaseModel -from .auto_schema_param import AutoSchemaParam from .json_schema_param import JsonSchemaParam from .text_schema_param import TextSchemaParam __all__ = ["TaskSpecParam", "OutputSchema", "InputSchema"] -OutputSchema: TypeAlias = Union[JsonSchemaParam, TextSchemaParam, AutoSchemaParam, str] +OutputSchema: TypeAlias = Union[JsonSchemaParam, TextSchemaParam, str] -InputSchema: TypeAlias = Union[str, JsonSchemaParam, TextSchemaParam] +InputSchema: TypeAlias = Union[JsonSchemaParam, TextSchemaParam, str] OutputT = TypeVar("OutputT", bound=BaseModel) diff --git a/src/parallel/types/text_schema.py b/src/parallel/types/text_schema.py deleted file mode 100644 index 22da813..0000000 --- a/src/parallel/types/text_schema.py +++ /dev/null @@ -1,16 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Optional -from typing_extensions import Literal - -from .._models import BaseModel - -__all__ = ["TextSchema"] - - -class TextSchema(BaseModel): - description: str - """A text description of the desired output from the task.""" - - type: Optional[Literal["text"]] = None - """The type of schema being defined. Always `text`.""" diff --git a/tests/api_resources/beta/__init__.py b/tests/api_resources/beta/__init__.py deleted file mode 100644 index fd8019a..0000000 --- a/tests/api_resources/beta/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. diff --git a/tests/api_resources/beta/test_task_group.py b/tests/api_resources/beta/test_task_group.py deleted file mode 100644 index 150452f..0000000 --- a/tests/api_resources/beta/test_task_group.py +++ /dev/null @@ -1,613 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -import os -from typing import Any, cast - -import pytest - -from parallel import Parallel, AsyncParallel -from tests.utils import assert_matches_type -from parallel.types.beta import ( - TaskGroup, - TaskGroupRunResponse, -) - -base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") - - -class TestTaskGroup: - parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - - @parametrize - def test_method_create(self, client: Parallel) -> None: - task_group = client.beta.task_group.create() - assert_matches_type(TaskGroup, task_group, path=["response"]) - - @parametrize - def test_method_create_with_all_params(self, client: Parallel) -> None: - task_group = client.beta.task_group.create( - metadata={"foo": "string"}, - ) - assert_matches_type(TaskGroup, task_group, path=["response"]) - - @parametrize - def test_raw_response_create(self, client: Parallel) -> None: - response = client.beta.task_group.with_raw_response.create() - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - task_group = response.parse() - assert_matches_type(TaskGroup, task_group, path=["response"]) - - @parametrize - def test_streaming_response_create(self, client: Parallel) -> None: - with client.beta.task_group.with_streaming_response.create() as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - task_group = response.parse() - assert_matches_type(TaskGroup, task_group, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @parametrize - def test_method_retrieve(self, client: Parallel) -> None: - task_group = client.beta.task_group.retrieve( - "taskgroup_id", - ) - assert_matches_type(TaskGroup, task_group, path=["response"]) - - @parametrize - def test_raw_response_retrieve(self, client: Parallel) -> None: - response = client.beta.task_group.with_raw_response.retrieve( - "taskgroup_id", - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - task_group = response.parse() - assert_matches_type(TaskGroup, task_group, path=["response"]) - - @parametrize - def test_streaming_response_retrieve(self, client: Parallel) -> None: - with client.beta.task_group.with_streaming_response.retrieve( - "taskgroup_id", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - task_group = response.parse() - assert_matches_type(TaskGroup, task_group, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @parametrize - def test_path_params_retrieve(self, client: Parallel) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): - client.beta.task_group.with_raw_response.retrieve( - "", - ) - - @parametrize - def test_method_add_runs(self, client: Parallel) -> None: - task_group = client.beta.task_group.add_runs( - task_group_id="taskgroup_id", - inputs=[ - { - "input": "What was the GDP of France in 2023?", - "processor": "base", - } - ], - ) - assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) - - @parametrize - def test_method_add_runs_with_all_params(self, client: Parallel) -> None: - task_group = client.beta.task_group.add_runs( - task_group_id="taskgroup_id", - inputs=[ - { - "input": "What was the GDP of France in 2023?", - "processor": "base", - "enable_events": True, - "mcp_servers": [ - { - "name": "name", - "url": "url", - "allowed_tools": ["string"], - "headers": {"foo": "string"}, - "type": "url", - } - ], - "metadata": {"foo": "string"}, - "source_policy": { - "exclude_domains": ["string"], - "include_domains": ["string"], - }, - "task_spec": { - "output_schema": { - "json_schema": { - "additionalProperties": "bar", - "properties": "bar", - "required": "bar", - "type": "bar", - }, - "type": "json", - }, - "input_schema": "string", - }, - "webhook": { - "url": "url", - "event_types": ["task_run.status"], - }, - } - ], - default_task_spec={ - "output_schema": { - "json_schema": { - "additionalProperties": "bar", - "properties": "bar", - "required": "bar", - "type": "bar", - }, - "type": "json", - }, - "input_schema": "string", - }, - betas=["mcp-server-2025-07-17"], - ) - assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) - - @parametrize - def test_raw_response_add_runs(self, client: Parallel) -> None: - response = client.beta.task_group.with_raw_response.add_runs( - task_group_id="taskgroup_id", - inputs=[ - { - "input": "What was the GDP of France in 2023?", - "processor": "base", - } - ], - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - task_group = response.parse() - assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) - - @parametrize - def test_streaming_response_add_runs(self, client: Parallel) -> None: - with client.beta.task_group.with_streaming_response.add_runs( - task_group_id="taskgroup_id", - inputs=[ - { - "input": "What was the GDP of France in 2023?", - "processor": "base", - } - ], - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - task_group = response.parse() - assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @parametrize - def test_path_params_add_runs(self, client: Parallel) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): - client.beta.task_group.with_raw_response.add_runs( - task_group_id="", - inputs=[ - { - "input": "What was the GDP of France in 2023?", - "processor": "base", - } - ], - ) - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - def test_method_events(self, client: Parallel) -> None: - task_group_stream = client.beta.task_group.events( - task_group_id="taskgroup_id", - ) - task_group_stream.response.close() - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - def test_method_events_with_all_params(self, client: Parallel) -> None: - task_group_stream = client.beta.task_group.events( - task_group_id="taskgroup_id", - last_event_id="last_event_id", - api_timeout=0, - ) - task_group_stream.response.close() - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - def test_raw_response_events(self, client: Parallel) -> None: - response = client.beta.task_group.with_raw_response.events( - task_group_id="taskgroup_id", - ) - - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - stream = response.parse() - stream.close() - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - def test_streaming_response_events(self, client: Parallel) -> None: - with client.beta.task_group.with_streaming_response.events( - task_group_id="taskgroup_id", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - stream = response.parse() - stream.close() - - assert cast(Any, response.is_closed) is True - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - def test_path_params_events(self, client: Parallel) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): - client.beta.task_group.with_raw_response.events( - task_group_id="", - ) - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - def test_method_get_runs(self, client: Parallel) -> None: - task_group_stream = client.beta.task_group.get_runs( - task_group_id="taskgroup_id", - ) - task_group_stream.response.close() - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - def test_method_get_runs_with_all_params(self, client: Parallel) -> None: - task_group_stream = client.beta.task_group.get_runs( - task_group_id="taskgroup_id", - include_input=True, - include_output=True, - last_event_id="last_event_id", - status="queued", - ) - task_group_stream.response.close() - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - def test_raw_response_get_runs(self, client: Parallel) -> None: - response = client.beta.task_group.with_raw_response.get_runs( - task_group_id="taskgroup_id", - ) - - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - stream = response.parse() - stream.close() - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - def test_streaming_response_get_runs(self, client: Parallel) -> None: - with client.beta.task_group.with_streaming_response.get_runs( - task_group_id="taskgroup_id", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - stream = response.parse() - stream.close() - - assert cast(Any, response.is_closed) is True - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - def test_path_params_get_runs(self, client: Parallel) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): - client.beta.task_group.with_raw_response.get_runs( - task_group_id="", - ) - - -class TestAsyncTaskGroup: - parametrize = pytest.mark.parametrize( - "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] - ) - - @parametrize - async def test_method_create(self, async_client: AsyncParallel) -> None: - task_group = await async_client.beta.task_group.create() - assert_matches_type(TaskGroup, task_group, path=["response"]) - - @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncParallel) -> None: - task_group = await async_client.beta.task_group.create( - metadata={"foo": "string"}, - ) - assert_matches_type(TaskGroup, task_group, path=["response"]) - - @parametrize - async def test_raw_response_create(self, async_client: AsyncParallel) -> None: - response = await async_client.beta.task_group.with_raw_response.create() - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - task_group = await response.parse() - assert_matches_type(TaskGroup, task_group, path=["response"]) - - @parametrize - async def test_streaming_response_create(self, async_client: AsyncParallel) -> None: - async with async_client.beta.task_group.with_streaming_response.create() as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - task_group = await response.parse() - assert_matches_type(TaskGroup, task_group, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @parametrize - async def test_method_retrieve(self, async_client: AsyncParallel) -> None: - task_group = await async_client.beta.task_group.retrieve( - "taskgroup_id", - ) - assert_matches_type(TaskGroup, task_group, path=["response"]) - - @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncParallel) -> None: - response = await async_client.beta.task_group.with_raw_response.retrieve( - "taskgroup_id", - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - task_group = await response.parse() - assert_matches_type(TaskGroup, task_group, path=["response"]) - - @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncParallel) -> None: - async with async_client.beta.task_group.with_streaming_response.retrieve( - "taskgroup_id", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - task_group = await response.parse() - assert_matches_type(TaskGroup, task_group, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @parametrize - async def test_path_params_retrieve(self, async_client: AsyncParallel) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): - await async_client.beta.task_group.with_raw_response.retrieve( - "", - ) - - @parametrize - async def test_method_add_runs(self, async_client: AsyncParallel) -> None: - task_group = await async_client.beta.task_group.add_runs( - task_group_id="taskgroup_id", - inputs=[ - { - "input": "What was the GDP of France in 2023?", - "processor": "base", - } - ], - ) - assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) - - @parametrize - async def test_method_add_runs_with_all_params(self, async_client: AsyncParallel) -> None: - task_group = await async_client.beta.task_group.add_runs( - task_group_id="taskgroup_id", - inputs=[ - { - "input": "What was the GDP of France in 2023?", - "processor": "base", - "enable_events": True, - "mcp_servers": [ - { - "name": "name", - "url": "url", - "allowed_tools": ["string"], - "headers": {"foo": "string"}, - "type": "url", - } - ], - "metadata": {"foo": "string"}, - "source_policy": { - "exclude_domains": ["string"], - "include_domains": ["string"], - }, - "task_spec": { - "output_schema": { - "json_schema": { - "additionalProperties": "bar", - "properties": "bar", - "required": "bar", - "type": "bar", - }, - "type": "json", - }, - "input_schema": "string", - }, - "webhook": { - "url": "url", - "event_types": ["task_run.status"], - }, - } - ], - default_task_spec={ - "output_schema": { - "json_schema": { - "additionalProperties": "bar", - "properties": "bar", - "required": "bar", - "type": "bar", - }, - "type": "json", - }, - "input_schema": "string", - }, - betas=["mcp-server-2025-07-17"], - ) - assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) - - @parametrize - async def test_raw_response_add_runs(self, async_client: AsyncParallel) -> None: - response = await async_client.beta.task_group.with_raw_response.add_runs( - task_group_id="taskgroup_id", - inputs=[ - { - "input": "What was the GDP of France in 2023?", - "processor": "base", - } - ], - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - task_group = await response.parse() - assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) - - @parametrize - async def test_streaming_response_add_runs(self, async_client: AsyncParallel) -> None: - async with async_client.beta.task_group.with_streaming_response.add_runs( - task_group_id="taskgroup_id", - inputs=[ - { - "input": "What was the GDP of France in 2023?", - "processor": "base", - } - ], - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - task_group = await response.parse() - assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @parametrize - async def test_path_params_add_runs(self, async_client: AsyncParallel) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): - await async_client.beta.task_group.with_raw_response.add_runs( - task_group_id="", - inputs=[ - { - "input": "What was the GDP of France in 2023?", - "processor": "base", - } - ], - ) - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - async def test_method_events(self, async_client: AsyncParallel) -> None: - task_group_stream = await async_client.beta.task_group.events( - task_group_id="taskgroup_id", - ) - await task_group_stream.response.aclose() - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - async def test_method_events_with_all_params(self, async_client: AsyncParallel) -> None: - task_group_stream = await async_client.beta.task_group.events( - task_group_id="taskgroup_id", - last_event_id="last_event_id", - api_timeout=0, - ) - await task_group_stream.response.aclose() - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - async def test_raw_response_events(self, async_client: AsyncParallel) -> None: - response = await async_client.beta.task_group.with_raw_response.events( - task_group_id="taskgroup_id", - ) - - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - stream = await response.parse() - await stream.close() - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - async def test_streaming_response_events(self, async_client: AsyncParallel) -> None: - async with async_client.beta.task_group.with_streaming_response.events( - task_group_id="taskgroup_id", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - stream = await response.parse() - await stream.close() - - assert cast(Any, response.is_closed) is True - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - async def test_path_params_events(self, async_client: AsyncParallel) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): - await async_client.beta.task_group.with_raw_response.events( - task_group_id="", - ) - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - async def test_method_get_runs(self, async_client: AsyncParallel) -> None: - task_group_stream = await async_client.beta.task_group.get_runs( - task_group_id="taskgroup_id", - ) - await task_group_stream.response.aclose() - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - async def test_method_get_runs_with_all_params(self, async_client: AsyncParallel) -> None: - task_group_stream = await async_client.beta.task_group.get_runs( - task_group_id="taskgroup_id", - include_input=True, - include_output=True, - last_event_id="last_event_id", - status="queued", - ) - await task_group_stream.response.aclose() - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - async def test_raw_response_get_runs(self, async_client: AsyncParallel) -> None: - response = await async_client.beta.task_group.with_raw_response.get_runs( - task_group_id="taskgroup_id", - ) - - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - stream = await response.parse() - await stream.close() - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - async def test_streaming_response_get_runs(self, async_client: AsyncParallel) -> None: - async with async_client.beta.task_group.with_streaming_response.get_runs( - task_group_id="taskgroup_id", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - stream = await response.parse() - await stream.close() - - assert cast(Any, response.is_closed) is True - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - async def test_path_params_get_runs(self, async_client: AsyncParallel) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): - await async_client.beta.task_group.with_raw_response.get_runs( - task_group_id="", - ) diff --git a/tests/api_resources/beta/test_task_run.py b/tests/api_resources/beta/test_task_run.py deleted file mode 100644 index 7a188f6..0000000 --- a/tests/api_resources/beta/test_task_run.py +++ /dev/null @@ -1,349 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -import os -from typing import Any, cast - -import pytest - -from parallel import Parallel, AsyncParallel -from tests.utils import assert_matches_type -from parallel.types import TaskRun -from parallel.types.beta import BetaTaskRunResult - -base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") - - -class TestTaskRun: - parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - - @parametrize - def test_method_create(self, client: Parallel) -> None: - task_run = client.beta.task_run.create( - input="What was the GDP of France in 2023?", - processor="base", - ) - assert_matches_type(TaskRun, task_run, path=["response"]) - - @parametrize - def test_method_create_with_all_params(self, client: Parallel) -> None: - task_run = client.beta.task_run.create( - input="What was the GDP of France in 2023?", - processor="base", - enable_events=True, - mcp_servers=[ - { - "name": "name", - "url": "url", - "allowed_tools": ["string"], - "headers": {"foo": "string"}, - "type": "url", - } - ], - metadata={"foo": "string"}, - source_policy={ - "exclude_domains": ["string"], - "include_domains": ["string"], - }, - task_spec={ - "output_schema": { - "json_schema": { - "additionalProperties": "bar", - "properties": "bar", - "required": "bar", - "type": "bar", - }, - "type": "json", - }, - "input_schema": "string", - }, - webhook={ - "url": "url", - "event_types": ["task_run.status"], - }, - betas=["mcp-server-2025-07-17"], - ) - assert_matches_type(TaskRun, task_run, path=["response"]) - - @parametrize - def test_raw_response_create(self, client: Parallel) -> None: - response = client.beta.task_run.with_raw_response.create( - input="What was the GDP of France in 2023?", - processor="base", - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - task_run = response.parse() - assert_matches_type(TaskRun, task_run, path=["response"]) - - @parametrize - def test_streaming_response_create(self, client: Parallel) -> None: - with client.beta.task_run.with_streaming_response.create( - input="What was the GDP of France in 2023?", - processor="base", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - task_run = response.parse() - assert_matches_type(TaskRun, task_run, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - def test_method_events(self, client: Parallel) -> None: - task_run_stream = client.beta.task_run.events( - "run_id", - ) - task_run_stream.response.close() - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - def test_raw_response_events(self, client: Parallel) -> None: - response = client.beta.task_run.with_raw_response.events( - "run_id", - ) - - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - stream = response.parse() - stream.close() - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - def test_streaming_response_events(self, client: Parallel) -> None: - with client.beta.task_run.with_streaming_response.events( - "run_id", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - stream = response.parse() - stream.close() - - assert cast(Any, response.is_closed) is True - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - def test_path_params_events(self, client: Parallel) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"): - client.beta.task_run.with_raw_response.events( - "", - ) - - @parametrize - def test_method_result(self, client: Parallel) -> None: - task_run = client.beta.task_run.result( - run_id="run_id", - ) - assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) - - @parametrize - def test_method_result_with_all_params(self, client: Parallel) -> None: - task_run = client.beta.task_run.result( - run_id="run_id", - api_timeout=0, - betas=["mcp-server-2025-07-17"], - ) - assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) - - @parametrize - def test_raw_response_result(self, client: Parallel) -> None: - response = client.beta.task_run.with_raw_response.result( - run_id="run_id", - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - task_run = response.parse() - assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) - - @parametrize - def test_streaming_response_result(self, client: Parallel) -> None: - with client.beta.task_run.with_streaming_response.result( - run_id="run_id", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - task_run = response.parse() - assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @parametrize - def test_path_params_result(self, client: Parallel) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"): - client.beta.task_run.with_raw_response.result( - run_id="", - ) - - -class TestAsyncTaskRun: - parametrize = pytest.mark.parametrize( - "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] - ) - - @parametrize - async def test_method_create(self, async_client: AsyncParallel) -> None: - task_run = await async_client.beta.task_run.create( - input="What was the GDP of France in 2023?", - processor="base", - ) - assert_matches_type(TaskRun, task_run, path=["response"]) - - @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncParallel) -> None: - task_run = await async_client.beta.task_run.create( - input="What was the GDP of France in 2023?", - processor="base", - enable_events=True, - mcp_servers=[ - { - "name": "name", - "url": "url", - "allowed_tools": ["string"], - "headers": {"foo": "string"}, - "type": "url", - } - ], - metadata={"foo": "string"}, - source_policy={ - "exclude_domains": ["string"], - "include_domains": ["string"], - }, - task_spec={ - "output_schema": { - "json_schema": { - "additionalProperties": "bar", - "properties": "bar", - "required": "bar", - "type": "bar", - }, - "type": "json", - }, - "input_schema": "string", - }, - webhook={ - "url": "url", - "event_types": ["task_run.status"], - }, - betas=["mcp-server-2025-07-17"], - ) - assert_matches_type(TaskRun, task_run, path=["response"]) - - @parametrize - async def test_raw_response_create(self, async_client: AsyncParallel) -> None: - response = await async_client.beta.task_run.with_raw_response.create( - input="What was the GDP of France in 2023?", - processor="base", - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - task_run = await response.parse() - assert_matches_type(TaskRun, task_run, path=["response"]) - - @parametrize - async def test_streaming_response_create(self, async_client: AsyncParallel) -> None: - async with async_client.beta.task_run.with_streaming_response.create( - input="What was the GDP of France in 2023?", - processor="base", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - task_run = await response.parse() - assert_matches_type(TaskRun, task_run, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - async def test_method_events(self, async_client: AsyncParallel) -> None: - task_run_stream = await async_client.beta.task_run.events( - "run_id", - ) - await task_run_stream.response.aclose() - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - async def test_raw_response_events(self, async_client: AsyncParallel) -> None: - response = await async_client.beta.task_run.with_raw_response.events( - "run_id", - ) - - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - stream = await response.parse() - await stream.close() - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - async def test_streaming_response_events(self, async_client: AsyncParallel) -> None: - async with async_client.beta.task_run.with_streaming_response.events( - "run_id", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - stream = await response.parse() - await stream.close() - - assert cast(Any, response.is_closed) is True - - @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") - @parametrize - async def test_path_params_events(self, async_client: AsyncParallel) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"): - await async_client.beta.task_run.with_raw_response.events( - "", - ) - - @parametrize - async def test_method_result(self, async_client: AsyncParallel) -> None: - task_run = await async_client.beta.task_run.result( - run_id="run_id", - ) - assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) - - @parametrize - async def test_method_result_with_all_params(self, async_client: AsyncParallel) -> None: - task_run = await async_client.beta.task_run.result( - run_id="run_id", - api_timeout=0, - betas=["mcp-server-2025-07-17"], - ) - assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) - - @parametrize - async def test_raw_response_result(self, async_client: AsyncParallel) -> None: - response = await async_client.beta.task_run.with_raw_response.result( - run_id="run_id", - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - task_run = await response.parse() - assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) - - @parametrize - async def test_streaming_response_result(self, async_client: AsyncParallel) -> None: - async with async_client.beta.task_run.with_streaming_response.result( - run_id="run_id", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - task_run = await response.parse() - assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @parametrize - async def test_path_params_result(self, async_client: AsyncParallel) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"): - await async_client.beta.task_run.with_raw_response.result( - run_id="", - ) diff --git a/tests/api_resources/test_beta.py b/tests/api_resources/test_beta.py deleted file mode 100644 index b733643..0000000 --- a/tests/api_resources/test_beta.py +++ /dev/null @@ -1,104 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -import os -from typing import Any, cast - -import pytest - -from parallel import Parallel, AsyncParallel -from tests.utils import assert_matches_type -from parallel.types.beta import SearchResult - -base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") - - -class TestBeta: - parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - - @parametrize - def test_method_search(self, client: Parallel) -> None: - beta = client.beta.search() - assert_matches_type(SearchResult, beta, path=["response"]) - - @parametrize - def test_method_search_with_all_params(self, client: Parallel) -> None: - beta = client.beta.search( - max_chars_per_result=0, - max_results=0, - objective="objective", - processor="base", - search_queries=["string"], - source_policy={ - "exclude_domains": ["string"], - "include_domains": ["string"], - }, - ) - assert_matches_type(SearchResult, beta, path=["response"]) - - @parametrize - def test_raw_response_search(self, client: Parallel) -> None: - response = client.beta.with_raw_response.search() - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - beta = response.parse() - assert_matches_type(SearchResult, beta, path=["response"]) - - @parametrize - def test_streaming_response_search(self, client: Parallel) -> None: - with client.beta.with_streaming_response.search() as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - beta = response.parse() - assert_matches_type(SearchResult, beta, path=["response"]) - - assert cast(Any, response.is_closed) is True - - -class TestAsyncBeta: - parametrize = pytest.mark.parametrize( - "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] - ) - - @parametrize - async def test_method_search(self, async_client: AsyncParallel) -> None: - beta = await async_client.beta.search() - assert_matches_type(SearchResult, beta, path=["response"]) - - @parametrize - async def test_method_search_with_all_params(self, async_client: AsyncParallel) -> None: - beta = await async_client.beta.search( - max_chars_per_result=0, - max_results=0, - objective="objective", - processor="base", - search_queries=["string"], - source_policy={ - "exclude_domains": ["string"], - "include_domains": ["string"], - }, - ) - assert_matches_type(SearchResult, beta, path=["response"]) - - @parametrize - async def test_raw_response_search(self, async_client: AsyncParallel) -> None: - response = await async_client.beta.with_raw_response.search() - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - beta = await response.parse() - assert_matches_type(SearchResult, beta, path=["response"]) - - @parametrize - async def test_streaming_response_search(self, async_client: AsyncParallel) -> None: - async with async_client.beta.with_streaming_response.search() as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - beta = await response.parse() - assert_matches_type(SearchResult, beta, path=["response"]) - - assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/test_task_run.py b/tests/api_resources/test_task_run.py index 096a73e..871e3fa 100644 --- a/tests/api_resources/test_task_run.py +++ b/tests/api_resources/test_task_run.py @@ -20,32 +20,46 @@ class TestTaskRun: @parametrize def test_method_create(self, client: Parallel) -> None: task_run = client.task_run.create( - input="What was the GDP of France in 2023?", - processor="base", + input="France (2023)", + processor="processor", ) assert_matches_type(TaskRun, task_run, path=["response"]) @parametrize def test_method_create_with_all_params(self, client: Parallel) -> None: task_run = client.task_run.create( - input="What was the GDP of France in 2023?", - processor="base", + input="France (2023)", + processor="processor", metadata={"foo": "string"}, - source_policy={ - "exclude_domains": ["string"], - "include_domains": ["string"], - }, task_spec={ "output_schema": { "json_schema": { - "additionalProperties": "bar", - "properties": "bar", - "required": "bar", - "type": "bar", + "additionalProperties": False, + "properties": { + "gdp": { + "description": "GDP in USD for the year, formatted like '$3.1 trillion (2023)'", + "type": "string", + } + }, + "required": ["gdp"], + "type": "object", + }, + "type": "json", + }, + "input_schema": { + "json_schema": { + "additionalProperties": False, + "properties": { + "gdp": { + "description": "GDP in USD for the year, formatted like '$3.1 trillion (2023)'", + "type": "string", + } + }, + "required": ["gdp"], + "type": "object", }, "type": "json", }, - "input_schema": "string", }, ) assert_matches_type(TaskRun, task_run, path=["response"]) @@ -53,8 +67,8 @@ def test_method_create_with_all_params(self, client: Parallel) -> None: @parametrize def test_raw_response_create(self, client: Parallel) -> None: response = client.task_run.with_raw_response.create( - input="What was the GDP of France in 2023?", - processor="base", + input="France (2023)", + processor="processor", ) assert response.is_closed is True @@ -65,8 +79,8 @@ def test_raw_response_create(self, client: Parallel) -> None: @parametrize def test_streaming_response_create(self, client: Parallel) -> None: with client.task_run.with_streaming_response.create( - input="What was the GDP of France in 2023?", - processor="base", + input="France (2023)", + processor="processor", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -169,32 +183,46 @@ class TestAsyncTaskRun: @parametrize async def test_method_create(self, async_client: AsyncParallel) -> None: task_run = await async_client.task_run.create( - input="What was the GDP of France in 2023?", - processor="base", + input="France (2023)", + processor="processor", ) assert_matches_type(TaskRun, task_run, path=["response"]) @parametrize async def test_method_create_with_all_params(self, async_client: AsyncParallel) -> None: task_run = await async_client.task_run.create( - input="What was the GDP of France in 2023?", - processor="base", + input="France (2023)", + processor="processor", metadata={"foo": "string"}, - source_policy={ - "exclude_domains": ["string"], - "include_domains": ["string"], - }, task_spec={ "output_schema": { "json_schema": { - "additionalProperties": "bar", - "properties": "bar", - "required": "bar", - "type": "bar", + "additionalProperties": False, + "properties": { + "gdp": { + "description": "GDP in USD for the year, formatted like '$3.1 trillion (2023)'", + "type": "string", + } + }, + "required": ["gdp"], + "type": "object", + }, + "type": "json", + }, + "input_schema": { + "json_schema": { + "additionalProperties": False, + "properties": { + "gdp": { + "description": "GDP in USD for the year, formatted like '$3.1 trillion (2023)'", + "type": "string", + } + }, + "required": ["gdp"], + "type": "object", }, "type": "json", }, - "input_schema": "string", }, ) assert_matches_type(TaskRun, task_run, path=["response"]) @@ -202,8 +230,8 @@ async def test_method_create_with_all_params(self, async_client: AsyncParallel) @parametrize async def test_raw_response_create(self, async_client: AsyncParallel) -> None: response = await async_client.task_run.with_raw_response.create( - input="What was the GDP of France in 2023?", - processor="base", + input="France (2023)", + processor="processor", ) assert response.is_closed is True @@ -214,8 +242,8 @@ async def test_raw_response_create(self, async_client: AsyncParallel) -> None: @parametrize async def test_streaming_response_create(self, async_client: AsyncParallel) -> None: async with async_client.task_run.with_streaming_response.create( - input="What was the GDP of France in 2023?", - processor="base", + input="France (2023)", + processor="processor", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" diff --git a/tests/test_client.py b/tests/test_client.py index f3c8287..878365f 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -717,9 +717,7 @@ def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter, clien respx_mock.post("/v1/tasks/runs").mock(side_effect=httpx.TimeoutException("Test timeout error")) with pytest.raises(APITimeoutError): - client.task_run.with_streaming_response.create( - input="What was the GDP of France in 2023?", processor="base" - ).__enter__() + client.task_run.with_streaming_response.create(input="France (2023)", processor="processor").__enter__() assert _get_open_connections(self.client) == 0 @@ -729,9 +727,7 @@ def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, client respx_mock.post("/v1/tasks/runs").mock(return_value=httpx.Response(500)) with pytest.raises(APIStatusError): - client.task_run.with_streaming_response.create( - input="What was the GDP of France in 2023?", processor="base" - ).__enter__() + client.task_run.with_streaming_response.create(input="France (2023)", processor="processor").__enter__() assert _get_open_connections(self.client) == 0 @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) @@ -760,9 +756,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/v1/tasks/runs").mock(side_effect=retry_handler) - response = client.task_run.with_raw_response.create( - input="What was the GDP of France in 2023?", processor="base" - ) + response = client.task_run.with_raw_response.create(input="France (2023)", processor="processor") assert response.retries_taken == failures_before_success assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success @@ -787,9 +781,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/v1/tasks/runs").mock(side_effect=retry_handler) response = client.task_run.with_raw_response.create( - input="What was the GDP of France in 2023?", - processor="base", - extra_headers={"x-stainless-retry-count": Omit()}, + input="France (2023)", processor="processor", extra_headers={"x-stainless-retry-count": Omit()} ) assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 @@ -814,9 +806,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/v1/tasks/runs").mock(side_effect=retry_handler) response = client.task_run.with_raw_response.create( - input="What was the GDP of France in 2023?", - processor="base", - extra_headers={"x-stainless-retry-count": "42"}, + input="France (2023)", processor="processor", extra_headers={"x-stainless-retry-count": "42"} ) assert response.http_request.headers.get("x-stainless-retry-count") == "42" @@ -1549,7 +1539,7 @@ async def test_retrying_timeout_errors_doesnt_leak( with pytest.raises(APITimeoutError): await async_client.task_run.with_streaming_response.create( - input="What was the GDP of France in 2023?", processor="base" + input="France (2023)", processor="processor" ).__aenter__() assert _get_open_connections(self.client) == 0 @@ -1563,7 +1553,7 @@ async def test_retrying_status_errors_doesnt_leak( with pytest.raises(APIStatusError): await async_client.task_run.with_streaming_response.create( - input="What was the GDP of France in 2023?", processor="base" + input="France (2023)", processor="processor" ).__aenter__() assert _get_open_connections(self.client) == 0 @@ -1594,9 +1584,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/v1/tasks/runs").mock(side_effect=retry_handler) - response = await client.task_run.with_raw_response.create( - input="What was the GDP of France in 2023?", processor="base" - ) + response = await client.task_run.with_raw_response.create(input="France (2023)", processor="processor") assert response.retries_taken == failures_before_success assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success @@ -1622,9 +1610,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/v1/tasks/runs").mock(side_effect=retry_handler) response = await client.task_run.with_raw_response.create( - input="What was the GDP of France in 2023?", - processor="base", - extra_headers={"x-stainless-retry-count": Omit()}, + input="France (2023)", processor="processor", extra_headers={"x-stainless-retry-count": Omit()} ) assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 @@ -1650,9 +1636,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/v1/tasks/runs").mock(side_effect=retry_handler) response = await client.task_run.with_raw_response.create( - input="What was the GDP of France in 2023?", - processor="base", - extra_headers={"x-stainless-retry-count": "42"}, + input="France (2023)", processor="processor", extra_headers={"x-stainless-retry-count": "42"} ) assert response.http_request.headers.get("x-stainless-retry-count") == "42" From b048bd7e1c5a992ae274aa4b6df16a9d5b0f843e Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sun, 31 Aug 2025 21:59:10 +0000 Subject: [PATCH 29/32] feat(api): update via SDK Studio --- .stats.yml | 8 +- api.md | 4 +- src/parallel/_client.py | 9 + src/parallel/resources/__init__.py | 14 + src/parallel/resources/beta/__init__.py | 47 ++ src/parallel/resources/beta/beta.py | 301 +++++++++ src/parallel/resources/beta/task_group.py | 632 ++++++++++++++++++ src/parallel/resources/beta/task_run.py | 499 ++++++++++++++ src/parallel/resources/task_run.py | 53 +- src/parallel/types/__init__.py | 13 +- src/parallel/types/auto_schema.py | 13 + src/parallel/types/auto_schema_param.py | 12 + src/parallel/types/beta/__init__.py | 27 + src/parallel/types/beta/beta_run_input.py | 63 ++ .../types/beta/beta_run_input_param.py | 65 ++ src/parallel/types/beta/beta_search_params.py | 47 ++ .../types/beta/beta_task_run_result.py | 74 ++ src/parallel/types/beta/error_event.py | 16 + src/parallel/types/beta/mcp_server.py | 25 + src/parallel/types/beta/mcp_server_param.py | 25 + src/parallel/types/beta/mcp_tool_call.py | 27 + .../types/beta/parallel_beta_param.py | 12 + src/parallel/types/beta/search_result.py | 16 + src/parallel/types/beta/task_group.py | 24 + .../types/beta/task_group_add_runs_params.py | 30 + .../types/beta/task_group_create_params.py | 13 + .../types/beta/task_group_events_params.py | 16 + .../types/beta/task_group_events_response.py | 28 + .../types/beta/task_group_get_runs_params.py | 18 + .../beta/task_group_get_runs_response.py | 12 + .../types/beta/task_group_run_response.py | 30 + src/parallel/types/beta/task_group_status.py | 27 + .../types/beta/task_run_create_params.py | 70 ++ src/parallel/types/beta/task_run_event.py | 32 + .../types/beta/task_run_events_response.py | 58 ++ .../types/beta/task_run_result_params.py | 18 + src/parallel/types/beta/web_search_result.py | 18 + src/parallel/types/beta/webhook.py | 16 + src/parallel/types/beta/webhook_param.py | 16 + src/parallel/types/field_basis.py | 25 + src/parallel/types/json_schema.py | 16 + src/parallel/types/json_schema_param.py | 3 +- src/parallel/types/shared/__init__.py | 2 + src/parallel/types/shared/error_object.py | 4 +- src/parallel/types/shared/error_response.py | 3 +- src/parallel/types/shared/source_policy.py | 21 + src/parallel/types/shared/warning.py | 22 + src/parallel/types/shared_params/__init__.py | 3 + .../types/shared_params/source_policy.py | 22 + src/parallel/types/task_run.py | 31 +- src/parallel/types/task_run_create_params.py | 15 +- src/parallel/types/task_run_json_output.py | 47 +- src/parallel/types/task_run_result.py | 7 +- src/parallel/types/task_run_text_output.py | 39 +- src/parallel/types/task_spec.py | 31 + src/parallel/types/task_spec_param.py | 5 +- src/parallel/types/text_schema.py | 16 + tests/api_resources/beta/__init__.py | 1 + tests/api_resources/beta/test_task_group.py | 613 +++++++++++++++++ tests/api_resources/beta/test_task_run.py | 349 ++++++++++ tests/api_resources/test_beta.py | 104 +++ tests/api_resources/test_task_run.py | 96 +-- tests/test_client.py | 36 +- 63 files changed, 3775 insertions(+), 164 deletions(-) create mode 100644 src/parallel/resources/beta/__init__.py create mode 100644 src/parallel/resources/beta/beta.py create mode 100644 src/parallel/resources/beta/task_group.py create mode 100644 src/parallel/resources/beta/task_run.py create mode 100644 src/parallel/types/auto_schema.py create mode 100644 src/parallel/types/auto_schema_param.py create mode 100644 src/parallel/types/beta/beta_run_input.py create mode 100644 src/parallel/types/beta/beta_run_input_param.py create mode 100644 src/parallel/types/beta/beta_search_params.py create mode 100644 src/parallel/types/beta/beta_task_run_result.py create mode 100644 src/parallel/types/beta/error_event.py create mode 100644 src/parallel/types/beta/mcp_server.py create mode 100644 src/parallel/types/beta/mcp_server_param.py create mode 100644 src/parallel/types/beta/mcp_tool_call.py create mode 100644 src/parallel/types/beta/parallel_beta_param.py create mode 100644 src/parallel/types/beta/search_result.py create mode 100644 src/parallel/types/beta/task_group.py create mode 100644 src/parallel/types/beta/task_group_add_runs_params.py create mode 100644 src/parallel/types/beta/task_group_create_params.py create mode 100644 src/parallel/types/beta/task_group_events_params.py create mode 100644 src/parallel/types/beta/task_group_events_response.py create mode 100644 src/parallel/types/beta/task_group_get_runs_params.py create mode 100644 src/parallel/types/beta/task_group_get_runs_response.py create mode 100644 src/parallel/types/beta/task_group_run_response.py create mode 100644 src/parallel/types/beta/task_group_status.py create mode 100644 src/parallel/types/beta/task_run_create_params.py create mode 100644 src/parallel/types/beta/task_run_event.py create mode 100644 src/parallel/types/beta/task_run_events_response.py create mode 100644 src/parallel/types/beta/task_run_result_params.py create mode 100644 src/parallel/types/beta/web_search_result.py create mode 100644 src/parallel/types/beta/webhook.py create mode 100644 src/parallel/types/beta/webhook_param.py create mode 100644 src/parallel/types/field_basis.py create mode 100644 src/parallel/types/json_schema.py create mode 100644 src/parallel/types/shared/source_policy.py create mode 100644 src/parallel/types/shared/warning.py create mode 100644 src/parallel/types/shared_params/__init__.py create mode 100644 src/parallel/types/shared_params/source_policy.py create mode 100644 src/parallel/types/task_spec.py create mode 100644 src/parallel/types/text_schema.py create mode 100644 tests/api_resources/beta/__init__.py create mode 100644 tests/api_resources/beta/test_task_group.py create mode 100644 tests/api_resources/beta/test_task_run.py create mode 100644 tests/api_resources/test_beta.py diff --git a/.stats.yml b/.stats.yml index c703e97..7c4f552 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ -configured_endpoints: 3 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/parallel-web%2Fparallel-sdk-ff0d5939e135b67b3448abf72d8bb0f9a574194337c7c7192453781347a9601d.yml -openapi_spec_hash: f3ce85349af6273a671d3d2781c4c877 -config_hash: 284b51e02bda8519b1f21bb67f1809e0 +configured_endpoints: 12 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/parallel-web%2Fparallel-sdk-1aeb1c81a84999f2d27ca9e86b041d74b892926bed126dc9b0f3cff4d7b26963.yml +openapi_spec_hash: 6280f6c6fb537f7c9ac5cc33ee2e433d +config_hash: 451edf5a87ae14248aa336ffc08d216f diff --git a/api.md b/api.md index 6b41468..665af26 100644 --- a/api.md +++ b/api.md @@ -1,7 +1,7 @@ # Shared Types ```python -from parallel.types import ErrorObject, ErrorResponse +from parallel.types import ErrorObject, ErrorResponse, SourcePolicy, Warning ``` # TaskRun @@ -10,7 +10,9 @@ Types: ```python from parallel.types import ( + AutoSchema, Citation, + FieldBasis, JsonSchema, ParsedTaskRunResult, RunInput, diff --git a/src/parallel/_client.py b/src/parallel/_client.py index dcb4a10..bb7c4a4 100644 --- a/src/parallel/_client.py +++ b/src/parallel/_client.py @@ -29,6 +29,7 @@ SyncAPIClient, AsyncAPIClient, ) +from .resources.beta import beta __all__ = [ "Timeout", @@ -44,6 +45,7 @@ class Parallel(SyncAPIClient): task_run: task_run.TaskRunResource + beta: beta.BetaResource with_raw_response: ParallelWithRawResponse with_streaming_response: ParallelWithStreamedResponse @@ -102,6 +104,7 @@ def __init__( ) self.task_run = task_run.TaskRunResource(self) + self.beta = beta.BetaResource(self) self.with_raw_response = ParallelWithRawResponse(self) self.with_streaming_response = ParallelWithStreamedResponse(self) @@ -212,6 +215,7 @@ def _make_status_error( class AsyncParallel(AsyncAPIClient): task_run: task_run.AsyncTaskRunResource + beta: beta.AsyncBetaResource with_raw_response: AsyncParallelWithRawResponse with_streaming_response: AsyncParallelWithStreamedResponse @@ -270,6 +274,7 @@ def __init__( ) self.task_run = task_run.AsyncTaskRunResource(self) + self.beta = beta.AsyncBetaResource(self) self.with_raw_response = AsyncParallelWithRawResponse(self) self.with_streaming_response = AsyncParallelWithStreamedResponse(self) @@ -381,21 +386,25 @@ def _make_status_error( class ParallelWithRawResponse: def __init__(self, client: Parallel) -> None: self.task_run = task_run.TaskRunResourceWithRawResponse(client.task_run) + self.beta = beta.BetaResourceWithRawResponse(client.beta) class AsyncParallelWithRawResponse: def __init__(self, client: AsyncParallel) -> None: self.task_run = task_run.AsyncTaskRunResourceWithRawResponse(client.task_run) + self.beta = beta.AsyncBetaResourceWithRawResponse(client.beta) class ParallelWithStreamedResponse: def __init__(self, client: Parallel) -> None: self.task_run = task_run.TaskRunResourceWithStreamingResponse(client.task_run) + self.beta = beta.BetaResourceWithStreamingResponse(client.beta) class AsyncParallelWithStreamedResponse: def __init__(self, client: AsyncParallel) -> None: self.task_run = task_run.AsyncTaskRunResourceWithStreamingResponse(client.task_run) + self.beta = beta.AsyncBetaResourceWithStreamingResponse(client.beta) Client = Parallel diff --git a/src/parallel/resources/__init__.py b/src/parallel/resources/__init__.py index 6fc7c06..9d1df4f 100644 --- a/src/parallel/resources/__init__.py +++ b/src/parallel/resources/__init__.py @@ -1,5 +1,13 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +from .beta import ( + BetaResource, + AsyncBetaResource, + BetaResourceWithRawResponse, + AsyncBetaResourceWithRawResponse, + BetaResourceWithStreamingResponse, + AsyncBetaResourceWithStreamingResponse, +) from .task_run import ( TaskRunResource, AsyncTaskRunResource, @@ -16,4 +24,10 @@ "AsyncTaskRunResourceWithRawResponse", "TaskRunResourceWithStreamingResponse", "AsyncTaskRunResourceWithStreamingResponse", + "BetaResource", + "AsyncBetaResource", + "BetaResourceWithRawResponse", + "AsyncBetaResourceWithRawResponse", + "BetaResourceWithStreamingResponse", + "AsyncBetaResourceWithStreamingResponse", ] diff --git a/src/parallel/resources/beta/__init__.py b/src/parallel/resources/beta/__init__.py new file mode 100644 index 0000000..3bd45ec --- /dev/null +++ b/src/parallel/resources/beta/__init__.py @@ -0,0 +1,47 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .beta import ( + BetaResource, + AsyncBetaResource, + BetaResourceWithRawResponse, + AsyncBetaResourceWithRawResponse, + BetaResourceWithStreamingResponse, + AsyncBetaResourceWithStreamingResponse, +) +from .task_run import ( + TaskRunResource, + AsyncTaskRunResource, + TaskRunResourceWithRawResponse, + AsyncTaskRunResourceWithRawResponse, + TaskRunResourceWithStreamingResponse, + AsyncTaskRunResourceWithStreamingResponse, +) +from .task_group import ( + TaskGroupResource, + AsyncTaskGroupResource, + TaskGroupResourceWithRawResponse, + AsyncTaskGroupResourceWithRawResponse, + TaskGroupResourceWithStreamingResponse, + AsyncTaskGroupResourceWithStreamingResponse, +) + +__all__ = [ + "TaskRunResource", + "AsyncTaskRunResource", + "TaskRunResourceWithRawResponse", + "AsyncTaskRunResourceWithRawResponse", + "TaskRunResourceWithStreamingResponse", + "AsyncTaskRunResourceWithStreamingResponse", + "TaskGroupResource", + "AsyncTaskGroupResource", + "TaskGroupResourceWithRawResponse", + "AsyncTaskGroupResourceWithRawResponse", + "TaskGroupResourceWithStreamingResponse", + "AsyncTaskGroupResourceWithStreamingResponse", + "BetaResource", + "AsyncBetaResource", + "BetaResourceWithRawResponse", + "AsyncBetaResourceWithRawResponse", + "BetaResourceWithStreamingResponse", + "AsyncBetaResourceWithStreamingResponse", +] diff --git a/src/parallel/resources/beta/beta.py b/src/parallel/resources/beta/beta.py new file mode 100644 index 0000000..c12ec5a --- /dev/null +++ b/src/parallel/resources/beta/beta.py @@ -0,0 +1,301 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Optional +from typing_extensions import Literal + +import httpx + +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._utils import maybe_transform, async_maybe_transform +from .task_run import ( + TaskRunResource, + AsyncTaskRunResource, + TaskRunResourceWithRawResponse, + AsyncTaskRunResourceWithRawResponse, + TaskRunResourceWithStreamingResponse, + AsyncTaskRunResourceWithStreamingResponse, +) +from ..._compat import cached_property +from .task_group import ( + TaskGroupResource, + AsyncTaskGroupResource, + TaskGroupResourceWithRawResponse, + AsyncTaskGroupResourceWithRawResponse, + TaskGroupResourceWithStreamingResponse, + AsyncTaskGroupResourceWithStreamingResponse, +) +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ...types.beta import beta_search_params +from ..._base_client import make_request_options +from ...types.beta.search_result import SearchResult +from ...types.shared_params.source_policy import SourcePolicy + +__all__ = ["BetaResource", "AsyncBetaResource"] + + +class BetaResource(SyncAPIResource): + @cached_property + def task_run(self) -> TaskRunResource: + return TaskRunResource(self._client) + + @cached_property + def task_group(self) -> TaskGroupResource: + return TaskGroupResource(self._client) + + @cached_property + def with_raw_response(self) -> BetaResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers + """ + return BetaResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> BetaResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response + """ + return BetaResourceWithStreamingResponse(self) + + def search( + self, + *, + max_chars_per_result: Optional[int] | NotGiven = NOT_GIVEN, + max_results: Optional[int] | NotGiven = NOT_GIVEN, + objective: Optional[str] | NotGiven = NOT_GIVEN, + processor: Literal["base", "pro"] | NotGiven = NOT_GIVEN, + search_queries: Optional[List[str]] | NotGiven = NOT_GIVEN, + source_policy: Optional[SourcePolicy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SearchResult: + """ + Searches the web. + + Args: + max_chars_per_result: Upper bound on the number of characters to include in excerpts for each search + result. + + max_results: Upper bound on the number of results to return. May be limited by the processor. + Defaults to 10 if not provided. + + objective: Natural-language description of what the web search is trying to find. May + include guidance about preferred sources or freshness. At least one of objective + or search_queries must be provided. + + processor: Search processor. + + search_queries: Optional list of traditional keyword search queries to guide the search. May + contain search operators. At least one of objective or search_queries must be + provided. + + source_policy: Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/v1beta/search", + body=maybe_transform( + { + "max_chars_per_result": max_chars_per_result, + "max_results": max_results, + "objective": objective, + "processor": processor, + "search_queries": search_queries, + "source_policy": source_policy, + }, + beta_search_params.BetaSearchParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=SearchResult, + ) + + +class AsyncBetaResource(AsyncAPIResource): + @cached_property + def task_run(self) -> AsyncTaskRunResource: + return AsyncTaskRunResource(self._client) + + @cached_property + def task_group(self) -> AsyncTaskGroupResource: + return AsyncTaskGroupResource(self._client) + + @cached_property + def with_raw_response(self) -> AsyncBetaResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers + """ + return AsyncBetaResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncBetaResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response + """ + return AsyncBetaResourceWithStreamingResponse(self) + + async def search( + self, + *, + max_chars_per_result: Optional[int] | NotGiven = NOT_GIVEN, + max_results: Optional[int] | NotGiven = NOT_GIVEN, + objective: Optional[str] | NotGiven = NOT_GIVEN, + processor: Literal["base", "pro"] | NotGiven = NOT_GIVEN, + search_queries: Optional[List[str]] | NotGiven = NOT_GIVEN, + source_policy: Optional[SourcePolicy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SearchResult: + """ + Searches the web. + + Args: + max_chars_per_result: Upper bound on the number of characters to include in excerpts for each search + result. + + max_results: Upper bound on the number of results to return. May be limited by the processor. + Defaults to 10 if not provided. + + objective: Natural-language description of what the web search is trying to find. May + include guidance about preferred sources or freshness. At least one of objective + or search_queries must be provided. + + processor: Search processor. + + search_queries: Optional list of traditional keyword search queries to guide the search. May + contain search operators. At least one of objective or search_queries must be + provided. + + source_policy: Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/v1beta/search", + body=await async_maybe_transform( + { + "max_chars_per_result": max_chars_per_result, + "max_results": max_results, + "objective": objective, + "processor": processor, + "search_queries": search_queries, + "source_policy": source_policy, + }, + beta_search_params.BetaSearchParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=SearchResult, + ) + + +class BetaResourceWithRawResponse: + def __init__(self, beta: BetaResource) -> None: + self._beta = beta + + self.search = to_raw_response_wrapper( + beta.search, + ) + + @cached_property + def task_run(self) -> TaskRunResourceWithRawResponse: + return TaskRunResourceWithRawResponse(self._beta.task_run) + + @cached_property + def task_group(self) -> TaskGroupResourceWithRawResponse: + return TaskGroupResourceWithRawResponse(self._beta.task_group) + + +class AsyncBetaResourceWithRawResponse: + def __init__(self, beta: AsyncBetaResource) -> None: + self._beta = beta + + self.search = async_to_raw_response_wrapper( + beta.search, + ) + + @cached_property + def task_run(self) -> AsyncTaskRunResourceWithRawResponse: + return AsyncTaskRunResourceWithRawResponse(self._beta.task_run) + + @cached_property + def task_group(self) -> AsyncTaskGroupResourceWithRawResponse: + return AsyncTaskGroupResourceWithRawResponse(self._beta.task_group) + + +class BetaResourceWithStreamingResponse: + def __init__(self, beta: BetaResource) -> None: + self._beta = beta + + self.search = to_streamed_response_wrapper( + beta.search, + ) + + @cached_property + def task_run(self) -> TaskRunResourceWithStreamingResponse: + return TaskRunResourceWithStreamingResponse(self._beta.task_run) + + @cached_property + def task_group(self) -> TaskGroupResourceWithStreamingResponse: + return TaskGroupResourceWithStreamingResponse(self._beta.task_group) + + +class AsyncBetaResourceWithStreamingResponse: + def __init__(self, beta: AsyncBetaResource) -> None: + self._beta = beta + + self.search = async_to_streamed_response_wrapper( + beta.search, + ) + + @cached_property + def task_run(self) -> AsyncTaskRunResourceWithStreamingResponse: + return AsyncTaskRunResourceWithStreamingResponse(self._beta.task_run) + + @cached_property + def task_group(self) -> AsyncTaskGroupResourceWithStreamingResponse: + return AsyncTaskGroupResourceWithStreamingResponse(self._beta.task_group) diff --git a/src/parallel/resources/beta/task_group.py b/src/parallel/resources/beta/task_group.py new file mode 100644 index 0000000..0a34e67 --- /dev/null +++ b/src/parallel/resources/beta/task_group.py @@ -0,0 +1,632 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Any, Dict, List, Union, Iterable, Optional, cast +from typing_extensions import Literal + +import httpx + +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._utils import is_given, maybe_transform, strip_not_given, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..._streaming import Stream, AsyncStream +from ...types.beta import ( + task_group_create_params, + task_group_events_params, + task_group_add_runs_params, + task_group_get_runs_params, +) +from ..._base_client import make_request_options +from ...types.beta.task_group import TaskGroup +from ...types.task_spec_param import TaskSpecParam +from ...types.beta.parallel_beta_param import ParallelBetaParam +from ...types.beta.beta_run_input_param import BetaRunInputParam +from ...types.beta.task_group_run_response import TaskGroupRunResponse +from ...types.beta.task_group_events_response import TaskGroupEventsResponse +from ...types.beta.task_group_get_runs_response import TaskGroupGetRunsResponse + +__all__ = ["TaskGroupResource", "AsyncTaskGroupResource"] + + +class TaskGroupResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> TaskGroupResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers + """ + return TaskGroupResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> TaskGroupResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response + """ + return TaskGroupResourceWithStreamingResponse(self) + + def create( + self, + *, + metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskGroup: + """ + Initiates a TaskGroup to group and track multiple runs. + + Args: + metadata: User-provided metadata stored with the task group. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/v1beta/tasks/groups", + body=maybe_transform({"metadata": metadata}, task_group_create_params.TaskGroupCreateParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskGroup, + ) + + def retrieve( + self, + task_group_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskGroup: + """ + Retrieves aggregated status across runs in a TaskGroup. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_group_id: + raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") + return self._get( + f"/v1beta/tasks/groups/{task_group_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskGroup, + ) + + def add_runs( + self, + task_group_id: str, + *, + inputs: Iterable[BetaRunInputParam], + default_task_spec: Optional[TaskSpecParam] | NotGiven = NOT_GIVEN, + betas: List[ParallelBetaParam] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskGroupRunResponse: + """ + Initiates multiple task runs within a TaskGroup. + + Args: + inputs: List of task runs to execute. + + default_task_spec: Specification for a task. + + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. + + betas: Optional header to specify the beta version(s) to enable. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_group_id: + raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") + extra_headers = { + **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}), + **(extra_headers or {}), + } + return self._post( + f"/v1beta/tasks/groups/{task_group_id}/runs", + body=maybe_transform( + { + "inputs": inputs, + "default_task_spec": default_task_spec, + }, + task_group_add_runs_params.TaskGroupAddRunsParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskGroupRunResponse, + ) + + def events( + self, + task_group_id: str, + *, + last_event_id: Optional[str] | NotGiven = NOT_GIVEN, + api_timeout: Optional[float] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Stream[TaskGroupEventsResponse]: + """ + Streams events from a TaskGroup: status updates and run completions. + + The connection will remain open for up to 10 minutes as long as at least one run + in the TaskGroup is active. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_group_id: + raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return self._get( + f"/v1beta/tasks/groups/{task_group_id}/events", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "last_event_id": last_event_id, + "api_timeout": api_timeout, + }, + task_group_events_params.TaskGroupEventsParams, + ), + ), + cast_to=cast( + Any, TaskGroupEventsResponse + ), # Union types cannot be passed in as arguments in the type system + stream=True, + stream_cls=Stream[TaskGroupEventsResponse], + ) + + def get_runs( + self, + task_group_id: str, + *, + include_input: bool | NotGiven = NOT_GIVEN, + include_output: bool | NotGiven = NOT_GIVEN, + last_event_id: Optional[str] | NotGiven = NOT_GIVEN, + status: Optional[ + Literal["queued", "action_required", "running", "completed", "failed", "cancelling", "cancelled"] + ] + | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Stream[TaskGroupGetRunsResponse]: + """ + Retrieves task runs in a TaskGroup and optionally their inputs and outputs. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_group_id: + raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return self._get( + f"/v1beta/tasks/groups/{task_group_id}/runs", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "include_input": include_input, + "include_output": include_output, + "last_event_id": last_event_id, + "status": status, + }, + task_group_get_runs_params.TaskGroupGetRunsParams, + ), + ), + cast_to=cast( + Any, TaskGroupGetRunsResponse + ), # Union types cannot be passed in as arguments in the type system + stream=True, + stream_cls=Stream[TaskGroupGetRunsResponse], + ) + + +class AsyncTaskGroupResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncTaskGroupResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers + """ + return AsyncTaskGroupResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncTaskGroupResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response + """ + return AsyncTaskGroupResourceWithStreamingResponse(self) + + async def create( + self, + *, + metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskGroup: + """ + Initiates a TaskGroup to group and track multiple runs. + + Args: + metadata: User-provided metadata stored with the task group. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/v1beta/tasks/groups", + body=await async_maybe_transform({"metadata": metadata}, task_group_create_params.TaskGroupCreateParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskGroup, + ) + + async def retrieve( + self, + task_group_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskGroup: + """ + Retrieves aggregated status across runs in a TaskGroup. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_group_id: + raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") + return await self._get( + f"/v1beta/tasks/groups/{task_group_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskGroup, + ) + + async def add_runs( + self, + task_group_id: str, + *, + inputs: Iterable[BetaRunInputParam], + default_task_spec: Optional[TaskSpecParam] | NotGiven = NOT_GIVEN, + betas: List[ParallelBetaParam] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskGroupRunResponse: + """ + Initiates multiple task runs within a TaskGroup. + + Args: + inputs: List of task runs to execute. + + default_task_spec: Specification for a task. + + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. + + betas: Optional header to specify the beta version(s) to enable. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_group_id: + raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") + extra_headers = { + **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}), + **(extra_headers or {}), + } + return await self._post( + f"/v1beta/tasks/groups/{task_group_id}/runs", + body=await async_maybe_transform( + { + "inputs": inputs, + "default_task_spec": default_task_spec, + }, + task_group_add_runs_params.TaskGroupAddRunsParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskGroupRunResponse, + ) + + async def events( + self, + task_group_id: str, + *, + last_event_id: Optional[str] | NotGiven = NOT_GIVEN, + api_timeout: Optional[float] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncStream[TaskGroupEventsResponse]: + """ + Streams events from a TaskGroup: status updates and run completions. + + The connection will remain open for up to 10 minutes as long as at least one run + in the TaskGroup is active. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_group_id: + raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return await self._get( + f"/v1beta/tasks/groups/{task_group_id}/events", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "last_event_id": last_event_id, + "api_timeout": api_timeout, + }, + task_group_events_params.TaskGroupEventsParams, + ), + ), + cast_to=cast( + Any, TaskGroupEventsResponse + ), # Union types cannot be passed in as arguments in the type system + stream=True, + stream_cls=AsyncStream[TaskGroupEventsResponse], + ) + + async def get_runs( + self, + task_group_id: str, + *, + include_input: bool | NotGiven = NOT_GIVEN, + include_output: bool | NotGiven = NOT_GIVEN, + last_event_id: Optional[str] | NotGiven = NOT_GIVEN, + status: Optional[ + Literal["queued", "action_required", "running", "completed", "failed", "cancelling", "cancelled"] + ] + | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncStream[TaskGroupGetRunsResponse]: + """ + Retrieves task runs in a TaskGroup and optionally their inputs and outputs. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_group_id: + raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return await self._get( + f"/v1beta/tasks/groups/{task_group_id}/runs", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "include_input": include_input, + "include_output": include_output, + "last_event_id": last_event_id, + "status": status, + }, + task_group_get_runs_params.TaskGroupGetRunsParams, + ), + ), + cast_to=cast( + Any, TaskGroupGetRunsResponse + ), # Union types cannot be passed in as arguments in the type system + stream=True, + stream_cls=AsyncStream[TaskGroupGetRunsResponse], + ) + + +class TaskGroupResourceWithRawResponse: + def __init__(self, task_group: TaskGroupResource) -> None: + self._task_group = task_group + + self.create = to_raw_response_wrapper( + task_group.create, + ) + self.retrieve = to_raw_response_wrapper( + task_group.retrieve, + ) + self.add_runs = to_raw_response_wrapper( + task_group.add_runs, + ) + self.events = to_raw_response_wrapper( + task_group.events, + ) + self.get_runs = to_raw_response_wrapper( + task_group.get_runs, + ) + + +class AsyncTaskGroupResourceWithRawResponse: + def __init__(self, task_group: AsyncTaskGroupResource) -> None: + self._task_group = task_group + + self.create = async_to_raw_response_wrapper( + task_group.create, + ) + self.retrieve = async_to_raw_response_wrapper( + task_group.retrieve, + ) + self.add_runs = async_to_raw_response_wrapper( + task_group.add_runs, + ) + self.events = async_to_raw_response_wrapper( + task_group.events, + ) + self.get_runs = async_to_raw_response_wrapper( + task_group.get_runs, + ) + + +class TaskGroupResourceWithStreamingResponse: + def __init__(self, task_group: TaskGroupResource) -> None: + self._task_group = task_group + + self.create = to_streamed_response_wrapper( + task_group.create, + ) + self.retrieve = to_streamed_response_wrapper( + task_group.retrieve, + ) + self.add_runs = to_streamed_response_wrapper( + task_group.add_runs, + ) + self.events = to_streamed_response_wrapper( + task_group.events, + ) + self.get_runs = to_streamed_response_wrapper( + task_group.get_runs, + ) + + +class AsyncTaskGroupResourceWithStreamingResponse: + def __init__(self, task_group: AsyncTaskGroupResource) -> None: + self._task_group = task_group + + self.create = async_to_streamed_response_wrapper( + task_group.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + task_group.retrieve, + ) + self.add_runs = async_to_streamed_response_wrapper( + task_group.add_runs, + ) + self.events = async_to_streamed_response_wrapper( + task_group.events, + ) + self.get_runs = async_to_streamed_response_wrapper( + task_group.get_runs, + ) diff --git a/src/parallel/resources/beta/task_run.py b/src/parallel/resources/beta/task_run.py new file mode 100644 index 0000000..8235a1a --- /dev/null +++ b/src/parallel/resources/beta/task_run.py @@ -0,0 +1,499 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Any, Dict, List, Union, Iterable, Optional, cast + +import httpx + +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._utils import is_given, maybe_transform, strip_not_given, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..._streaming import Stream, AsyncStream +from ...types.beta import task_run_create_params, task_run_result_params +from ..._base_client import make_request_options +from ...types.task_run import TaskRun +from ...types.task_spec_param import TaskSpecParam +from ...types.beta.webhook_param import WebhookParam +from ...types.beta.mcp_server_param import McpServerParam +from ...types.beta.parallel_beta_param import ParallelBetaParam +from ...types.beta.beta_task_run_result import BetaTaskRunResult +from ...types.shared_params.source_policy import SourcePolicy +from ...types.beta.task_run_events_response import TaskRunEventsResponse + +__all__ = ["TaskRunResource", "AsyncTaskRunResource"] + + +class TaskRunResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> TaskRunResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers + """ + return TaskRunResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> TaskRunResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response + """ + return TaskRunResourceWithStreamingResponse(self) + + def create( + self, + *, + input: Union[str, Dict[str, object]], + processor: str, + enable_events: Optional[bool] | NotGiven = NOT_GIVEN, + mcp_servers: Optional[Iterable[McpServerParam]] | NotGiven = NOT_GIVEN, + metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, + source_policy: Optional[SourcePolicy] | NotGiven = NOT_GIVEN, + task_spec: Optional[TaskSpecParam] | NotGiven = NOT_GIVEN, + webhook: Optional[WebhookParam] | NotGiven = NOT_GIVEN, + betas: List[ParallelBetaParam] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskRun: + """ + Initiates a task run. + + Returns immediately with a run object in status 'queued'. + + Beta features can be enabled by setting the 'parallel-beta' header. + + Args: + input: Input to the task, either text or a JSON object. + + processor: Processor to use for the task. + + enable_events: Controls tracking of task run execution progress. When set to true, progress + events are recorded and can be accessed via the + [Task Run events](https://platform.parallel.ai/api-reference) endpoint. When + false, no progress events are tracked. Note that progress tracking cannot be + enabled after a run has been created. The flag is set to true by default for + premium processors (pro and above). This feature is not available via the Python + SDK. To enable this feature in your API requests, specify the `parallel-beta` + header with `events-sse-2025-07-24` value. + + mcp_servers: Optional list of MCP servers to use for the run. This feature is not available + via the Python SDK. To enable this feature in your API requests, specify the + `parallel-beta` header with `mcp-server-2025-07-17` value. + + metadata: User-provided metadata stored with the run. Keys and values must be strings with + a maximum length of 16 and 512 characters respectively. + + source_policy: Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + + task_spec: Specification for a task. + + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. + + webhook: Webhooks for Task Runs. + + betas: Optional header to specify the beta version(s) to enable. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = { + **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}), + **(extra_headers or {}), + } + return self._post( + "/v1/tasks/runs?beta=true", + body=maybe_transform( + { + "input": input, + "processor": processor, + "enable_events": enable_events, + "mcp_servers": mcp_servers, + "metadata": metadata, + "source_policy": source_policy, + "task_spec": task_spec, + "webhook": webhook, + }, + task_run_create_params.TaskRunCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskRun, + ) + + def events( + self, + run_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Stream[TaskRunEventsResponse]: + """ + Streams events for a task run. + + Returns a stream of events showing progress updates and state changes for the + task run. + + For task runs that did not have enable_events set to true during creation, the + frequency of events will be reduced. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return self._get( + f"/v1beta/tasks/runs/{run_id}/events", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=cast(Any, TaskRunEventsResponse), # Union types cannot be passed in as arguments in the type system + stream=True, + stream_cls=Stream[TaskRunEventsResponse], + ) + + def result( + self, + run_id: str, + *, + api_timeout: int | NotGiven = NOT_GIVEN, + betas: List[ParallelBetaParam] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> BetaTaskRunResult: + """ + Retrieves a run result by run_id, blocking until the run is completed. + + Args: + betas: Optional header to specify the beta version(s) to enable. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + extra_headers = { + **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}), + **(extra_headers or {}), + } + return self._get( + f"/v1/tasks/runs/{run_id}/result?beta=true", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform({"api_timeout": api_timeout}, task_run_result_params.TaskRunResultParams), + ), + cast_to=BetaTaskRunResult, + ) + + +class AsyncTaskRunResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncTaskRunResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers + """ + return AsyncTaskRunResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncTaskRunResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response + """ + return AsyncTaskRunResourceWithStreamingResponse(self) + + async def create( + self, + *, + input: Union[str, Dict[str, object]], + processor: str, + enable_events: Optional[bool] | NotGiven = NOT_GIVEN, + mcp_servers: Optional[Iterable[McpServerParam]] | NotGiven = NOT_GIVEN, + metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, + source_policy: Optional[SourcePolicy] | NotGiven = NOT_GIVEN, + task_spec: Optional[TaskSpecParam] | NotGiven = NOT_GIVEN, + webhook: Optional[WebhookParam] | NotGiven = NOT_GIVEN, + betas: List[ParallelBetaParam] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskRun: + """ + Initiates a task run. + + Returns immediately with a run object in status 'queued'. + + Beta features can be enabled by setting the 'parallel-beta' header. + + Args: + input: Input to the task, either text or a JSON object. + + processor: Processor to use for the task. + + enable_events: Controls tracking of task run execution progress. When set to true, progress + events are recorded and can be accessed via the + [Task Run events](https://platform.parallel.ai/api-reference) endpoint. When + false, no progress events are tracked. Note that progress tracking cannot be + enabled after a run has been created. The flag is set to true by default for + premium processors (pro and above). This feature is not available via the Python + SDK. To enable this feature in your API requests, specify the `parallel-beta` + header with `events-sse-2025-07-24` value. + + mcp_servers: Optional list of MCP servers to use for the run. This feature is not available + via the Python SDK. To enable this feature in your API requests, specify the + `parallel-beta` header with `mcp-server-2025-07-17` value. + + metadata: User-provided metadata stored with the run. Keys and values must be strings with + a maximum length of 16 and 512 characters respectively. + + source_policy: Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + + task_spec: Specification for a task. + + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. + + webhook: Webhooks for Task Runs. + + betas: Optional header to specify the beta version(s) to enable. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = { + **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}), + **(extra_headers or {}), + } + return await self._post( + "/v1/tasks/runs?beta=true", + body=await async_maybe_transform( + { + "input": input, + "processor": processor, + "enable_events": enable_events, + "mcp_servers": mcp_servers, + "metadata": metadata, + "source_policy": source_policy, + "task_spec": task_spec, + "webhook": webhook, + }, + task_run_create_params.TaskRunCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskRun, + ) + + async def events( + self, + run_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncStream[TaskRunEventsResponse]: + """ + Streams events for a task run. + + Returns a stream of events showing progress updates and state changes for the + task run. + + For task runs that did not have enable_events set to true during creation, the + frequency of events will be reduced. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return await self._get( + f"/v1beta/tasks/runs/{run_id}/events", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=cast(Any, TaskRunEventsResponse), # Union types cannot be passed in as arguments in the type system + stream=True, + stream_cls=AsyncStream[TaskRunEventsResponse], + ) + + async def result( + self, + run_id: str, + *, + api_timeout: int | NotGiven = NOT_GIVEN, + betas: List[ParallelBetaParam] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> BetaTaskRunResult: + """ + Retrieves a run result by run_id, blocking until the run is completed. + + Args: + betas: Optional header to specify the beta version(s) to enable. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + extra_headers = { + **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}), + **(extra_headers or {}), + } + return await self._get( + f"/v1/tasks/runs/{run_id}/result?beta=true", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + {"api_timeout": api_timeout}, task_run_result_params.TaskRunResultParams + ), + ), + cast_to=BetaTaskRunResult, + ) + + +class TaskRunResourceWithRawResponse: + def __init__(self, task_run: TaskRunResource) -> None: + self._task_run = task_run + + self.create = to_raw_response_wrapper( + task_run.create, + ) + self.events = to_raw_response_wrapper( + task_run.events, + ) + self.result = to_raw_response_wrapper( + task_run.result, + ) + + +class AsyncTaskRunResourceWithRawResponse: + def __init__(self, task_run: AsyncTaskRunResource) -> None: + self._task_run = task_run + + self.create = async_to_raw_response_wrapper( + task_run.create, + ) + self.events = async_to_raw_response_wrapper( + task_run.events, + ) + self.result = async_to_raw_response_wrapper( + task_run.result, + ) + + +class TaskRunResourceWithStreamingResponse: + def __init__(self, task_run: TaskRunResource) -> None: + self._task_run = task_run + + self.create = to_streamed_response_wrapper( + task_run.create, + ) + self.events = to_streamed_response_wrapper( + task_run.events, + ) + self.result = to_streamed_response_wrapper( + task_run.result, + ) + + +class AsyncTaskRunResourceWithStreamingResponse: + def __init__(self, task_run: AsyncTaskRunResource) -> None: + self._task_run = task_run + + self.create = async_to_streamed_response_wrapper( + task_run.create, + ) + self.events = async_to_streamed_response_wrapper( + task_run.events, + ) + self.result = async_to_streamed_response_wrapper( + task_run.result, + ) diff --git a/src/parallel/resources/task_run.py b/src/parallel/resources/task_run.py index d05648d..9fe16d9 100644 --- a/src/parallel/resources/task_run.py +++ b/src/parallel/resources/task_run.py @@ -31,6 +31,7 @@ wait_for_result_async as _wait_for_result_async, task_run_result_parser, ) +from ..types.shared_params.source_policy import SourcePolicy __all__ = ["TaskRunResource", "AsyncTaskRunResource"] @@ -58,9 +59,10 @@ def with_streaming_response(self) -> TaskRunResourceWithStreamingResponse: def create( self, *, - input: Union[str, object], + input: Union[str, Dict[str, object]], processor: str, metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, + source_policy: Optional[SourcePolicy] | NotGiven = NOT_GIVEN, task_spec: Optional[TaskSpecParam] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -70,7 +72,11 @@ def create( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> TaskRun: """ - Initiates a single task run. + Initiates a task run. + + Returns immediately with a run object in status 'queued'. + + Beta features can be enabled by setting the 'parallel-beta' header. Args: input: Input to the task, either text or a JSON object. @@ -80,10 +86,16 @@ def create( metadata: User-provided metadata stored with the run. Keys and values must be strings with a maximum length of 16 and 512 characters respectively. + source_policy: Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + task_spec: Specification for a task. - For convenience we allow bare strings as input or output schemas, which is - equivalent to a text schema with the same description. + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. extra_headers: Send extra headers @@ -100,6 +112,7 @@ def create( "input": input, "processor": processor, "metadata": metadata, + "source_policy": source_policy, "task_spec": task_spec, }, task_run_create_params.TaskRunCreateParams, @@ -122,7 +135,9 @@ def retrieve( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> TaskRun: """ - Retrieves a run by run_id. + Retrieves run status by run_id. + + The run result is available from the `/result` endpoint. Args: extra_headers: Send extra headers @@ -156,7 +171,7 @@ def result( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> TaskRunResult: """ - Retrieves a run by run_id, blocking until the run is completed. + Retrieves a run result by run_id, blocking until the run is completed. Args: extra_headers: Send extra headers @@ -333,9 +348,10 @@ def with_streaming_response(self) -> AsyncTaskRunResourceWithStreamingResponse: async def create( self, *, - input: Union[str, object], + input: Union[str, Dict[str, object]], processor: str, metadata: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN, + source_policy: Optional[SourcePolicy] | NotGiven = NOT_GIVEN, task_spec: Optional[TaskSpecParam] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -345,7 +361,11 @@ async def create( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> TaskRun: """ - Initiates a single task run. + Initiates a task run. + + Returns immediately with a run object in status 'queued'. + + Beta features can be enabled by setting the 'parallel-beta' header. Args: input: Input to the task, either text or a JSON object. @@ -355,10 +375,16 @@ async def create( metadata: User-provided metadata stored with the run. Keys and values must be strings with a maximum length of 16 and 512 characters respectively. + source_policy: Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + task_spec: Specification for a task. - For convenience we allow bare strings as input or output schemas, which is - equivalent to a text schema with the same description. + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. extra_headers: Send extra headers @@ -375,6 +401,7 @@ async def create( "input": input, "processor": processor, "metadata": metadata, + "source_policy": source_policy, "task_spec": task_spec, }, task_run_create_params.TaskRunCreateParams, @@ -397,7 +424,9 @@ async def retrieve( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> TaskRun: """ - Retrieves a run by run_id. + Retrieves run status by run_id. + + The run result is available from the `/result` endpoint. Args: extra_headers: Send extra headers @@ -431,7 +460,7 @@ async def result( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> TaskRunResult: """ - Retrieves a run by run_id, blocking until the run is completed. + Retrieves a run result by run_id, blocking until the run is completed. Args: extra_headers: Send extra headers diff --git a/src/parallel/types/__init__.py b/src/parallel/types/__init__.py index 9404593..15d056e 100644 --- a/src/parallel/types/__init__.py +++ b/src/parallel/types/__init__.py @@ -2,11 +2,22 @@ from __future__ import annotations -from .shared import ErrorObject as ErrorObject, ErrorResponse as ErrorResponse +from .shared import ( + Warning as Warning, + ErrorObject as ErrorObject, + SourcePolicy as SourcePolicy, + ErrorResponse as ErrorResponse, +) from .citation import Citation as Citation from .task_run import TaskRun as TaskRun +from .task_spec import TaskSpec as TaskSpec +from .auto_schema import AutoSchema as AutoSchema +from .field_basis import FieldBasis as FieldBasis +from .json_schema import JsonSchema as JsonSchema +from .text_schema import TextSchema as TextSchema from .task_run_result import TaskRunResult as TaskRunResult from .task_spec_param import TaskSpecParam as TaskSpecParam +from .auto_schema_param import AutoSchemaParam as AutoSchemaParam from .json_schema_param import JsonSchemaParam as JsonSchemaParam from .text_schema_param import TextSchemaParam as TextSchemaParam from .task_run_json_output import TaskRunJsonOutput as TaskRunJsonOutput diff --git a/src/parallel/types/auto_schema.py b/src/parallel/types/auto_schema.py new file mode 100644 index 0000000..6ff429e --- /dev/null +++ b/src/parallel/types/auto_schema.py @@ -0,0 +1,13 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["AutoSchema"] + + +class AutoSchema(BaseModel): + type: Optional[Literal["auto"]] = None + """The type of schema being defined. Always `auto`.""" diff --git a/src/parallel/types/auto_schema_param.py b/src/parallel/types/auto_schema_param.py new file mode 100644 index 0000000..54d5196 --- /dev/null +++ b/src/parallel/types/auto_schema_param.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, TypedDict + +__all__ = ["AutoSchemaParam"] + + +class AutoSchemaParam(TypedDict, total=False): + type: Literal["auto"] + """The type of schema being defined. Always `auto`.""" diff --git a/src/parallel/types/beta/__init__.py b/src/parallel/types/beta/__init__.py index f8ee8b1..a872a43 100644 --- a/src/parallel/types/beta/__init__.py +++ b/src/parallel/types/beta/__init__.py @@ -1,3 +1,30 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. from __future__ import annotations + +from .webhook import Webhook as Webhook +from .mcp_server import McpServer as McpServer +from .task_group import TaskGroup as TaskGroup +from .error_event import ErrorEvent as ErrorEvent +from .mcp_tool_call import McpToolCall as McpToolCall +from .search_result import SearchResult as SearchResult +from .webhook_param import WebhookParam as WebhookParam +from .beta_run_input import BetaRunInput as BetaRunInput +from .task_run_event import TaskRunEvent as TaskRunEvent +from .mcp_server_param import McpServerParam as McpServerParam +from .task_group_status import TaskGroupStatus as TaskGroupStatus +from .web_search_result import WebSearchResult as WebSearchResult +from .beta_search_params import BetaSearchParams as BetaSearchParams +from .parallel_beta_param import ParallelBetaParam as ParallelBetaParam +from .beta_run_input_param import BetaRunInputParam as BetaRunInputParam +from .beta_task_run_result import BetaTaskRunResult as BetaTaskRunResult +from .task_run_create_params import TaskRunCreateParams as TaskRunCreateParams +from .task_run_result_params import TaskRunResultParams as TaskRunResultParams +from .task_group_run_response import TaskGroupRunResponse as TaskGroupRunResponse +from .task_group_create_params import TaskGroupCreateParams as TaskGroupCreateParams +from .task_group_events_params import TaskGroupEventsParams as TaskGroupEventsParams +from .task_run_events_response import TaskRunEventsResponse as TaskRunEventsResponse +from .task_group_add_runs_params import TaskGroupAddRunsParams as TaskGroupAddRunsParams +from .task_group_events_response import TaskGroupEventsResponse as TaskGroupEventsResponse +from .task_group_get_runs_params import TaskGroupGetRunsParams as TaskGroupGetRunsParams +from .task_group_get_runs_response import TaskGroupGetRunsResponse as TaskGroupGetRunsResponse diff --git a/src/parallel/types/beta/beta_run_input.py b/src/parallel/types/beta/beta_run_input.py new file mode 100644 index 0000000..7707711 --- /dev/null +++ b/src/parallel/types/beta/beta_run_input.py @@ -0,0 +1,63 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, List, Union, Optional + +from .webhook import Webhook +from ..._models import BaseModel +from ..task_spec import TaskSpec +from .mcp_server import McpServer +from ..shared.source_policy import SourcePolicy + +__all__ = ["BetaRunInput"] + + +class BetaRunInput(BaseModel): + input: Union[str, Dict[str, object]] + """Input to the task, either text or a JSON object.""" + + processor: str + """Processor to use for the task.""" + + enable_events: Optional[bool] = None + """Controls tracking of task run execution progress. + + When set to true, progress events are recorded and can be accessed via the + [Task Run events](https://platform.parallel.ai/api-reference) endpoint. When + false, no progress events are tracked. Note that progress tracking cannot be + enabled after a run has been created. The flag is set to true by default for + premium processors (pro and above). This feature is not available via the Python + SDK. To enable this feature in your API requests, specify the `parallel-beta` + header with `events-sse-2025-07-24` value. + """ + + mcp_servers: Optional[List[McpServer]] = None + """ + Optional list of MCP servers to use for the run. This feature is not available + via the Python SDK. To enable this feature in your API requests, specify the + `parallel-beta` header with `mcp-server-2025-07-17` value. + """ + + metadata: Optional[Dict[str, Union[str, float, bool]]] = None + """User-provided metadata stored with the run. + + Keys and values must be strings with a maximum length of 16 and 512 characters + respectively. + """ + + source_policy: Optional[SourcePolicy] = None + """Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + """ + + task_spec: Optional[TaskSpec] = None + """Specification for a task. + + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. + """ + + webhook: Optional[Webhook] = None + """Webhooks for Task Runs.""" diff --git a/src/parallel/types/beta/beta_run_input_param.py b/src/parallel/types/beta/beta_run_input_param.py new file mode 100644 index 0000000..715f6ed --- /dev/null +++ b/src/parallel/types/beta/beta_run_input_param.py @@ -0,0 +1,65 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Union, Iterable, Optional +from typing_extensions import Required, TypedDict + +from .webhook_param import WebhookParam +from ..task_spec_param import TaskSpecParam +from .mcp_server_param import McpServerParam +from ..shared_params.source_policy import SourcePolicy + +__all__ = ["BetaRunInputParam"] + + +class BetaRunInputParam(TypedDict, total=False): + input: Required[Union[str, Dict[str, object]]] + """Input to the task, either text or a JSON object.""" + + processor: Required[str] + """Processor to use for the task.""" + + enable_events: Optional[bool] + """Controls tracking of task run execution progress. + + When set to true, progress events are recorded and can be accessed via the + [Task Run events](https://platform.parallel.ai/api-reference) endpoint. When + false, no progress events are tracked. Note that progress tracking cannot be + enabled after a run has been created. The flag is set to true by default for + premium processors (pro and above). This feature is not available via the Python + SDK. To enable this feature in your API requests, specify the `parallel-beta` + header with `events-sse-2025-07-24` value. + """ + + mcp_servers: Optional[Iterable[McpServerParam]] + """ + Optional list of MCP servers to use for the run. This feature is not available + via the Python SDK. To enable this feature in your API requests, specify the + `parallel-beta` header with `mcp-server-2025-07-17` value. + """ + + metadata: Optional[Dict[str, Union[str, float, bool]]] + """User-provided metadata stored with the run. + + Keys and values must be strings with a maximum length of 16 and 512 characters + respectively. + """ + + source_policy: Optional[SourcePolicy] + """Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + """ + + task_spec: Optional[TaskSpecParam] + """Specification for a task. + + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. + """ + + webhook: Optional[WebhookParam] + """Webhooks for Task Runs.""" diff --git a/src/parallel/types/beta/beta_search_params.py b/src/parallel/types/beta/beta_search_params.py new file mode 100644 index 0000000..b6b55a6 --- /dev/null +++ b/src/parallel/types/beta/beta_search_params.py @@ -0,0 +1,47 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Optional +from typing_extensions import Literal, TypedDict + +from ..shared_params.source_policy import SourcePolicy + +__all__ = ["BetaSearchParams"] + + +class BetaSearchParams(TypedDict, total=False): + max_chars_per_result: Optional[int] + """ + Upper bound on the number of characters to include in excerpts for each search + result. + """ + + max_results: Optional[int] + """Upper bound on the number of results to return. + + May be limited by the processor. Defaults to 10 if not provided. + """ + + objective: Optional[str] + """Natural-language description of what the web search is trying to find. + + May include guidance about preferred sources or freshness. At least one of + objective or search_queries must be provided. + """ + + processor: Literal["base", "pro"] + """Search processor.""" + + search_queries: Optional[List[str]] + """Optional list of traditional keyword search queries to guide the search. + + May contain search operators. At least one of objective or search_queries must + be provided. + """ + + source_policy: Optional[SourcePolicy] + """Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + """ diff --git a/src/parallel/types/beta/beta_task_run_result.py b/src/parallel/types/beta/beta_task_run_result.py new file mode 100644 index 0000000..3db1c50 --- /dev/null +++ b/src/parallel/types/beta/beta_task_run_result.py @@ -0,0 +1,74 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, List, Union, Optional +from typing_extensions import Literal, Annotated, TypeAlias + +from ..._utils import PropertyInfo +from ..._models import BaseModel +from ..task_run import TaskRun +from ..field_basis import FieldBasis +from .mcp_tool_call import McpToolCall + +__all__ = ["BetaTaskRunResult", "Output", "OutputBetaTaskRunTextOutput", "OutputBetaTaskRunJsonOutput"] + + +class OutputBetaTaskRunTextOutput(BaseModel): + basis: List[FieldBasis] + """Basis for the output.""" + + content: str + """Text output from the task.""" + + type: Literal["text"] + """ + The type of output being returned, as determined by the output schema of the + task spec. + """ + + beta_fields: Optional[Dict[str, object]] = None + """Always None.""" + + mcp_tool_calls: Optional[List[McpToolCall]] = None + """MCP tool calls made by the task.""" + + +class OutputBetaTaskRunJsonOutput(BaseModel): + basis: List[FieldBasis] + """Basis for the output.""" + + content: Dict[str, object] + """ + Output from the task as a native JSON object, as determined by the output schema + of the task spec. + """ + + type: Literal["json"] + """ + The type of output being returned, as determined by the output schema of the + task spec. + """ + + beta_fields: Optional[Dict[str, object]] = None + """Always None.""" + + mcp_tool_calls: Optional[List[McpToolCall]] = None + """MCP tool calls made by the task.""" + + output_schema: Optional[Dict[str, object]] = None + """Output schema for the Task Run. + + Populated only if the task was executed with an auto schema. + """ + + +Output: TypeAlias = Annotated[ + Union[OutputBetaTaskRunTextOutput, OutputBetaTaskRunJsonOutput], PropertyInfo(discriminator="type") +] + + +class BetaTaskRunResult(BaseModel): + output: Output + """Output from the task conforming to the output schema.""" + + run: TaskRun + """Status of a task run.""" diff --git a/src/parallel/types/beta/error_event.py b/src/parallel/types/beta/error_event.py new file mode 100644 index 0000000..aefa872 --- /dev/null +++ b/src/parallel/types/beta/error_event.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ..._models import BaseModel +from ..shared.error_object import ErrorObject + +__all__ = ["ErrorEvent"] + + +class ErrorEvent(BaseModel): + error: ErrorObject + """An error message.""" + + type: Literal["error"] + """Event type; always 'error'.""" diff --git a/src/parallel/types/beta/mcp_server.py b/src/parallel/types/beta/mcp_server.py new file mode 100644 index 0000000..192493f --- /dev/null +++ b/src/parallel/types/beta/mcp_server.py @@ -0,0 +1,25 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, List, Optional +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["McpServer"] + + +class McpServer(BaseModel): + name: str + """Name of the MCP server.""" + + url: str + """URL of the MCP server.""" + + allowed_tools: Optional[List[str]] = None + """List of allowed tools for the MCP server.""" + + headers: Optional[Dict[str, str]] = None + """Headers for the MCP server.""" + + type: Optional[Literal["url"]] = None + """Type of MCP server being configured. Always `url`.""" diff --git a/src/parallel/types/beta/mcp_server_param.py b/src/parallel/types/beta/mcp_server_param.py new file mode 100644 index 0000000..cef54ce --- /dev/null +++ b/src/parallel/types/beta/mcp_server_param.py @@ -0,0 +1,25 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, List, Optional +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["McpServerParam"] + + +class McpServerParam(TypedDict, total=False): + name: Required[str] + """Name of the MCP server.""" + + url: Required[str] + """URL of the MCP server.""" + + allowed_tools: Optional[List[str]] + """List of allowed tools for the MCP server.""" + + headers: Optional[Dict[str, str]] + """Headers for the MCP server.""" + + type: Literal["url"] + """Type of MCP server being configured. Always `url`.""" diff --git a/src/parallel/types/beta/mcp_tool_call.py b/src/parallel/types/beta/mcp_tool_call.py new file mode 100644 index 0000000..daa7cca --- /dev/null +++ b/src/parallel/types/beta/mcp_tool_call.py @@ -0,0 +1,27 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ..._models import BaseModel + +__all__ = ["McpToolCall"] + + +class McpToolCall(BaseModel): + arguments: str + """Arguments used to call the MCP tool.""" + + server_name: str + """Name of the MCP server.""" + + tool_call_id: str + """Identifier for the tool call.""" + + tool_name: str + """Name of the tool being called.""" + + content: Optional[str] = None + """Output received from the tool call, if successful.""" + + error: Optional[str] = None + """Error message if the tool call failed.""" diff --git a/src/parallel/types/beta/parallel_beta_param.py b/src/parallel/types/beta/parallel_beta_param.py new file mode 100644 index 0000000..80b4de0 --- /dev/null +++ b/src/parallel/types/beta/parallel_beta_param.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union +from typing_extensions import Literal, TypeAlias + +__all__ = ["ParallelBetaParam"] + +ParallelBetaParam: TypeAlias = Union[ + Literal["mcp-server-2025-07-17", "events-sse-2025-07-24", "webhook-2025-08-12"], str +] diff --git a/src/parallel/types/beta/search_result.py b/src/parallel/types/beta/search_result.py new file mode 100644 index 0000000..05c945c --- /dev/null +++ b/src/parallel/types/beta/search_result.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List + +from ..._models import BaseModel +from .web_search_result import WebSearchResult + +__all__ = ["SearchResult"] + + +class SearchResult(BaseModel): + results: List[WebSearchResult] + """A list of WebSearchResult objects, ordered by decreasing relevance.""" + + search_id: str + """Search ID. Example: `search_cad0a6d2-dec0-46bd-95ae-900527d880e7`""" diff --git a/src/parallel/types/beta/task_group.py b/src/parallel/types/beta/task_group.py new file mode 100644 index 0000000..247bddb --- /dev/null +++ b/src/parallel/types/beta/task_group.py @@ -0,0 +1,24 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, Union, Optional + +from pydantic import Field as FieldInfo + +from ..._models import BaseModel +from .task_group_status import TaskGroupStatus + +__all__ = ["TaskGroup"] + + +class TaskGroup(BaseModel): + created_at: Optional[str] = None + """Timestamp of the creation of the group, as an RFC 3339 string.""" + + status: TaskGroupStatus + """Status of a task group.""" + + task_group_id: str = FieldInfo(alias="taskgroup_id") + """ID of the group.""" + + metadata: Optional[Dict[str, Union[str, float, bool]]] = None + """User-provided metadata stored with the group.""" diff --git a/src/parallel/types/beta/task_group_add_runs_params.py b/src/parallel/types/beta/task_group_add_runs_params.py new file mode 100644 index 0000000..68523f9 --- /dev/null +++ b/src/parallel/types/beta/task_group_add_runs_params.py @@ -0,0 +1,30 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Iterable, Optional +from typing_extensions import Required, Annotated, TypedDict + +from ..._utils import PropertyInfo +from ..task_spec_param import TaskSpecParam +from .parallel_beta_param import ParallelBetaParam +from .beta_run_input_param import BetaRunInputParam + +__all__ = ["TaskGroupAddRunsParams"] + + +class TaskGroupAddRunsParams(TypedDict, total=False): + inputs: Required[Iterable[BetaRunInputParam]] + """List of task runs to execute.""" + + default_task_spec: Optional[TaskSpecParam] + """Specification for a task. + + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. + """ + + betas: Annotated[List[ParallelBetaParam], PropertyInfo(alias="parallel-beta")] + """Optional header to specify the beta version(s) to enable.""" diff --git a/src/parallel/types/beta/task_group_create_params.py b/src/parallel/types/beta/task_group_create_params.py new file mode 100644 index 0000000..2b5cc73 --- /dev/null +++ b/src/parallel/types/beta/task_group_create_params.py @@ -0,0 +1,13 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Union, Optional +from typing_extensions import TypedDict + +__all__ = ["TaskGroupCreateParams"] + + +class TaskGroupCreateParams(TypedDict, total=False): + metadata: Optional[Dict[str, Union[str, float, bool]]] + """User-provided metadata stored with the task group.""" diff --git a/src/parallel/types/beta/task_group_events_params.py b/src/parallel/types/beta/task_group_events_params.py new file mode 100644 index 0000000..15f0d00 --- /dev/null +++ b/src/parallel/types/beta/task_group_events_params.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional +from typing_extensions import Annotated, TypedDict + +from ..._utils import PropertyInfo + +__all__ = ["TaskGroupEventsParams"] + + +class TaskGroupEventsParams(TypedDict, total=False): + last_event_id: Optional[str] + + api_timeout: Annotated[Optional[float], PropertyInfo(alias="timeout")] diff --git a/src/parallel/types/beta/task_group_events_response.py b/src/parallel/types/beta/task_group_events_response.py new file mode 100644 index 0000000..cbb6522 --- /dev/null +++ b/src/parallel/types/beta/task_group_events_response.py @@ -0,0 +1,28 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union +from typing_extensions import Literal, Annotated, TypeAlias + +from ..._utils import PropertyInfo +from ..._models import BaseModel +from .error_event import ErrorEvent +from .task_run_event import TaskRunEvent +from .task_group_status import TaskGroupStatus + +__all__ = ["TaskGroupEventsResponse", "TaskGroupStatusEvent"] + + +class TaskGroupStatusEvent(BaseModel): + event_id: str + """Cursor to resume the event stream.""" + + status: TaskGroupStatus + """Status of a task group.""" + + type: Literal["task_group_status"] + """Event type; always 'task_group_status'.""" + + +TaskGroupEventsResponse: TypeAlias = Annotated[ + Union[TaskGroupStatusEvent, TaskRunEvent, ErrorEvent], PropertyInfo(discriminator="type") +] diff --git a/src/parallel/types/beta/task_group_get_runs_params.py b/src/parallel/types/beta/task_group_get_runs_params.py new file mode 100644 index 0000000..b6b1ef7 --- /dev/null +++ b/src/parallel/types/beta/task_group_get_runs_params.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional +from typing_extensions import Literal, TypedDict + +__all__ = ["TaskGroupGetRunsParams"] + + +class TaskGroupGetRunsParams(TypedDict, total=False): + include_input: bool + + include_output: bool + + last_event_id: Optional[str] + + status: Optional[Literal["queued", "action_required", "running", "completed", "failed", "cancelling", "cancelled"]] diff --git a/src/parallel/types/beta/task_group_get_runs_response.py b/src/parallel/types/beta/task_group_get_runs_response.py new file mode 100644 index 0000000..b287dcb --- /dev/null +++ b/src/parallel/types/beta/task_group_get_runs_response.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union +from typing_extensions import Annotated, TypeAlias + +from ..._utils import PropertyInfo +from .error_event import ErrorEvent +from .task_run_event import TaskRunEvent + +__all__ = ["TaskGroupGetRunsResponse"] + +TaskGroupGetRunsResponse: TypeAlias = Annotated[Union[TaskRunEvent, ErrorEvent], PropertyInfo(discriminator="type")] diff --git a/src/parallel/types/beta/task_group_run_response.py b/src/parallel/types/beta/task_group_run_response.py new file mode 100644 index 0000000..98161bb --- /dev/null +++ b/src/parallel/types/beta/task_group_run_response.py @@ -0,0 +1,30 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from ..._models import BaseModel +from .task_group_status import TaskGroupStatus + +__all__ = ["TaskGroupRunResponse"] + + +class TaskGroupRunResponse(BaseModel): + event_cursor: Optional[str] = None + """ + Cursor for these runs in the event stream at + taskgroup/events?last_event_id=. Empty for the first runs in the + group. + """ + + run_cursor: Optional[str] = None + """ + Cursor for these runs in the run stream at + taskgroup/runs?last_event_id=. Empty for the first runs in the + group. + """ + + run_ids: List[str] + """IDs of the newly created runs.""" + + status: TaskGroupStatus + """Status of a task group.""" diff --git a/src/parallel/types/beta/task_group_status.py b/src/parallel/types/beta/task_group_status.py new file mode 100644 index 0000000..221b527 --- /dev/null +++ b/src/parallel/types/beta/task_group_status.py @@ -0,0 +1,27 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, Optional + +from ..._models import BaseModel + +__all__ = ["TaskGroupStatus"] + + +class TaskGroupStatus(BaseModel): + is_active: bool + """True if at least one run in the group is currently active, i.e. + + status is one of {'cancelling', 'queued', 'running'}. + """ + + modified_at: Optional[str] = None + """Timestamp of the last status update to the group, as an RFC 3339 string.""" + + num_task_runs: int + """Number of task runs in the group.""" + + status_message: Optional[str] = None + """Human-readable status message for the group.""" + + task_run_status_counts: Dict[str, int] + """Number of task runs with each status.""" diff --git a/src/parallel/types/beta/task_run_create_params.py b/src/parallel/types/beta/task_run_create_params.py new file mode 100644 index 0000000..80bd0da --- /dev/null +++ b/src/parallel/types/beta/task_run_create_params.py @@ -0,0 +1,70 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, List, Union, Iterable, Optional +from typing_extensions import Required, Annotated, TypedDict + +from ..._utils import PropertyInfo +from .webhook_param import WebhookParam +from ..task_spec_param import TaskSpecParam +from .mcp_server_param import McpServerParam +from .parallel_beta_param import ParallelBetaParam +from ..shared_params.source_policy import SourcePolicy + +__all__ = ["TaskRunCreateParams"] + + +class TaskRunCreateParams(TypedDict, total=False): + input: Required[Union[str, Dict[str, object]]] + """Input to the task, either text or a JSON object.""" + + processor: Required[str] + """Processor to use for the task.""" + + enable_events: Optional[bool] + """Controls tracking of task run execution progress. + + When set to true, progress events are recorded and can be accessed via the + [Task Run events](https://platform.parallel.ai/api-reference) endpoint. When + false, no progress events are tracked. Note that progress tracking cannot be + enabled after a run has been created. The flag is set to true by default for + premium processors (pro and above). This feature is not available via the Python + SDK. To enable this feature in your API requests, specify the `parallel-beta` + header with `events-sse-2025-07-24` value. + """ + + mcp_servers: Optional[Iterable[McpServerParam]] + """ + Optional list of MCP servers to use for the run. This feature is not available + via the Python SDK. To enable this feature in your API requests, specify the + `parallel-beta` header with `mcp-server-2025-07-17` value. + """ + + metadata: Optional[Dict[str, Union[str, float, bool]]] + """User-provided metadata stored with the run. + + Keys and values must be strings with a maximum length of 16 and 512 characters + respectively. + """ + + source_policy: Optional[SourcePolicy] + """Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + """ + + task_spec: Optional[TaskSpecParam] + """Specification for a task. + + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. + """ + + webhook: Optional[WebhookParam] + """Webhooks for Task Runs.""" + + betas: Annotated[List[ParallelBetaParam], PropertyInfo(alias="parallel-beta")] + """Optional header to specify the beta version(s) to enable.""" diff --git a/src/parallel/types/beta/task_run_event.py b/src/parallel/types/beta/task_run_event.py new file mode 100644 index 0000000..7d809fe --- /dev/null +++ b/src/parallel/types/beta/task_run_event.py @@ -0,0 +1,32 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union, Optional +from typing_extensions import Literal, Annotated, TypeAlias + +from ..._utils import PropertyInfo +from ..._models import BaseModel +from ..task_run import TaskRun +from .beta_run_input import BetaRunInput +from ..task_run_json_output import TaskRunJsonOutput +from ..task_run_text_output import TaskRunTextOutput + +__all__ = ["TaskRunEvent", "Output"] + +Output: TypeAlias = Annotated[Union[TaskRunTextOutput, TaskRunJsonOutput, None], PropertyInfo(discriminator="type")] + + +class TaskRunEvent(BaseModel): + event_id: Optional[str] = None + """Cursor to resume the event stream. Always empty for non Task Group runs.""" + + run: TaskRun + """Status of a task run.""" + + type: Literal["task_run.state"] + """Event type; always 'task_run.state'.""" + + input: Optional[BetaRunInput] = None + """Task run input with additional beta fields.""" + + output: Optional[Output] = None + """Output from the run; included only if requested and if status == `completed`.""" diff --git a/src/parallel/types/beta/task_run_events_response.py b/src/parallel/types/beta/task_run_events_response.py new file mode 100644 index 0000000..27ea82f --- /dev/null +++ b/src/parallel/types/beta/task_run_events_response.py @@ -0,0 +1,58 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Union, Optional +from typing_extensions import Literal, Annotated, TypeAlias + +from ..._utils import PropertyInfo +from ..._models import BaseModel +from .error_event import ErrorEvent +from .task_run_event import TaskRunEvent + +__all__ = [ + "TaskRunEventsResponse", + "TaskRunProgressStatsEvent", + "TaskRunProgressStatsEventSourceStats", + "TaskRunProgressMessageEvent", +] + + +class TaskRunProgressStatsEventSourceStats(BaseModel): + num_sources_considered: Optional[int] = None + """Number of sources considered in processing the task.""" + + num_sources_read: Optional[int] = None + """Number of sources read in processing the task.""" + + sources_read_sample: Optional[List[str]] = None + """A sample of URLs of sources read in processing the task.""" + + +class TaskRunProgressStatsEvent(BaseModel): + source_stats: TaskRunProgressStatsEventSourceStats + """Source stats for a task run.""" + + type: Literal["task_run.progress_stats"] + """Event type; always 'task_run.progress_stats'.""" + + +class TaskRunProgressMessageEvent(BaseModel): + message: str + """Progress update message.""" + + timestamp: Optional[str] = None + """Timestamp of the message.""" + + type: Literal[ + "task_run.progress_msg.plan", + "task_run.progress_msg.search", + "task_run.progress_msg.result", + "task_run.progress_msg.tool_call", + "task_run.progress_msg.exec_status", + ] + """Event type; always starts with 'task_run.progress_msg'.""" + + +TaskRunEventsResponse: TypeAlias = Annotated[ + Union[TaskRunProgressStatsEvent, TaskRunProgressMessageEvent, TaskRunEvent, ErrorEvent], + PropertyInfo(discriminator="type"), +] diff --git a/src/parallel/types/beta/task_run_result_params.py b/src/parallel/types/beta/task_run_result_params.py new file mode 100644 index 0000000..c48ef15 --- /dev/null +++ b/src/parallel/types/beta/task_run_result_params.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List +from typing_extensions import Annotated, TypedDict + +from ..._utils import PropertyInfo +from .parallel_beta_param import ParallelBetaParam + +__all__ = ["TaskRunResultParams"] + + +class TaskRunResultParams(TypedDict, total=False): + api_timeout: Annotated[int, PropertyInfo(alias="timeout")] + + betas: Annotated[List[ParallelBetaParam], PropertyInfo(alias="parallel-beta")] + """Optional header to specify the beta version(s) to enable.""" diff --git a/src/parallel/types/beta/web_search_result.py b/src/parallel/types/beta/web_search_result.py new file mode 100644 index 0000000..cbc7d5e --- /dev/null +++ b/src/parallel/types/beta/web_search_result.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List + +from ..._models import BaseModel + +__all__ = ["WebSearchResult"] + + +class WebSearchResult(BaseModel): + excerpts: List[str] + """Text excerpts from the search result which are relevant to the request.""" + + title: str + """Title of the search result.""" + + url: str + """URL associated with the search result.""" diff --git a/src/parallel/types/beta/webhook.py b/src/parallel/types/beta/webhook.py new file mode 100644 index 0000000..afa1b5b --- /dev/null +++ b/src/parallel/types/beta/webhook.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["Webhook"] + + +class Webhook(BaseModel): + url: str + """URL for the webhook.""" + + event_types: Optional[List[Literal["task_run.status"]]] = None + """Event types to send the webhook notifications for.""" diff --git a/src/parallel/types/beta/webhook_param.py b/src/parallel/types/beta/webhook_param.py new file mode 100644 index 0000000..b7e6cd6 --- /dev/null +++ b/src/parallel/types/beta/webhook_param.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["WebhookParam"] + + +class WebhookParam(TypedDict, total=False): + url: Required[str] + """URL for the webhook.""" + + event_types: List[Literal["task_run.status"]] + """Event types to send the webhook notifications for.""" diff --git a/src/parallel/types/field_basis.py b/src/parallel/types/field_basis.py new file mode 100644 index 0000000..de4d4a1 --- /dev/null +++ b/src/parallel/types/field_basis.py @@ -0,0 +1,25 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from .._models import BaseModel +from .citation import Citation + +__all__ = ["FieldBasis"] + + +class FieldBasis(BaseModel): + field: str + """Name of the output field.""" + + reasoning: str + """Reasoning for the output field.""" + + citations: Optional[List[Citation]] = None + """List of citations supporting the output field.""" + + confidence: Optional[str] = None + """Confidence level for the output field. + + Only certain processors provide confidence levels. + """ diff --git a/src/parallel/types/json_schema.py b/src/parallel/types/json_schema.py new file mode 100644 index 0000000..7bff20b --- /dev/null +++ b/src/parallel/types/json_schema.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, Optional +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["JsonSchema"] + + +class JsonSchema(BaseModel): + json_schema: Dict[str, object] + """A JSON Schema object. Only a subset of JSON Schema is supported.""" + + type: Optional[Literal["json"]] = None + """The type of schema being defined. Always `json`.""" diff --git a/src/parallel/types/json_schema_param.py b/src/parallel/types/json_schema_param.py index 6b04665..90bae27 100644 --- a/src/parallel/types/json_schema_param.py +++ b/src/parallel/types/json_schema_param.py @@ -2,13 +2,14 @@ from __future__ import annotations +from typing import Dict from typing_extensions import Literal, Required, TypedDict __all__ = ["JsonSchemaParam"] class JsonSchemaParam(TypedDict, total=False): - json_schema: Required[object] + json_schema: Required[Dict[str, object]] """A JSON Schema object. Only a subset of JSON Schema is supported.""" type: Literal["json"] diff --git a/src/parallel/types/shared/__init__.py b/src/parallel/types/shared/__init__.py index ea1f600..c7a4d05 100644 --- a/src/parallel/types/shared/__init__.py +++ b/src/parallel/types/shared/__init__.py @@ -1,4 +1,6 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +from .warning import Warning as Warning from .error_object import ErrorObject as ErrorObject +from .source_policy import SourcePolicy as SourcePolicy from .error_response import ErrorResponse as ErrorResponse diff --git a/src/parallel/types/shared/error_object.py b/src/parallel/types/shared/error_object.py index 5dcece5..52b9656 100644 --- a/src/parallel/types/shared/error_object.py +++ b/src/parallel/types/shared/error_object.py @@ -1,6 +1,6 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -from typing import Optional +from typing import Dict, Optional from ..._models import BaseModel @@ -14,5 +14,5 @@ class ErrorObject(BaseModel): ref_id: str """Reference ID for the error.""" - detail: Optional[object] = None + detail: Optional[Dict[str, object]] = None """Optional detail supporting the error.""" diff --git a/src/parallel/types/shared/error_response.py b/src/parallel/types/shared/error_response.py index 6123db5..4a72af6 100644 --- a/src/parallel/types/shared/error_response.py +++ b/src/parallel/types/shared/error_response.py @@ -1,6 +1,5 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -from typing import Optional from typing_extensions import Literal from ..._models import BaseModel @@ -13,5 +12,5 @@ class ErrorResponse(BaseModel): error: ErrorObject """An error message.""" - type: Optional[Literal["error"]] = None + type: Literal["error"] """Always 'error'.""" diff --git a/src/parallel/types/shared/source_policy.py b/src/parallel/types/shared/source_policy.py new file mode 100644 index 0000000..50c38ca --- /dev/null +++ b/src/parallel/types/shared/source_policy.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from ..._models import BaseModel + +__all__ = ["SourcePolicy"] + + +class SourcePolicy(BaseModel): + exclude_domains: Optional[List[str]] = None + """List of domains to exclude from results. + + If specified, sources from these domains will be excluded. + """ + + include_domains: Optional[List[str]] = None + """List of domains to restrict the results to. + + If specified, only sources from these domains will be included. + """ diff --git a/src/parallel/types/shared/warning.py b/src/parallel/types/shared/warning.py new file mode 100644 index 0000000..84a2b84 --- /dev/null +++ b/src/parallel/types/shared/warning.py @@ -0,0 +1,22 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, Optional +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["Warning"] + + +class Warning(BaseModel): + message: str + """Human-readable message.""" + + type: Literal["spec_validation_warning", "input_validation_warning", "warning"] + """Type of warning. + + Note that adding new warning types is considered a backward-compatible change. + """ + + detail: Optional[Dict[str, object]] = None + """Optional detail supporting the warning.""" diff --git a/src/parallel/types/shared_params/__init__.py b/src/parallel/types/shared_params/__init__.py new file mode 100644 index 0000000..1ab16e6 --- /dev/null +++ b/src/parallel/types/shared_params/__init__.py @@ -0,0 +1,3 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .source_policy import SourcePolicy as SourcePolicy diff --git a/src/parallel/types/shared_params/source_policy.py b/src/parallel/types/shared_params/source_policy.py new file mode 100644 index 0000000..0a5d5ec --- /dev/null +++ b/src/parallel/types/shared_params/source_policy.py @@ -0,0 +1,22 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List +from typing_extensions import TypedDict + +__all__ = ["SourcePolicy"] + + +class SourcePolicy(TypedDict, total=False): + exclude_domains: List[str] + """List of domains to exclude from results. + + If specified, sources from these domains will be excluded. + """ + + include_domains: List[str] + """List of domains to restrict the results to. + + If specified, only sources from these domains will be included. + """ diff --git a/src/parallel/types/task_run.py b/src/parallel/types/task_run.py index 3db862c..4b5e118 100644 --- a/src/parallel/types/task_run.py +++ b/src/parallel/types/task_run.py @@ -3,37 +3,26 @@ from typing import Dict, List, Union, Optional from typing_extensions import Literal +from pydantic import Field as FieldInfo + from .._models import BaseModel +from .shared.warning import Warning +from .shared.error_object import ErrorObject __all__ = [ "TaskRun", - "Warning", # for backwards compatibility with v0.1.3 ] -class Warning(BaseModel): - message: str - """Human-readable message.""" - - type: str - """Type of warning. - - Note that adding new warning types is considered a backward-compatible change. - """ - - detail: Optional[object] = None - """Optional detail supporting the warning.""" - - class TaskRun(BaseModel): created_at: Optional[str] = None """Timestamp of the creation of the task, as an RFC 3339 string.""" is_active: bool - """Whether the run is currently active; i.e. + """Whether the run is currently active, i.e. - status is one of {'running', 'queued', 'cancelling'}. + status is one of {'cancelling', 'queued', 'running'}. """ modified_at: Optional[str] = None @@ -48,8 +37,14 @@ class TaskRun(BaseModel): status: Literal["queued", "action_required", "running", "completed", "failed", "cancelling", "cancelled"] """Status of the run.""" + error: Optional[ErrorObject] = None + """An error message.""" + metadata: Optional[Dict[str, Union[str, float, bool]]] = None """User-provided metadata stored with the run.""" + task_group_id: Optional[str] = FieldInfo(alias="taskgroup_id", default=None) + """ID of the taskgroup to which the run belongs.""" + warnings: Optional[List[Warning]] = None - """Warnings for the run.""" + """Warnings for the run, if any.""" diff --git a/src/parallel/types/task_run_create_params.py b/src/parallel/types/task_run_create_params.py index a6f39e0..6c81803 100644 --- a/src/parallel/types/task_run_create_params.py +++ b/src/parallel/types/task_run_create_params.py @@ -6,12 +6,13 @@ from typing_extensions import Required, TypedDict from .task_spec_param import TaskSpecParam +from .shared_params.source_policy import SourcePolicy __all__ = ["TaskRunCreateParams"] class TaskRunCreateParams(TypedDict, total=False): - input: Required[Union[str, object]] + input: Required[Union[str, Dict[str, object]]] """Input to the task, either text or a JSON object.""" processor: Required[str] @@ -24,9 +25,17 @@ class TaskRunCreateParams(TypedDict, total=False): respectively. """ + source_policy: Optional[SourcePolicy] + """Source policy for web search results. + + This policy governs which sources are allowed/disallowed in results. + """ + task_spec: Optional[TaskSpecParam] """Specification for a task. - For convenience we allow bare strings as input or output schemas, which is - equivalent to a text schema with the same description. + Auto output schemas can be specified by setting `output_schema={"type":"auto"}`. + Not specifying a TaskSpec is the same as setting an auto output schema. + + For convenience bare strings are also accepted as input or output schemas. """ diff --git a/src/parallel/types/task_run_json_output.py b/src/parallel/types/task_run_json_output.py index 48f2cf9..ee63e60 100644 --- a/src/parallel/types/task_run_json_output.py +++ b/src/parallel/types/task_run_json_output.py @@ -1,36 +1,19 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -from typing import List, Optional +from typing import Dict, List, Optional from typing_extensions import Literal from .._models import BaseModel -from .citation import Citation +from .field_basis import FieldBasis -__all__ = ["TaskRunJsonOutput", "Basis"] - - -class Basis(BaseModel): - field: str - """Name of the output field.""" - - reasoning: str - """Reasoning for the output field.""" - - citations: Optional[List[Citation]] = None - """List of citations supporting the output field.""" - - confidence: Optional[str] = None - """Confidence level for the output field. - - Only certain processors provide confidence levels. - """ +__all__ = ["TaskRunJsonOutput"] class TaskRunJsonOutput(BaseModel): - basis: List[Basis] + basis: List[FieldBasis] """Basis for each top-level field in the JSON output.""" - content: object + content: Dict[str, object] """ Output from the task as a native JSON object, as determined by the output schema of the task spec. @@ -41,3 +24,23 @@ class TaskRunJsonOutput(BaseModel): The type of output being returned, as determined by the output schema of the task spec. """ + + beta_fields: Optional[Dict[str, object]] = None + """Additional fields from beta features used in this task run. + + When beta features are specified during both task run creation and result + retrieval, this field will be empty and instead the relevant beta attributes + will be directly included in the `BetaTaskRunJsonOutput` or corresponding output + type. However, if beta features were specified during task run creation but not + during result retrieval, this field will contain the dump of fields from those + beta features. Each key represents the beta feature version (one amongst + parallel-beta headers) and the values correspond to the beta feature attributes, + if any. For now, only MCP server beta features have attributes. For example, + `{mcp-server-2025-07-17: [{'server_name':'mcp_server', 'tool_call_id': 'tc_123', ...}]}}` + """ + + output_schema: Optional[Dict[str, object]] = None + """Output schema for the Task Run. + + Populated only if the task was executed with an auto schema. + """ diff --git a/src/parallel/types/task_run_result.py b/src/parallel/types/task_run_result.py index 9c3cce4..7b52026 100644 --- a/src/parallel/types/task_run_result.py +++ b/src/parallel/types/task_run_result.py @@ -1,8 +1,9 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. from typing import Union -from typing_extensions import TypeAlias +from typing_extensions import Annotated, TypeAlias +from .._utils import PropertyInfo from .._models import BaseModel from .citation import Citation from .task_run import TaskRun @@ -40,7 +41,7 @@ """This is deprecated, `TaskRunJsonOutput` should be used instead""" -Output: TypeAlias = Union[TaskRunTextOutput, TaskRunJsonOutput] +Output: TypeAlias = Annotated[Union[TaskRunTextOutput, TaskRunJsonOutput], PropertyInfo(discriminator="type")] class TaskRunResult(BaseModel): @@ -48,4 +49,4 @@ class TaskRunResult(BaseModel): """Output from the task conforming to the output schema.""" run: TaskRun - """Status of a task.""" + """Status of a task run.""" diff --git a/src/parallel/types/task_run_text_output.py b/src/parallel/types/task_run_text_output.py index de50939..ef38d7e 100644 --- a/src/parallel/types/task_run_text_output.py +++ b/src/parallel/types/task_run_text_output.py @@ -1,33 +1,16 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -from typing import List, Optional +from typing import Dict, List, Optional from typing_extensions import Literal from .._models import BaseModel -from .citation import Citation +from .field_basis import FieldBasis -__all__ = ["TaskRunTextOutput", "Basis"] - - -class Basis(BaseModel): - field: str - """Name of the output field.""" - - reasoning: str - """Reasoning for the output field.""" - - citations: Optional[List[Citation]] = None - """List of citations supporting the output field.""" - - confidence: Optional[str] = None - """Confidence level for the output field. - - Only certain processors provide confidence levels. - """ +__all__ = ["TaskRunTextOutput"] class TaskRunTextOutput(BaseModel): - basis: List[Basis] + basis: List[FieldBasis] """Basis for the output. The basis has a single field 'output'.""" content: str @@ -38,3 +21,17 @@ class TaskRunTextOutput(BaseModel): The type of output being returned, as determined by the output schema of the task spec. """ + + beta_fields: Optional[Dict[str, object]] = None + """Additional fields from beta features used in this task run. + + When beta features are specified during both task run creation and result + retrieval, this field will be empty and instead the relevant beta attributes + will be directly included in the `BetaTaskRunJsonOutput` or corresponding output + type. However, if beta features were specified during task run creation but not + during result retrieval, this field will contain the dump of fields from those + beta features. Each key represents the beta feature version (one amongst + parallel-beta headers) and the values correspond to the beta feature attributes, + if any. For now, only MCP server beta features have attributes. For example, + `{mcp-server-2025-07-17: [{'server_name':'mcp_server', 'tool_call_id': 'tc_123', ...}]}}` + """ diff --git a/src/parallel/types/task_spec.py b/src/parallel/types/task_spec.py new file mode 100644 index 0000000..fc7aefe --- /dev/null +++ b/src/parallel/types/task_spec.py @@ -0,0 +1,31 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union, Optional +from typing_extensions import TypeAlias + +from .._models import BaseModel +from .auto_schema import AutoSchema +from .json_schema import JsonSchema +from .text_schema import TextSchema + +__all__ = ["TaskSpec", "OutputSchema", "InputSchema"] + +OutputSchema: TypeAlias = Union[JsonSchema, TextSchema, AutoSchema, str] + +InputSchema: TypeAlias = Union[str, JsonSchema, TextSchema, None] + + +class TaskSpec(BaseModel): + output_schema: OutputSchema + """JSON schema or text fully describing the desired output from the task. + + Descriptions of output fields will determine the form and content of the + response. A bare string is equivalent to a text schema with the same + description. + """ + + input_schema: Optional[InputSchema] = None + """Optional JSON schema or text description of expected input to the task. + + A bare string is equivalent to a text schema with the same description. + """ diff --git a/src/parallel/types/task_spec_param.py b/src/parallel/types/task_spec_param.py index 8ab84ed..ae434bf 100644 --- a/src/parallel/types/task_spec_param.py +++ b/src/parallel/types/task_spec_param.py @@ -7,14 +7,15 @@ from pydantic import BaseModel +from .auto_schema_param import AutoSchemaParam from .json_schema_param import JsonSchemaParam from .text_schema_param import TextSchemaParam __all__ = ["TaskSpecParam", "OutputSchema", "InputSchema"] -OutputSchema: TypeAlias = Union[JsonSchemaParam, TextSchemaParam, str] +OutputSchema: TypeAlias = Union[JsonSchemaParam, TextSchemaParam, AutoSchemaParam, str] -InputSchema: TypeAlias = Union[JsonSchemaParam, TextSchemaParam, str] +InputSchema: TypeAlias = Union[str, JsonSchemaParam, TextSchemaParam] OutputT = TypeVar("OutputT", bound=BaseModel) diff --git a/src/parallel/types/text_schema.py b/src/parallel/types/text_schema.py new file mode 100644 index 0000000..22da813 --- /dev/null +++ b/src/parallel/types/text_schema.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["TextSchema"] + + +class TextSchema(BaseModel): + description: str + """A text description of the desired output from the task.""" + + type: Optional[Literal["text"]] = None + """The type of schema being defined. Always `text`.""" diff --git a/tests/api_resources/beta/__init__.py b/tests/api_resources/beta/__init__.py new file mode 100644 index 0000000..fd8019a --- /dev/null +++ b/tests/api_resources/beta/__init__.py @@ -0,0 +1 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. diff --git a/tests/api_resources/beta/test_task_group.py b/tests/api_resources/beta/test_task_group.py new file mode 100644 index 0000000..150452f --- /dev/null +++ b/tests/api_resources/beta/test_task_group.py @@ -0,0 +1,613 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from parallel import Parallel, AsyncParallel +from tests.utils import assert_matches_type +from parallel.types.beta import ( + TaskGroup, + TaskGroupRunResponse, +) + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestTaskGroup: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @parametrize + def test_method_create(self, client: Parallel) -> None: + task_group = client.beta.task_group.create() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + def test_method_create_with_all_params(self, client: Parallel) -> None: + task_group = client.beta.task_group.create( + metadata={"foo": "string"}, + ) + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + def test_raw_response_create(self, client: Parallel) -> None: + response = client.beta.task_group.with_raw_response.create() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_group = response.parse() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + def test_streaming_response_create(self, client: Parallel) -> None: + with client.beta.task_group.with_streaming_response.create() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_group = response.parse() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + def test_method_retrieve(self, client: Parallel) -> None: + task_group = client.beta.task_group.retrieve( + "taskgroup_id", + ) + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + def test_raw_response_retrieve(self, client: Parallel) -> None: + response = client.beta.task_group.with_raw_response.retrieve( + "taskgroup_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_group = response.parse() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + def test_streaming_response_retrieve(self, client: Parallel) -> None: + with client.beta.task_group.with_streaming_response.retrieve( + "taskgroup_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_group = response.parse() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + def test_path_params_retrieve(self, client: Parallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): + client.beta.task_group.with_raw_response.retrieve( + "", + ) + + @parametrize + def test_method_add_runs(self, client: Parallel) -> None: + task_group = client.beta.task_group.add_runs( + task_group_id="taskgroup_id", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + } + ], + ) + assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) + + @parametrize + def test_method_add_runs_with_all_params(self, client: Parallel) -> None: + task_group = client.beta.task_group.add_runs( + task_group_id="taskgroup_id", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + "enable_events": True, + "mcp_servers": [ + { + "name": "name", + "url": "url", + "allowed_tools": ["string"], + "headers": {"foo": "string"}, + "type": "url", + } + ], + "metadata": {"foo": "string"}, + "source_policy": { + "exclude_domains": ["string"], + "include_domains": ["string"], + }, + "task_spec": { + "output_schema": { + "json_schema": { + "additionalProperties": "bar", + "properties": "bar", + "required": "bar", + "type": "bar", + }, + "type": "json", + }, + "input_schema": "string", + }, + "webhook": { + "url": "url", + "event_types": ["task_run.status"], + }, + } + ], + default_task_spec={ + "output_schema": { + "json_schema": { + "additionalProperties": "bar", + "properties": "bar", + "required": "bar", + "type": "bar", + }, + "type": "json", + }, + "input_schema": "string", + }, + betas=["mcp-server-2025-07-17"], + ) + assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) + + @parametrize + def test_raw_response_add_runs(self, client: Parallel) -> None: + response = client.beta.task_group.with_raw_response.add_runs( + task_group_id="taskgroup_id", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + } + ], + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_group = response.parse() + assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) + + @parametrize + def test_streaming_response_add_runs(self, client: Parallel) -> None: + with client.beta.task_group.with_streaming_response.add_runs( + task_group_id="taskgroup_id", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + } + ], + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_group = response.parse() + assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + def test_path_params_add_runs(self, client: Parallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): + client.beta.task_group.with_raw_response.add_runs( + task_group_id="", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + } + ], + ) + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_method_events(self, client: Parallel) -> None: + task_group_stream = client.beta.task_group.events( + task_group_id="taskgroup_id", + ) + task_group_stream.response.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_method_events_with_all_params(self, client: Parallel) -> None: + task_group_stream = client.beta.task_group.events( + task_group_id="taskgroup_id", + last_event_id="last_event_id", + api_timeout=0, + ) + task_group_stream.response.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_raw_response_events(self, client: Parallel) -> None: + response = client.beta.task_group.with_raw_response.events( + task_group_id="taskgroup_id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = response.parse() + stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_streaming_response_events(self, client: Parallel) -> None: + with client.beta.task_group.with_streaming_response.events( + task_group_id="taskgroup_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = response.parse() + stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_path_params_events(self, client: Parallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): + client.beta.task_group.with_raw_response.events( + task_group_id="", + ) + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_method_get_runs(self, client: Parallel) -> None: + task_group_stream = client.beta.task_group.get_runs( + task_group_id="taskgroup_id", + ) + task_group_stream.response.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_method_get_runs_with_all_params(self, client: Parallel) -> None: + task_group_stream = client.beta.task_group.get_runs( + task_group_id="taskgroup_id", + include_input=True, + include_output=True, + last_event_id="last_event_id", + status="queued", + ) + task_group_stream.response.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_raw_response_get_runs(self, client: Parallel) -> None: + response = client.beta.task_group.with_raw_response.get_runs( + task_group_id="taskgroup_id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = response.parse() + stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_streaming_response_get_runs(self, client: Parallel) -> None: + with client.beta.task_group.with_streaming_response.get_runs( + task_group_id="taskgroup_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = response.parse() + stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_path_params_get_runs(self, client: Parallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): + client.beta.task_group.with_raw_response.get_runs( + task_group_id="", + ) + + +class TestAsyncTaskGroup: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @parametrize + async def test_method_create(self, async_client: AsyncParallel) -> None: + task_group = await async_client.beta.task_group.create() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + async def test_method_create_with_all_params(self, async_client: AsyncParallel) -> None: + task_group = await async_client.beta.task_group.create( + metadata={"foo": "string"}, + ) + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + async def test_raw_response_create(self, async_client: AsyncParallel) -> None: + response = await async_client.beta.task_group.with_raw_response.create() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_group = await response.parse() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + async def test_streaming_response_create(self, async_client: AsyncParallel) -> None: + async with async_client.beta.task_group.with_streaming_response.create() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_group = await response.parse() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + async def test_method_retrieve(self, async_client: AsyncParallel) -> None: + task_group = await async_client.beta.task_group.retrieve( + "taskgroup_id", + ) + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncParallel) -> None: + response = await async_client.beta.task_group.with_raw_response.retrieve( + "taskgroup_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_group = await response.parse() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncParallel) -> None: + async with async_client.beta.task_group.with_streaming_response.retrieve( + "taskgroup_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_group = await response.parse() + assert_matches_type(TaskGroup, task_group, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncParallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): + await async_client.beta.task_group.with_raw_response.retrieve( + "", + ) + + @parametrize + async def test_method_add_runs(self, async_client: AsyncParallel) -> None: + task_group = await async_client.beta.task_group.add_runs( + task_group_id="taskgroup_id", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + } + ], + ) + assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) + + @parametrize + async def test_method_add_runs_with_all_params(self, async_client: AsyncParallel) -> None: + task_group = await async_client.beta.task_group.add_runs( + task_group_id="taskgroup_id", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + "enable_events": True, + "mcp_servers": [ + { + "name": "name", + "url": "url", + "allowed_tools": ["string"], + "headers": {"foo": "string"}, + "type": "url", + } + ], + "metadata": {"foo": "string"}, + "source_policy": { + "exclude_domains": ["string"], + "include_domains": ["string"], + }, + "task_spec": { + "output_schema": { + "json_schema": { + "additionalProperties": "bar", + "properties": "bar", + "required": "bar", + "type": "bar", + }, + "type": "json", + }, + "input_schema": "string", + }, + "webhook": { + "url": "url", + "event_types": ["task_run.status"], + }, + } + ], + default_task_spec={ + "output_schema": { + "json_schema": { + "additionalProperties": "bar", + "properties": "bar", + "required": "bar", + "type": "bar", + }, + "type": "json", + }, + "input_schema": "string", + }, + betas=["mcp-server-2025-07-17"], + ) + assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) + + @parametrize + async def test_raw_response_add_runs(self, async_client: AsyncParallel) -> None: + response = await async_client.beta.task_group.with_raw_response.add_runs( + task_group_id="taskgroup_id", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + } + ], + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_group = await response.parse() + assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) + + @parametrize + async def test_streaming_response_add_runs(self, async_client: AsyncParallel) -> None: + async with async_client.beta.task_group.with_streaming_response.add_runs( + task_group_id="taskgroup_id", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + } + ], + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_group = await response.parse() + assert_matches_type(TaskGroupRunResponse, task_group, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + async def test_path_params_add_runs(self, async_client: AsyncParallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): + await async_client.beta.task_group.with_raw_response.add_runs( + task_group_id="", + inputs=[ + { + "input": "What was the GDP of France in 2023?", + "processor": "base", + } + ], + ) + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_method_events(self, async_client: AsyncParallel) -> None: + task_group_stream = await async_client.beta.task_group.events( + task_group_id="taskgroup_id", + ) + await task_group_stream.response.aclose() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_method_events_with_all_params(self, async_client: AsyncParallel) -> None: + task_group_stream = await async_client.beta.task_group.events( + task_group_id="taskgroup_id", + last_event_id="last_event_id", + api_timeout=0, + ) + await task_group_stream.response.aclose() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_raw_response_events(self, async_client: AsyncParallel) -> None: + response = await async_client.beta.task_group.with_raw_response.events( + task_group_id="taskgroup_id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = await response.parse() + await stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_streaming_response_events(self, async_client: AsyncParallel) -> None: + async with async_client.beta.task_group.with_streaming_response.events( + task_group_id="taskgroup_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = await response.parse() + await stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_path_params_events(self, async_client: AsyncParallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): + await async_client.beta.task_group.with_raw_response.events( + task_group_id="", + ) + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_method_get_runs(self, async_client: AsyncParallel) -> None: + task_group_stream = await async_client.beta.task_group.get_runs( + task_group_id="taskgroup_id", + ) + await task_group_stream.response.aclose() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_method_get_runs_with_all_params(self, async_client: AsyncParallel) -> None: + task_group_stream = await async_client.beta.task_group.get_runs( + task_group_id="taskgroup_id", + include_input=True, + include_output=True, + last_event_id="last_event_id", + status="queued", + ) + await task_group_stream.response.aclose() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_raw_response_get_runs(self, async_client: AsyncParallel) -> None: + response = await async_client.beta.task_group.with_raw_response.get_runs( + task_group_id="taskgroup_id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = await response.parse() + await stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_streaming_response_get_runs(self, async_client: AsyncParallel) -> None: + async with async_client.beta.task_group.with_streaming_response.get_runs( + task_group_id="taskgroup_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = await response.parse() + await stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_path_params_get_runs(self, async_client: AsyncParallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"): + await async_client.beta.task_group.with_raw_response.get_runs( + task_group_id="", + ) diff --git a/tests/api_resources/beta/test_task_run.py b/tests/api_resources/beta/test_task_run.py new file mode 100644 index 0000000..7a188f6 --- /dev/null +++ b/tests/api_resources/beta/test_task_run.py @@ -0,0 +1,349 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from parallel import Parallel, AsyncParallel +from tests.utils import assert_matches_type +from parallel.types import TaskRun +from parallel.types.beta import BetaTaskRunResult + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestTaskRun: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @parametrize + def test_method_create(self, client: Parallel) -> None: + task_run = client.beta.task_run.create( + input="What was the GDP of France in 2023?", + processor="base", + ) + assert_matches_type(TaskRun, task_run, path=["response"]) + + @parametrize + def test_method_create_with_all_params(self, client: Parallel) -> None: + task_run = client.beta.task_run.create( + input="What was the GDP of France in 2023?", + processor="base", + enable_events=True, + mcp_servers=[ + { + "name": "name", + "url": "url", + "allowed_tools": ["string"], + "headers": {"foo": "string"}, + "type": "url", + } + ], + metadata={"foo": "string"}, + source_policy={ + "exclude_domains": ["string"], + "include_domains": ["string"], + }, + task_spec={ + "output_schema": { + "json_schema": { + "additionalProperties": "bar", + "properties": "bar", + "required": "bar", + "type": "bar", + }, + "type": "json", + }, + "input_schema": "string", + }, + webhook={ + "url": "url", + "event_types": ["task_run.status"], + }, + betas=["mcp-server-2025-07-17"], + ) + assert_matches_type(TaskRun, task_run, path=["response"]) + + @parametrize + def test_raw_response_create(self, client: Parallel) -> None: + response = client.beta.task_run.with_raw_response.create( + input="What was the GDP of France in 2023?", + processor="base", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_run = response.parse() + assert_matches_type(TaskRun, task_run, path=["response"]) + + @parametrize + def test_streaming_response_create(self, client: Parallel) -> None: + with client.beta.task_run.with_streaming_response.create( + input="What was the GDP of France in 2023?", + processor="base", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_run = response.parse() + assert_matches_type(TaskRun, task_run, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_method_events(self, client: Parallel) -> None: + task_run_stream = client.beta.task_run.events( + "run_id", + ) + task_run_stream.response.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_raw_response_events(self, client: Parallel) -> None: + response = client.beta.task_run.with_raw_response.events( + "run_id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = response.parse() + stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_streaming_response_events(self, client: Parallel) -> None: + with client.beta.task_run.with_streaming_response.events( + "run_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = response.parse() + stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_path_params_events(self, client: Parallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"): + client.beta.task_run.with_raw_response.events( + "", + ) + + @parametrize + def test_method_result(self, client: Parallel) -> None: + task_run = client.beta.task_run.result( + run_id="run_id", + ) + assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) + + @parametrize + def test_method_result_with_all_params(self, client: Parallel) -> None: + task_run = client.beta.task_run.result( + run_id="run_id", + api_timeout=0, + betas=["mcp-server-2025-07-17"], + ) + assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) + + @parametrize + def test_raw_response_result(self, client: Parallel) -> None: + response = client.beta.task_run.with_raw_response.result( + run_id="run_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_run = response.parse() + assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) + + @parametrize + def test_streaming_response_result(self, client: Parallel) -> None: + with client.beta.task_run.with_streaming_response.result( + run_id="run_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_run = response.parse() + assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + def test_path_params_result(self, client: Parallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"): + client.beta.task_run.with_raw_response.result( + run_id="", + ) + + +class TestAsyncTaskRun: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @parametrize + async def test_method_create(self, async_client: AsyncParallel) -> None: + task_run = await async_client.beta.task_run.create( + input="What was the GDP of France in 2023?", + processor="base", + ) + assert_matches_type(TaskRun, task_run, path=["response"]) + + @parametrize + async def test_method_create_with_all_params(self, async_client: AsyncParallel) -> None: + task_run = await async_client.beta.task_run.create( + input="What was the GDP of France in 2023?", + processor="base", + enable_events=True, + mcp_servers=[ + { + "name": "name", + "url": "url", + "allowed_tools": ["string"], + "headers": {"foo": "string"}, + "type": "url", + } + ], + metadata={"foo": "string"}, + source_policy={ + "exclude_domains": ["string"], + "include_domains": ["string"], + }, + task_spec={ + "output_schema": { + "json_schema": { + "additionalProperties": "bar", + "properties": "bar", + "required": "bar", + "type": "bar", + }, + "type": "json", + }, + "input_schema": "string", + }, + webhook={ + "url": "url", + "event_types": ["task_run.status"], + }, + betas=["mcp-server-2025-07-17"], + ) + assert_matches_type(TaskRun, task_run, path=["response"]) + + @parametrize + async def test_raw_response_create(self, async_client: AsyncParallel) -> None: + response = await async_client.beta.task_run.with_raw_response.create( + input="What was the GDP of France in 2023?", + processor="base", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_run = await response.parse() + assert_matches_type(TaskRun, task_run, path=["response"]) + + @parametrize + async def test_streaming_response_create(self, async_client: AsyncParallel) -> None: + async with async_client.beta.task_run.with_streaming_response.create( + input="What was the GDP of France in 2023?", + processor="base", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_run = await response.parse() + assert_matches_type(TaskRun, task_run, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_method_events(self, async_client: AsyncParallel) -> None: + task_run_stream = await async_client.beta.task_run.events( + "run_id", + ) + await task_run_stream.response.aclose() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_raw_response_events(self, async_client: AsyncParallel) -> None: + response = await async_client.beta.task_run.with_raw_response.events( + "run_id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = await response.parse() + await stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_streaming_response_events(self, async_client: AsyncParallel) -> None: + async with async_client.beta.task_run.with_streaming_response.events( + "run_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = await response.parse() + await stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_path_params_events(self, async_client: AsyncParallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"): + await async_client.beta.task_run.with_raw_response.events( + "", + ) + + @parametrize + async def test_method_result(self, async_client: AsyncParallel) -> None: + task_run = await async_client.beta.task_run.result( + run_id="run_id", + ) + assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) + + @parametrize + async def test_method_result_with_all_params(self, async_client: AsyncParallel) -> None: + task_run = await async_client.beta.task_run.result( + run_id="run_id", + api_timeout=0, + betas=["mcp-server-2025-07-17"], + ) + assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) + + @parametrize + async def test_raw_response_result(self, async_client: AsyncParallel) -> None: + response = await async_client.beta.task_run.with_raw_response.result( + run_id="run_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task_run = await response.parse() + assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) + + @parametrize + async def test_streaming_response_result(self, async_client: AsyncParallel) -> None: + async with async_client.beta.task_run.with_streaming_response.result( + run_id="run_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task_run = await response.parse() + assert_matches_type(BetaTaskRunResult, task_run, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + async def test_path_params_result(self, async_client: AsyncParallel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"): + await async_client.beta.task_run.with_raw_response.result( + run_id="", + ) diff --git a/tests/api_resources/test_beta.py b/tests/api_resources/test_beta.py new file mode 100644 index 0000000..b733643 --- /dev/null +++ b/tests/api_resources/test_beta.py @@ -0,0 +1,104 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from parallel import Parallel, AsyncParallel +from tests.utils import assert_matches_type +from parallel.types.beta import SearchResult + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestBeta: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @parametrize + def test_method_search(self, client: Parallel) -> None: + beta = client.beta.search() + assert_matches_type(SearchResult, beta, path=["response"]) + + @parametrize + def test_method_search_with_all_params(self, client: Parallel) -> None: + beta = client.beta.search( + max_chars_per_result=0, + max_results=0, + objective="objective", + processor="base", + search_queries=["string"], + source_policy={ + "exclude_domains": ["string"], + "include_domains": ["string"], + }, + ) + assert_matches_type(SearchResult, beta, path=["response"]) + + @parametrize + def test_raw_response_search(self, client: Parallel) -> None: + response = client.beta.with_raw_response.search() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + beta = response.parse() + assert_matches_type(SearchResult, beta, path=["response"]) + + @parametrize + def test_streaming_response_search(self, client: Parallel) -> None: + with client.beta.with_streaming_response.search() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + beta = response.parse() + assert_matches_type(SearchResult, beta, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncBeta: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @parametrize + async def test_method_search(self, async_client: AsyncParallel) -> None: + beta = await async_client.beta.search() + assert_matches_type(SearchResult, beta, path=["response"]) + + @parametrize + async def test_method_search_with_all_params(self, async_client: AsyncParallel) -> None: + beta = await async_client.beta.search( + max_chars_per_result=0, + max_results=0, + objective="objective", + processor="base", + search_queries=["string"], + source_policy={ + "exclude_domains": ["string"], + "include_domains": ["string"], + }, + ) + assert_matches_type(SearchResult, beta, path=["response"]) + + @parametrize + async def test_raw_response_search(self, async_client: AsyncParallel) -> None: + response = await async_client.beta.with_raw_response.search() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + beta = await response.parse() + assert_matches_type(SearchResult, beta, path=["response"]) + + @parametrize + async def test_streaming_response_search(self, async_client: AsyncParallel) -> None: + async with async_client.beta.with_streaming_response.search() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + beta = await response.parse() + assert_matches_type(SearchResult, beta, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/test_task_run.py b/tests/api_resources/test_task_run.py index 871e3fa..096a73e 100644 --- a/tests/api_resources/test_task_run.py +++ b/tests/api_resources/test_task_run.py @@ -20,46 +20,32 @@ class TestTaskRun: @parametrize def test_method_create(self, client: Parallel) -> None: task_run = client.task_run.create( - input="France (2023)", - processor="processor", + input="What was the GDP of France in 2023?", + processor="base", ) assert_matches_type(TaskRun, task_run, path=["response"]) @parametrize def test_method_create_with_all_params(self, client: Parallel) -> None: task_run = client.task_run.create( - input="France (2023)", - processor="processor", + input="What was the GDP of France in 2023?", + processor="base", metadata={"foo": "string"}, + source_policy={ + "exclude_domains": ["string"], + "include_domains": ["string"], + }, task_spec={ "output_schema": { "json_schema": { - "additionalProperties": False, - "properties": { - "gdp": { - "description": "GDP in USD for the year, formatted like '$3.1 trillion (2023)'", - "type": "string", - } - }, - "required": ["gdp"], - "type": "object", - }, - "type": "json", - }, - "input_schema": { - "json_schema": { - "additionalProperties": False, - "properties": { - "gdp": { - "description": "GDP in USD for the year, formatted like '$3.1 trillion (2023)'", - "type": "string", - } - }, - "required": ["gdp"], - "type": "object", + "additionalProperties": "bar", + "properties": "bar", + "required": "bar", + "type": "bar", }, "type": "json", }, + "input_schema": "string", }, ) assert_matches_type(TaskRun, task_run, path=["response"]) @@ -67,8 +53,8 @@ def test_method_create_with_all_params(self, client: Parallel) -> None: @parametrize def test_raw_response_create(self, client: Parallel) -> None: response = client.task_run.with_raw_response.create( - input="France (2023)", - processor="processor", + input="What was the GDP of France in 2023?", + processor="base", ) assert response.is_closed is True @@ -79,8 +65,8 @@ def test_raw_response_create(self, client: Parallel) -> None: @parametrize def test_streaming_response_create(self, client: Parallel) -> None: with client.task_run.with_streaming_response.create( - input="France (2023)", - processor="processor", + input="What was the GDP of France in 2023?", + processor="base", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -183,46 +169,32 @@ class TestAsyncTaskRun: @parametrize async def test_method_create(self, async_client: AsyncParallel) -> None: task_run = await async_client.task_run.create( - input="France (2023)", - processor="processor", + input="What was the GDP of France in 2023?", + processor="base", ) assert_matches_type(TaskRun, task_run, path=["response"]) @parametrize async def test_method_create_with_all_params(self, async_client: AsyncParallel) -> None: task_run = await async_client.task_run.create( - input="France (2023)", - processor="processor", + input="What was the GDP of France in 2023?", + processor="base", metadata={"foo": "string"}, + source_policy={ + "exclude_domains": ["string"], + "include_domains": ["string"], + }, task_spec={ "output_schema": { "json_schema": { - "additionalProperties": False, - "properties": { - "gdp": { - "description": "GDP in USD for the year, formatted like '$3.1 trillion (2023)'", - "type": "string", - } - }, - "required": ["gdp"], - "type": "object", - }, - "type": "json", - }, - "input_schema": { - "json_schema": { - "additionalProperties": False, - "properties": { - "gdp": { - "description": "GDP in USD for the year, formatted like '$3.1 trillion (2023)'", - "type": "string", - } - }, - "required": ["gdp"], - "type": "object", + "additionalProperties": "bar", + "properties": "bar", + "required": "bar", + "type": "bar", }, "type": "json", }, + "input_schema": "string", }, ) assert_matches_type(TaskRun, task_run, path=["response"]) @@ -230,8 +202,8 @@ async def test_method_create_with_all_params(self, async_client: AsyncParallel) @parametrize async def test_raw_response_create(self, async_client: AsyncParallel) -> None: response = await async_client.task_run.with_raw_response.create( - input="France (2023)", - processor="processor", + input="What was the GDP of France in 2023?", + processor="base", ) assert response.is_closed is True @@ -242,8 +214,8 @@ async def test_raw_response_create(self, async_client: AsyncParallel) -> None: @parametrize async def test_streaming_response_create(self, async_client: AsyncParallel) -> None: async with async_client.task_run.with_streaming_response.create( - input="France (2023)", - processor="processor", + input="What was the GDP of France in 2023?", + processor="base", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" diff --git a/tests/test_client.py b/tests/test_client.py index 878365f..f3c8287 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -717,7 +717,9 @@ def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter, clien respx_mock.post("/v1/tasks/runs").mock(side_effect=httpx.TimeoutException("Test timeout error")) with pytest.raises(APITimeoutError): - client.task_run.with_streaming_response.create(input="France (2023)", processor="processor").__enter__() + client.task_run.with_streaming_response.create( + input="What was the GDP of France in 2023?", processor="base" + ).__enter__() assert _get_open_connections(self.client) == 0 @@ -727,7 +729,9 @@ def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, client respx_mock.post("/v1/tasks/runs").mock(return_value=httpx.Response(500)) with pytest.raises(APIStatusError): - client.task_run.with_streaming_response.create(input="France (2023)", processor="processor").__enter__() + client.task_run.with_streaming_response.create( + input="What was the GDP of France in 2023?", processor="base" + ).__enter__() assert _get_open_connections(self.client) == 0 @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) @@ -756,7 +760,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/v1/tasks/runs").mock(side_effect=retry_handler) - response = client.task_run.with_raw_response.create(input="France (2023)", processor="processor") + response = client.task_run.with_raw_response.create( + input="What was the GDP of France in 2023?", processor="base" + ) assert response.retries_taken == failures_before_success assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success @@ -781,7 +787,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/v1/tasks/runs").mock(side_effect=retry_handler) response = client.task_run.with_raw_response.create( - input="France (2023)", processor="processor", extra_headers={"x-stainless-retry-count": Omit()} + input="What was the GDP of France in 2023?", + processor="base", + extra_headers={"x-stainless-retry-count": Omit()}, ) assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 @@ -806,7 +814,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/v1/tasks/runs").mock(side_effect=retry_handler) response = client.task_run.with_raw_response.create( - input="France (2023)", processor="processor", extra_headers={"x-stainless-retry-count": "42"} + input="What was the GDP of France in 2023?", + processor="base", + extra_headers={"x-stainless-retry-count": "42"}, ) assert response.http_request.headers.get("x-stainless-retry-count") == "42" @@ -1539,7 +1549,7 @@ async def test_retrying_timeout_errors_doesnt_leak( with pytest.raises(APITimeoutError): await async_client.task_run.with_streaming_response.create( - input="France (2023)", processor="processor" + input="What was the GDP of France in 2023?", processor="base" ).__aenter__() assert _get_open_connections(self.client) == 0 @@ -1553,7 +1563,7 @@ async def test_retrying_status_errors_doesnt_leak( with pytest.raises(APIStatusError): await async_client.task_run.with_streaming_response.create( - input="France (2023)", processor="processor" + input="What was the GDP of France in 2023?", processor="base" ).__aenter__() assert _get_open_connections(self.client) == 0 @@ -1584,7 +1594,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/v1/tasks/runs").mock(side_effect=retry_handler) - response = await client.task_run.with_raw_response.create(input="France (2023)", processor="processor") + response = await client.task_run.with_raw_response.create( + input="What was the GDP of France in 2023?", processor="base" + ) assert response.retries_taken == failures_before_success assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success @@ -1610,7 +1622,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/v1/tasks/runs").mock(side_effect=retry_handler) response = await client.task_run.with_raw_response.create( - input="France (2023)", processor="processor", extra_headers={"x-stainless-retry-count": Omit()} + input="What was the GDP of France in 2023?", + processor="base", + extra_headers={"x-stainless-retry-count": Omit()}, ) assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 @@ -1636,7 +1650,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/v1/tasks/runs").mock(side_effect=retry_handler) response = await client.task_run.with_raw_response.create( - input="France (2023)", processor="processor", extra_headers={"x-stainless-retry-count": "42"} + input="What was the GDP of France in 2023?", + processor="base", + extra_headers={"x-stainless-retry-count": "42"}, ) assert response.http_request.headers.get("x-stainless-retry-count") == "42" From c975302c0d61d1d6731ccaeb7977c2009cb0b666 Mon Sep 17 00:00:00 2001 From: Kumar Saunack Date: Mon, 1 Sep 2025 10:58:59 -0700 Subject: [PATCH 30/32] fix: add types for backwards compatibility --- src/parallel/types/parsed_task_run_result.py | 17 +++++++++++++---- src/parallel/types/task_run.py | 2 +- src/parallel/types/task_run_result.py | 20 ++++++++++---------- 3 files changed, 24 insertions(+), 15 deletions(-) diff --git a/src/parallel/types/parsed_task_run_result.py b/src/parallel/types/parsed_task_run_result.py index bd5b842..2e72308 100644 --- a/src/parallel/types/parsed_task_run_result.py +++ b/src/parallel/types/parsed_task_run_result.py @@ -3,7 +3,9 @@ from pydantic import BaseModel from .._models import GenericModel -from .task_run_result import TaskRunResult, OutputTaskRunJsonOutput, OutputTaskRunTextOutput +from .task_run_result import TaskRunResult +from .task_run_json_output import TaskRunJsonOutput +from .task_run_text_output import TaskRunTextOutput ContentType = TypeVar("ContentType", bound=BaseModel) @@ -14,16 +16,23 @@ # pyright: reportIncompatibleVariableOverride=false -class ParsedOutputTaskRunTextOutput(OutputTaskRunTextOutput, GenericModel, Generic[ContentType]): +class ParsedTaskRunTextOutput(TaskRunTextOutput, GenericModel, Generic[ContentType]): parsed: None """The parsed output from the task run.""" -class ParsedOutputTaskRunJsonOutput(OutputTaskRunJsonOutput, GenericModel, Generic[ContentType]): +class ParsedTaskRunJsonOutput(TaskRunJsonOutput, GenericModel, Generic[ContentType]): parsed: Optional[ContentType] = None """The parsed output from the task run.""" class ParsedTaskRunResult(TaskRunResult, GenericModel, Generic[ContentType]): - output: Union[ParsedOutputTaskRunTextOutput[ContentType], ParsedOutputTaskRunJsonOutput[ContentType]] # type: ignore[assignment] + output: Union[ParsedTaskRunTextOutput[ContentType], ParsedTaskRunJsonOutput[ContentType]] # type: ignore[assignment] """The parsed output from the task run.""" + + +ParsedOutputTaskRunTextOutput = ParsedTaskRunTextOutput # for backwards compatibility with v0.1.3 +"""This is deprecated, `ParsedTaskRunTextOutput` should be used instead""" + +ParsedOutputTaskRunJsonOutput = ParsedTaskRunJsonOutput # for backwards compatibility with v0.1.3 +"""This is deprecated, `ParsedTaskRunJsonOutput` should be used instead""" diff --git a/src/parallel/types/task_run.py b/src/parallel/types/task_run.py index 4b5e118..8ac2d93 100644 --- a/src/parallel/types/task_run.py +++ b/src/parallel/types/task_run.py @@ -11,7 +11,7 @@ __all__ = [ "TaskRun", -# for backwards compatibility with v0.1.3 + "Warning", # for backwards compatibility with v0.1.3 ] diff --git a/src/parallel/types/task_run_result.py b/src/parallel/types/task_run_result.py index 7b52026..087cdce 100644 --- a/src/parallel/types/task_run_result.py +++ b/src/parallel/types/task_run_result.py @@ -14,19 +14,16 @@ __all__ = [ "TaskRunResult", "Output", - "OutputTaskRunTextOutput", - "OutputTaskRunTextOutputBasis", - "OutputTaskRunTextOutputBasisCitation", "OutputTaskRunJsonOutput", "OutputTaskRunJsonOutputBasis", "OutputTaskRunJsonOutputBasisCitation", + "OutputTaskRunTextOutput", + "OutputTaskRunTextOutputBasis", + "OutputTaskRunTextOutputBasisCitation", ] -OutputTaskRunTextOutputBasis = FieldBasis # for backwards compatibility with v0.1.3 -"""This is deprecated, `FieldBasis` should be used instead""" - -OutputTaskRunTextOutputBasisCitation = Citation # for backwards compatibility with v0.1.3 -"""This is deprecated, `Citation` should be used instead""" +OutputTaskRunJsonOutput = TaskRunJsonOutput # for backwards compatibility with v0.1.3 +"""This is deprecated, `TaskRunJsonOutput` should be used instead""" OutputTaskRunJsonOutputBasis = FieldBasis # for backwards compatibility with v0.1.3 """This is deprecated, `FieldBasis` should be used instead""" @@ -37,8 +34,11 @@ OutputTaskRunTextOutput = TaskRunTextOutput # for backwards compatibility with v0.1.3 """This is deprecated, `TaskRunTextOutput` should be used instead""" -OutputTaskRunJsonOutput = TaskRunJsonOutput # for backwards compatibility with v0.1.3 -"""This is deprecated, `TaskRunJsonOutput` should be used instead""" +OutputTaskRunTextOutputBasis = FieldBasis # for backwards compatibility with v0.1.3 +"""This is deprecated, `FieldBasis` should be used instead""" + +OutputTaskRunTextOutputBasisCitation = Citation # for backwards compatibility with v0.1.3 +"""This is deprecated, `Citation` should be used instead""" Output: TypeAlias = Annotated[Union[TaskRunTextOutput, TaskRunJsonOutput], PropertyInfo(discriminator="type")] From cefefbfccba78fdabcc925728836d70400d4e5aa Mon Sep 17 00:00:00 2001 From: Kumar Saunack Date: Mon, 1 Sep 2025 11:17:05 -0700 Subject: [PATCH 31/32] chore(readme): Remove references to methods, update FAQ for beta --- README.md | 162 +++++++----------------------------------------------- 1 file changed, 19 insertions(+), 143 deletions(-) diff --git a/README.md b/README.md index d2694f8..731e3e6 100644 --- a/README.md +++ b/README.md @@ -34,8 +34,12 @@ client = Parallel( api_key=os.environ.get("PARALLEL_API_KEY"), # This is the default and can be omitted ) -run_result = client.task_run.execute(input="France (2023)", processor="core", output="GDP") -print(run_result.output.parsed) +task_run = client.task_run.create( + input="France (2023)", + processor="core", +) +task_run_result = client.task_run.result(run_id=task_run.run_id) +print(task_run_result.output) ``` While you can provide an `api_key` keyword argument, @@ -63,10 +67,9 @@ client = AsyncParallel( async def main() -> None: - run_result = await client.task_run.execute( - input="France (2023)", processor="core", output="GDP" - ) - print(run_result.output.parsed) + task_run = await client.task_run.create(input="France (2023)", processor="core") + run_result = await client.task_run.result(run_id=task_run.run_id) + print(run_result.output.content) if __name__ == "__main__": @@ -78,127 +81,6 @@ using the asynchronous client, especially for executing multiple Task Runs concu Functionality between the synchronous and asynchronous clients is identical, including the convenience methods. -## Convenience methods - -### Execute - -The `execute` method provides a single call that combines creating a task run, -polling until completion, and parsing structured outputs (if specified). - -If an output type that inherits from `BaseModel` is -specified in the call to `.execute()`, the response content will be parsed into an -instance of the provided output type. The parsed output can be accessed via the -`parsed` property on the output field of the response. - -```python -import asyncio -from parallel import AsyncParallel -from pydantic import BaseModel - -client = AsyncParallel() - - -class SampleOutputStructure(BaseModel): - output: str - - -async def main() -> None: - # with pydantic - run_result = await client.task_run.execute( - input="France (2023)", - processor="core", - output=SampleOutputStructure, - ) - # parsed output of type SampleOutputStructure - print(run_result.output.parsed) - # without pydantic - run_result = await client.task_run.execute( - input="France (2023)", processor="core", output="GDP" - ) - print(run_result.output.parsed) - - -if __name__ == "__main__": - asyncio.run(main()) -``` - -The async client lets you create multiple task runs without blocking. -To submit several at once, call `execute()` and gather the results at the end. - -```python -import asyncio -import os - -from parallel import AsyncParallel -from pydantic import BaseModel, Field -from typing import List - - -class CountryInput(BaseModel): - country: str = Field( - description="Name of the country to research. Must be a recognized " - "sovereign nation (e.g., 'France', 'Japan')." - ) - year: int = Field( - description="Year for which to retrieve data. Must be 2000 or later. " - "Use most recent full-year estimates if year is current." - ) - - -class CountryOutput(BaseModel): - gdp: str = Field(description="GDP in USD for the year, formatted like '$3.1 trillion (2023)'.") - top_exports: List[str] = Field( - description="Top 3 exported goods/services by value. Use credible sources." - ) - top_imports: List[str] = Field( - description="Top 3 imported goods/services by value. Use credible sources." - ) - - -async def main(): - # Initialize the Parallel client - client = AsyncParallel(api_key=os.environ.get("PARALLEL_API_KEY")) - - # Prepare structured input - input_data = [ - CountryInput(country="France", year=2023), - CountryInput(country="Germany", year=2023), - CountryInput(country="Italy", year=2023), - ] - - run_results = await asyncio.gather( - *[ - client.task_run.execute(input=datum, output=CountryOutput, processor="core") - for datum in input_data - ] - ) - - for run_input, run_result in zip(input_data, run_results): - print(f"Task run output for {run_input}: {run_result.output.parsed}") - - -if __name__ == "__main__": - asyncio.run(main()) -``` - -#### `execute()` vs `create()` - -The `execute` and `create` methods differ slightly in their signatures and -behavior — `create` requires a Task Spec object that contains the output schema, -while `execute` accepts an output schema as a top‑level parameter. `execute` is -also a one‑shot method that combines creation, polling, and parsing for you. - -Use `create` when you want a run ID immediately and prefer to control polling -yourself. `execute` is best for one‑shot task execution and for typed inputs and -outputs — note that no outputs are available until the call finishes. Finally, for -the output of `execute`, parsed content is available via `run_result.output.parsed`. - -Both `execute` and `create` validate inputs when appropriate input types are -provided. For `execute`, validation happens when a pydantic input is provided. For -`create`, validation occurs when the input schema is specified inside the task spec -parameter. Additionally, in both calls, the un-parsed result content is accessible via -the `run_result.output.content`. - ## Frequently Asked Questions **Does the Task API accept prompts or objectives?** @@ -209,9 +91,8 @@ more information, check [our docs](https://docs.parallel.ai/task-api/core-concep **Can I access beta parameters or endpoints via the SDK?** -The SDK currently does not support beta parameters in the Task API. You can consider -using [custom requests](#making-customundocumented-requests) in conjunction with -[low level APIs](#lowlevel-api-access). +Yes, the SDK supports both beta endpoints and beta header parameters for the Task API. +All beta parameters are accessible via the `client.beta` namespace in the SDK. **Can I specify a timeout for API calls?** @@ -297,7 +178,7 @@ from parallel import Parallel client = Parallel() try: - client.task_run.execute(input="France (2023)", processor="core", output="GDP") + client.task_run.create(input="France (2023)", processor="core") except parallel.APIConnectionError as e: print("The server could not be reached") print(e.__cause__) # an underlying Exception, likely raised within httpx. @@ -340,9 +221,7 @@ client = Parallel( ) # Or, configure per-request: -client.with_options(max_retries=5).task_run.execute( - input="France (2023)", processor="core", output="GDP" -) +client.with_options(max_retries=5).task_run.create(input="France (2023)", processor="core") ``` ### Timeouts @@ -365,9 +244,7 @@ client = Parallel( ) # Override per-request: -client.with_options(timeout=5.0).task_run.execute( - input="France (2023)", processor="core", output="GDP" -) +client.with_options(timeout=5.0).task_run.create(input="France (2023)", processor="core") ``` On timeout, an `APITimeoutError` is thrown. @@ -408,15 +285,14 @@ The "raw" Response object can be accessed by prefixing `.with_raw_response.` to from parallel import Parallel client = Parallel() -response = client.task_run.with_raw_response.execute( +response = client.task_run.with_raw_response.create( input="France (2023)", processor="core", - output="GDP" ) print(response.headers.get('X-My-Header')) -task_run = response.parse() # get the object that `task_run.execute()` would have returned -print(task_run.output) +task_run = response.parse() +print(task_run.run_id) ``` These methods return an [`APIResponse`](https://github.com/parallel-web/parallel-sdk-python/tree/main/src/parallel/_response.py) object. @@ -430,8 +306,8 @@ The above interface eagerly reads the full response body when you make the reque To stream the response body, use `.with_streaming_response` instead, which requires a context manager and only reads the response body once you call `.read()`, `.text()`, `.json()`, `.iter_bytes()`, `.iter_text()`, `.iter_lines()` or `.parse()`. In the async client, these are async methods. ```python -with client.task_run.with_streaming_response.execute( - input="France (2023)", processor="core", output="GDP" +with client.task_run.with_streaming_response.create( + input="France (2023)", processor="core" ) as response: print(response.headers.get("X-My-Header")) From 77230f968eac0c25c262605295b191f4c872fe04 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 1 Sep 2025 18:17:28 +0000 Subject: [PATCH 32/32] release: 0.2.0 --- .release-please-manifest.json | 2 +- CHANGELOG.md | 46 +++++++++++++++++++++++++++++++++++ pyproject.toml | 2 +- src/parallel/_version.py | 2 +- 4 files changed, 49 insertions(+), 3 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 4c5a1a0..10f3091 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.1.3" + ".": "0.2.0" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 80c1203..4d85ed9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,51 @@ # Changelog +## 0.2.0 (2025-09-01) + +Full Changelog: [v0.1.3...v0.2.0](https://github.com/parallel-web/parallel-sdk-python/compare/v0.1.3...v0.2.0) + +### Features + +* **api:** update via SDK Studio ([b048bd7](https://github.com/parallel-web/parallel-sdk-python/commit/b048bd7e1c5a992ae274aa4b6df16a9d5b0f843e)) +* **api:** update via SDK Studio ([b9abf3c](https://github.com/parallel-web/parallel-sdk-python/commit/b9abf3c8b0e22b260149f01b1ef608924eefe735)) +* **api:** update via SDK Studio ([4326698](https://github.com/parallel-web/parallel-sdk-python/commit/43266988c2123fa1aff00bf0b62c355b0c2bf04e)) +* clean up environment call outs ([3a102e9](https://github.com/parallel-web/parallel-sdk-python/commit/3a102e9a05476e4d28c0ac386cd156cc0fe8b5cf)) +* **client:** add support for aiohttp ([4e2aa32](https://github.com/parallel-web/parallel-sdk-python/commit/4e2aa32ad8242745f56e5a8b810d33c362967dad)) +* **client:** support file upload requests ([ec0c2cf](https://github.com/parallel-web/parallel-sdk-python/commit/ec0c2cf30bd24524567232ad0f661facda124203)) + + +### Bug Fixes + +* add types for backwards compatibility ([c975302](https://github.com/parallel-web/parallel-sdk-python/commit/c975302c0d61d1d6731ccaeb7977c2009cb0b666)) +* avoid newer type syntax ([2ea196d](https://github.com/parallel-web/parallel-sdk-python/commit/2ea196d5d4c7881e61dc848a1387770b4e27e304)) +* **ci:** correct conditional ([99d37f6](https://github.com/parallel-web/parallel-sdk-python/commit/99d37f657a249987ccae60dd0e62f296ab0c1d85)) +* **ci:** release-doctor — report correct token name ([310076b](https://github.com/parallel-web/parallel-sdk-python/commit/310076b2f8a75ed29ba2a1fae0f6e840ec43bb5b)) +* **client:** don't send Content-Type header on GET requests ([f103b4a](https://github.com/parallel-web/parallel-sdk-python/commit/f103b4a72fc25f6a8dd1bda0c8d040aba1f527d1)) +* **parsing:** correctly handle nested discriminated unions ([c9a2300](https://github.com/parallel-web/parallel-sdk-python/commit/c9a23002be2d78a11b5c1b7c901f4ddb32663393)) +* **parsing:** ignore empty metadata ([ab434aa](https://github.com/parallel-web/parallel-sdk-python/commit/ab434aa7bd088fc16279255ae36138ab6dff0730)) +* **parsing:** parse extra field types ([85f5cd4](https://github.com/parallel-web/parallel-sdk-python/commit/85f5cd4191ae168ed443e78a2c7bd747d51404b3)) + + +### Chores + +* **ci:** change upload type ([40dbd3b](https://github.com/parallel-web/parallel-sdk-python/commit/40dbd3b7d5becf0fe54b62a4acd8696957380053)) +* **ci:** only run for pushes and fork pull requests ([d55fbea](https://github.com/parallel-web/parallel-sdk-python/commit/d55fbea54037d2d833ecc281cbddbc8d6700d24d)) +* **internal:** add Sequence related utils ([cb9a7a9](https://github.com/parallel-web/parallel-sdk-python/commit/cb9a7a905ca4a4a9ba35e540f6c47a8bf89c87d2)) +* **internal:** bump pinned h11 dep ([818f1dd](https://github.com/parallel-web/parallel-sdk-python/commit/818f1ddb3ba1be6bfdb9aee1322d6a3d8a98667a)) +* **internal:** change ci workflow machines ([a90da34](https://github.com/parallel-web/parallel-sdk-python/commit/a90da34910585453eac918a5f273749c00d2f743)) +* **internal:** codegen related update ([47ea68b](https://github.com/parallel-web/parallel-sdk-python/commit/47ea68bd44ad52ac1c18e7215c013f408914890c)) +* **internal:** fix ruff target version ([4e5dbda](https://github.com/parallel-web/parallel-sdk-python/commit/4e5dbda03907f45ac31d18d89714e86f26e79866)) +* **internal:** update comment in script ([631b045](https://github.com/parallel-web/parallel-sdk-python/commit/631b045ae2f138e4c8098fafd9466451d61ca82a)) +* **internal:** update pyright exclude list ([8d2fb29](https://github.com/parallel-web/parallel-sdk-python/commit/8d2fb29b5d80a2fa9ee81a6f9510134fb7bab908)) +* **internal:** version bump ([90d26a5](https://github.com/parallel-web/parallel-sdk-python/commit/90d26a5e8db8bd6a27f9bbc96595da87bd7ea0f3)) +* **package:** mark python 3.13 as supported ([6fa54c4](https://github.com/parallel-web/parallel-sdk-python/commit/6fa54c42a17f5e731f5e97214f0212a0828d3cb8)) +* **project:** add settings file for vscode ([acdeda2](https://github.com/parallel-web/parallel-sdk-python/commit/acdeda2f1f95f5bade2da52d5a2aa8560e71369d)) +* **readme:** fix version rendering on pypi ([2bf10b0](https://github.com/parallel-web/parallel-sdk-python/commit/2bf10b073ab7e015b08c106d265a9091752df51a)) +* **readme:** Remove references to methods, update FAQ for beta ([cefefbf](https://github.com/parallel-web/parallel-sdk-python/commit/cefefbfccba78fdabcc925728836d70400d4e5aa)) +* **tests:** skip some failing tests on the latest python versions ([13b1533](https://github.com/parallel-web/parallel-sdk-python/commit/13b153381e9b7c998a7ebef878518222678dfa83)) +* update @stainless-api/prism-cli to v5.15.0 ([56b5aab](https://github.com/parallel-web/parallel-sdk-python/commit/56b5aab87a833c27b8e1a2bc7c4bf2169ee281a8)) +* update github action ([3d90e19](https://github.com/parallel-web/parallel-sdk-python/commit/3d90e196184e540242fb310cc55b0219d20dff45)) + ## 0.1.3 (2025-08-09) Full Changelog: [v0.1.2...v0.1.3](https://github.com/parallel-web/parallel-sdk-python/compare/v0.1.2...v0.1.3) diff --git a/pyproject.toml b/pyproject.toml index 2708962..e4d08a0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "parallel-web" -version = "0.1.3" +version = "0.2.0" description = "The official Python library for the Parallel API" dynamic = ["readme"] license = "MIT" diff --git a/src/parallel/_version.py b/src/parallel/_version.py index 664dcba..11e0b38 100644 --- a/src/parallel/_version.py +++ b/src/parallel/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "parallel" -__version__ = "0.1.3" # x-release-please-version +__version__ = "0.2.0" # x-release-please-version