From bbdbceeefc586bc4127102f3172a69c5b44dc46d Mon Sep 17 00:00:00 2001 From: fern-api <115122769+fern-api[bot]@users.noreply.github.com> Date: Sat, 11 Jan 2025 23:36:57 +0000 Subject: [PATCH] SDK regeneration --- poetry.lock | 6 +- pyproject.toml | 2 +- reference.md | 171 ----------- src/scrapybara/__init__.py | 9 +- src/scrapybara/agent/__init__.py | 5 - src/scrapybara/agent/client.py | 364 ------------------------ src/scrapybara/agent/types/__init__.py | 5 - src/scrapybara/agent/types/model.py | 5 - src/scrapybara/base_client.py | 4 - src/scrapybara/core/client_wrapper.py | 2 +- src/scrapybara/types/__init__.py | 4 - src/scrapybara/types/act_response.py | 20 -- src/scrapybara/types/scrape_response.py | 20 -- 13 files changed, 6 insertions(+), 611 deletions(-) delete mode 100644 src/scrapybara/agent/__init__.py delete mode 100644 src/scrapybara/agent/client.py delete mode 100644 src/scrapybara/agent/types/__init__.py delete mode 100644 src/scrapybara/agent/types/model.py delete mode 100644 src/scrapybara/types/act_response.py delete mode 100644 src/scrapybara/types/scrape_response.py diff --git a/poetry.lock b/poetry.lock index 2daf4a8..4ed5aac 100644 --- a/poetry.lock +++ b/poetry.lock @@ -358,13 +358,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pydantic" -version = "2.10.4" +version = "2.10.5" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.10.4-py3-none-any.whl", hash = "sha256:597e135ea68be3a37552fb524bc7d0d66dcf93d395acd93a00682f1efcb8ee3d"}, - {file = "pydantic-2.10.4.tar.gz", hash = "sha256:82f12e9723da6de4fe2ba888b5971157b3be7ad914267dea8f05f82b28254f06"}, + {file = "pydantic-2.10.5-py3-none-any.whl", hash = "sha256:4dd4e322dbe55472cb7ca7e73f4b63574eecccf2835ffa2af9021ce113c83c53"}, + {file = "pydantic-2.10.5.tar.gz", hash = "sha256:278b38dbbaec562011d659ee05f63346951b3a248a6f3642e1bc68894ea2b4ff"}, ] [package.dependencies] diff --git a/pyproject.toml b/pyproject.toml index 6a6900a..42b78b0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "scrapybara" -version = "2.0.7" +version = "2.1.0" description = "" readme = "README.md" authors = [] diff --git a/reference.md b/reference.md index 778f83f..a16e22d 100644 --- a/reference.md +++ b/reference.md @@ -761,177 +761,6 @@ client.instance.resume( - - - - -## Agent -
client.agent.act(...) -
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from scrapybara import Scrapybara - -client = Scrapybara( - api_key="YOUR_API_KEY", -) -client.agent.act( - instance_id="instance_id", - cmd="cmd", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**instance_id:** `str` - -
-
- -
-
- -**cmd:** `str` - -
-
- -
-
- -**include_screenshot:** `typing.Optional[bool]` - -
-
- -
-
- -**model:** `typing.Optional[Model]` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.agent.scrape(...) -
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from scrapybara import Scrapybara - -client = Scrapybara( - api_key="YOUR_API_KEY", -) -client.agent.scrape( - instance_id="instance_id", - cmd="cmd", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**instance_id:** `str` - -
-
- -
-
- -**cmd:** `str` - -
-
- -
-
- -**schema:** `typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]` - -
-
- -
-
- -**include_screenshot:** `typing.Optional[bool]` - -
-
- -
-
- -**model:** `typing.Optional[Model]` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- -
diff --git a/src/scrapybara/__init__.py b/src/scrapybara/__init__.py index 91c8291..03f549c 100644 --- a/src/scrapybara/__init__.py +++ b/src/scrapybara/__init__.py @@ -1,7 +1,6 @@ # This file was auto-generated by Fern from our API Definition. from .types import ( - ActResponse, AuthStateResponse, BrowserAuthenticateResponse, BrowserGetCdpUrlResponse, @@ -20,7 +19,6 @@ Notebook, NotebookCell, SaveBrowserAuthResponse, - ScrapeResponse, StartBrowserResponse, Status, StopBrowserResponse, @@ -29,15 +27,13 @@ ValidationErrorLocItem, ) from .errors import UnprocessableEntityError -from . import agent, browser, code, env, file, instance, notebook -from .agent import Model +from . import browser, code, env, file, instance, notebook from .client import AsyncScrapybara, Scrapybara from .environment import ScrapybaraEnvironment from .instance import Action, Command from .version import __version__ __all__ = [ - "ActResponse", "Action", "AsyncScrapybara", "AuthStateResponse", @@ -56,11 +52,9 @@ "InstanceGetStreamUrlResponse", "InstanceScreenshotResponse", "KernelInfo", - "Model", "Notebook", "NotebookCell", "SaveBrowserAuthResponse", - "ScrapeResponse", "Scrapybara", "ScrapybaraEnvironment", "StartBrowserResponse", @@ -71,7 +65,6 @@ "ValidationError", "ValidationErrorLocItem", "__version__", - "agent", "browser", "code", "env", diff --git a/src/scrapybara/agent/__init__.py b/src/scrapybara/agent/__init__.py deleted file mode 100644 index b1f769f..0000000 --- a/src/scrapybara/agent/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from .types import Model - -__all__ = ["Model"] diff --git a/src/scrapybara/agent/client.py b/src/scrapybara/agent/client.py deleted file mode 100644 index fb3ac5f..0000000 --- a/src/scrapybara/agent/client.py +++ /dev/null @@ -1,364 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from ..core.client_wrapper import SyncClientWrapper -from .types.model import Model -from ..core.request_options import RequestOptions -from ..types.act_response import ActResponse -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from ..errors.unprocessable_entity_error import UnprocessableEntityError -from ..types.http_validation_error import HttpValidationError -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..types.scrape_response import ScrapeResponse -from ..core.client_wrapper import AsyncClientWrapper - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class AgentClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def act( - self, - instance_id: str, - *, - cmd: str, - include_screenshot: typing.Optional[bool] = OMIT, - model: typing.Optional[Model] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> ActResponse: - """ - Parameters - ---------- - instance_id : str - - cmd : str - - include_screenshot : typing.Optional[bool] - - model : typing.Optional[Model] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - ActResponse - Successful Response - - Examples - -------- - from scrapybara import Scrapybara - - client = Scrapybara( - api_key="YOUR_API_KEY", - ) - client.agent.act( - instance_id="instance_id", - cmd="cmd", - ) - """ - _response = self._client_wrapper.httpx_client.request( - f"v1/instance/{jsonable_encoder(instance_id)}/act", - method="POST", - json={ - "cmd": cmd, - "include_screenshot": include_screenshot, - "model": model, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ActResponse, - parse_obj_as( - type_=ActResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 422: - raise UnprocessableEntityError( - typing.cast( - HttpValidationError, - parse_obj_as( - type_=HttpValidationError, # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def scrape( - self, - instance_id: str, - *, - cmd: str, - schema: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - include_screenshot: typing.Optional[bool] = OMIT, - model: typing.Optional[Model] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> ScrapeResponse: - """ - Parameters - ---------- - instance_id : str - - cmd : str - - schema : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - - include_screenshot : typing.Optional[bool] - - model : typing.Optional[Model] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - ScrapeResponse - Successful Response - - Examples - -------- - from scrapybara import Scrapybara - - client = Scrapybara( - api_key="YOUR_API_KEY", - ) - client.agent.scrape( - instance_id="instance_id", - cmd="cmd", - ) - """ - _response = self._client_wrapper.httpx_client.request( - f"v1/instance/{jsonable_encoder(instance_id)}/scrape", - method="POST", - json={ - "cmd": cmd, - "schema": schema, - "include_screenshot": include_screenshot, - "model": model, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ScrapeResponse, - parse_obj_as( - type_=ScrapeResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 422: - raise UnprocessableEntityError( - typing.cast( - HttpValidationError, - parse_obj_as( - type_=HttpValidationError, # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - -class AsyncAgentClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def act( - self, - instance_id: str, - *, - cmd: str, - include_screenshot: typing.Optional[bool] = OMIT, - model: typing.Optional[Model] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> ActResponse: - """ - Parameters - ---------- - instance_id : str - - cmd : str - - include_screenshot : typing.Optional[bool] - - model : typing.Optional[Model] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - ActResponse - Successful Response - - Examples - -------- - import asyncio - - from scrapybara import AsyncScrapybara - - client = AsyncScrapybara( - api_key="YOUR_API_KEY", - ) - - - async def main() -> None: - await client.agent.act( - instance_id="instance_id", - cmd="cmd", - ) - - - asyncio.run(main()) - """ - _response = await self._client_wrapper.httpx_client.request( - f"v1/instance/{jsonable_encoder(instance_id)}/act", - method="POST", - json={ - "cmd": cmd, - "include_screenshot": include_screenshot, - "model": model, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ActResponse, - parse_obj_as( - type_=ActResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 422: - raise UnprocessableEntityError( - typing.cast( - HttpValidationError, - parse_obj_as( - type_=HttpValidationError, # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def scrape( - self, - instance_id: str, - *, - cmd: str, - schema: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - include_screenshot: typing.Optional[bool] = OMIT, - model: typing.Optional[Model] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> ScrapeResponse: - """ - Parameters - ---------- - instance_id : str - - cmd : str - - schema : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - - include_screenshot : typing.Optional[bool] - - model : typing.Optional[Model] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - ScrapeResponse - Successful Response - - Examples - -------- - import asyncio - - from scrapybara import AsyncScrapybara - - client = AsyncScrapybara( - api_key="YOUR_API_KEY", - ) - - - async def main() -> None: - await client.agent.scrape( - instance_id="instance_id", - cmd="cmd", - ) - - - asyncio.run(main()) - """ - _response = await self._client_wrapper.httpx_client.request( - f"v1/instance/{jsonable_encoder(instance_id)}/scrape", - method="POST", - json={ - "cmd": cmd, - "schema": schema, - "include_screenshot": include_screenshot, - "model": model, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ScrapeResponse, - parse_obj_as( - type_=ScrapeResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 422: - raise UnprocessableEntityError( - typing.cast( - HttpValidationError, - parse_obj_as( - type_=HttpValidationError, # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/scrapybara/agent/types/__init__.py b/src/scrapybara/agent/types/__init__.py deleted file mode 100644 index dce9230..0000000 --- a/src/scrapybara/agent/types/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from .model import Model - -__all__ = ["Model"] diff --git a/src/scrapybara/agent/types/model.py b/src/scrapybara/agent/types/model.py deleted file mode 100644 index 5949ec8..0000000 --- a/src/scrapybara/agent/types/model.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -Model = typing.Union[typing.Literal["claude", "gemini"], typing.Any] diff --git a/src/scrapybara/base_client.py b/src/scrapybara/base_client.py index c2e37de..44799b9 100644 --- a/src/scrapybara/base_client.py +++ b/src/scrapybara/base_client.py @@ -7,7 +7,6 @@ from .core.api_error import ApiError from .core.client_wrapper import SyncClientWrapper from .instance.client import InstanceClient -from .agent.client import AgentClient from .browser.client import BrowserClient from .code.client import CodeClient from .notebook.client import NotebookClient @@ -24,7 +23,6 @@ from .types.auth_state_response import AuthStateResponse from .core.client_wrapper import AsyncClientWrapper from .instance.client import AsyncInstanceClient -from .agent.client import AsyncAgentClient from .browser.client import AsyncBrowserClient from .code.client import AsyncCodeClient from .notebook.client import AsyncNotebookClient @@ -98,7 +96,6 @@ def __init__( timeout=_defaulted_timeout, ) self.instance = InstanceClient(client_wrapper=self._client_wrapper) - self.agent = AgentClient(client_wrapper=self._client_wrapper) self.browser = BrowserClient(client_wrapper=self._client_wrapper) self.code = CodeClient(client_wrapper=self._client_wrapper) self.notebook = NotebookClient(client_wrapper=self._client_wrapper) @@ -375,7 +372,6 @@ def __init__( timeout=_defaulted_timeout, ) self.instance = AsyncInstanceClient(client_wrapper=self._client_wrapper) - self.agent = AsyncAgentClient(client_wrapper=self._client_wrapper) self.browser = AsyncBrowserClient(client_wrapper=self._client_wrapper) self.code = AsyncCodeClient(client_wrapper=self._client_wrapper) self.notebook = AsyncNotebookClient(client_wrapper=self._client_wrapper) diff --git a/src/scrapybara/core/client_wrapper.py b/src/scrapybara/core/client_wrapper.py index f59b9a3..fe993b6 100644 --- a/src/scrapybara/core/client_wrapper.py +++ b/src/scrapybara/core/client_wrapper.py @@ -16,7 +16,7 @@ def get_headers(self) -> typing.Dict[str, str]: headers: typing.Dict[str, str] = { "X-Fern-Language": "Python", "X-Fern-SDK-Name": "scrapybara", - "X-Fern-SDK-Version": "2.0.7", + "X-Fern-SDK-Version": "2.1.0", } headers["x-api-key"] = self.api_key return headers diff --git a/src/scrapybara/types/__init__.py b/src/scrapybara/types/__init__.py index b83f133..40bc910 100644 --- a/src/scrapybara/types/__init__.py +++ b/src/scrapybara/types/__init__.py @@ -1,6 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -from .act_response import ActResponse from .auth_state_response import AuthStateResponse from .browser_authenticate_response import BrowserAuthenticateResponse from .browser_get_cdp_url_response import BrowserGetCdpUrlResponse @@ -19,7 +18,6 @@ from .notebook import Notebook from .notebook_cell import NotebookCell from .save_browser_auth_response import SaveBrowserAuthResponse -from .scrape_response import ScrapeResponse from .start_browser_response import StartBrowserResponse from .status import Status from .stop_browser_response import StopBrowserResponse @@ -28,7 +26,6 @@ from .validation_error_loc_item import ValidationErrorLocItem __all__ = [ - "ActResponse", "AuthStateResponse", "BrowserAuthenticateResponse", "BrowserGetCdpUrlResponse", @@ -47,7 +44,6 @@ "Notebook", "NotebookCell", "SaveBrowserAuthResponse", - "ScrapeResponse", "StartBrowserResponse", "Status", "StopBrowserResponse", diff --git a/src/scrapybara/types/act_response.py b/src/scrapybara/types/act_response.py deleted file mode 100644 index f808f43..0000000 --- a/src/scrapybara/types/act_response.py +++ /dev/null @@ -1,20 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from ..core.pydantic_utilities import UniversalBaseModel -import typing -from ..core.pydantic_utilities import IS_PYDANTIC_V2 -import pydantic - - -class ActResponse(UniversalBaseModel): - output: str - screenshot: typing.Optional[str] = None - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/src/scrapybara/types/scrape_response.py b/src/scrapybara/types/scrape_response.py deleted file mode 100644 index 1b4799d..0000000 --- a/src/scrapybara/types/scrape_response.py +++ /dev/null @@ -1,20 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from ..core.pydantic_utilities import UniversalBaseModel -import typing -from ..core.pydantic_utilities import IS_PYDANTIC_V2 -import pydantic - - -class ScrapeResponse(UniversalBaseModel): - data: typing.Dict[str, typing.Optional[typing.Any]] - screenshot: typing.Optional[str] = None - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow