From a852b7a67d5950a575a755498333ec403687204f Mon Sep 17 00:00:00 2001
From: fern-api <115122769+fern-api[bot]@users.noreply.github.com>
Date: Sun, 29 Dec 2024 05:43:57 +0000
Subject: [PATCH 1/2] SDK regeneration
---
pyproject.toml | 2 +-
reference.md | 124 +++++++++++-
src/scrapybara/__init__.py | 2 +
src/scrapybara/agent/__init__.py | 3 +
src/scrapybara/agent/client.py | 17 +-
src/scrapybara/agent/types/__init__.py | 5 +
src/scrapybara/agent/types/model.py | 5 +
src/scrapybara/base_client.py | 8 +-
src/scrapybara/core/client_wrapper.py | 20 +-
src/scrapybara/instance/client.py | 259 +++++++++++++++++++++++++
10 files changed, 414 insertions(+), 31 deletions(-)
create mode 100644 src/scrapybara/agent/types/__init__.py
create mode 100644 src/scrapybara/agent/types/model.py
diff --git a/pyproject.toml b/pyproject.toml
index b25bc8e..2dab782 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "scrapybara"
-version = "2.0.5"
+version = "2.0.6"
description = ""
readme = "README.md"
authors = []
diff --git a/reference.md b/reference.md
index 3854076..9b3ce7a 100644
--- a/reference.md
+++ b/reference.md
@@ -549,6 +549,126 @@ client.instance.stop(
+
+
+
+
+client.instance.pause(...)
+
+-
+
+#### 🔌 Usage
+
+
+-
+
+
+-
+
+```python
+from scrapybara import Scrapybara
+
+client = Scrapybara(
+ api_key="YOUR_API_KEY",
+)
+client.instance.pause(
+ instance_id="instance_id",
+)
+
+```
+
+
+
+
+
+#### ⚙️ Parameters
+
+
+-
+
+
+-
+
+**instance_id:** `str`
+
+
+
+
+
+-
+
+**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+
+
+
+
+
+
+
+
+
+
+
+client.instance.resume(...)
+
+-
+
+#### 🔌 Usage
+
+
+-
+
+
+-
+
+```python
+from scrapybara import Scrapybara
+
+client = Scrapybara(
+ api_key="YOUR_API_KEY",
+)
+client.instance.resume(
+ instance_id="instance_id",
+)
+
+```
+
+
+
+
+
+#### ⚙️ Parameters
+
+
+-
+
+
+-
+
+**instance_id:** `str`
+
+
+
+
+
+-
+
+**timeout_hours:** `typing.Optional[float]`
+
+
+
+
+
+-
+
+**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+
+
+
+
+
+
+
@@ -615,7 +735,7 @@ client.agent.act(
-
-**model:** `typing.Optional[typing.Literal["claude"]]`
+**model:** `typing.Optional[Model]`
@@ -704,7 +824,7 @@ client.agent.scrape(
-
-**model:** `typing.Optional[typing.Literal["claude"]]`
+**model:** `typing.Optional[Model]`
diff --git a/src/scrapybara/__init__.py b/src/scrapybara/__init__.py
index c8c3b9d..7322978 100644
--- a/src/scrapybara/__init__.py
+++ b/src/scrapybara/__init__.py
@@ -28,6 +28,7 @@
)
from .errors import UnprocessableEntityError
from . import agent, browser, code, env, file, instance, notebook
+from .agent import Model
from .client import AsyncScrapybara, Scrapybara
from .environment import ScrapybaraEnvironment
from .instance import Action, Command
@@ -52,6 +53,7 @@
"InstanceGetStreamUrlResponse",
"InstanceScreenshotResponse",
"KernelInfo",
+ "Model",
"Notebook",
"NotebookCell",
"ScrapeResponse",
diff --git a/src/scrapybara/agent/__init__.py b/src/scrapybara/agent/__init__.py
index f3ea265..b1f769f 100644
--- a/src/scrapybara/agent/__init__.py
+++ b/src/scrapybara/agent/__init__.py
@@ -1,2 +1,5 @@
# This file was auto-generated by Fern from our API Definition.
+from .types import Model
+
+__all__ = ["Model"]
diff --git a/src/scrapybara/agent/client.py b/src/scrapybara/agent/client.py
index 73b3e70..fb3ac5f 100644
--- a/src/scrapybara/agent/client.py
+++ b/src/scrapybara/agent/client.py
@@ -2,6 +2,7 @@
import typing
from ..core.client_wrapper import SyncClientWrapper
+from .types.model import Model
from ..core.request_options import RequestOptions
from ..types.act_response import ActResponse
from ..core.jsonable_encoder import jsonable_encoder
@@ -27,7 +28,7 @@ def act(
*,
cmd: str,
include_screenshot: typing.Optional[bool] = OMIT,
- model: typing.Optional[typing.Literal["claude"]] = OMIT,
+ model: typing.Optional[Model] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> ActResponse:
"""
@@ -39,7 +40,7 @@ def act(
include_screenshot : typing.Optional[bool]
- model : typing.Optional[typing.Literal["claude"]]
+ model : typing.Optional[Model]
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
@@ -106,7 +107,7 @@ def scrape(
cmd: str,
schema: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
include_screenshot: typing.Optional[bool] = OMIT,
- model: typing.Optional[typing.Literal["claude"]] = OMIT,
+ model: typing.Optional[Model] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> ScrapeResponse:
"""
@@ -120,7 +121,7 @@ def scrape(
include_screenshot : typing.Optional[bool]
- model : typing.Optional[typing.Literal["claude"]]
+ model : typing.Optional[Model]
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
@@ -192,7 +193,7 @@ async def act(
*,
cmd: str,
include_screenshot: typing.Optional[bool] = OMIT,
- model: typing.Optional[typing.Literal["claude"]] = OMIT,
+ model: typing.Optional[Model] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> ActResponse:
"""
@@ -204,7 +205,7 @@ async def act(
include_screenshot : typing.Optional[bool]
- model : typing.Optional[typing.Literal["claude"]]
+ model : typing.Optional[Model]
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
@@ -279,7 +280,7 @@ async def scrape(
cmd: str,
schema: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
include_screenshot: typing.Optional[bool] = OMIT,
- model: typing.Optional[typing.Literal["claude"]] = OMIT,
+ model: typing.Optional[Model] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> ScrapeResponse:
"""
@@ -293,7 +294,7 @@ async def scrape(
include_screenshot : typing.Optional[bool]
- model : typing.Optional[typing.Literal["claude"]]
+ model : typing.Optional[Model]
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
diff --git a/src/scrapybara/agent/types/__init__.py b/src/scrapybara/agent/types/__init__.py
new file mode 100644
index 0000000..dce9230
--- /dev/null
+++ b/src/scrapybara/agent/types/__init__.py
@@ -0,0 +1,5 @@
+# This file was auto-generated by Fern from our API Definition.
+
+from .model import Model
+
+__all__ = ["Model"]
diff --git a/src/scrapybara/agent/types/model.py b/src/scrapybara/agent/types/model.py
new file mode 100644
index 0000000..5949ec8
--- /dev/null
+++ b/src/scrapybara/agent/types/model.py
@@ -0,0 +1,5 @@
+# This file was auto-generated by Fern from our API Definition.
+
+import typing
+
+Model = typing.Union[typing.Literal["claude", "gemini"], typing.Any]
diff --git a/src/scrapybara/base_client.py b/src/scrapybara/base_client.py
index 6f8ca07..e6ee261 100644
--- a/src/scrapybara/base_client.py
+++ b/src/scrapybara/base_client.py
@@ -54,7 +54,7 @@ class BaseClient:
api_key : typing.Optional[str]
timeout : typing.Optional[float]
- The timeout to be used, in seconds, for requests. By default the timeout is 60 seconds, unless a custom httpx client is used, in which case this default is not enforced.
+ The timeout to be used, in seconds, for requests. By default the timeout is 600 seconds, unless a custom httpx client is used, in which case this default is not enforced.
follow_redirects : typing.Optional[bool]
Whether the default httpx client follows redirects or not, this is irrelevant if a custom httpx client is passed in.
@@ -81,7 +81,7 @@ def __init__(
follow_redirects: typing.Optional[bool] = True,
httpx_client: typing.Optional[httpx.Client] = None,
):
- _defaulted_timeout = timeout if timeout is not None else 60 if httpx_client is None else None
+ _defaulted_timeout = timeout if timeout is not None else 600 if httpx_client is None else None
if api_key is None:
raise ApiError(
body="The client must be instantiated be either passing in api_key or setting SCRAPYBARA_API_KEY"
@@ -247,7 +247,7 @@ class AsyncBaseClient:
api_key : typing.Optional[str]
timeout : typing.Optional[float]
- The timeout to be used, in seconds, for requests. By default the timeout is 60 seconds, unless a custom httpx client is used, in which case this default is not enforced.
+ The timeout to be used, in seconds, for requests. By default the timeout is 600 seconds, unless a custom httpx client is used, in which case this default is not enforced.
follow_redirects : typing.Optional[bool]
Whether the default httpx client follows redirects or not, this is irrelevant if a custom httpx client is passed in.
@@ -274,7 +274,7 @@ def __init__(
follow_redirects: typing.Optional[bool] = True,
httpx_client: typing.Optional[httpx.AsyncClient] = None,
):
- _defaulted_timeout = timeout if timeout is not None else 60 if httpx_client is None else None
+ _defaulted_timeout = timeout if timeout is not None else 600 if httpx_client is None else None
if api_key is None:
raise ApiError(
body="The client must be instantiated be either passing in api_key or setting SCRAPYBARA_API_KEY"
diff --git a/src/scrapybara/core/client_wrapper.py b/src/scrapybara/core/client_wrapper.py
index b9a9d26..583a197 100644
--- a/src/scrapybara/core/client_wrapper.py
+++ b/src/scrapybara/core/client_wrapper.py
@@ -7,9 +7,7 @@
class BaseClientWrapper:
- def __init__(
- self, *, api_key: str, base_url: str, timeout: typing.Optional[float] = None
- ):
+ def __init__(self, *, api_key: str, base_url: str, timeout: typing.Optional[float] = None):
self.api_key = api_key
self._base_url = base_url
self._timeout = timeout
@@ -18,7 +16,7 @@ def get_headers(self) -> typing.Dict[str, str]:
headers: typing.Dict[str, str] = {
"X-Fern-Language": "Python",
"X-Fern-SDK-Name": "scrapybara",
- "X-Fern-SDK-Version": "2.0.5",
+ "X-Fern-SDK-Version": "2.0.6",
}
headers["x-api-key"] = self.api_key
return headers
@@ -32,12 +30,7 @@ def get_timeout(self) -> typing.Optional[float]:
class SyncClientWrapper(BaseClientWrapper):
def __init__(
- self,
- *,
- api_key: str,
- base_url: str,
- timeout: typing.Optional[float] = None,
- httpx_client: httpx.Client
+ self, *, api_key: str, base_url: str, timeout: typing.Optional[float] = None, httpx_client: httpx.Client
):
super().__init__(api_key=api_key, base_url=base_url, timeout=timeout)
self.httpx_client = HttpClient(
@@ -50,12 +43,7 @@ def __init__(
class AsyncClientWrapper(BaseClientWrapper):
def __init__(
- self,
- *,
- api_key: str,
- base_url: str,
- timeout: typing.Optional[float] = None,
- httpx_client: httpx.AsyncClient
+ self, *, api_key: str, base_url: str, timeout: typing.Optional[float] = None, httpx_client: httpx.AsyncClient
):
super().__init__(api_key=api_key, base_url=base_url, timeout=timeout)
self.httpx_client = AsyncHttpClient(
diff --git a/src/scrapybara/instance/client.py b/src/scrapybara/instance/client.py
index 41b6916..17ed202 100644
--- a/src/scrapybara/instance/client.py
+++ b/src/scrapybara/instance/client.py
@@ -14,6 +14,7 @@
from .types.action import Action
from .types.command import Command
from ..types.stop_instance_response import StopInstanceResponse
+from ..types.get_instance_response import GetInstanceResponse
from ..core.client_wrapper import AsyncClientWrapper
# this is used as the default value for optional parameters
@@ -438,6 +439,127 @@ def stop(
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
+ def pause(
+ self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None
+ ) -> StopInstanceResponse:
+ """
+ Parameters
+ ----------
+ instance_id : str
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ StopInstanceResponse
+ Successful Response
+
+ Examples
+ --------
+ from scrapybara import Scrapybara
+
+ client = Scrapybara(
+ api_key="YOUR_API_KEY",
+ )
+ client.instance.pause(
+ instance_id="instance_id",
+ )
+ """
+ _response = self._client_wrapper.httpx_client.request(
+ f"v1/instance/{jsonable_encoder(instance_id)}/pause",
+ method="POST",
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ return typing.cast(
+ StopInstanceResponse,
+ parse_obj_as(
+ type_=StopInstanceResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ if _response.status_code == 422:
+ raise UnprocessableEntityError(
+ typing.cast(
+ HttpValidationError,
+ parse_obj_as(
+ type_=HttpValidationError, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise ApiError(status_code=_response.status_code, body=_response.text)
+ raise ApiError(status_code=_response.status_code, body=_response_json)
+
+ def resume(
+ self,
+ instance_id: str,
+ *,
+ timeout_hours: typing.Optional[float] = None,
+ request_options: typing.Optional[RequestOptions] = None,
+ ) -> GetInstanceResponse:
+ """
+ Parameters
+ ----------
+ instance_id : str
+
+ timeout_hours : typing.Optional[float]
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ GetInstanceResponse
+ Successful Response
+
+ Examples
+ --------
+ from scrapybara import Scrapybara
+
+ client = Scrapybara(
+ api_key="YOUR_API_KEY",
+ )
+ client.instance.resume(
+ instance_id="instance_id",
+ )
+ """
+ _response = self._client_wrapper.httpx_client.request(
+ f"v1/instance/{jsonable_encoder(instance_id)}/resume",
+ method="POST",
+ params={
+ "timeout_hours": timeout_hours,
+ },
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ return typing.cast(
+ GetInstanceResponse,
+ parse_obj_as(
+ type_=GetInstanceResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ if _response.status_code == 422:
+ raise UnprocessableEntityError(
+ typing.cast(
+ HttpValidationError,
+ parse_obj_as(
+ type_=HttpValidationError, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise ApiError(status_code=_response.status_code, body=_response.text)
+ raise ApiError(status_code=_response.status_code, body=_response_json)
+
class AsyncInstanceClient:
def __init__(self, *, client_wrapper: AsyncClientWrapper):
@@ -904,3 +1026,140 @@ async def main() -> None:
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
+
+ async def pause(
+ self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None
+ ) -> StopInstanceResponse:
+ """
+ Parameters
+ ----------
+ instance_id : str
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ StopInstanceResponse
+ Successful Response
+
+ Examples
+ --------
+ import asyncio
+
+ from scrapybara import AsyncScrapybara
+
+ client = AsyncScrapybara(
+ api_key="YOUR_API_KEY",
+ )
+
+
+ async def main() -> None:
+ await client.instance.pause(
+ instance_id="instance_id",
+ )
+
+
+ asyncio.run(main())
+ """
+ _response = await self._client_wrapper.httpx_client.request(
+ f"v1/instance/{jsonable_encoder(instance_id)}/pause",
+ method="POST",
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ return typing.cast(
+ StopInstanceResponse,
+ parse_obj_as(
+ type_=StopInstanceResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ if _response.status_code == 422:
+ raise UnprocessableEntityError(
+ typing.cast(
+ HttpValidationError,
+ parse_obj_as(
+ type_=HttpValidationError, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise ApiError(status_code=_response.status_code, body=_response.text)
+ raise ApiError(status_code=_response.status_code, body=_response_json)
+
+ async def resume(
+ self,
+ instance_id: str,
+ *,
+ timeout_hours: typing.Optional[float] = None,
+ request_options: typing.Optional[RequestOptions] = None,
+ ) -> GetInstanceResponse:
+ """
+ Parameters
+ ----------
+ instance_id : str
+
+ timeout_hours : typing.Optional[float]
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ GetInstanceResponse
+ Successful Response
+
+ Examples
+ --------
+ import asyncio
+
+ from scrapybara import AsyncScrapybara
+
+ client = AsyncScrapybara(
+ api_key="YOUR_API_KEY",
+ )
+
+
+ async def main() -> None:
+ await client.instance.resume(
+ instance_id="instance_id",
+ )
+
+
+ asyncio.run(main())
+ """
+ _response = await self._client_wrapper.httpx_client.request(
+ f"v1/instance/{jsonable_encoder(instance_id)}/resume",
+ method="POST",
+ params={
+ "timeout_hours": timeout_hours,
+ },
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ return typing.cast(
+ GetInstanceResponse,
+ parse_obj_as(
+ type_=GetInstanceResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ if _response.status_code == 422:
+ raise UnprocessableEntityError(
+ typing.cast(
+ HttpValidationError,
+ parse_obj_as(
+ type_=HttpValidationError, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise ApiError(status_code=_response.status_code, body=_response.text)
+ raise ApiError(status_code=_response.status_code, body=_response_json)
From a7dc0270319711a58640512586ffaafba888b700 Mon Sep 17 00:00:00 2001
From: Justin Sun
Date: Sat, 28 Dec 2024 21:53:38 -0800
Subject: [PATCH 2/2] update model enum + add agent pause/resume
---
src/scrapybara/client.py | 66 ++++++++++++++++++++++++++++------------
1 file changed, 46 insertions(+), 20 deletions(-)
diff --git a/src/scrapybara/client.py b/src/scrapybara/client.py
index 334c8b6..9a750a9 100644
--- a/src/scrapybara/client.py
+++ b/src/scrapybara/client.py
@@ -6,6 +6,7 @@
import typing
from pydantic import BaseModel, ValidationError
+from scrapybara.agent.types.model import Model
from scrapybara.environment import ScrapybaraEnvironment
from .core.request_options import RequestOptions
from .types import (
@@ -18,6 +19,7 @@
EnvResponse,
FileDownloadResponse,
FileReadResponse,
+ GetInstanceResponse,
InstanceGetStreamUrlResponse,
InstanceScreenshotResponse,
KernelInfo,
@@ -46,7 +48,7 @@ def act(
*,
cmd: str,
include_screenshot: Optional[bool] = OMIT,
- model: Optional[Literal["claude"]] = OMIT,
+ model: Optional[Model] = OMIT,
request_options: Optional[RequestOptions] = None,
) -> ActResponse:
return self._client.agent.act(
@@ -63,7 +65,7 @@ def scrape(
cmd: str,
schema: Optional[Dict[str, Optional[Any]]] = OMIT,
include_screenshot: Optional[bool] = OMIT,
- model: Optional[Literal["claude"]] = OMIT,
+ model: Optional[Model] = OMIT,
request_options: Optional[RequestOptions] = None,
) -> ScrapeResponse:
return self._client.agent.scrape(
@@ -78,17 +80,11 @@ def scrape(
def scrape_to_pydantic(
self,
*,
- cmd: typing.Optional[str] = OMIT,
+ cmd: str,
schema: PydanticModelT,
- model: typing.Optional[typing.Literal["claude"]] = OMIT,
+ model: typing.Optional[Model] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> PydanticModelT:
- cmd = cmd if cmd else (str(schema.__doc__) if schema.__doc__ else None)
- if cmd is None:
- raise ValueError(
- "No command provided, please provide a 'cmd' parameter or docstring in schema class."
- )
-
response = self._client.agent.scrape(
self.instance_id,
cmd=cmd,
@@ -113,7 +109,7 @@ async def act(
*,
cmd: str,
include_screenshot: Optional[bool] = OMIT,
- model: Optional[Literal["claude"]] = OMIT,
+ model: Optional[Model] = OMIT,
request_options: Optional[RequestOptions] = None,
) -> ActResponse:
return await self._client.agent.act(
@@ -130,7 +126,7 @@ async def scrape(
cmd: str,
schema: Optional[Dict[str, Optional[Any]]] = OMIT,
include_screenshot: Optional[bool] = OMIT,
- model: Optional[Literal["claude"]] = OMIT,
+ model: Optional[Model] = OMIT,
request_options: Optional[RequestOptions] = None,
) -> ScrapeResponse:
return await self._client.agent.scrape(
@@ -145,17 +141,11 @@ async def scrape(
async def scrape_to_pydantic(
self,
*,
- cmd: typing.Optional[str] = OMIT,
+ cmd: str,
schema: PydanticModelT,
- model: typing.Optional[typing.Literal["claude"]] = OMIT,
+ model: typing.Optional[Model] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> PydanticModelT:
- cmd = cmd if cmd else (str(schema.__doc__) if schema.__doc__ else None)
- if cmd is None:
- raise ValueError(
- "No command provided, please provide a 'cmd' parameter or docstring in schema class."
- )
-
response = await self._client.agent.scrape(
self.instance_id,
cmd=cmd,
@@ -721,6 +711,23 @@ def stop(
) -> StopInstanceResponse:
return self._client.instance.stop(self.id, request_options=request_options)
+ def pause(
+ self, request_options: Optional[RequestOptions] = None
+ ) -> StopInstanceResponse:
+ return self._client.instance.pause(self.id, request_options=request_options)
+
+ def resume(
+ self,
+ *,
+ timeout_hours: Optional[float] = None,
+ request_options: Optional[RequestOptions] = None,
+ ) -> GetInstanceResponse:
+ return self._client.instance.resume(
+ self.id,
+ timeout_hours=timeout_hours,
+ request_options=request_options,
+ )
+
class AsyncInstance:
def __init__(
@@ -815,6 +822,25 @@ async def stop(
self.id, request_options=request_options
)
+ async def pause(
+ self, request_options: Optional[RequestOptions] = None
+ ) -> StopInstanceResponse:
+ return await self._client.instance.pause(
+ self.id, request_options=request_options
+ )
+
+ async def resume(
+ self,
+ *,
+ timeout_hours: Optional[float] = None,
+ request_options: Optional[RequestOptions] = None,
+ ) -> GetInstanceResponse:
+ return await self._client.instance.resume(
+ self.id,
+ timeout_hours=timeout_hours,
+ request_options=request_options,
+ )
+
class Scrapybara:
def __init__(