Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "scrapybara"
version = "2.0.5"
version = "2.0.6"
description = ""
readme = "README.md"
authors = []
Expand Down
124 changes: 122 additions & 2 deletions reference.md
Original file line number Diff line number Diff line change
Expand Up @@ -549,6 +549,126 @@ client.instance.stop(
</dl>


</dd>
</dl>
</details>

<details><summary><code>client.instance.<a href="src/scrapybara/instance/client.py">pause</a>(...)</code></summary>
<dl>
<dd>

#### 🔌 Usage

<dl>
<dd>

<dl>
<dd>

```python
from scrapybara import Scrapybara

client = Scrapybara(
api_key="YOUR_API_KEY",
)
client.instance.pause(
instance_id="instance_id",
)

```
</dd>
</dl>
</dd>
</dl>

#### ⚙️ Parameters

<dl>
<dd>

<dl>
<dd>

**instance_id:** `str`

</dd>
</dl>

<dl>
<dd>

**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.

</dd>
</dl>
</dd>
</dl>


</dd>
</dl>
</details>

<details><summary><code>client.instance.<a href="src/scrapybara/instance/client.py">resume</a>(...)</code></summary>
<dl>
<dd>

#### 🔌 Usage

<dl>
<dd>

<dl>
<dd>

```python
from scrapybara import Scrapybara

client = Scrapybara(
api_key="YOUR_API_KEY",
)
client.instance.resume(
instance_id="instance_id",
)

```
</dd>
</dl>
</dd>
</dl>

#### ⚙️ Parameters

<dl>
<dd>

<dl>
<dd>

**instance_id:** `str`

</dd>
</dl>

<dl>
<dd>

**timeout_hours:** `typing.Optional[float]`

</dd>
</dl>

<dl>
<dd>

**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.

</dd>
</dl>
</dd>
</dl>


</dd>
</dl>
</details>
Expand Down Expand Up @@ -615,7 +735,7 @@ client.agent.act(
<dl>
<dd>

**model:** `typing.Optional[typing.Literal["claude"]]`
**model:** `typing.Optional[Model]`

</dd>
</dl>
Expand Down Expand Up @@ -704,7 +824,7 @@ client.agent.scrape(
<dl>
<dd>

**model:** `typing.Optional[typing.Literal["claude"]]`
**model:** `typing.Optional[Model]`

</dd>
</dl>
Expand Down
2 changes: 2 additions & 0 deletions src/scrapybara/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
)
from .errors import UnprocessableEntityError
from . import agent, browser, code, env, file, instance, notebook
from .agent import Model
from .client import AsyncScrapybara, Scrapybara
from .environment import ScrapybaraEnvironment
from .instance import Action, Command
Expand All @@ -52,6 +53,7 @@
"InstanceGetStreamUrlResponse",
"InstanceScreenshotResponse",
"KernelInfo",
"Model",
"Notebook",
"NotebookCell",
"ScrapeResponse",
Expand Down
3 changes: 3 additions & 0 deletions src/scrapybara/agent/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,5 @@
# This file was auto-generated by Fern from our API Definition.

from .types import Model

__all__ = ["Model"]
17 changes: 9 additions & 8 deletions src/scrapybara/agent/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import typing
from ..core.client_wrapper import SyncClientWrapper
from .types.model import Model
from ..core.request_options import RequestOptions
from ..types.act_response import ActResponse
from ..core.jsonable_encoder import jsonable_encoder
Expand All @@ -27,7 +28,7 @@ def act(
*,
cmd: str,
include_screenshot: typing.Optional[bool] = OMIT,
model: typing.Optional[typing.Literal["claude"]] = OMIT,
model: typing.Optional[Model] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> ActResponse:
"""
Expand All @@ -39,7 +40,7 @@ def act(

include_screenshot : typing.Optional[bool]

model : typing.Optional[typing.Literal["claude"]]
model : typing.Optional[Model]

request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Expand Down Expand Up @@ -106,7 +107,7 @@ def scrape(
cmd: str,
schema: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
include_screenshot: typing.Optional[bool] = OMIT,
model: typing.Optional[typing.Literal["claude"]] = OMIT,
model: typing.Optional[Model] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> ScrapeResponse:
"""
Expand All @@ -120,7 +121,7 @@ def scrape(

include_screenshot : typing.Optional[bool]

model : typing.Optional[typing.Literal["claude"]]
model : typing.Optional[Model]

request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Expand Down Expand Up @@ -192,7 +193,7 @@ async def act(
*,
cmd: str,
include_screenshot: typing.Optional[bool] = OMIT,
model: typing.Optional[typing.Literal["claude"]] = OMIT,
model: typing.Optional[Model] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> ActResponse:
"""
Expand All @@ -204,7 +205,7 @@ async def act(

include_screenshot : typing.Optional[bool]

model : typing.Optional[typing.Literal["claude"]]
model : typing.Optional[Model]

request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Expand Down Expand Up @@ -279,7 +280,7 @@ async def scrape(
cmd: str,
schema: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
include_screenshot: typing.Optional[bool] = OMIT,
model: typing.Optional[typing.Literal["claude"]] = OMIT,
model: typing.Optional[Model] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> ScrapeResponse:
"""
Expand All @@ -293,7 +294,7 @@ async def scrape(

include_screenshot : typing.Optional[bool]

model : typing.Optional[typing.Literal["claude"]]
model : typing.Optional[Model]

request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Expand Down
5 changes: 5 additions & 0 deletions src/scrapybara/agent/types/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# This file was auto-generated by Fern from our API Definition.

from .model import Model

__all__ = ["Model"]
5 changes: 5 additions & 0 deletions src/scrapybara/agent/types/model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# This file was auto-generated by Fern from our API Definition.

import typing

Model = typing.Union[typing.Literal["claude", "gemini"], typing.Any]
8 changes: 4 additions & 4 deletions src/scrapybara/base_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ class BaseClient:

api_key : typing.Optional[str]
timeout : typing.Optional[float]
The timeout to be used, in seconds, for requests. By default the timeout is 60 seconds, unless a custom httpx client is used, in which case this default is not enforced.
The timeout to be used, in seconds, for requests. By default the timeout is 600 seconds, unless a custom httpx client is used, in which case this default is not enforced.

follow_redirects : typing.Optional[bool]
Whether the default httpx client follows redirects or not, this is irrelevant if a custom httpx client is passed in.
Expand All @@ -81,7 +81,7 @@ def __init__(
follow_redirects: typing.Optional[bool] = True,
httpx_client: typing.Optional[httpx.Client] = None,
):
_defaulted_timeout = timeout if timeout is not None else 60 if httpx_client is None else None
_defaulted_timeout = timeout if timeout is not None else 600 if httpx_client is None else None
if api_key is None:
raise ApiError(
body="The client must be instantiated be either passing in api_key or setting SCRAPYBARA_API_KEY"
Expand Down Expand Up @@ -247,7 +247,7 @@ class AsyncBaseClient:

api_key : typing.Optional[str]
timeout : typing.Optional[float]
The timeout to be used, in seconds, for requests. By default the timeout is 60 seconds, unless a custom httpx client is used, in which case this default is not enforced.
The timeout to be used, in seconds, for requests. By default the timeout is 600 seconds, unless a custom httpx client is used, in which case this default is not enforced.

follow_redirects : typing.Optional[bool]
Whether the default httpx client follows redirects or not, this is irrelevant if a custom httpx client is passed in.
Expand All @@ -274,7 +274,7 @@ def __init__(
follow_redirects: typing.Optional[bool] = True,
httpx_client: typing.Optional[httpx.AsyncClient] = None,
):
_defaulted_timeout = timeout if timeout is not None else 60 if httpx_client is None else None
_defaulted_timeout = timeout if timeout is not None else 600 if httpx_client is None else None
if api_key is None:
raise ApiError(
body="The client must be instantiated be either passing in api_key or setting SCRAPYBARA_API_KEY"
Expand Down
Loading