Skip to content

Commit

Permalink
Remove client.chat resource (#105)
Browse files Browse the repository at this point in the history
This was always temporary, but we are finally removing the chat resource
from the ``Arcade`` and ``AsyncArcade`` clients.
  • Loading branch information
Spartee authored Oct 11, 2024
1 parent 6b716d6 commit ff092ac
Show file tree
Hide file tree
Showing 6 changed files with 30 additions and 38 deletions.
9 changes: 6 additions & 3 deletions arcade/arcade/cli/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from urllib.parse import urlencode

import typer
from openai import OpenAIError
from openai import OpenAI, OpenAIError
from rich.console import Console
from rich.markup import escape
from rich.text import Text
Expand Down Expand Up @@ -256,7 +256,10 @@ def chat(
history.append({"role": "user", "content": user_input})

try:
chat_result = handle_chat_interaction(client, model, history, user_email, stream)
openai_client = OpenAI(api_key=config.api.key, base_url=config.engine_url)
chat_result = handle_chat_interaction(
openai_client, model, history, user_email, stream
)
except OpenAIError as e:
console.print(f"❌ Arcade Chat failed with error: {e!s}", style="bold red")
continue
Expand All @@ -273,7 +276,7 @@ def chat(
try:
history.pop()
chat_result = handle_chat_interaction(
client, model, history, user_email, stream
openai_client, model, history, user_email, stream
)
except OpenAIError as e:
console.print(f"❌ Arcade Chat failed with error: {e!s}", style="bold red")
Expand Down
3 changes: 2 additions & 1 deletion arcade/arcade/cli/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from typing import Callable, Union

import typer
from openai import OpenAI
from openai.resources.chat.completions import ChatCompletionChunk, Stream
from openai.types.chat.chat_completion import Choice as ChatCompletionChoice
from openai.types.chat.chat_completion_chunk import Choice as ChatCompletionChunkChoice
Expand Down Expand Up @@ -247,7 +248,7 @@ class ChatInteractionResult:


def handle_chat_interaction(
client: Arcade, model: str, history: list[dict], user_email: str | None, stream: bool = False
client: OpenAI, model: str, history: list[dict], user_email: str | None, stream: bool = False
) -> ChatInteractionResult:
"""
Handle a single chat-request/chat-response interaction for both streamed and non-streamed responses.
Expand Down
19 changes: 8 additions & 11 deletions arcade/arcade/client/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
RateLimitError,
UnauthorizedError,
)
from arcade.client.schema import OPENAI_API_VERSION

T = TypeVar("T")
ResponseT = TypeVar("ResponseT")
Expand All @@ -22,11 +21,15 @@
class BaseResource(Generic[T]):
"""Base class for all resources."""

_path: str
_path: str = ""
_version: str = "v1"

def __init__(self, client: T) -> None:
self._client = client
self._resource_path = self._client._base_url + self._path # type: ignore[attr-defined]
self._resource_path = urljoin(
self._client._base_url, # type: ignore[attr-defined]
f"{self._version}/{self._path}",
)


class BaseArcadeClient:
Expand All @@ -37,8 +40,8 @@ def __init__(
base_url: str | None = None,
api_key: str | None = None,
headers: dict[str, str] | None = None,
timeout: float | Timeout = 10.0,
retries: int = 3,
timeout: float | Timeout = 30.0,
retries: int = 1,
):
"""
Initialize the BaseArcadeClient.
Expand Down Expand Up @@ -70,12 +73,6 @@ def _build_url(self, path: str) -> str:
"""
return urljoin(self._base_url, path)

def _chat_url(self, base_url: str) -> str:
chat_url = str(base_url)
if not base_url.endswith(OPENAI_API_VERSION):
chat_url = f"{base_url}/{OPENAI_API_VERSION}"
return chat_url

def _handle_http_error(self, e: httpx.HTTPStatusError) -> None:
error_map = {
400: BadRequestError,
Expand Down
23 changes: 8 additions & 15 deletions arcade/arcade/client/client.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import json
from typing import Any, TypeVar, Union

from httpx import Timeout
from openai import AsyncOpenAI, OpenAI
from openai.resources.chat import AsyncChat, Chat

from arcade.client.base import (
AsyncArcadeClient,
Expand Down Expand Up @@ -120,7 +119,7 @@ def run(
tool_name: str,
user_id: str,
tool_version: str | None = None,
inputs: dict[str, Any] | None = None,
inputs: dict[str, Any] | str | None = None,
) -> ExecuteToolResponse:
"""
Send a request to execute a tool and return the response.
Expand All @@ -131,6 +130,12 @@ def run(
tool_version: The version of the tool to execute (if not provided, the latest version will be used).
inputs: The inputs for the tool.
"""
if not isinstance(inputs, str):
try:
inputs = json.dumps(inputs)
except Exception:
raise ValueError("Inputs must be a valid JSON object or serializable dictionary")

request_data = {
"tool_name": tool_name,
"user_id": user_id,
Expand Down Expand Up @@ -399,12 +404,6 @@ def __init__(self, *args: Any, **kwargs: Any):
self.auth: AuthResource = AuthResource(self)
self.tools: ToolResource = ToolResource(self)
self.health: HealthResource = HealthResource(self)
chat_url = self._chat_url(self._base_url)
self._openai_client = OpenAI(base_url=chat_url, api_key=self._api_key)

@property
def chat(self) -> Chat:
return self._openai_client.chat

def _execute_request(self, method: str, url: str, **kwargs: Any) -> Any:
"""
Expand All @@ -422,12 +421,6 @@ def __init__(self, *args: Any, **kwargs: Any):
self.auth: AsyncAuthResource = AsyncAuthResource(self)
self.tools: AsyncToolResource = AsyncToolResource(self)
self.health: AsyncHealthResource = AsyncHealthResource(self)
chat_url = self._chat_url(self._base_url)
self._openai_client = AsyncOpenAI(base_url=chat_url, api_key=self._api_key)

@property
def chat(self) -> AsyncChat:
return self._openai_client.chat

async def _execute_request(self, method: str, url: str, **kwargs: Any) -> Any:
"""
Expand Down
3 changes: 0 additions & 3 deletions arcade/arcade/client/schema.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
import os
from enum import Enum

from pydantic import BaseModel, Field

from arcade.core.schema import ToolAuthorizationContext, ToolCallOutput

OPENAI_API_VERSION = os.getenv("OPENAI_API_VERSION", "v1")


class AuthProvider(str, Enum):
google = "google"
Expand Down
11 changes: 6 additions & 5 deletions arcade/arcade/sdk/eval/eval.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@
"Use `pip install arcade-ai[evals]` to install the required dependencies for evaluation."
)

from arcade.client.client import AsyncArcade
from openai import AsyncOpenAI

from arcade.sdk.error import WeightError

if TYPE_CHECKING:
Expand Down Expand Up @@ -520,12 +521,12 @@ def extend_case(
)
self.cases.append(new_case)

async def run(self, client: AsyncArcade, model: str) -> dict[str, Any]:
async def run(self, client: AsyncOpenAI, model: str) -> dict[str, Any]:
"""
Run the evaluation suite.
Args:
client: The AsyncArcade client instance.
client: The AsyncOpenAI client instance.
model: The model to evaluate.
Returns:
Expand Down Expand Up @@ -651,11 +652,11 @@ async def wrapper(
raise TypeError("Eval function must return an EvalSuite")
suite.max_concurrent = max_concurrency
results = []
async with AsyncArcade(
async with AsyncOpenAI(
api_key=config.api.key,
base_url=config.engine_url,
) as client:
result = await suite.run(client, model) # type: ignore[arg-type]
result = await suite.run(client, model)
results.append(result)
return results

Expand Down

0 comments on commit ff092ac

Please sign in to comment.