Skip to content

Commit

Permalink
feat: improve error handling of Agent component, solves Empty Excepti…
Browse files Browse the repository at this point in the history
…onWithMessageError (#6097)

* Gracefully handle Errors

* updates to Error handling

* update in Error handling

* update lint error similar to main

* [autofix.ci] apply automated fixes

* [autofix.ci] apply automated fixes (attempt 2/3)

* feat: add max retry and request timeout to open ai component, fixes remote protocol error caused by OpenAI LLM in Agents (#6118)

* update to __str__ and fix lint errors

* [autofix.ci] apply automated fixes

---------

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
  • Loading branch information
1 parent 9264083 commit f9e41f9
Show file tree
Hide file tree
Showing 22 changed files with 1,127 additions and 75 deletions.
6 changes: 5 additions & 1 deletion src/backend/base/langflow/base/agents/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from langflow.field_typing import Tool
from langflow.inputs.inputs import InputTypes, MultilineInput
from langflow.io import BoolInput, HandleInput, IntInput, MessageTextInput
from langflow.logging import logger
from langflow.memory import delete_message
from langflow.schema import Data
from langflow.schema.content_block import ContentBlock
Expand Down Expand Up @@ -171,8 +172,11 @@ async def run_agent(
msg_id = e.agent_message.id
await delete_message(id_=msg_id)
await self._send_message_event(e.agent_message, category="remove_message")
logger.error(f"ExceptionWithMessageError: {e}")
raise
except Exception:
except Exception as e:
# Log or handle any other exceptions
logger.error(f"Error: {e}")
raise

self.status = result
Expand Down
15 changes: 15 additions & 0 deletions src/backend/base/langflow/base/agents/errors.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
from anthropic import BadRequestError as AnthropicBadRequestError
from cohere import BadRequestError as CohereBadRequestError
from httpx import HTTPStatusError

from langflow.schema.message import Message


class CustomBadRequestError(AnthropicBadRequestError, CohereBadRequestError, HTTPStatusError):
def __init__(self, agent_message: Message | None, message: str):
super().__init__(message)
self.message = message
self.agent_message = agent_message

def __str__(self):
return f"{self.message}"
15 changes: 11 additions & 4 deletions src/backend/base/langflow/base/agents/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,17 @@


class ExceptionWithMessageError(Exception):
def __init__(self, agent_message: Message):
def __init__(self, agent_message: Message, message: str):
self.agent_message = agent_message
super().__init__()
super().__init__(message)
self.message = message

def __str__(self):
return (
f"Agent message: {self.agent_message.text} \nError: {self.message}."
if self.agent_message.error or self.agent_message.text
else f"{self.message}."
)


class InputDict(TypedDict):
Expand Down Expand Up @@ -273,6 +281,5 @@ async def process_agent_events(
agent_message, start_time = await chain_handler(event, agent_message, send_message_method, start_time)
agent_message.properties.state = "complete"
except Exception as e:
raise ExceptionWithMessageError(agent_message) from e

raise ExceptionWithMessageError(agent_message, str(e)) from e
return await Message.create(**agent_message.model_dump())
84 changes: 42 additions & 42 deletions src/backend/base/langflow/components/agents/agent.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from langchain_core.tools import StructuredTool

from langflow.base.agents.agent import LCToolsAgentComponent
from langflow.base.agents.events import ExceptionWithMessageError
from langflow.base.models.model_input_constants import (
ALL_PROVIDER_FIELDS,
MODEL_DYNAMIC_UPDATE_FIELDS,
Expand Down Expand Up @@ -65,43 +66,32 @@ class AgentComponent(ToolCallingAgentComponent):

async def message_response(self) -> Message:
try:
# Get LLM model and validate
llm_model, display_name = self.get_llm()
if llm_model is None:
msg = "No language model selected"
msg = "No language model selected. Please choose a model to proceed."
raise ValueError(msg)
self.model_name = get_model_name(llm_model, display_name=display_name)
except Exception as e:
# Log the error for debugging purposes
logger.error(f"Error retrieving language model: {e}")
raise

try:
# Get memory data
self.chat_history = await self.get_memory_data()
except Exception as e:
logger.error(f"Error retrieving chat history: {e}")
raise

if self.add_current_date_tool:
try:
# Add current date tool if enabled
if self.add_current_date_tool:
if not isinstance(self.tools, list): # type: ignore[has-type]
self.tools = []
# Convert CurrentDateComponent to a StructuredTool
current_date_tool = (await CurrentDateComponent(**self.get_base_args()).to_toolkit()).pop(0)
if isinstance(current_date_tool, StructuredTool):
self.tools.append(current_date_tool)
else:
if not isinstance(current_date_tool, StructuredTool):
msg = "CurrentDateComponent must be converted to a StructuredTool"
raise TypeError(msg)
except Exception as e:
logger.error(f"Error adding current date tool: {e}")
raise
self.tools.append(current_date_tool)

if not self.tools:
msg = "Tools are required to run the agent."
logger.error(msg)
raise ValueError(msg)
# Validate tools
if not self.tools:
msg = "Tools are required to run the agent. Please add at least one tool."
raise ValueError(msg)

try:
# Set up and run agent
self.set(
llm=llm_model,
tools=self.tools,
Expand All @@ -110,12 +100,18 @@ async def message_response(self) -> Message:
system_prompt=self.system_prompt,
)
agent = self.create_agent_runnable()
return await self.run_agent(agent)

except (ValueError, TypeError, KeyError) as e:
logger.error(f"{type(e).__name__}: {e!s}")
raise
except ExceptionWithMessageError as e:
logger.error(f"ExceptionWithMessageError occurred: {e}")
raise
except Exception as e:
logger.error(f"Error setting up the agent: {e}")
logger.error(f"Unexpected error: {e!s}")
raise

return await self.run_agent(agent)

async def get_memory_data(self):
memory_kwargs = {
component_input.name: getattr(self, f"{component_input.name}") for component_input in self.memory_inputs
Expand All @@ -126,22 +122,26 @@ async def get_memory_data(self):
return await MemoryComponent(**self.get_base_args()).set(**memory_kwargs).retrieve_messages()

def get_llm(self):
if isinstance(self.agent_llm, str):
try:
provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)
if provider_info:
component_class = provider_info.get("component_class")
display_name = component_class.display_name
inputs = provider_info.get("inputs")
prefix = provider_info.get("prefix", "")
return (
self._build_llm_model(component_class, inputs, prefix),
display_name,
)
except Exception as e:
msg = f"Error building {self.agent_llm} language model"
raise ValueError(msg) from e
return self.agent_llm, None
if not isinstance(self.agent_llm, str):
return self.agent_llm, None

try:
provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)
if not provider_info:
msg = f"Invalid model provider: {self.agent_llm}"
raise ValueError(msg)

component_class = provider_info.get("component_class")
display_name = component_class.display_name
inputs = provider_info.get("inputs")
prefix = provider_info.get("prefix", "")

return self._build_llm_model(component_class, inputs, prefix), display_name

except Exception as e:
logger.error(f"Error building {self.agent_llm} language model: {e!s}")
msg = f"Failed to initialize language model: {e!s}"
raise ValueError(msg) from e

def _build_llm_model(self, component, inputs, prefix=""):
model_kwargs = {input_.name: getattr(self, f"{prefix}{input_.name}") for input_ in inputs}
Expand Down
18 changes: 18 additions & 0 deletions src/backend/base/langflow/components/models/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,20 @@ class OpenAIModelComponent(LCModelComponent):
advanced=True,
value=1,
),
IntInput(
name="max_retries",
display_name="Max Retries",
info="The maximum number of retries to make when generating.",
advanced=True,
value=5,
),
IntInput(
name="timeout",
display_name="Timeout",
info="The timeout for requests to OpenAI completion API.",
advanced=True,
value=700,
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand All @@ -79,6 +93,8 @@ def build_model(self) -> LanguageModel: # type: ignore[type-var]
openai_api_base = self.openai_api_base or "https://api.openai.com/v1"
json_mode = self.json_mode
seed = self.seed
max_retries = self.max_retries
timeout = self.timeout

api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None
output = ChatOpenAI(
Expand All @@ -89,6 +105,8 @@ def build_model(self) -> LanguageModel: # type: ignore[type-var]
api_key=api_key,
temperature=temperature if temperature is not None else 0.1,
seed=seed,
max_retries=max_retries,
request_timeout=timeout,
)
if json_mode:
output = output.bind(response_format={"type": "json_object"})
Expand Down
Loading

0 comments on commit f9e41f9

Please sign in to comment.