Skip to content

Commit

Permalink
Merge branch 'main' into fix-try-except-json
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-twhite authored Dec 2, 2024
2 parents deb900f + 8427e62 commit 14b9a7b
Show file tree
Hide file tree
Showing 10 changed files with 414 additions and 415 deletions.
5 changes: 3 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.3.0
rev: v5.0.0
hooks:
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.7.1
rev: v0.8.0
hooks:
- id: ruff
- id: ruff-format
802 changes: 401 additions & 401 deletions Quickstart.ipynb

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions agent_gateway/agents/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

import yaml
from chains.llm_chain import LLMChain
from langchain.agents.agent import AgentOutputParser, BaseSingleActionAgent
from langchain.agents.agent import BaseSingleActionAgent
from langchain.agents.agent_types import AgentType
from langchain.callbacks.base import BaseCallbackManager
from langchain.callbacks.manager import Callbacks
Expand Down Expand Up @@ -130,7 +130,7 @@ def plan(
full_inputs = self.get_full_inputs(intermediate_steps, **kwargs)
full_output = self.llm_chain.predict(callbacks=callbacks, **full_inputs)
return self.output_parser.parse(full_output)
except Exception as e:
except Exception:
full_inputs["agent_scratchpad"] = (
full_inputs["agent_scratchpad"] + full_output + "\nAction: "
)
Expand Down Expand Up @@ -161,7 +161,7 @@ async def aplan(
callbacks=callbacks, **full_inputs
)
agent_output = await self.output_parser.aparse(full_output)
except Exception as e:
except Exception:
full_inputs["agent_scratchpad"] = (
full_inputs["agent_scratchpad"] + full_output + "\nAction: "
)
Expand Down
4 changes: 2 additions & 2 deletions agent_gateway/chains/llm_chain.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ def generate(
callbacks=run_manager.get_child() if run_manager else None,
**self.llm_kwargs,
)
except:
except Exception:
text = prompts[0].text
# Try removing in-context examples
first_index = text.find("Question:")
Expand Down Expand Up @@ -167,7 +167,7 @@ async def agenerate(
callbacks=run_manager.get_child() if run_manager else None,
**self.llm_kwargs,
)
except:
except Exception:
text = prompts[0].text
# Try removing in-context examples
first_index = text.find("Question:")
Expand Down
1 change: 0 additions & 1 deletion agent_gateway/executors/agent_executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
)
from langchain.pydantic_v1 import root_validator
from langchain.schema import AgentAction, AgentFinish, OutputParserException
from langchain.tools import BaseTool
from langchain.utilities.asyncio import asyncio_timeout
from langchain.utils.input import get_color_mapping

Expand Down
2 changes: 1 addition & 1 deletion agent_gateway/gateway/gateway.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ async def arun(self, prompt: str) -> str:
try:
snowflake_response = self._parse_snowflake_response(response_text)
return snowflake_response
except:
except Exception:
raise AgentGatewayError(
message=f"Failed Cortex LLM Request. Unable to parse response. See details:{response_text}"
)
Expand Down
2 changes: 1 addition & 1 deletion agent_gateway/gateway/output_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ def instantiate_task(
args = _parse_llm_compiler_action_args(args)
if tool_name == "fuse":
# fuse does not have a tool
tool_func = lambda x: None
tool_func = lambda x: None # noqa: E731
stringify_rule = None
else:
tool = _find_tool(tool_name, tools)
Expand Down
2 changes: 1 addition & 1 deletion agent_gateway/gateway/planner.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ async def run_llm(
try:
snowflake_response = self._parse_snowflake_response(response_text)
return snowflake_response
except:
except Exception:
raise AgentGatewayError(
message=f"Failed Cortex LLM Request. Unable to parse response. See details:{response_text}"
)
Expand Down
2 changes: 1 addition & 1 deletion agent_gateway/tools/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ class Headers(TypedDict):

def _determine_runtime():
try:
from _stored_proc_restful import StoredProcRestful
from _stored_proc_restful import StoredProcRestful # noqa: F401

return True
except ImportError:
Expand Down
3 changes: 1 addition & 2 deletions demo_app/demo_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import asyncio
import io
import json
import logging
import os
import queue
import re
Expand Down Expand Up @@ -114,8 +115,6 @@ def create_prompt(prompt_key: str):

source_list = []

import logging


class StreamlitLogHandler(logging.Handler):
def __init__(self):
Expand Down

0 comments on commit 14b9a7b

Please sign in to comment.