Skip to content

Commit

Permalink
Add support for Google Search Tool
Browse files Browse the repository at this point in the history
  • Loading branch information
homanp committed Mar 31, 2024
1 parent b6caf43 commit 49cb2d9
Show file tree
Hide file tree
Showing 7 changed files with 57 additions and 0 deletions.
1 change: 1 addition & 0 deletions libs/superagent/app/api/workflow_configs/saml_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ class ToolModel(BaseModel):
openai_assistant: Optional["OpenAIAgentTool"]
llm: Optional["LLMAgentTool"]
scraper: Optional[Tool]
google_search: Optional[Tool]

# OpenAI Assistant tools
code_interpreter: Optional[Tool]
Expand Down
4 changes: 4 additions & 0 deletions libs/superagent/app/models/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,3 +100,7 @@ class TavilyInput(BaseModel):

class ScraperInput(BaseModel):
url: str


class GoogleSearchInput(BaseModel):
query: str
3 changes: 3 additions & 0 deletions libs/superagent/app/tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
TTS1Input,
WolframInput,
ZapierInput,
GoogleSearchInput,
)
from app.tools.agent import Agent
from app.tools.algolia import Algolia
Expand All @@ -49,6 +50,7 @@
from app.tools.tts_1 import TTS1
from app.tools.wolfram_alpha import WolframAlpha
from app.tools.zapier import ZapierNLA
from app.tools.google_search import GoogleSearch

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -83,6 +85,7 @@
"SUPERRAG": {"class": SuperRagTool, "schema": SuperRagInput},
"RESEARCH": {"class": Tavily, "schema": TavilyInput},
"SCRAPER": {"class": Scraper, "schema": ScraperInput},
"GOOGLE_SEARCH": {"class": GoogleSearch, "schema": GoogleSearchInput},
}

OSS_TOOL_TYPE_MAPPING = {"BROWSER": Browser, "BING_SEARCH": BingSearch}
Expand Down
35 changes: 35 additions & 0 deletions libs/superagent/app/tools/google_search.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import aiohttp
import requests
import json

from decouple import config
from langchain_community.tools import BaseTool


url = "https://google.serper.dev/search"


class GoogleSearch(BaseTool):
name = "PubMed® search"
description = "useful for answering question about medical publications"
return_direct = False

def _run(self, query: str) -> str:
headers = {
"X-API-KEY": self.metadata.get("apiKey"),
"Content-Type": "application/json",
}
payload = json.dumps({"q": query})
response = requests.request("POST", url, headers=headers, data=payload)
return response.text

async def _arun(self, query: str) -> str:
headers = {
"X-API-KEY": self.metadata.get("apiKey"),
"Content-Type": "application/json",
}
payload = json.dumps({"q": query})

async with aiohttp.ClientSession() as session:
async with session.post(url, headers=headers, data=payload) as response:
return await response.text()
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
-- AlterEnum
ALTER TYPE "ToolType" ADD VALUE 'GOOGLE_SEARCH';
1 change: 1 addition & 0 deletions libs/superagent/prisma/schema.prisma
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ enum ToolType {
RESEARCH
GITHUB
SCRAPER
GOOGLE_SEARCH
}

enum DatasourceType {
Expand Down
11 changes: 11 additions & 0 deletions libs/ui/config/site.ts
Original file line number Diff line number Diff line change
Expand Up @@ -249,6 +249,17 @@ export const siteConfig = {
},
],
},
{
value: "GOOGLE_SEARCH",
title: "Google search",
metadata: [
{
key: "apiKey",
type: "input",
label: "Google search api key",
},
],
},
{
value: "HTTP",
title: "API Request",
Expand Down

0 comments on commit 49cb2d9

Please sign in to comment.