Skip to content

Popgetter #4

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 3 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
__pycache__
Empty file added demoland_llm_server/README.md
Empty file.
65 changes: 33 additions & 32 deletions demoland_llm_server/demoland_agent/__init__.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import os

from dotenv import load_dotenv
load_dotenv()
import numpy as np
load_dotenv()
from .utils import load_geography,load_regions, load_signatures,load_pen_portaits
import geopandas as gp
import pandas as pd
import langchain
langchain.debug = True
import pandas as pd
import langchain
from langchain.agents import tool
from langchain_core.messages import AIMessage, HumanMessage
import overpy
Expand All @@ -17,7 +17,8 @@

overpass_api = overpy.Overpass()
from langchain_community.tools.convert_to_openai import format_tool_to_openai_function
from langchain_openai import ChatOpenAI

from langchain_ollama import ChatOllama as ChatOpenAI
from langchain.agents.format_scratchpad import format_to_openai_function_messages
from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser
from langchain_experimental.agents.agent_toolkits import create_pandas_dataframe_agent
Expand All @@ -34,9 +35,9 @@ def PythonAstREPLTool_init_wrapper(self, *args, **kwargs):
PythonAstREPLTool.__init__ = PythonAstREPLTool_init_wrapper


print("Key ", os.environ["OPENAI_API_KEY"])
print("Key 2 ",os.environ["OPEN_AI_KEY"])
print("loading env ")
# print("Key ", os.environ["OPENAI_API_KEY"])
# print("Key 2 ",os.environ["OPEN_AI_KEY"])
# print("loading env ")


class ScenarioChangeAtPointSchema(BaseModel):
Expand All @@ -59,9 +60,9 @@ def _construct_prompt(self):
[
(
"system",
"""You are very powerful geospatial assistant, but don't know current events. If asked about places physical locations you dont know,
for example schools, hospitals, grocery stores etc, use the find_points_of_interest tool to look them up. Make your responses conversational
as if you where reporting them in a talk. If asked to report changes only report non zero changes.
"""You are very powerful geospatial assistant, but don't know current events. If asked about places physical locations you dont know,
for example schools, hospitals, grocery stores etc, use the find_points_of_interest tool to look them up. Make your responses conversational
as if you where reporting them in a talk. If asked to report changes only report non zero changes.
""",
),
MessagesPlaceholder(variable_name=MEMORY_KEY),
Expand All @@ -70,9 +71,9 @@ def _construct_prompt(self):
]
)
self._prompt = prompt

def _construct_agent(self):

print("loading regions")
regions = load_regions()
print("loading signature descriptions")
Expand All @@ -93,27 +94,27 @@ def _construct_agent(self):


executor= create_pandas_dataframe_agent(
# ChatOpenAI(temperature=0, model="gpt-4"),
ChatOpenAI(temperature=0, model="gpt-4"),
ChatOpenAI(temperature=0, model="llama3.1"),
self._scenario,
verbose=True,
agent_type=AgentType.OPENAI_FUNCTIONS,#,AgentType.ZERO_SHOT_REACT_DESCRIPTION,
extra_tools= tools,
handle_parsing_errors=True,
prefix="""You are very powerful geospatial assistant, but don't know current events. If asked about places physical locations you dont know,
for example schools, hospitals, grocery stores etc, use the find_points_of_interest tool to look them up. Make your responses conversational
as if you where reporting them in a talk.
prefix="""You are very powerful geospatial assistant, but don't know current events. If asked about places physical locations you dont know,
for example schools, hospitals, grocery stores etc, use the find_points_of_interest tool to look them up. Make your responses conversational
as if you where reporting them in a talk.

If a dataframe has a geometry column, treat these as geopandas dataframes. IMD means index of multiple deprivation """,

return_intermediate_steps = True

return_intermediate_steps = True,
allow_dangerous_code=True,
)

print("prompts", executor.to_json())

# llm = ChatOpenAI(
# # model="gpt-4",
# # model="gpt-3.5-turbo",
# # model="gpt-3.5-turbo",
# temperature=0
# )
# llm_with_tools = llm.bind(functions=[format_tool_to_openai_function(t) for t in tools])
Expand Down Expand Up @@ -154,19 +155,19 @@ def find_points_of_interest(ammenity_type:str):
node["amenity"="{ammenity_type}"]({bounds});
out body;
'''

result = overpass_api.query(query)
features = []
for node in result.nodes:
features.append([node.lon,node.lat,node.tags.get("name")])

return pd.DataFrame(features, columns=["lon","lat","name"]).to_json()

@tool
return pd.DataFrame(features, columns=np.array(["lon","lat","name"])).to_json()

@tool
def signature_at_points(points):
"""Given a geojson object of points, return a geodataframe of the spatial signature of the region that contains each point"""
from io import StringIO
from io import StringIO

points = gp.read_file(StringIO(json.dumps(points)))

print("Points are ", points, type(points))
Expand All @@ -176,7 +177,7 @@ def signature_at_points(points):
@tool(args_schema=ScenarioChangeAtPointSchema)
def scenario_change_at_point(points):
"""For the current scenario and an input list of points in geojson format, this function provides the changes in air pollution, house prices and access to greenspace for those points"""
from io import StringIO
from io import StringIO
print(points)
points = json.loads(points)
names = [point["name"] for point in points]
Expand All @@ -188,7 +189,7 @@ def scenario_change_at_point(points):
print("Change in point columns" ,result.columns)
return result.drop(["geometry"], axis=1).to_markdown()

@tool
@tool
def signature_descriptions():
"""Returns the descriptions of each signature as json"""
return sig_descriptions
Expand All @@ -204,11 +205,11 @@ def summarize_in_region(region:str):
"""Summarizes data in a geographic region. Should not be used for categories of location like resturants"""
region_geo = regions[regions['name']==region]
return gp.sjoin(region_geo,signatures)['type'].value_counts().to_csv()

@tool
def get_spatial_signature_list():
"""Returns the names and counts of each spatial signature"""
return sig_types.to_csv()
return sig_types.to_csv()

tools = [
#summarize_in_region,
Expand Down
30 changes: 18 additions & 12 deletions demoland_llm_server/main.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
from typing import Union
from fastapi import FastAPI,WebSocket
from fastapi.responses import HTMLResponse
from demoland_agent import DemolandAgent
from demoland_agent.utils import load_scenario
from demoland_agent import DemolandAgent
from demoland_agent.utils import load_scenario
import datetime

print("loading baseline")
baseline = load_scenario("../src/data/scenarios/baseline.json")
baseline = load_scenario("./data/baseline.json")
print("loading sceanrio1")
scenario1 = load_scenario("../src/data/scenarios/scenario3.json", baseline)
scenario1 = load_scenario("./data/scenario1.json", baseline)

app = FastAPI()

Expand Down Expand Up @@ -58,16 +58,16 @@ def read_item(item_id: int, q: Union[str, None] = None):
@app.websocket("/ws")
async def websocket_endpoint(websocket: WebSocket):
# print("Created agent")

await websocket.accept()

agent = DemolandAgent(scenario1);

inital_message= {
"text" : "Hi I am your helpful demoland bot. Ask me questions about the scenario you just ran!",
"isUser": False,
"text" : "Hi I am your helpful demoland bot. Ask me questions about the scenario you just ran!",
"isUser": False,
"timestamp": str(datetime.datetime.now())
}
}

await websocket.send_json(inital_message)

Expand All @@ -78,9 +78,15 @@ async def websocket_endpoint(websocket: WebSocket):
print(result)

response = {
"text" : result["output"],
"isUser": False,
"timestamp": str(datetime.datetime.now()),
"text" : result["output"],
"isUser": False,
"timestamp": str(datetime.datetime.now()),
"steps": [ {"action":step[0].log} for step in result["intermediate_steps"]]
}
}
await websocket.send_json(response)



if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="127.0.0.1", port=8000)
Loading