Skip to content

Commit

Permalink
Add simple entrypoint for LumenAI apps (#736)
Browse files Browse the repository at this point in the history
  • Loading branch information
philippjfr authored Oct 29, 2024
1 parent 3722bd0 commit 0f3922f
Show file tree
Hide file tree
Showing 7 changed files with 195 additions and 136 deletions.
1 change: 1 addition & 0 deletions lumen/ai/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from . import agents, embeddings, llm # noqa
from .agents import Analysis # noqa
from .app import LumenAI # noqa
from .assistant import Assistant, PlanningAssistant # noqa
from .memory import memory # noqa

Expand Down
11 changes: 11 additions & 0 deletions lumen/ai/agents.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,8 @@ class Agent(Viewer):

provides = param.List(default=[], readonly=True)

_extensions = ()

_max_width = 1200

__abstract = True
Expand Down Expand Up @@ -208,6 +210,8 @@ class SourceAgent(Agent):

on_init = param.Boolean(default=True)

_extensions = ('filedropper',)

async def answer(self, messages: list | str):
source_controls = SourceControls(multiple=True, replace_controls=True, select_existing=False)
self.interface.send(source_controls, respond=False, user="SourceAgent")
Expand Down Expand Up @@ -365,6 +369,8 @@ class TableListAgent(LumenBaseAgent):

requires = param.List(default=["current_source"], readonly=True)

_extensions = ('tabulator',)

@classmethod
async def applies(cls) -> bool:
source = memory.get("current_source")
Expand Down Expand Up @@ -425,6 +431,8 @@ class SQLAgent(LumenBaseAgent):

provides = param.List(default=["current_table", "current_sql", "current_pipeline"], readonly=True)

_extensions = ('codeeditor', 'tabulator',)

async def _select_relevant_table(self, messages: list | str) -> tuple[str, BaseSQLSource]:
"""Select the most relevant table based on the user query."""
available_sources = memory["available_sources"]
Expand Down Expand Up @@ -690,6 +698,7 @@ async def answer(self, messages: list | str):
# Remove source prefixes message, e.g. //<source>//<table>
messages[-1]["content"] = re.sub(r"//[^/]+//", "", messages[-1]["content"])
sql_query = await self._create_valid_sql(messages, system, tables_to_source)
print(sql_query)
return sql_query

async def invoke(self, messages: list | str):
Expand Down Expand Up @@ -813,6 +822,8 @@ class VegaLiteAgent(BaseViewAgent):

view_type = VegaLiteView

_extensions = ('vega',)

@classmethod
def _get_model(cls, schema):
return VegaLiteSpec
Expand Down
152 changes: 152 additions & 0 deletions lumen/ai/app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
from __future__ import annotations

import param

from panel.config import config, panel_extension
from panel.io.state import state
from panel.layout import Row
from panel.viewable import Viewer

from ..pipeline import Pipeline
from ..sources import Source
from ..sources.duckdb import DuckDBSource
from .agents import (
AnalysisAgent, ChatAgent, SourceAgent, SQLAgent,
)
from .assistant import Assistant, PlanningAssistant
from .llm import Llm, OpenAI
from .memory import memory

DataT = str | Source | Pipeline


class LumenAI(Viewer):
"""
LumenAI provides a high-level entrypoint to start chatting with your data.
This high-level wrapper allows providing the data sources you will
be chatting with and then configures the assistant and agents.
Example:
```python
import lumen.ai as lmai
lmai.LumenAI('~/data.csv').servable()
```
"""

analyses = param.List(default=[], doc="""
List of custom analyses. If provided the AnalysesAgent will be added."""
)

assistant = param.ClassSelector(
class_=Assistant, default=PlanningAssistant, is_instance=False, doc="""
The Assistant class that will be responsible for coordinating the Agents."""
)

agents = param.List(default=[], doc="""
List of additional Agents to add beyond the default_agents."""
)

default_agents = param.List(default=[ChatAgent, SourceAgent, SQLAgent], doc="""
List of default agents which will always be added.""")

llm = param.ClassSelector(class_=Llm, default=OpenAI(), doc="""
The LLM provider to be used by default""")

template = param.Selector(
default=config.param.template.names['fast'],
objects=config.param.template.names, doc="""
Panel template to serve the application in."""
)

title = param.String(default='Lumen.ai', doc="Title of the app.")

def __init__(
self,
data: DataT | list[DataT] | None = None,
**params
):
super().__init__(**params)
agents = self.default_agents + self.agents
if self.analyses:
agents.append(AnalysisAgent(analyses=self.analyses))
self._assistant = self.assistant(
agents=agents,
llm=self.llm
)
self._resolve_data(data)

def _resolve_data(self, data: DataT | list[DataT] | None):
if data is None:
return
elif not isinstance(data, list):
data = [data]
sources = []
mirrors, tables = {}, {}
remote = False
for src in data:
if isinstance(src, Source):
sources.append(src)
elif isinstance(src, Pipeline):
mirrors[src.name] = src
elif isinstance(src, str):
if src.startswith('http'):
remote = True
if src.endswith(('.parq', '.parquet')):
table = f"read_parquet('{src}')"
elif src.endswith(".csv"):
table = f"read_csv('{src}')"
elif src.endswith(".json"):
table = f"read_json_auto('{src}')"
else:
raise ValueError(
"Could not determine how to load {} file."
)
tables[src] = table
if tables or mirrors:
initializers = ["INSTALL httpfs;", "LOAD httpfs;"] if remote else []
source = DuckDBSource(tables=tables, mirrors=mirrors, uri=':memory:', initializers=initializers)
sources.append(source)
if not sources:
raise ValueError(
'Must provide at least one data source.'
)
memory['available_sources'] = sources
if sources:
memory['current_source'] = sources[0]

def show(self, **kwargs):
return self._create_view(server=True).show(**kwargs)

def _create_view(self, server: bool | None = None):
if (state.curdoc and state.curdoc.session_context) or server is True:
panel_extension(
*{ext for agent in self._assistant.agents for ext in agent._extensions}, template=self.template
)
config.template = self.template
template = state.template
template.title = self.title
template.main.append(self._assistant)
template.sidebar.append(self._assistant.controls())
return template
return super()._create_view()

def servable(self, title: str | None = None, **kwargs):
if (state.curdoc and state.curdoc.session_context):
self._create_view()
return self
return self._create_view().servable(title, **kwargs)

def __panel__(self):
return Row(
Row(self._assistant.controls(), max_width=300),
self._assistant
)

def _repr_mimebundle_(self, include=None, exclude=None):
panel_extension(
*{ext for exts in self._assistant.agents for ext in exts}, design='material', notifications=True
)
return self._create_view()._repr_mimebundle_(include, exclude)
6 changes: 3 additions & 3 deletions lumen/ai/assistant.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,9 @@ def download_notebook():
instantiated = []
self._analyses = []
for agent in agents or self.agents:
if not isinstance(agent, Agent):
kwargs = {"llm": llm} if agent.llm is None else {}
agent = agent(interface=interface, **kwargs)
if isinstance(agent, AnalysisAgent):
analyses = "\n".join(
f"- `{analysis.__name__}`: {(analysis.__doc__ or '').strip()}"
Expand All @@ -129,9 +132,6 @@ def download_notebook():
instantiated.append(agent)
break

if not isinstance(agent, Agent):
kwargs = {"llm": llm} if agent.llm is None else {}
agent = agent(interface=interface, **kwargs)
if agent.llm is None:
agent.llm = llm
# must use the same interface or else nothing shows
Expand Down
Loading

0 comments on commit 0f3922f

Please sign in to comment.