From 049a997f07643e9778164531b6142d03713712e8 Mon Sep 17 00:00:00 2001 From: Philipp Rudiger Date: Tue, 29 Oct 2024 18:19:08 +0100 Subject: [PATCH] Various small fixes and improvements (#737) --- lumen/ai/agents.py | 3 ++- lumen/ai/app.py | 10 +++++++++- lumen/ai/llm.py | 7 ++++++- lumen/command/ai.py | 2 +- 4 files changed, 18 insertions(+), 4 deletions(-) diff --git a/lumen/ai/agents.py b/lumen/ai/agents.py index 0937df26..336736e2 100644 --- a/lumen/ai/agents.py +++ b/lumen/ai/agents.py @@ -400,7 +400,8 @@ async def answer(self, messages: list | str): min_width=350, widths={'Table': '90%'}, disabled=True, - page_size=10 + page_size=10, + header_filters=True ) table_list.on_click(self._use_table) self.interface.stream(table_list, user="Lumen") diff --git a/lumen/ai/app.py b/lumen/ai/app.py index 7a66f89d..8fe1b32e 100644 --- a/lumen/ai/app.py +++ b/lumen/ai/app.py @@ -3,6 +3,7 @@ import param from panel.config import config, panel_extension +from panel.io.resources import CSS_URLS from panel.io.state import state from panel.layout import Row from panel.viewable import Viewer @@ -55,6 +56,9 @@ class LumenAI(Viewer): llm = param.ClassSelector(class_=Llm, default=OpenAI(), doc=""" The LLM provider to be used by default""") + show_controls = param.Boolean(default=True, doc=""" + Whether to show assistant controls in the sidebar.""") + template = param.Selector( default=config.param.template.names['fast'], objects=config.param.template.names, doc=""" @@ -121,6 +125,7 @@ def show(self, **kwargs): return self._create_view(server=True).show(**kwargs) def _create_view(self, server: bool | None = None): + config.css_files.append(CSS_URLS['font-awesome']) if (state.curdoc and state.curdoc.session_context) or server is True: panel_extension( *{ext for agent in self._assistant.agents for ext in agent._extensions}, template=self.template @@ -129,7 +134,8 @@ def _create_view(self, server: bool | None = None): template = state.template template.title = self.title template.main.append(self._assistant) - template.sidebar.append(self._assistant.controls()) + if self.show_controls: + template.sidebar.append(self._assistant.controls()) return template return super()._create_view() @@ -140,6 +146,8 @@ def servable(self, title: str | None = None, **kwargs): return self._create_view().servable(title, **kwargs) def __panel__(self): + if not self.show_controls: + return self._assistant return Row( Row(self._assistant.controls(), max_width=300), self._assistant diff --git a/lumen/ai/llm.py b/lumen/ai/llm.py index 8da21a3e..b3b48068 100644 --- a/lumen/ai/llm.py +++ b/lumen/ai/llm.py @@ -29,6 +29,10 @@ class Llm(param.Parameterized): # Allows defining a dictionary of default models. model_kwargs = param.Dict(default={}) + # Whether the LLM supports streaming of any kind + _supports_stream = True + + # Whether the LLM supports streaming of Pydantic model output _supports_model_stream = True __abstract = True @@ -94,7 +98,8 @@ async def stream( model_key: str = "default", **kwargs, ): - if response_model and not self._supports_model_stream: + if ((response_model and not self._supports_model_stream) or + not self._supports_stream): yield await self.invoke( messages, system=system, diff --git a/lumen/command/ai.py b/lumen/command/ai.py index 662654d2..07ab09eb 100644 --- a/lumen/command/ai.py +++ b/lumen/command/ai.py @@ -13,7 +13,7 @@ SOURCE_CODE = """ import lumen.ai as lmai -lmai.LumenAI({tables}).servable()""" +lmai.LumenAI([{tables}]).servable()""" VALID_EXTENSIONS = ['.parq', '.parquet', '.csv', '.json']