From 8719d5bf6f92c4f54275454cef848fe38812cd1f Mon Sep 17 00:00:00 2001 From: Garrick Aden-Buie Date: Mon, 18 Aug 2025 11:19:25 -0400 Subject: [PATCH 1/3] feat(pkg-r): Stream from chat in ExtendedTask --- pkg-r/DESCRIPTION | 1 + pkg-r/R/querychat.R | 21 +++++++++++++++------ 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/pkg-r/DESCRIPTION b/pkg-r/DESCRIPTION index f5471586..3d39b3e6 100644 --- a/pkg-r/DESCRIPTION +++ b/pkg-r/DESCRIPTION @@ -21,6 +21,7 @@ Imports: ellmer (>= 0.3.0), htmltools, lifecycle, + promises, purrr, rlang, shiny, diff --git a/pkg-r/R/querychat.R b/pkg-r/R/querychat.R index cae09e7a..f90327a1 100644 --- a/pkg-r/R/querychat.R +++ b/pkg-r/R/querychat.R @@ -285,13 +285,22 @@ querychat_server <- function(id, querychat_config) { ) } - # Handle user input + append_stream_task <- shiny::ExtendedTask$new( + function(client, user_input) { + stream <- client$stream_async( + user_input, + stream = "content" + ) + + p <- promises::promise_resolve(stream) + promises::then(p, function(stream) { + shinychat::chat_append("chat", stream) + }) + } + ) + shiny::observeEvent(input$chat_user_input, { - # Add user message to the chat history - shinychat::chat_append( - "chat", - chat$stream_async(input$chat_user_input, stream = "content") - ) + append_stream_task$invoke(chat, input$chat_user_input) }) list( From a04751d61cd75f92785b4277f3d58b3ef56c4b37 Mon Sep 17 00:00:00 2001 From: Garrick Aden-Buie Date: Mon, 18 Aug 2025 11:21:41 -0400 Subject: [PATCH 2/3] docs: Add NEWS item --- pkg-r/NEWS.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pkg-r/NEWS.md b/pkg-r/NEWS.md index d44c08d2..ccf7dbda 100644 --- a/pkg-r/NEWS.md +++ b/pkg-r/NEWS.md @@ -17,3 +17,5 @@ * the `querychat.client` R option, which can be any of the above options, * the `QUERYCHAT_CLIENT` environment variable, which should be a provider-model string, * or the default model from `ellmer::chat_openai()`. + +* `querychat_server()` now uses a `shiny::ExtendedTask` for streaming the chat response, which allows the dashboard to update and remain responsive while the chat response is streaming in. (#63) From 25c5c4c2195122337023985197078101de66cda2 Mon Sep 17 00:00:00 2001 From: Garrick Aden-Buie Date: Mon, 18 Aug 2025 11:26:29 -0400 Subject: [PATCH 3/3] draft(pky-py): Use extended task --- pkg-py/src/querychat/querychat.py | 39 ++++++++++++++++++++++--------- 1 file changed, 28 insertions(+), 11 deletions(-) diff --git a/pkg-py/src/querychat/querychat.py b/pkg-py/src/querychat/querychat.py index c9e70482..16293175 100644 --- a/pkg-py/src/querychat/querychat.py +++ b/pkg-py/src/querychat/querychat.py @@ -617,19 +617,23 @@ async def query(query: str): chat.register_tool(update_dashboard) chat.register_tool(query) - # Add greeting if provided - if greeting and any(len(g) > 0 for g in greeting.split("\n")): - # Display greeting in chat UI - pass - else: - # Generate greeting using the chat model - pass + @reactive.extended_task + async def append_stream(chat_obj: chatlas.Chat, user_input: str): + stream = await chat_obj.stream_async(user_input, echo="none") + await chat_ui.append_message_stream(stream) # Handle user input @chat_ui.on_user_submit - async def _(user_input: str): - stream = await chat.stream_async(user_input, echo="none") - await chat_ui.append_message_stream(stream) + def _(user_input: str): + append_stream(chat, user_input) + + @reactive.effect + async def _(): + if append_stream.status() == "error": + await chat_ui.append_message( + "An error occurred while processing your input. " + f"Error: {append_stream.result()}.", + ) @reactive.effect async def greet_on_startup(): @@ -642,5 +646,18 @@ async def greet_on_startup(): ) await chat_ui.append_message_stream(stream) + @reactive.calc + def current_query_rv(): + return current_query.get() + + @reactive.calc + def current_title_rv(): + return current_title.get() + # Return the interface for other components to use - return QueryChat(chat, current_query.get, current_title.get, filtered_df) + return QueryChat( + chat, + current_query_rv, + current_title_rv, + filtered_df, + )