Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Streaming choice feature #2070

Open
wants to merge 9 commits into
base: main
Choose a base branch
from
42 changes: 36 additions & 6 deletions private_gpt/ui/ui.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,8 @@ def __init__(

self._selected_filename = None

self._response_style = True

basicbloke marked this conversation as resolved.
Show resolved Hide resolved
# Initialize system prompt based on default mode
self.mode = MODES[0]
self._system_prompt = self._get_default_system_prompt(self.mode)
Expand Down Expand Up @@ -168,6 +170,12 @@ def build_history() -> list[ChatMessage]:
role=MessageRole.SYSTEM,
),
)
def draw_methods(service_type):
service = getattr(self, f'_{service_type}_service')
return {
True: getattr(service, f'stream_{service_type}'),
False: getattr(service, f'{service_type}')
}
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think reflection is necessary in this case. It complicates the understanding of the method a lot.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Consider use lambda function or conventional functions and a switch statement

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

My apologies on merging main after the commit

match mode:
case Modes.RAG_MODE:
# Use only the selected file for the query
Expand All @@ -182,18 +190,20 @@ def build_history() -> list[ChatMessage]:
docs_ids.append(ingested_document.doc_id)
context_filter = ContextFilter(docs_ids=docs_ids)

query_stream = self._chat_service.stream_chat(
methods = draw_methods('chat')
query_stream = methods.get(self._response_style, self._chat_service.stream_chat)(
messages=all_messages,
use_context=True,
context_filter=context_filter,
context_filter=context_filter
)
yield from yield_deltas(query_stream)
yield from (yield_deltas(query_stream) if self._response_style else [query_stream.response])
case Modes.BASIC_CHAT_MODE:
llm_stream = self._chat_service.stream_chat(
methods = draw_methods('chat')
llm_stream = methods.get(self._response_style, self._chat_service.stream_chat)(
messages=all_messages,
use_context=False,
use_context=False
)
yield from yield_deltas(llm_stream)
yield from (yield_deltas(llm_stream) if self._response_style else [llm_stream.response])

case Modes.SEARCH_MODE:
response = self._chunks_service.retrieve_relevant(
Expand Down Expand Up @@ -227,6 +237,15 @@ def build_history() -> list[ChatMessage]:
instructions=message,
)
yield from yield_tokens(summary_stream)
'''
methods = draw_methods('summarize')
summary_stream = methods.get(self._response_style, self._summarize_service.stream_summarize)(
use_context=True,
context_filter=context_filter,
instructions=message
)
yield from yield_tokens(summary_stream) if response_style else summary_stream
'''

# On initialization and on mode change, this function set the system prompt
# to the default prompt based on the mode (and user settings).
Expand Down Expand Up @@ -279,6 +298,9 @@ def _set_current_mode(self, mode: Modes) -> Any:
gr.update(value=self._explanation_mode),
]

def _set_response_style(self, response_style: str) -> None:
self._response_style = response_style

def _list_ingested_files(self) -> list[list[str]]:
files = set()
for ingested_document in self._ingest_service.list_ingested():
Expand Down Expand Up @@ -402,6 +424,14 @@ def _build_ui_blocks(self) -> gr.Blocks:
max_lines=3,
interactive=False,
)
response_style = gr.Checkbox(
label="Response Style: Streaming",
value=self._response_style
)
response_style.input(
self._set_response_style,
inputs=response_style
)
upload_button = gr.components.UploadButton(
"Upload File(s)",
type="filepath",
Expand Down
12 changes: 6 additions & 6 deletions settings.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,20 +26,20 @@ ui:
enabled: true
path: /
default_chat_system_prompt: >
You are a helpful, respectful and honest assistant.
You are a helpful, respectful and honest assistant.
Always answer as helpfully as possible and follow ALL given instructions.
Do not speculate or make up information.
Do not reference any given instructions or context.
default_query_system_prompt: >
You can only answer questions about the provided context.
If you know the answer but it is not based in the provided context, don't provide
You can only answer questions about the provided context.
If you know the answer but it is not based in the provided context, don't provide
the answer, just state the answer is not in the context provided.
default_summarization_system_prompt: >
Provide a comprehensive summary of the provided context information.
Provide a comprehensive summary of the provided context information.
The summary should cover all the key points and main ideas presented in
the original text, while also condensing the information into a concise
the original text, while also condensing the information into a concise
and easy-to-understand format. Please ensure that the summary includes
relevant details and examples that support the main ideas, while avoiding
relevant details and examples that support the main ideas, while avoiding
any unnecessary information or repetition.
delete_file_button_enabled: true
delete_all_files_button_enabled: true
Expand Down
Loading