Skip to content

Commit

Permalink
Add awaiting_llm_response property
Browse files Browse the repository at this point in the history
  • Loading branch information
rachaelcodes committed Sep 10, 2024
1 parent 63fba74 commit bc7dd8a
Show file tree
Hide file tree
Showing 4 changed files with 35 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# Generated by Django 5.1.1 on 2024-09-10 10:34

from django.db import migrations, models


class Migration(migrations.Migration):
dependencies = [
(
"redbox_core",
"0042_chat_chat_backend_chat_chat_map_question_prompt_and_more",
),
]

operations = [
migrations.AddField(
model_name="chat",
name="awaiting_llm_response",
field=models.BooleanField(default=False),
),
]
1 change: 1 addition & 0 deletions django_app/redbox_app/redbox_core/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,6 +414,7 @@ def get_ordered_by_citation_priority(cls, chat_message_id: uuid.UUID) -> Sequenc
class Chat(UUIDPrimaryKeyBase, TimeStampedModel, AbstractAISettings):
name = models.TextField(max_length=1024, null=False, blank=False)
user = models.ForeignKey(User, on_delete=models.CASCADE)
awaiting_llm_response = models.BooleanField(default=False)

def __str__(self) -> str: # pragma: no cover
return self.name or ""
Expand Down
3 changes: 3 additions & 0 deletions django_app/redbox_app/redbox_core/views/chat_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,13 @@ def get(self, request: HttpRequest, chat_id: uuid.UUID | None = None) -> HttpRes

messages: Sequence[ChatMessage] = []
current_chat = None
awaiting_llm_response = False
if chat_id:
current_chat = get_object_or_404(Chat, id=chat_id)
if current_chat.user != request.user:
return redirect(reverse("chats"))
messages = ChatMessage.get_messages_ordered_by_citation_priority(chat_id)
awaiting_llm_response = current_chat.awaiting_llm_response
endpoint = URL.build(scheme=settings.WEBSOCKET_SCHEME, host=request.get_host(), path=r"/ws/chat/")

completed_files, processing_files = File.get_completed_and_processing_files(request.user)
Expand Down Expand Up @@ -61,6 +63,7 @@ def get(self, request: HttpRequest, chat_id: uuid.UUID | None = None) -> HttpRes
}
for _, llm in AbstractAISettings.ChatBackend.choices
],
"awaiting_llm_response": awaiting_llm_response,
}

return render(
Expand Down
11 changes: 11 additions & 0 deletions django_app/redbox_app/templates/chats.html
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,17 @@ <h2 class="chat-title__heading govuk-visually-hidden" hidden>Current chat</h2>

{# CSR messages are inserted here #}

{% if awaiting_llm_response %}
<div class="govuk-warning-text">
<span class="govuk-warning-text__icon" aria-hidden="true">!</span>
<strong class="govuk-warning-text__text">
<span class="govuk-visually-hidden">Warning</span>
Awaiting response from the LLM, please refresh the page.
<br>If this continues, <a href="mailto:{{contact_email}}">contact support</a>.
</strong>
</div>
{% endif %}

</div>

<input type="hidden" name="csrfmiddlewaretoken" value="{{ csrf_token }}"/>
Expand Down

0 comments on commit bc7dd8a

Please sign in to comment.