From f2278b2038ff00a9acc63461b0d4339762fcd157 Mon Sep 17 00:00:00 2001 From: mdr223 Date: Mon, 6 Nov 2023 22:32:51 -0500 Subject: [PATCH] fix timeout error handling on server; try to correct padding on error messages in browser --- A2rchi/interfaces/chat_app/app.py | 27 +++++++++++-------- .../chat_app/static/script.js-template | 5 ++-- A2rchi/interfaces/chat_app/static/style.css | 4 +++ 3 files changed, 23 insertions(+), 13 deletions(-) diff --git a/A2rchi/interfaces/chat_app/app.py b/A2rchi/interfaces/chat_app/app.py index 99361ce6..1ae1c677 100644 --- a/A2rchi/interfaces/chat_app/app.py +++ b/A2rchi/interfaces/chat_app/app.py @@ -225,7 +225,7 @@ def insert_conversation(self, conversation_id, user_message, a2rchi_message, is_ return message_ids - def __call__(self, message: List[str], conversation_id: int, is_refresh: bool, msg_ts: datetime): + def __call__(self, message: List[str], conversation_id: int, is_refresh: bool, msg_ts: datetime, client_msg_ts: float, client_timeout: float): """ Execute the chat functionality. """ @@ -259,14 +259,19 @@ def __call__(self, message: List[str], conversation_id: int, is_refresh: bool, m while history[-1][0] == "A2rchi": _ = history.pop(-1) + # guard call to LLM; if timestamp from message is more than timeout secs in the past; + # return error=True and do not generate response as the client will have timed out + if msg_ts.timestamp() - client_msg_ts > client_timeout: + return None, None, None, True + # run chain to get result; limit users to 1000 queries per conversation; refreshing browser starts new conversation if len(history) < QUERY_LIMIT: full_history = history + [(sender, content)] if not is_refresh else history result = self.chain(full_history) else: - # the case where we have exceeded the QUERY LIMIT (built so that we do not overuse the chain) - output = "Sorry, our service is currently down due to exceptional demand. Please come again later." - return output, conversation_id + # for now let's return a timeout error, as returning a different + # error message would require handling new message_ids param. properly + return None, None, None, True # keep track of total number of queries and log this amount self.number_of_queries += 1 @@ -314,7 +319,7 @@ def __call__(self, message: List[str], conversation_id: int, is_refresh: bool, m if self.conn is not None: self.conn.close() - return output, conversation_id, message_ids + return output, conversation_id, message_ids, False class FlaskAppWrapper(object): @@ -377,15 +382,15 @@ def get_chat_response(self): client_msg_ts = request.json.get('client_msg_ts') / 1000 client_timeout = request.json.get('timeout') / 1000 - # if timestamp from message is more than TIMEOUT_SECS in the past; - # do not generate response as the client will have timed out - if msg_ts - client_msg_ts > client_timeout: - return jsonify({'error': 'client timeout'}), 408 - # query the chat and return the results. print(" INFO - Calling the ChatWrapper()") - response, conversation_id, message_ids = self.chat(message, conversation_id, is_refresh, msg_ts) + response, conversation_id, message_ids, error = self.chat(message, conversation_id, is_refresh, msg_ts, client_msg_ts, client_timeout) + + # handle timeout error + if error: + return jsonify({'error': 'client timeout'}), 408 + # otherwise return A2rchi's response to client return jsonify({'response': response, 'conversation_id': conversation_id, 'a2rchi_msg_id': message_ids[-1]}) def index(self): diff --git a/A2rchi/interfaces/chat_app/static/script.js-template b/A2rchi/interfaces/chat_app/static/script.js-template index 1d66b955..a6de7a99 100644 --- a/A2rchi/interfaces/chat_app/static/script.js-template +++ b/A2rchi/interfaces/chat_app/static/script.js-template @@ -100,8 +100,9 @@ const getChatResponse = async (incomingChatDiv, isRefresh=false) => { last_message: conversation.slice(-1), conversation_id: conversation_id, is_refresh: isRefresh, + client_msg_ts: Date.now(), + client_timeout: DEFAULT_TIMEOUT_SECS * 1000 }), - client_msg_ts: Date.now(), timeout: DEFAULT_TIMEOUT_SECS * 1000 } @@ -116,7 +117,7 @@ const getChatResponse = async (incomingChatDiv, isRefresh=false) => { last_response_is_feedback_request = false; } catch (error) { pElement.classList.add("error"); - pElement.textContent = "

Oops! Something went wrong while retrieving the response. Please try again.

"; + pElement.textContent = "Oops! Something went wrong while retrieving the response. Please try again."; } // Remove the typing animation, append the paragraph element and save the chats to local storage diff --git a/A2rchi/interfaces/chat_app/static/style.css b/A2rchi/interfaces/chat_app/static/style.css index 931f370e..3d08d96c 100644 --- a/A2rchi/interfaces/chat_app/static/style.css +++ b/A2rchi/interfaces/chat_app/static/style.css @@ -168,6 +168,10 @@ span.material-symbols-rounded { .chat .chat-details p.error { color: #e55865; } +.chat .chat-details .error { + padding: 0 50px 0 25px; + color: var(--text-color); +} .chat .typing-animation { padding-left: 25px; display: inline-flex;