Skip to content

Commit

Permalink
fix timeout error handling on server; try to correct padding on error…
Browse files Browse the repository at this point in the history
… messages in browser
  • Loading branch information
mdr223 committed Nov 7, 2023
1 parent 0ef02de commit f2278b2
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 13 deletions.
27 changes: 16 additions & 11 deletions A2rchi/interfaces/chat_app/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ def insert_conversation(self, conversation_id, user_message, a2rchi_message, is_
return message_ids


def __call__(self, message: List[str], conversation_id: int, is_refresh: bool, msg_ts: datetime):
def __call__(self, message: List[str], conversation_id: int, is_refresh: bool, msg_ts: datetime, client_msg_ts: float, client_timeout: float):
"""
Execute the chat functionality.
"""
Expand Down Expand Up @@ -259,14 +259,19 @@ def __call__(self, message: List[str], conversation_id: int, is_refresh: bool, m
while history[-1][0] == "A2rchi":
_ = history.pop(-1)

# guard call to LLM; if timestamp from message is more than timeout secs in the past;
# return error=True and do not generate response as the client will have timed out
if msg_ts.timestamp() - client_msg_ts > client_timeout:
return None, None, None, True

# run chain to get result; limit users to 1000 queries per conversation; refreshing browser starts new conversation
if len(history) < QUERY_LIMIT:
full_history = history + [(sender, content)] if not is_refresh else history
result = self.chain(full_history)
else:
# the case where we have exceeded the QUERY LIMIT (built so that we do not overuse the chain)
output = "Sorry, our service is currently down due to exceptional demand. Please come again later."
return output, conversation_id
# for now let's return a timeout error, as returning a different
# error message would require handling new message_ids param. properly
return None, None, None, True

# keep track of total number of queries and log this amount
self.number_of_queries += 1
Expand Down Expand Up @@ -314,7 +319,7 @@ def __call__(self, message: List[str], conversation_id: int, is_refresh: bool, m
if self.conn is not None:
self.conn.close()

return output, conversation_id, message_ids
return output, conversation_id, message_ids, False


class FlaskAppWrapper(object):
Expand Down Expand Up @@ -377,15 +382,15 @@ def get_chat_response(self):
client_msg_ts = request.json.get('client_msg_ts') / 1000
client_timeout = request.json.get('timeout') / 1000

# if timestamp from message is more than TIMEOUT_SECS in the past;
# do not generate response as the client will have timed out
if msg_ts - client_msg_ts > client_timeout:
return jsonify({'error': 'client timeout'}), 408

# query the chat and return the results.
print(" INFO - Calling the ChatWrapper()")
response, conversation_id, message_ids = self.chat(message, conversation_id, is_refresh, msg_ts)
response, conversation_id, message_ids, error = self.chat(message, conversation_id, is_refresh, msg_ts, client_msg_ts, client_timeout)

# handle timeout error
if error:
return jsonify({'error': 'client timeout'}), 408

# otherwise return A2rchi's response to client
return jsonify({'response': response, 'conversation_id': conversation_id, 'a2rchi_msg_id': message_ids[-1]})

def index(self):
Expand Down
5 changes: 3 additions & 2 deletions A2rchi/interfaces/chat_app/static/script.js-template
Original file line number Diff line number Diff line change
Expand Up @@ -100,8 +100,9 @@ const getChatResponse = async (incomingChatDiv, isRefresh=false) => {
last_message: conversation.slice(-1),
conversation_id: conversation_id,
is_refresh: isRefresh,
client_msg_ts: Date.now(),
client_timeout: DEFAULT_TIMEOUT_SECS * 1000
}),
client_msg_ts: Date.now(),
timeout: DEFAULT_TIMEOUT_SECS * 1000
}

Expand All @@ -116,7 +117,7 @@ const getChatResponse = async (incomingChatDiv, isRefresh=false) => {
last_response_is_feedback_request = false;
} catch (error) {
pElement.classList.add("error");
pElement.textContent = "<p>Oops! Something went wrong while retrieving the response. Please try again.</p>";
pElement.textContent = "Oops! Something went wrong while retrieving the response. Please try again.";
}

// Remove the typing animation, append the paragraph element and save the chats to local storage
Expand Down
4 changes: 4 additions & 0 deletions A2rchi/interfaces/chat_app/static/style.css
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,10 @@ span.material-symbols-rounded {
.chat .chat-details p.error {
color: #e55865;
}
.chat .chat-details .error {
padding: 0 50px 0 25px;
color: var(--text-color);
}
.chat .typing-animation {
padding-left: 25px;
display: inline-flex;
Expand Down

0 comments on commit f2278b2

Please sign in to comment.