Skip to content
This repository was archived by the owner on Jan 5, 2025. It is now read-only.

Commit 4acc362

Browse files
authored
Merge pull request #601 from openchatai/feat/parsing
Feat/parsing
2 parents a60d4c1 + 743ebbc commit 4acc362

File tree

4 files changed

+63
-34
lines changed

4 files changed

+63
-34
lines changed

llm-server/routes/chat/implementation/chain_strategy.py

+34-20
Original file line numberDiff line numberDiff line change
@@ -49,20 +49,26 @@ async def handle_request(
4949

5050
top_documents = select_top_documents(actions + flows + knowledgebase)
5151

52-
emit(
53-
f"{session_id}_info", "Checking if actionable ... \n"
54-
) if is_streaming else None
52+
(
53+
emit(f"{session_id}_info", "Checking if actionable ... \n")
54+
if is_streaming
55+
else None
56+
)
5557
next_step = get_next_response_type(
5658
user_message=text,
5759
session_id=session_id,
5860
chat_history=conversations_history,
5961
top_documents=top_documents,
6062
)
6163

62-
emit(
63-
f"{session_id}_info",
64-
f"Is next step actionable: {next_step.actionable}... \n",
65-
) if is_streaming else None
64+
(
65+
emit(
66+
f"{session_id}_info",
67+
f"Is next step actionable: {next_step.actionable}... \n",
68+
)
69+
if is_streaming
70+
else None
71+
)
6672
if next_step.actionable and next_step.api:
6773
# if the LLM given operationID is actually exist, then use it, otherwise fallback to the highest vector space document
6874
llm_predicted_operation_id = (
@@ -78,9 +84,11 @@ async def handle_request(
7884
[VectorCollections.actions, VectorCollections.flows],
7985
)
8086
# now run it
81-
emit(
82-
f"{session_id}_info", "Executing the actionable item... \n"
83-
) if is_streaming else None
87+
(
88+
emit(f"{session_id}_info", "Executing the actionable item... \n")
89+
if is_streaming
90+
else None
91+
)
8492
response = await run_actionable_item(
8593
bot_id=bot_id,
8694
actionable_item=actionable_item,
@@ -92,20 +100,26 @@ async def handle_request(
92100
)
93101

94102
response.api_called = True
95-
add_action_call(
96-
operation_id=actionable_item["actions"][0].document.metadata.get(
97-
"operation_id", ""
98-
),
99-
session_id=session_id,
100-
bot_id=bot_id,
101-
)
103+
if (
104+
actionable_item
105+
and "actions" in actionable_item
106+
and actionable_item["actions"]
107+
):
108+
action = actionable_item["actions"][0]
109+
operation_id = action.document.metadata.get("operation_id", "")
110+
add_action_call(
111+
operation_id=operation_id, session_id=session_id, bot_id=bot_id
112+
)
102113
return response
114+
103115
else:
104116
# it means that the user query is "informative" and can be answered using text only
105117
# get the top knowledgeable documents (if any)
106-
emit(
107-
f"{session_id}_info", "Running informative action... \n"
108-
) if is_streaming else None
118+
(
119+
emit(f"{session_id}_info", "Running informative action... \n")
120+
if is_streaming
121+
else None
122+
)
109123
response = await run_informative_item(
110124
informative_item=top_documents,
111125
base_prompt=base_prompt,

llm-server/routes/flow/utils/run_workflow.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -60,5 +60,5 @@ async def run_flow(
6060
error=output["error"],
6161
message=output["response"],
6262
api_called=True,
63-
action_ids=flow.get_all_action_ids(),
63+
operation_ids=flow.get_all_action_ids(),
6464
)

llm-server/utils/llm_consts.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -80,5 +80,5 @@ def get_mysql_uri():
8080
max_pages_to_crawl = int(os.getenv("MAX_PAGES_TO_CRAWL", "15"))
8181

8282
enable_followup_questions = (
83-
True if os.getenv("ENABLE_FOLLOWUP_QUESTIONS", "YES") == "YES" else False
83+
True if os.getenv("ENABLE_FOLLOWUP_QUESTIONS", "NO") == "YES" else False
8484
)

llm-server/utils/swagger_parser.py

+27-12
Original file line numberDiff line numberDiff line change
@@ -18,15 +18,15 @@
1818

1919
class Endpoint:
2020
def __init__(
21-
self,
22-
operation_id,
23-
endpoint_type,
24-
name,
25-
description,
26-
request_body,
27-
parameters,
28-
response,
29-
path,
21+
self,
22+
operation_id,
23+
endpoint_type,
24+
name,
25+
description,
26+
request_body,
27+
parameters,
28+
response,
29+
path,
3030
):
3131
self.operation_id = operation_id
3232
self.type = endpoint_type
@@ -233,9 +233,24 @@ def get_all_actions(self, bot_id: str):
233233
# Process the payload to resolve any $ref references
234234
processed_payload = self.process_payload(payload)
235235

236+
name = method_data.get(
237+
"operation_id",
238+
method_data.get(
239+
"name",
240+
method_data.get("summary", method_data.get("description")),
241+
),
242+
)
243+
if name is None:
244+
logger.error(
245+
"operation_id_not_found",
246+
bot_id=bot_id,
247+
path=path,
248+
method=method,
249+
)
250+
236251
action_dto = ActionDTO(
237252
api_endpoint=base_uri + path,
238-
name=method_data.get("name", method_data.get("summary", method_data.get('description'))),
253+
name=name,
239254
description=method_data.get("description"),
240255
request_type=method.upper(),
241256
payload=processed_payload,
@@ -263,8 +278,8 @@ def gather_metadata(self, api_data: dict) -> DefaultDict[str, Dict[str, str]]:
263278

264279
for path, path_item in api_data["paths"].items():
265280
for http_verb, http_details in path_item.items():
266-
summary = http_details.get("summary") or ""
267-
description = http_details.get("description") or ""
281+
summary = http_details.get("summary", "")
282+
description = http_details.get("description", "")
268283
# inconsistent tag behaviour..
269284
# tags = (
270285
# ", ".join([t["name"] for t in http_details.get("tags", [])])

0 commit comments

Comments
 (0)