Skip to content
This repository was archived by the owner on Jan 5, 2025. It is now read-only.

Commit 89653d2

Browse files
authored
Merge pull request #193 from openchatai/feat/slack
adding prompts to customise behaviour of application
2 parents a9fc758 + 789bba6 commit 89653d2

File tree

5 files changed

+22
-6
lines changed

5 files changed

+22
-6
lines changed
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
flow_generation_prompts = ""
22

3-
api_generation_prompt = ""
3+
api_generation_prompt = "Use encoded value for channel"

llm-server/routes/workflow/extractors/extract_body.py

+11-2
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
from typing import Any, Optional
77
from routes.workflow.extractors.extract_json import extract_json_payload
88
from custom_types.t_json import JsonData
9+
import importlib
910
import logging
1011

1112
openai_api_key = os.getenv("OPENAI_API_KEY")
@@ -16,7 +17,7 @@ def gen_body_from_schema(
1617
body_schema: str,
1718
text: str,
1819
prev_api_response: str,
19-
example: str,
20+
app: Optional[str],
2021
current_state: Optional[str],
2122
) -> Any:
2223
chat = ChatOpenAI(
@@ -25,6 +26,11 @@ def gen_body_from_schema(
2526
temperature=0,
2627
)
2728

29+
if app:
30+
module_name = f"integrations.custom_prompts.{app}"
31+
module = importlib.import_module(module_name)
32+
api_generation_prompt = getattr(module, "api_generation_prompt")
33+
2834
messages = [
2935
SystemMessage(
3036
content="You are an intelligent machine learning model that can produce REST API's body in json format"
@@ -37,10 +43,13 @@ def gen_body_from_schema(
3743
HumanMessage(content="prev api responses: {}".format(prev_api_response)),
3844
HumanMessage(content="current_state: {}".format(current_state)),
3945
HumanMessage(
40-
content="Given the provided information, generate the appropriate minified JSON payload to use as body for the API request. Avoid using fields that are not required, and user input doesnot require it."
46+
content="Given the provided information, generate the appropriate minified JSON payload to use as body for the API request. If a user doesn't provide a required parameter, use sensible defaults for required params, and leave optional params"
4147
),
4248
]
4349

50+
if api_generation_prompt is not None:
51+
messages.append(HumanMessage(content="{}".format(api_generation_prompt)))
52+
4453
result = chat(messages)
4554

4655
logging.info("[OpenCopilot] LLM Body Response: {}".format(result.content))

llm-server/routes/workflow/generate_openapi_payload.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,7 @@ def generate_openapi_payload(
8585
text: str,
8686
_operation_id: str,
8787
prev_api_response: str,
88+
app: Optional[str],
8889
current_state: Optional[str],
8990
) -> ApiInfo:
9091
(
@@ -119,12 +120,12 @@ def generate_openapi_payload(
119120
)
120121

121122
if api_info.body_schema:
122-
example = gen_ex_from_schema(api_info.body_schema)
123+
# example = gen_ex_from_schema(api_info.body_schema)
123124
api_info.body_schema = gen_body_from_schema(
124125
json.dumps(api_info.body_schema, separators=(",", ":")),
125126
text,
126127
prev_api_response,
127-
example,
128+
app,
128129
current_state,
129130
)
130131

llm-server/routes/workflow/utils/run_openapi_ops.py

+6-1
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,12 @@ def run_openapi_operations(
3232
# refresh state after every api call, we can look into optimizing this later as well
3333
operation_id = step.get("open_api_operation_id")
3434
api_payload = generate_openapi_payload(
35-
swagger_json, text, operation_id, prev_api_response, current_state
35+
swagger_json,
36+
text,
37+
operation_id,
38+
prev_api_response,
39+
app,
40+
current_state,
3641
)
3742

3843
api_response = make_api_request(headers=headers, **api_payload.__dict__)

llm-server/utils/make_api_call.py

+1
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ def make_api_request(
3535
) -> Response:
3636
try:
3737
endpoint = replace_url_placeholders(endpoint, path_params)
38+
print(f"Endpoint: {endpoint}")
3839
url = servers[0] + endpoint
3940
# Create a session and configure it with headers
4041
session = requests.Session()

0 commit comments

Comments
 (0)