-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathpowered_functions.py
89 lines (64 loc) · 2.84 KB
/
powered_functions.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
import openai
import os
import numpy as np
from dotenv import load_dotenv
from services.buda_api import get_bitcoin_price, buy_bitcoin
from services.mailer_manager import send_email
from services.weather import get_weather
from services.jokes import get_joke
load_dotenv()
openai.api_key = os.getenv("OPENAI_API_KEY")
def cosine_similarity(data1, data2):
vector1 = data1.embedding
vector2 = data2.embedding
return np.dot(vector1, vector2) / (np.linalg.norm(vector1) * np.linalg.norm(vector2))
def get_most_similar_text(text, embeddings, texts):
text_vector = get_embeddings_vector_for_texts([text])[0]
similarities = []
for i, embedding in enumerate(embeddings):
similarities.append(cosine_similarity(text_vector, embedding))
return texts[np.argmax(similarities)]
def get_embeddings_vector_for_texts(texts):
response = openai.Embedding.create(
engine="text-embedding-ada-002",
input=texts
)
return response.data
def create_chat_function_response(messages, model="gpt-4-0613"):
response = openai.ChatCompletion.create(
model=model,
temperature=0.1,
messages=messages
)
return response.choices[0]
codes = []
codes_path = "services/"
for filename in os.listdir(codes_path):
if filename[-3:] != ".py":
continue
with open(os.path.join(codes_path, filename), 'r') as f:
codes.append(f.read())
embeddings_file = "code_embeddings.npy"
if os.path.isfile(embeddings_file):
embeddings = np.load(embeddings_file, allow_pickle=True)
else:
embeddings = []
for i in range(0, len(codes), 3):
embeddings.extend(get_embeddings_vector_for_texts(codes[i:i+3]))
np.save(embeddings_file, embeddings)
def find_and_call_function_through_gpt(message, embeddings, texts):
message_embedding = get_embeddings_vector_for_texts([message])[0]
most_similar_text = get_most_similar_text(message, embeddings, texts)
messages = [
{ 'role': 'system', 'content': 'Transform all the functions of the code to a GPT3-5 Functions capable. For example: {"name": "FUNCTION_NAM", "description": "DESCRIPTION", "parameters": { "type": "TYPE", "properties": { PROPERTIES } }' },
{ 'role': 'user', 'content': most_similar_text } ]
formatted_code = create_chat_function_response(messages).message.content
new_messages = [
{ 'role': 'system', 'content': 'Generate a function call according to the user input. The next system message is the code. answer just with the function call' },
{ 'role': 'user', 'content': formatted_code },
{ 'role': 'user', 'content': message } ]
response = create_chat_function_response(new_messages)
function_call = response.message.content
response_f = exec(function_call)
user_input = input("Ingrese su pregunta: ")
find_and_call_function_through_gpt(user_input, embeddings, codes)