-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathask.py
53 lines (39 loc) · 1.35 KB
/
ask.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import chromadb
from sentence_transformers import SentenceTransformer
from openai import OpenAI
from dotenv import load_dotenv
load_dotenv()
# setting the environment
embedding_model = SentenceTransformer('all-MiniLM-L6-v2')
DATA_PATH = r"data"
CHROMA_PATH = r"chroma_db"
chroma_client = chromadb.PersistentClient(path=CHROMA_PATH)
collection = chroma_client.get_or_create_collection(name="growing_vegetables")
user_query = input("What do you want to know about growing vegetables?\n\n")
user_query_embedding = embedding_model.encode(user_query).tolist()
results = collection.query(
query_embeddings=[user_query_embedding],
n_results=1
)
print(results['documents'])
print(results['metadatas'])
client = OpenAI()
system_prompt = """
You are a helpful assistant. You answer questions about growing vegetables in Florida.
But you only answer based on knowledge I'm providing you. You don't use your internal
knowledge and you don't make thins up.
If you don't know the answer, just say: I don't know
--------------------
The data:
"""+str(results['documents'])+"""
"""
#print(system_prompt)
response = client.chat.completions.create(
model="gpt-4o",
messages = [
{"role":"system","content":system_prompt},
{"role":"user","content":user_query}
]
)
print("\n\n---------------------\n\n")
print(response.choices[0].message.content)