diff --git a/main.py b/main.py index 3012caeb..f330a166 100644 --- a/main.py +++ b/main.py @@ -3,7 +3,7 @@ from streamlit_chat import message import faiss from langchain import OpenAI -from langchain.chains import VectorDBQAWithSourcesChain +from langchain.chains import RetrievalQAWithSourcesChain import pickle # Load the LangChain. @@ -13,7 +13,7 @@ store = pickle.load(f) store.index = index -chain = VectorDBQAWithSourcesChain.from_llm(llm=OpenAI(temperature=0), vectorstore=store) +chain = RetrievalQAWithSourcesChain.from_chain_type(OpenAI(temperature=0), chain_type="stuff", retriever=store.as_retriever()) # From here down is all the StreamLit UI. diff --git a/qa.py b/qa.py index 9152cc15..63e1614c 100644 --- a/qa.py +++ b/qa.py @@ -1,7 +1,7 @@ """Ask a question to the notion database.""" import faiss from langchain import OpenAI -from langchain.chains import VectorDBQAWithSourcesChain +from langchain.chains import RetrievalQAWithSourcesChain import pickle import argparse @@ -16,7 +16,7 @@ store = pickle.load(f) store.index = index -chain = VectorDBQAWithSourcesChain.from_llm(llm=OpenAI(temperature=0), vectorstore=store) +chain = RetrievalQAWithSourcesChain.from_chain_type(OpenAI(temperature=0), chain_type="stuff", retriever=store.as_retriever()) result = chain({"question": args.question}) print(f"Answer: {result['answer']}") print(f"Sources: {result['sources']}") diff --git a/requirements.txt b/requirements.txt index c99f9572..21ce5d4e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,6 @@ -langchain==0.0.58 +langchain==0.0.126 openai faiss-cpu streamlit streamlit-chat +tiktoken