diff --git a/.gitignore b/.gitignore index 5c4816f..2565c32 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +mynotes.md # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/bedrock-chat-with-pdf/Admin/admin.py b/bedrock-chat-with-pdf/Admin/admin.py index 9dc6087..cee199a 100644 --- a/bedrock-chat-with-pdf/Admin/admin.py +++ b/bedrock-chat-with-pdf/Admin/admin.py @@ -6,10 +6,11 @@ ## s3_client s3_client = boto3.client("s3") BUCKET_NAME = os.getenv("BUCKET_NAME") +REGION = os.getenv("REGION") ## Bedrock -from langchain_community.embeddings import BedrockEmbeddings - +#from langchain_community.embeddings import BedrockEmbeddings +from langchain_aws import BedrockEmbeddings ## Text Splitter from langchain.text_splitter import RecursiveCharacterTextSplitter @@ -19,8 +20,8 @@ ## import FAISS from langchain_community.vectorstores import FAISS -bedrock_client = boto3.client(service_name="bedrock-runtime") -bedrock_embeddings = BedrockEmbeddings(model_id="amazon.titan-embed-text-v1", client=bedrock_client) +bedrock_client = boto3.client(service_name="bedrock-runtime", region_name=REGION) +bedrock_embeddings = BedrockEmbeddings(model_id="amazon.titan-embed-text-v2:0", client=bedrock_client) def get_unique_id(): return str(uuid.uuid4()) diff --git a/bedrock-chat-with-pdf/Admin/requirements.txt b/bedrock-chat-with-pdf/Admin/requirements.txt index 3637850..0d9975e 100644 --- a/bedrock-chat-with-pdf/Admin/requirements.txt +++ b/bedrock-chat-with-pdf/Admin/requirements.txt @@ -1,5 +1,7 @@ streamlit pypdf langchain +langchain_community +langchain-aws faiss-cpu boto3 \ No newline at end of file diff --git a/bedrock-chat-with-pdf/User/app.py b/bedrock-chat-with-pdf/User/app.py index 26abfee..ac217c5 100644 --- a/bedrock-chat-with-pdf/User/app.py +++ b/bedrock-chat-with-pdf/User/app.py @@ -6,11 +6,13 @@ ## s3_client s3_client = boto3.client("s3") BUCKET_NAME = os.getenv("BUCKET_NAME") +REGION = os.getenv("REGION") ## Bedrock -from langchain_community.embeddings import BedrockEmbeddings -from langchain.llms.bedrock import Bedrock - +#from langchain_community.embeddings import BedrockEmbeddings +from langchain_aws import BedrockEmbeddings +#from langchain.llms.bedrock import Bedrock +from langchain_aws import ChatBedrock ## prompt and chain from langchain.prompts import PromptTemplate from langchain.chains import RetrievalQA @@ -24,8 +26,9 @@ ## import FAISS from langchain_community.vectorstores import FAISS -bedrock_client = boto3.client(service_name="bedrock-runtime") -bedrock_embeddings = BedrockEmbeddings(model_id="amazon.titan-embed-text-v1", client=bedrock_client) + +bedrock_client = boto3.client(service_name="bedrock-runtime", region_name=REGION) +bedrock_embeddings = BedrockEmbeddings(model_id="amazon.titan-embed-text-v2:0", client=bedrock_client) folder_path="/tmp/" @@ -38,8 +41,14 @@ def load_index(): s3_client.download_file(Bucket=BUCKET_NAME, Key="my_faiss.pkl", Filename=f"{folder_path}my_faiss.pkl") def get_llm(): - llm=Bedrock(model_id="anthropic.claude-v2:1", client=bedrock_client, - model_kwargs={'max_tokens_to_sample': 512}) + llm = ChatBedrock( + client=bedrock_client, + model_id="anthropic.claude-3-haiku-20240307-v1:0", + ) +# llm = BedrockChat( +# client=bedrock_client, +# model_id="anthropic.claude-3-haiku-20240307-v1:0", +# ) return llm # get_response() @@ -70,7 +79,8 @@ def get_response(llm,vectorstore, question ): return_source_documents=True, chain_type_kwargs={"prompt": PROMPT} ) - answer=qa({"query":question}) + #answer=qa({"query":question}) + answer = qa.invoke({"query": question}) return answer['result'] diff --git a/bedrock-chat-with-pdf/User/requirements.txt b/bedrock-chat-with-pdf/User/requirements.txt index c4cbbc8..3e877f2 100644 --- a/bedrock-chat-with-pdf/User/requirements.txt +++ b/bedrock-chat-with-pdf/User/requirements.txt @@ -1,4 +1,6 @@ streamlit langchain +langchain_community +langchain-aws faiss-cpu boto3 \ No newline at end of file