-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
18 changed files
with
642 additions
and
0 deletions.
There are no files selected for viewing
12 changes: 12 additions & 0 deletions
12
...ommunication: The Importance of Monitoring Message History/0_conversation_chain_simple.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,12 @@ | ||
|
||
#from langchain import OpenAI, ConversationChain | ||
from langchain_openai import OpenAI | ||
from langchain.chains import ConversationChain | ||
|
||
|
||
llm = OpenAI(model_name="gpt-3.5-turbo-instruct", temperature=0) | ||
conversation = ConversationChain(llm=llm, verbose=True) | ||
|
||
output = conversation.predict(input="Hi there!") | ||
|
||
print(output) |
22 changes: 22 additions & 0 deletions
22
...portance of Monitoring Message History/1_conversation_chain_concatenating_buffermemory.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,22 @@ | ||
#from langchain import OpenAI, ConversationChain | ||
from langchain_openai import OpenAI | ||
from langchain.chains import ConversationChain | ||
|
||
llm = OpenAI(model_name="gpt-3.5-turbo-instruct", temperature=0) | ||
conversation = ConversationChain(llm=llm, verbose=True) | ||
|
||
from langchain.memory import ConversationBufferMemory | ||
|
||
memory = ConversationBufferMemory(return_messages=True) | ||
memory.save_context({"input": "hi there!"}, {"output": "Hi there! It's nice to meet you. How can I help you today?"}) | ||
|
||
print( memory.load_memory_variables({}) ) | ||
|
||
output = conversation.predict(input="In what scenarios extra memory should be used?") | ||
output = conversation.predict(input="There are various types of memory in Langchain. When to use which type?") | ||
output = conversation.predict(input="Do you remember what was our first message?") | ||
|
||
|
||
print(output) | ||
print("------------------") | ||
print( memory.load_memory_variables({}) ) |
13 changes: 13 additions & 0 deletions
13
...e Importance of Monitoring Message History/1_conversation_chain_concatenating_dontwork.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,13 @@ | ||
from langchain import OpenAI, ConversationChain | ||
#from langchain_openai import OpenAI | ||
#from langchain.chains import ConversationChain | ||
|
||
llm = OpenAI(model_name="gpt-3.5-turbo-instruct", temperature=0) | ||
conversation = ConversationChain(llm=llm, verbose=True) | ||
|
||
output = conversation.predict(input="In what scenarios extra memory should be used?") | ||
output = conversation.predict(input="There are various types of memory in Langchain. When to use which type?") | ||
output = conversation.predict(input="Do you remember what was our first message?") | ||
|
||
print ("---- Output ----") | ||
print(output) |
39 changes: 39 additions & 0 deletions
39
...The Importance of Monitoring Message History/1_conversation_chain_concatenating_memory.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
from langchain_openai import OpenAI | ||
from langchain.chains import ConversationChain | ||
from langchain.memory import ConversationBufferMemory | ||
|
||
# Initialize the language model | ||
llm = OpenAI(model_name="gpt-3.5-turbo-instruct", temperature=0) | ||
|
||
# Initialize the memory | ||
memory = ConversationBufferMemory() | ||
|
||
# Initialize the conversation chain with the language model and memory | ||
conversation = ConversationChain(llm=llm, memory=memory, verbose=True) | ||
|
||
# Simulate a conversation | ||
output1 = conversation.predict(input="In what scenarios extra memory should be used?") | ||
print("--- Output 1 ---") | ||
print(output1) | ||
|
||
# Check memory after first message | ||
print("--- Memory after Output 1 ---") | ||
print(memory.buffer) | ||
|
||
output2 = conversation.predict(input="There are various types of memory in Langchain. When to use which type?") | ||
print("--- Output 2 ---") | ||
print(output2) | ||
|
||
# Check memory after second message | ||
print("--- Memory after Output 2 ---") | ||
print(memory.buffer) | ||
|
||
output3 = conversation.predict(input="Do you remember what was our first message?") | ||
print("--- Output 3 ---") | ||
print(output3) | ||
|
||
# Check memory after third message | ||
print("--- Memory after Output 3 ---") | ||
print(memory.buffer) | ||
|
||
|
39 changes: 39 additions & 0 deletions
39
...ce of Monitoring Message History/1_conversation_chain_concatenating_memory_alternative.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
from langchain_openai import OpenAI | ||
from langchain.chains import ConversationChain | ||
from langchain.memory import ConversationBufferMemory | ||
|
||
# Initialize the language model | ||
llm = OpenAI(model_name="gpt-3.5-turbo-instruct", temperature=0) | ||
|
||
# Initialize the memory | ||
memory = ConversationBufferMemory() | ||
|
||
# Initialize the conversation chain with the language model and memory | ||
conversation = ConversationChain(llm=llm, memory=memory, verbose=True) | ||
|
||
# Simulate a conversation | ||
input_messages = [ | ||
"In what scenarios extra memory should be used?", | ||
"There are various types of memory in Langchain. When to use which type?", | ||
"Do you remember what was our first message?" | ||
] | ||
output_messages = [] | ||
|
||
# Iterate over input messages | ||
for input_message in input_messages: | ||
# Predict response | ||
output = conversation.predict(input=input_message) | ||
print("--- Output ---") | ||
print(output) | ||
output_messages.append(output) | ||
|
||
# Ensure memory buffer is initialized as a list | ||
if not isinstance(memory.buffer, list): | ||
memory.buffer = [] | ||
|
||
# Add input-output pair to memory buffer | ||
memory.buffer.append((input_message, output)) | ||
|
||
# Check memory after the conversation | ||
print("--- Memory ---") | ||
print(memory.buffer) |
13 changes: 13 additions & 0 deletions
13
... Importance of Monitoring Message History/2_conversation_buffer_memory_under_the_hoods.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,13 @@ | ||
from langchain_openai import OpenAI | ||
from langchain.chains import ConversationChain | ||
|
||
from langchain.memory import ConversationBufferMemory | ||
|
||
llm = OpenAI(model_name="gpt-3.5-turbo-instruct", temperature=0) | ||
conversation = ConversationChain(llm=llm, verbose=True, memory=ConversationBufferMemory()) | ||
|
||
|
||
memory = ConversationBufferMemory(return_messages=True) | ||
memory.save_context({"input": "hi there!"}, {"output": "Hi there! It's nice to meet you. How can I help you today?"}) | ||
|
||
print( memory.load_memory_variables({}) ) |
16 changes: 16 additions & 0 deletions
16
...n: The Importance of Monitoring Message History/3_conversation_chain_with_save_context.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
from langchain.chains import ConversationChain | ||
from langchain_openai import OpenAI | ||
from langchain.memory import ConversationBufferMemory | ||
|
||
llm = OpenAI(model_name="gpt-3.5-turbo-instruct", temperature=0) | ||
|
||
conversation = ConversationChain( | ||
llm=llm, | ||
verbose=True, | ||
memory=ConversationBufferMemory() | ||
) | ||
|
||
output = conversation.predict(input="In what scenarios extra memory should be used?") | ||
output = conversation.predict(input="There are various types of memory in Langchain. When to use which type?") | ||
output = conversation.predict(input="Do you remember what was our first message?") | ||
|
21 changes: 21 additions & 0 deletions
21
...nication: The Importance of Monitoring Message History/4_conversation_chain_full_usage.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,21 @@ | ||
from langchain.chains import ConversationChain | ||
from langchain.memory import ConversationBufferMemory | ||
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate, HumanMessagePromptTemplate | ||
from langchain_openai import OpenAI | ||
|
||
llm = OpenAI(model_name="gpt-3.5-turbo-instruct", temperature=0) | ||
|
||
|
||
prompt = ChatPromptTemplate.from_messages([ | ||
SystemMessagePromptTemplate.from_template("The following is a friendly conversation between a human and an AI."), | ||
MessagesPlaceholder(variable_name="history"), | ||
HumanMessagePromptTemplate.from_template("{input}") | ||
]) | ||
|
||
memory = ConversationBufferMemory(return_messages=True) | ||
conversation = ConversationChain(memory=memory, prompt=prompt, llm=llm, verbose=True) | ||
|
||
|
||
print( conversation.predict(input="Tell me a joke about elephants") ) | ||
print( conversation.predict(input="Who is the author of the Harry Potter series?") ) | ||
print( conversation.predict(input="What was the joke you told me earlier?") ) |
41 changes: 41 additions & 0 deletions
41
...ication: The Importance of Monitoring Message History/5_conversation_chain_with_memory.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
from langchain.chains import ConversationChain | ||
from langchain.memory import ConversationBufferMemory | ||
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate, HumanMessagePromptTemplate | ||
from langchain_openai import OpenAI | ||
|
||
from langchain.load.dump import dumps | ||
|
||
llm = OpenAI(model_name="gpt-3.5-turbo-instruct", temperature=0) | ||
|
||
prompt = ChatPromptTemplate.from_messages([ | ||
SystemMessagePromptTemplate.from_template("The following is a friendly conversation between a human and an AI."), | ||
MessagesPlaceholder(variable_name="history"), | ||
HumanMessagePromptTemplate.from_template("{input}") | ||
]) | ||
|
||
memory = ConversationBufferMemory(return_messages=True) | ||
conversation = ConversationChain(memory=memory, prompt=prompt, llm=llm, verbose=True) | ||
|
||
user_message = "Tell me about the history of the Internet." | ||
response = conversation(user_message) | ||
|
||
import pprint | ||
print("------------") | ||
pp = pprint.PrettyPrinter(indent=4) | ||
pp.pprint(response) | ||
|
||
|
||
|
||
# User sends another message | ||
user_message = "Who are some important figures in its development?" | ||
response = conversation(user_message) | ||
pp.pprint(response) # Chatbot responds with names of important figures, recalling the previous topic | ||
|
||
print("------------") | ||
|
||
user_message = "What did Tim Berners-Lee contribute?" | ||
response = conversation(user_message) | ||
print(response) | ||
print ("--------- ") | ||
|
||
print(response['response']) |
36 changes: 36 additions & 0 deletions
36
.../1 Optimizing Your Communication: The Importance of Monitoring Message History/chat.json
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,36 @@ | ||
{ | ||
"input": "Tell me about the history of the Internet.", | ||
"history": [ | ||
{ | ||
"lc": 1, | ||
"type": "constructor", | ||
"id": [ | ||
"langchain", | ||
"schema", | ||
"messages", | ||
"HumanMessage" | ||
], | ||
"kwargs": { | ||
"content": "Tell me about the history of the Internet.", | ||
"type": "human" | ||
} | ||
}, | ||
{ | ||
"lc": 1, | ||
"type": "constructor", | ||
"id": [ | ||
"langchain", | ||
"schema", | ||
"messages", | ||
"AIMessage" | ||
], | ||
"kwargs": { | ||
"content": " \n\nAI: The Internet, also known as the World Wide Web, has a rich and complex history. It began as a project in the late 1960s by the United States Department of Defense, with the goal of creating a decentralized network that could withstand a nuclear attack. This project, called ARPANET, was the precursor to the modern Internet.\n\nIn the 1980s, the development of the TCP/IP protocol allowed for multiple networks to be connected, leading to the creation of the Internet as we know it today. The 1990s saw a surge in the popularity of the Internet, with the introduction of the World Wide Web and the first web browser, allowing for easy access to information and communication.\n\nThe early 2000s saw the rise of social media and e-commerce, further expanding the capabilities and reach of the Internet. Today, the Internet is an integral part of our daily lives, connecting people and information from all corners of the globe. Its history is constantly evolving as new technologies and innovations continue to shape its future.", | ||
"type": "ai", | ||
"tool_calls": [], | ||
"invalid_tool_calls": [] | ||
} | ||
} | ||
], | ||
"response": " \n\nAI: The Internet, also known as the World Wide Web, has a rich and complex history. It began as a project in the late 1960s by the United States Department of Defense, with the goal of creating a decentralized network that could withstand a nuclear attack. This project, called ARPANET, was the precursor to the modern Internet.\n\nIn the 1980s, the development of the TCP/IP protocol allowed for multiple networks to be connected, leading to the creation of the Internet as we know it today. The 1990s saw a surge in the popularity of the Internet, with the introduction of the World Wide Web and the first web browser, allowing for easy access to information and communication.\n\nThe early 2000s saw the rise of social media and e-commerce, further expanding the capabilities and reach of the Internet. Today, the Internet is an integral part of our daily lives, connecting people and information from all corners of the globe. Its history is constantly evolving as new technologies and innovations continue to shape its future." | ||
} |
15 changes: 15 additions & 0 deletions
15
... Langchain: A Comprehensive Guide with Practical Examples/1_conversation_buffer_memory.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
#from langchain import OpenAI, ConversationChain | ||
from langchain_openai import OpenAI | ||
from langchain.memory import ConversationBufferMemory | ||
from langchain.chains import ConversationChain | ||
|
||
# TODO: Set your OPENAI API credentials in environemnt variables. | ||
llm = OpenAI(model_name="gpt-3.5-turbo-instruct", temperature=0) | ||
|
||
conversation = ConversationChain( | ||
llm=llm, | ||
verbose=True, | ||
memory=ConversationBufferMemory() | ||
) | ||
result = conversation.predict(input="Hello!") | ||
print(result) |
37 changes: 37 additions & 0 deletions
37
...ain: A Comprehensive Guide with Practical Examples/3_conversation_window_buffer_memory.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,37 @@ | ||
from langchain.memory import ConversationBufferWindowMemory | ||
from langchain_openai import OpenAI | ||
from langchain.chains import ConversationChain, LLMChain | ||
from langchain_core.prompts import PromptTemplate | ||
|
||
llm = OpenAI(model_name="gpt-3.5-turbo-instruct", temperature=0) | ||
|
||
template = """You are ArtVenture, a cutting-edge virtual tour guide for | ||
an art gallery that showcases masterpieces from alternate dimensions and | ||
timelines. Your advanced AI capabilities allow you to perceive and understand | ||
the intricacies of each artwork, as well as their origins and significance in | ||
their respective dimensions. As visitors embark on their journey with you | ||
through the gallery, you weave enthralling tales about the alternate histories | ||
and cultures that gave birth to these otherworldly creations. | ||
{chat_history} | ||
Visitor: {visitor_input} | ||
Tour Guide:""" | ||
|
||
prompt = PromptTemplate( | ||
input_variables=["chat_history", "visitor_input"], | ||
template=template | ||
) | ||
|
||
chat_history="" | ||
|
||
convo_buffer_win = ConversationChain( | ||
llm=llm, | ||
memory = ConversationBufferWindowMemory(k=3, return_messages=True) | ||
) | ||
|
||
convo_buffer_win("What is your name?") | ||
convo_buffer_win("What can you do?") | ||
convo_buffer_win("Do you mind give me a tour, I want to see your galery?") | ||
convo_buffer_win("what is your working hours?") | ||
result = convo_buffer_win("See you soon.") | ||
print(result) |
44 changes: 44 additions & 0 deletions
44
...Langchain: A Comprehensive Guide with Practical Examples/4_conversation_summary_memory.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,44 @@ | ||
from langchain.memory import ConversationBufferWindowMemory | ||
from langchain_openai import OpenAI | ||
from langchain.chains import ConversationChain, LLMChain | ||
from langchain_core.prompts import PromptTemplate | ||
from langchain.memory import ConversationSummaryMemory | ||
|
||
|
||
|
||
llm = OpenAI(model_name="gpt-3.5-turbo-instruct", temperature=0) | ||
|
||
|
||
# Create a ConversationChain with ConversationSummaryMemory | ||
conversation_with_summary = ConversationChain( | ||
llm=llm, | ||
memory=ConversationSummaryMemory(llm=llm), | ||
verbose=True | ||
) | ||
|
||
# Example conversation | ||
response = conversation_with_summary.predict(input="Hi, what's up?") | ||
print(response) | ||
|
||
|
||
|
||
|
||
|
||
from langchain.memory import ConversationSummaryBufferMemory | ||
|
||
prompt = PromptTemplate( | ||
input_variables=["topic"], | ||
template="The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.\nCurrent conversation:\n{topic}", | ||
) | ||
|
||
llm = OpenAI(temperature=0) | ||
conversation_with_summary = ConversationChain( | ||
llm=llm, | ||
memory=ConversationSummaryBufferMemory(llm=OpenAI(), max_token_limit=40), | ||
verbose=True | ||
) | ||
conversation_with_summary.predict(input="Hi, what's up?") | ||
conversation_with_summary.predict(input="Just working on writing some documentation!") | ||
response = conversation_with_summary.predict(input="For LangChain! Have you heard of it?") | ||
print(response) | ||
|
Oops, something went wrong.