forked from gkamradt/twitter-reply-bot
-
Notifications
You must be signed in to change notification settings - Fork 0
/
twitter-reply-bot.py
188 lines (150 loc) · 8.59 KB
/
twitter-reply-bot.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
import tweepy
from airtable import Airtable
from datetime import datetime, timedelta
from langchain.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate
import schedule
import time
import os
# Helpful when testing locally
from dotenv import load_dotenv
load_dotenv()
# Load your Twitter and Airtable API keys (preferably from environment variables, config file, or within the railyway app)
TWITTER_API_KEY = os.getenv("TWITTER_API_KEY", "YourKey")
TWITTER_API_SECRET = os.getenv("TWITTER_API_SECRET", "YourKey")
TWITTER_ACCESS_TOKEN = os.getenv("TWITTER_ACCESS_TOKEN", "YourKey")
TWITTER_ACCESS_TOKEN_SECRET = os.getenv("TWITTER_ACCESS_TOKEN_SECRET", "YourKey")
TWITTER_BEARER_TOKEN = os.getenv("TWITTER_BEARER_TOKEN", "YourKey")
AIRTABLE_API_KEY = os.getenv("AIRTABLE_API_KEY", "YourKey")
AIRTABLE_BASE_KEY = os.getenv("AIRTABLE_BASE_KEY", "YourKey")
AIRTABLE_TABLE_NAME = os.getenv("AIRTABLE_TABLE_NAME", "YourKey")
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "YourKey")
# TwitterBot class to help us organize our code and manage shared state
class TwitterBot:
def __init__(self):
self.twitter_api = tweepy.Client(bearer_token=TWITTER_BEARER_TOKEN,
consumer_key=TWITTER_API_KEY,
consumer_secret=TWITTER_API_SECRET,
access_token=TWITTER_ACCESS_TOKEN,
access_token_secret=TWITTER_ACCESS_TOKEN_SECRET,
wait_on_rate_limit=True)
self.airtable = Airtable(AIRTABLE_BASE_KEY, AIRTABLE_TABLE_NAME, AIRTABLE_API_KEY)
self.twitter_me_id = self.get_me_id()
self.tweet_response_limit = 35 # How many tweets to respond to each time the program wakes up
# Initialize the language model w/ temperature of .5 to induce some creativity
self.llm = ChatOpenAI(temperature=.5, openai_api_key=OPENAI_API_KEY, model_name='gpt-4')
# For statics tracking for each run. This is not persisted anywhere, just logging
self.mentions_found = 0
self.mentions_replied = 0
self.mentions_replied_errors = 0
# Generate a response using the language model using the template we reviewed in the jupyter notebook (see README)
def generate_response(self, mentioned_conversation_tweet_text):
# It would be nice to bring in information about the links, pictures, etc. But out of scope for now
# Edit this prompt for your own personality!
system_template = """
You are an incredibly wise and smart tech mad scientist from silicon valley.
Your goal is to give a concise prediction in response to a piece of text from the user.
% RESPONSE TONE:
- Your prediction should be given in an active voice and be opinionated
- Your tone should be serious w/ a hint of wit and sarcasm
% RESPONSE FORMAT:
- Respond in under 200 characters
- Respond in two or less short sentences
- Do not respond with emojis
% RESPONSE CONTENT:
- Include specific examples of old tech if they are relevant
- If you don't have an answer, say, "Sorry, my magic 8 ball isn't working right now 🔮"
"""
system_message_prompt = SystemMessagePromptTemplate.from_template(system_template)
human_template="{text}"
human_message_prompt = HumanMessagePromptTemplate.from_template(human_template)
chat_prompt = ChatPromptTemplate.from_messages([system_message_prompt, human_message_prompt])
# get a chat completion from the formatted messages
final_prompt = chat_prompt.format_prompt(text=mentioned_conversation_tweet_text).to_messages()
response = self.llm(final_prompt).content
return response
# Generate a response using the language model
def respond_to_mention(self, mention, mentioned_conversation_tweet):
response_text = self.generate_response(mentioned_conversation_tweet.text)
# Try and create the response to the tweet. If it fails, log it and move on
try:
response_tweet = self.twitter_api.create_tweet(text=response_text, in_reply_to_tweet_id=mention.id)
self.mentions_replied += 1
except Exception as e:
print (e)
self.mentions_replied_errors += 1
return
# Log the response in airtable if it was successful
self.airtable.insert({
'mentioned_conversation_tweet_id': str(mentioned_conversation_tweet.id),
'mentioned_conversation_tweet_text': mentioned_conversation_tweet.text,
'tweet_response_id': response_tweet.data['id'],
'tweet_response_text': response_text,
'tweet_response_created_at' : datetime.utcnow().isoformat(),
'mentioned_at' : mention.created_at.isoformat()
})
return True
# Returns the ID of the authenticated user for tweet creation purposes
def get_me_id(self):
return self.twitter_api.get_me()[0].id
# Returns the parent tweet text of a mention if it exists. Otherwise returns None
# We use this to since we want to respond to the parent tweet, not the mention itself
def get_mention_conversation_tweet(self, mention):
# Check to see if mention has a field 'conversation_id' and if it's not null
if mention.conversation_id is not None:
conversation_tweet = self.twitter_api.get_tweet(mention.conversation_id).data
return conversation_tweet
return None
# Get mentioned to the user thats authenticated and running the bot.
# Using a lookback window of 2 hours to avoid parsing over too many tweets
def get_mentions(self):
# If doing this in prod make sure to deal with pagination. There could be a lot of mentions!
# Get current time in UTC
now = datetime.utcnow()
# Subtract 2 hours to get the start time
start_time = now - timedelta(minutes=20)
# Convert to required string format
start_time_str = start_time.strftime("%Y-%m-%dT%H:%M:%SZ")
return self.twitter_api.get_users_mentions(id=self.twitter_me_id,
start_time=start_time_str,
expansions=['referenced_tweets.id'],
tweet_fields=['created_at', 'conversation_id']).data
# Checking to see if we've already responded to a mention with what's logged in airtable
def check_already_responded(self, mentioned_conversation_tweet_id):
records = self.airtable.get_all(view='Grid view')
for record in records:
if record['fields'].get('mentioned_conversation_tweet_id') == str(mentioned_conversation_tweet_id):
return True
return False
# Run through all mentioned tweets and generate a response
def respond_to_mentions(self):
mentions = self.get_mentions()
# If no mentions, just return
if not mentions:
print("No mentions found")
return
self.mentions_found = len(mentions)
for mention in mentions[:self.tweet_response_limit]:
# Getting the mention's conversation tweet
mentioned_conversation_tweet = self.get_mention_conversation_tweet(mention)
# If the mention *is* the conversation or you've already responded, skip it and don't respond
if (mentioned_conversation_tweet.id != mention.id
and not self.check_already_responded(mentioned_conversation_tweet.id)):
self.respond_to_mention(mention, mentioned_conversation_tweet)
return True
# The main entry point for the bot with some logging
def execute_replies(self):
print (f"Starting Job: {datetime.utcnow().isoformat()}")
self.respond_to_mentions()
print (f"Finished Job: {datetime.utcnow().isoformat()}, Found: {self.mentions_found}, Replied: {self.mentions_replied}, Errors: {self.mentions_replied_errors}")
# The job that we'll schedule to run every X minutes
def job():
print(f"Job executed at {datetime.utcnow().isoformat()}")
bot = TwitterBot()
bot.execute_replies()
if __name__ == "__main__":
# Schedule the job to run every 5 minutes. Edit to your liking, but watch out for rate limits
schedule.every(6).minutes.do(job)
while True:
schedule.run_pending()
time.sleep(1)