1
1
# --------------- Import -----------------
2
- import os , datetime , time , csv
2
+ import os , datetime , time , csv , pprint
3
3
from selenium import webdriver
4
4
from selenium .webdriver .chrome .options import Options
5
5
from selenium .webdriver .common .by import By
8
8
from sys import argv
9
9
import random
10
10
# --------------- -----------------
11
- print ("\n " * 10 )
11
+ print ("\n " * 50 )
12
12
os .environ ["DEBUSSY" ] = "1"
13
13
# --------------- Configure browser session -----------------
14
14
wd_options = Options ()
@@ -48,17 +48,8 @@ def scroll_to_bottom():
48
48
def scan_friends ():
49
49
print ('Scanning page for friends...' )
50
50
friends = []
51
- # friend_cards = browser.find_elements_by_xpath("//div[@data-sigil='undoable-action']/a")
52
51
friend_cards = browser .find_elements_by_css_selector ("div#root div.timeline div[data-sigil='undoable-action'] a" )
53
52
for friend in friend_cards :
54
- # if friend.get_attribute('data-hovercard') is None:
55
- # print(" %s (INACTIVE)" % friend.text)
56
- # friend_id = friend.get_attribute('ajaxify').split('id=')[1]
57
- # friend_active = 0
58
- # else:
59
- # print(" %s" % friend.text)
60
- # friend_id = friend.get_attribute('data-hovercard').split('id=')[1].split('&')[0]
61
- # friend_active = 1
62
53
friend_name = friend .text
63
54
friend_id = friend .get_attribute ('href' )
64
55
if friend_name :
@@ -87,12 +78,7 @@ def scrape_1st_degrees():
87
78
writer = csv .writer (open (csvOut , 'w' , encoding = "utf-8" ))
88
79
writer .writerow (['id' ,'name' ])
89
80
90
- #Get your unique Facebook ID
91
- # profile_icon = browser.find_element_by_css_selector("[data-click='profile_icon'] > a > span > img")
92
- # myid = profile_icon.get_attribute("id")[19:]
93
- # profile_icon = browser.find_element_by_css_selector("[id='profile_tab_jewel'] > a")
94
- # myid = profile_icon.get_attribute("href")
95
- # myid = 0
81
+ #Get my Facebook id
96
82
time .sleep (1 )
97
83
browser .get ("https://m.facebook.com/home.php" )
98
84
time .sleep (1 )
@@ -113,13 +99,15 @@ def scrape_1st_degrees():
113
99
writer .writerow ([friend ['id' ],str (friend ['name' ])])
114
100
115
101
print ("Successfully saved to %s" % csvOut )
102
+
103
+ return csvOut
116
104
# --------------- Scrape 2nd degree friends. ---------------
117
105
#This can take several days if you have a lot of friends!!
118
106
def scrape_2nd_degrees ():
119
107
#Prep CSV Output File
120
108
csvOut = '2nd-degree_%s.csv' % now .strftime ("%Y-%m-%d_%H%M" )
121
109
writer = csv .writer (open (csvOut , 'w' , encoding = "utf-8" ))
122
- writer .writerow (['A_id' , 'B_id' , 'A_name' ,'B_name' , 'active' ])
110
+ writer .writerow (['A_id' , 'B_id' , 'A_name' ,'B_name' ])
123
111
124
112
#Load friends from CSV Input File
125
113
script , filename = argv
@@ -128,7 +116,7 @@ def scrape_2nd_degrees():
128
116
print ("------------------------------------------" )
129
117
for idx ,friend in enumerate (myfriends ):
130
118
#Load URL of friend's friend page
131
- scrape_url = "https://www .facebook.com/" + friend ['uid' ] + "/friends?source_ref=pb_friends_tl"
119
+ scrape_url = "https://m .facebook.com/" + friend ['uid' ] + "/friends?source_ref=pb_friends_tl"
132
120
browser .get (scrape_url )
133
121
134
122
#Scan your friends' Friends page (2nd-degree friends)
@@ -139,10 +127,34 @@ def scrape_2nd_degrees():
139
127
#Write friends to CSV File
140
128
print ('Writing friends to CSV...' )
141
129
for person in their_friends :
142
- writer .writerow ([friend ['uid' ],person ['id' ],friend ['name' ],person ['name' ],person ['active' ]])
130
+ writer .writerow ([friend ['uid' ],person ['id' ],friend ['name' ],person ['name' ]])
131
+
132
+ print ("friend #%d done" % (idx + 1 ))
133
+
134
+ print ("Successfully saved to %s" % csvOut )
143
135
# --------------- Check Disconnected Friends ---------------
144
136
def who_unfriended_me ():
145
-
137
+ #get old frineds and scrape current friends
138
+ #then compare between them
139
+ script , filename , action = argv
140
+ old_friends = load_csv (filename )
141
+ current_friends = load_csv (scrape_1st_degrees ())
142
+ disconnections = [x for x in old_friends if x not in current_friends ]
143
+
144
+ print ("\n " * 10 )
145
+ print ("=== Who Unfriended Me? ===\n \n " )
146
+ pp = pprint .PrettyPrinter (indent = 4 )
147
+ pp .pprint (disconnections )
148
+ print ("\n " * 10 )
149
+
150
+ #save to file
151
+ csvOut = '1st-degree-disconnections_%s.csv' % now .strftime ("%Y-%m-%d_%H%M" )
152
+ writer = csv .writer (open (csvOut , 'w' , encoding = "utf-8" ))
153
+ writer .writerow (['name' ,'id' ])
154
+ for friend in disconnections :
155
+ writer .writerow ([friend ['uid' ],friend ['name' ]])
156
+
157
+ print ("Successfully saved to %s" % csvOut )
146
158
# --------------- Start Scraping ---------------
147
159
now = datetime .now ()
148
160
configPath = "config.txt"
@@ -153,12 +165,14 @@ def who_unfriended_me():
153
165
password = configObj .get ('credentials' , 'password' )
154
166
else :
155
167
print ('Enter the config path' )
156
- # fb_login(configObj)
168
+ fb_login (configObj )
157
169
158
- if len (argv ) is 1 :
159
- # scrape_1st_degrees()
160
- print (load_csv ("1st-degree_2021-05-26_1554.csv" ))
161
- elif len (argv ) is 2 :
170
+ if len (argv ) == 1 :
171
+ scrape_1st_degrees ()
172
+ elif len (argv ) == 2 :
162
173
scrape_2nd_degrees ()
174
+ elif len (argv ) == 3 :
175
+ script , filename , action = argv
176
+ if action == "un" : who_unfriended_me ()
163
177
else :
164
178
print ("Invalid # of arguments specified. Use none to scrape your 1st degree connections, or specify the name of the CSV file as the first argument." )
0 commit comments