-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathfetch_socials.py
48 lines (38 loc) · 1.11 KB
/
fetch_socials.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import json
import os
import cloudscraper
import time
from bs4 import BeautifulSoup
from opensea_rankings_crawler import opensea_rankings_crawler
fast = opensea_rankings_crawler(my_proxy="s",url='cat',use_proxy=True)
fast.do_json()
with open("udata.json", "r") as f:
dict = json.load(f)
stats = []
hcount = 0
ncount = 0
for key in dict:
if dict[key]['hit']:
hcount+=1
else:
ncount+=1
print (hcount,ncount)
scraper = cloudscraper.create_scraper()
county = 0
for key in dict:
if dict[key]['hit']:
continue
url = "https://api.opensea.io/api/v1/collection/" + key + "?force_update=true&format=json"
print (url)
html = scraper.get(url, timeout=25).json()#.text.encode("utf-8") # #.text.encode("utf-8")
#html = BeautifulSoup(html,'lxml')
#print (html)
with open("testing/" + key,"w") as f:
json.dump(html,f)
dict[key]['hit'] = 1
dict[key]['data']['url'] = url
with open("udata.json", "w") as f:
json.dump(dict, f, indent=4)
county+=1
print (county, ' out of ', ncount)
time.sleep(.7)