-
Notifications
You must be signed in to change notification settings - Fork 0
/
scrape.py
53 lines (41 loc) · 2.05 KB
/
scrape.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
from selenium.webdriver import Remote, ChromeOptions
from selenium.webdriver.chromium.remote_connection import ChromiumRemoteConnection
from bs4 import BeautifulSoup
SBR_WEBDRIVER = 'https://brd-customer-hl_442f3167-zone-cybersec_scraper:91mdgp2d3d87@brd.superproxy.io:9515'
def scrape_website(website):
print("Launching chrome webbrowser...")
#print('Connecting to Scraping Browser...')
sbr_connection = ChromiumRemoteConnection(SBR_WEBDRIVER, 'goog', 'chrome')
with Remote(sbr_connection, options=ChromeOptions()) as driver:
# print('Connected! Navigating to https://example.com...')
driver.get(website)
# CAPTCHA handling: If you're expecting a CAPTCHA on the target page, use the following code snippet to check the status of Scraping Browser's automatic CAPTCHA solver
print('Waiting captcha to solve...')
solve_res = driver.execute('executeCdpCommand', {
'cmd': 'Captcha.waitForSolve',
'params': {'detectTimeout': 10000},
})
print('Captcha solve status:', solve_res['value']['status'])
print('Navigated! Scraping page content...')
print('Taking page screenshot to file page.png')
driver.get_screenshot_as_file('./page.png')
print('Navigated! Scraping page content...')
html = driver.page_source
return html
def extract_body_content(html_content):
soup = BeautifulSoup(html_content, 'html.parser')
body_content = soup.body
if body_content:
return str(body_content)
return ""
def clean_body_conetent(body_content):
soup = BeautifulSoup(body_content, "html.parser")
for script_or_style in soup(["script", "style"]):
script_or_style.extract()
cleaned_content = soup.get_text(separator="\n")
cleaned_content = "\n".join(line.strip() for line in cleaned_content.splitlines() if line.strip())
return cleaned_content
def split_dom_content(dom_content, max_length=6000):
return [
dom_content[i : i + max_length] for i in range(0, len(dom_content), max_length)
]