Skip to content

Commit

Permalink
Fix imgur images, add headers, timeout for requests
Browse files Browse the repository at this point in the history
  • Loading branch information
quantrancse committed Jan 26, 2021
1 parent 9c21b91 commit af7393d
Showing 1 changed file with 15 additions and 9 deletions.
24 changes: 15 additions & 9 deletions hako2epub.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@
from ebooklib import epub
from PIL import Image

HEADERS = {
'user-agent': ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36')}

class Utils():

Expand All @@ -23,6 +25,11 @@ def re_url(self, ln_url, url):
def format_text(self, text):
return text.strip().replace('\n', '')

def getImage(self, image_url):
if 'imgur.com' in image_url:
if '.' not in image_url[-5:]:
image_url += '.jpg'
return Image.open(requests.get(image_url, headers=HEADERS, stream=True, timeout=10).raw).convert('RGB')

class UpdateLN():

Expand Down Expand Up @@ -50,7 +57,7 @@ def checkUpdate(self, ln_url='all', mode=''):
def checkUpdateLN(self, old_ln, mode):
old_ln_url = old_ln.get('ln_url')
try:
request = requests.get(old_ln_url, timeout=5)
request = requests.get(old_ln_url, headers=HEADERS, timeout=10)
soup = BeautifulSoup(request.text, 'html.parser')
new_ln = LNInfo()
new_ln = new_ln.getLNInfo(old_ln_url, soup, 'default')
Expand Down Expand Up @@ -185,8 +192,7 @@ class EpubEngine():
def makeCoverImage(self):
try:
print('Making cover image...')
img = Image.open(requests.get(
self.volume.cover_img, stream=True).raw).convert('RGB')
img = Utils().getImage(self.volume.cover_img)
b = BytesIO()
img.save(b, 'jpeg')
b_img = b.getvalue()
Expand Down Expand Up @@ -254,7 +260,7 @@ def makeChapter(self, i=0):
print('Making chapter contents...')
for i, chapter in enumerate(self.volume.chapter_list.keys(), i):
chapter_url = self.volume.chapter_list[chapter]
request = requests.get(chapter_url, timeout=5)
request = requests.get(chapter_url, headers=HEADERS, timeout=10)
soup = BeautifulSoup(request.text, 'html.parser')

xhtml_file = 'chap_%s.xhtml' % str(i + 1)
Expand Down Expand Up @@ -287,7 +293,7 @@ def makeImage(self, chapter_content, chapter_id):
content = str(chapter_content)
for i, img_url in enumerate(img_urls):
try:
img = Image.open(requests.get(img_url, stream=True).raw).convert('RGB')
img = Utils().getImage(img_url)
b = BytesIO()
img.save(b, 'jpeg')
b_img = b.getvalue()
Expand Down Expand Up @@ -315,7 +321,7 @@ def bindEpubBook(self):

try:
self.book.set_cover('cover.jpeg', requests.get(
self.volume.cover_img, stream=True).content)
self.volume.cover_img, headers=HEADERS, stream=True, timeout=10).content)
except:
print('Error: Can not set cover image!')

Expand Down Expand Up @@ -490,7 +496,7 @@ def setLNVolume(self, soup, mode):
try:
if mode == 'default':
for volume_url in volume_urls:
request = requests.get(volume_url, timeout=5)
request = requests.get(volume_url, headers=HEADERS, timeout=10)
soup = BeautifulSoup(request.text, 'html.parser')

self.volume_list.append(Volume(volume_url, soup))
Expand All @@ -515,7 +521,7 @@ def setLNVolume(self, soup, mode):

if selected_volume in range(len(volume_urls)):
request = requests.get(
volume_urls[selected_volume], timeout=5)
volume_urls[selected_volume], headers=HEADERS, timeout=10)
soup = BeautifulSoup(request.text, 'html.parser')

self.volume_list.append(
Expand Down Expand Up @@ -552,7 +558,7 @@ def start(self, ln_url, mode):
UpdateLN().checkUpdate(ln_url, 'updatevol')

elif self.checkValidUrl(ln_url):
request = requests.get(ln_url, timeout=5)
request = requests.get(ln_url, headers=HEADERS, timeout=10)
soup = BeautifulSoup(request.text, 'html.parser')
if not soup.find('section', 'volume-list'):
print('Invalid url. Please try again.')
Expand Down

0 comments on commit af7393d

Please sign in to comment.