Skip to content

Commit

Permalink
release: v1.60.0
Browse files Browse the repository at this point in the history
  • Loading branch information
newt-sc committed Aug 24, 2024
1 parent b51db25 commit 5ae60b6
Show file tree
Hide file tree
Showing 10 changed files with 93 additions and 55 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
* [v1.60.0](https://github.com/newt-sc/a4kStreaming/releases/tag/plugin.video.a4kstreaming%2Fplugin.video.a4kstreaming-1.60.0):
* Add generic requests caching to speedup browsing

* [v1.59.0](https://github.com/newt-sc/a4kStreaming/releases/tag/plugin.video.a4kstreaming%2Fplugin.video.a4kstreaming-1.59.0):
* Remove deprecated usages ListItem methods

Expand Down
6 changes: 6 additions & 0 deletions a4kStreaming/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,15 @@
goto
)

from .lib.database import db

core = sys.modules[__name__]
utils.core = core

removed_bytes = db.cleanup()
if removed_bytes:
logger.notice('Removed %s bytes from cache' % removed_bytes)

api_mode_enabled = True
url = ''
handle = None
Expand Down
2 changes: 1 addition & 1 deletion a4kStreaming/explorer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2815,7 +2815,7 @@ def delete_magnet():
item.setArt({ 'poster': provider_params.title.poster })

info_tag = core.kodi.set_info_tag(item, video_meta)
info_tag.set_stream_details('video', { 'codec': result['videocodec'], 'duration': result['ref'].duration })
info_tag.add_stream_info('video', { 'codec': result['videocodec'], 'duration': result['ref'].duration })

core.utils.end_action(core, True, item)
return link
84 changes: 42 additions & 42 deletions a4kStreaming/lib/database.py
Original file line number Diff line number Diff line change
@@ -1,61 +1,61 @@
# -*- coding: utf-8 -*-

import hashlib
from . import kodi
from .utils import open_file_wrapper, os, re, json, time, provider_temp_dir
from .utils import open_file_wrapper, os, time, provider_temp_dir

def __cache_key_path(key):
path = ''.join([x if x.isalnum() else '_' for x in key]) + '.json'
path = ''.join([x if x.isalnum() else '_' for x in key])
return os.path.join(provider_temp_dir, path)

def __cache_save(key, data):
path = __cache_key_path(key)
with open_file_wrapper(path, mode='w')() as f:
f.write(json.dumps(data, indent=4))
with open_file_wrapper(path, mode='wb')() as f:
f.write(data)

def __cache_get(key):
path = __cache_key_path(key)
if not os.path.exists(path):
return {}
try:
with open_file_wrapper(path)() as f:
return json.load(f)
with open_file_wrapper(path, mode='rb')() as f:
return f.read()
except:
return {}

def __generate_md5(*args):
md5_hash = hashlib.md5()
def __cache_check(key):
path = __cache_key_path(key)
return os.path.exists(path)

def __cache_cleanup():
try:
[md5_hash.update(str(arg)) for arg in args]
# while temp dir bigger than 500MiB, remove files sorted by age (oldest first)
max_size = 500 * 1024 * 1024
files = []
size = 0

for file in os.listdir(provider_temp_dir):
path = os.path.join(provider_temp_dir, file)
if os.path.isfile(path):
files.append((path, os.path.getmtime(path)))
size += os.path.getsize(path)

if size < max_size:
return

original_size = size

files.sort(key=lambda x: x[1])
for file, _ in files:
if size < max_size:
break
size -= os.path.getsize(file)
os.remove(file)

return original_size - size
except:
[md5_hash.update(str(arg).encode('utf-8')) for arg in args]
return str(md5_hash.hexdigest())

def __get_function_name(function_instance):
return re.sub(r'.+?\s*method\s*|.+function\s*|\sat\s*?.+|\s*?of\s*?.+', '', repr(function_instance))

def __hash_function(function_instance, *args):
return __get_function_name(function_instance) + __generate_md5(args)

def __get_or_add(key, value, fn, duration, *args, **kwargs):
key = __hash_function(fn, *args) if not key else key
if not value:
data = __cache_get(key)
if data:
if not duration or time.time() - data['t'] < (duration * 60):
return data['v']

if not value and not fn:
return None

value = fn(*args, **kwargs) if not value else value
data = { 't': time.time(), 'v': value }
__cache_save(key, data)
return value

database = lambda: None
def db_get(fn, duration, *args, **kwargs):
return __get_or_add(None, None, fn, duration, *args, **kwargs)
database.get = db_get
database.cache_get = lambda key: __get_or_add(key, None, None, None)
database.cache_insert = lambda key, value: __get_or_add(key, value, None, None)
pass

db = lambda: None
db.set = lambda key, value: __cache_save(key, value)
db.get = lambda key: __cache_get(key)
db.check = lambda key: __cache_check(key)
db.cleanup = lambda: __cache_cleanup()
2 changes: 1 addition & 1 deletion a4kStreaming/lib/kodi_mock.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ def __get_video_info_tag(*args, **kwargs): return __video_info_tag

__list_info_tag = lambda: None
__list_info_tag.set_cast = lambda _: None
__list_info_tag.set_stream_details = lambda _, __: None
__list_info_tag.add_stream_info = lambda _, __: None
def set_info_tag(*args, **kwargs): return __list_info_tag

def __create_listitem(*args, **kwargs): return __listitem
Expand Down
32 changes: 26 additions & 6 deletions a4kStreaming/lib/request.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,10 @@ def __retry_on_503(core, request, response, retry=True):
request['validate'] = lambda response: __retry_on_503(core, request, response, retry=False)
return request

def execute(core, request, session=None):
if not session:
session = requests.Session()
retries = Retry(total=3, backoff_factor=0.1, status_forcelist=[500, 502, 503, 504])
session.mount("https://", HTTPAdapter(max_retries=retries, pool_maxsize=100))
def execute(core, request, cache=True):
session = requests.Session()
retries = Retry(total=3, backoff_factor=0.1, status_forcelist=[500, 502, 503, 504])
session.mount("https://", HTTPAdapter(max_retries=retries, pool_maxsize=100))

request.setdefault('timeout', 60)
headers = request.setdefault('headers', {})
Expand All @@ -41,7 +40,28 @@ def execute(core, request, session=None):

logger.debug('%s ^ - %s' % (request['method'], request['url']))
try:
response = session.request(verify=False, **request)
# hash request object for checking a cache file
request_hash = core.utils.hash({ 'url': request['url'], 'method': request['method'], 'data': request.get('data', '') })
if cache and core.db.check(request_hash):
core.logger.debug('Cache hit: %s' % request_hash)
response = lambda: None
response.text = ''
response.content = core.db.get(request_hash)
response.status_code = 200

def refresh():
response = session.request(verify=False, **request)
if response.status_code == 200:
core.logger.debug('Cache refresh: %s' % request_hash)
core.db.set(request_hash, response.content)

core.threading.Thread(target=refresh).start()
else:
if cache:
core.logger.debug('Cache miss: %s' % request_hash)
response = session.request(verify=False, **request)
if response.status_code == 200 and cache:
core.db.set(request_hash, response.content)
exc = ''
except:
exc = core.traceback.format_exc()
Expand Down
7 changes: 5 additions & 2 deletions a4kStreaming/lib/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,12 +75,15 @@ def versiontuple(v):
version_parts = [(int(v_part) if v_part.isdigit() else 0) for v_part in version_parts]
return tuple(map(int, version_parts))

def hash(data):
return hashlib.sha1(json.dumps(data, sort_keys=True).encode('utf-8')).hexdigest()

def chunk(it, size):
it = iter(it)
return iter(lambda: tuple(islice(it, size)), ())

def open_file_wrapper(file, mode='r', encoding='utf-8'):
if py2:
if py2 or mode.endswith('b'):
return lambda: open(file, mode)
return lambda: open(file, mode, encoding=encoding)

Expand Down Expand Up @@ -138,7 +141,7 @@ def download_zip(core, zip_url, zip_name):
'timeout': 15
}

with core.request.execute(core, request) as r:
with core.request.execute(core, request, False) as r:
with open(filepath, 'wb') as f:
shutil.copyfileobj(r.raw, f)

Expand Down
5 changes: 4 additions & 1 deletion addon.xml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<addon id="plugin.video.a4kstreaming"
name="a4kStreaming"
version="1.59.0"
version="1.60.0"
provider-name="Unknown">
<requires>
<import addon="script.module.requests"/>
Expand Down Expand Up @@ -32,6 +32,9 @@ Designed for low-end devices and Estuary skin.
<screenshot>screenshot-06.jpg</screenshot>
</assets>
<news>
[v1.60.0]:
* Add generic requests caching to speedup browsing

[v1.59.0]:
* Remove deprecated usages ListItem methods

Expand Down
5 changes: 4 additions & 1 deletion packages/addons.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<addons>
<addon id="plugin.video.a4kstreaming"
name="a4kStreaming"
version="1.59.0"
version="1.60.0"
provider-name="Unknown">
<requires>
<import addon="script.module.requests"/>
Expand Down Expand Up @@ -35,6 +35,9 @@ Designed for low-end devices and Estuary skin.
<screenshot>screenshot-06.jpg</screenshot>
</assets>
<news>
[v1.60.0]:
* Add generic requests caching to speedup browsing

[v1.59.0]:
* Remove deprecated usages ListItem methods

Expand Down
2 changes: 1 addition & 1 deletion packages/addons.xml.crc
Original file line number Diff line number Diff line change
@@ -1 +1 @@
04b4bc63dac4924a998aefa868552532a8d67443
94ed66018b0d476f20cd1368dd61e6c3180dea6b

0 comments on commit 5ae60b6

Please sign in to comment.