Skip to content

Commit

Permalink
Hopefully Release candidate of 2.3
Browse files Browse the repository at this point in the history
  • Loading branch information
ukdtom committed Oct 22, 2016
1 parent bc2c623 commit 5f5d3c8
Show file tree
Hide file tree
Showing 9 changed files with 134 additions and 62 deletions.
Binary file modified Contents/Code/Docs/webtools-README_DEVS.odt
Binary file not shown.
14 changes: 8 additions & 6 deletions Contents/Code/consts.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,15 @@

import io, os, json

DEBUGMODE = False
WT_AUTH = True
VERSION = 'ERROR'
UAS_URL = 'https://github.com/ukdtom/UAS2Res'
UAS_BRANCH = 'master'
DEBUGMODE = False # default for debug mode
WT_AUTH = True # validate password
VERSION = 'ERROR' # version of WebTools
UAS_URL = 'https://github.com/ukdtom/UAS2Res' # USA2 Repo branch
UAS_BRANCH = 'master' # UAS2 branch to check
PREFIX = '/applications/webtools'
NAME = 'WebTools'
ICON = 'WebTools.png'
JSONTIMESTAMP = 0
JSONTIMESTAMP = 0 # timestamp for json export


class consts(object):
Expand All @@ -32,6 +32,7 @@ def __init__(self):
global VERSION
global JSONTIMESTAMP

# Grap version number from the version file
versionFile = Core.storage.join_path(Core.app_support_path, Core.config.bundles_dir_name, NAME + '.bundle', 'VERSION')
with io.open(versionFile, "rb") as version_file:
VERSION = version_file.read().replace('\n','')
Expand Down Expand Up @@ -64,6 +65,7 @@ def __init__(self):
Log.Debug('UAS_Repo: ' + UAS_URL)
Log.Debug('UAS_RepoBranch: ' + UAS_BRANCH)
Log.Debug('Authenticate: ' + str(WT_AUTH))
Log.Debug('JSON timestamp: ' + str(JSONTIMESTAMP))
Log.Debug('*****************************************************')
else:
DEBUGMODE = False
Expand Down
50 changes: 29 additions & 21 deletions Contents/Code/findMedia.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,18 @@
from misc import misc

# Consts used here
AmountOfMediasInDatabase = 0 # Int of amount of medias in a database section
mediasFromDB = [] # Files from the database
mediasFromFileSystem = [] # Files from the file system
statusMsg = 'idle' # Response to getStatus
runningState = 0 # Internal tracker of where we are
bAbort = False # Flag to set if user wants to cancel
Extras = ['behindthescenes','deleted','featurette','interview','scene','short','trailer'] # Local extras
KEYS = ['IGNORE_HIDDEN', 'IGNORED_DIRS', 'VALID_EXTENSIONS'] # Valid keys for prefs
AmountOfMediasInDatabase = 0 # Int of amount of medias in a database section
mediasFromDB = [] # Files from the database
mediasFromFileSystem = [] # Files from the file system
statusMsg = 'idle' # Response to getStatus
runningState = 0 # Internal tracker of where we are
bAbort = False # Flag to set if user wants to cancel
Extras = ['behindthescenes','deleted','featurette','interview','scene','short','trailer'] # Local extras
ExtrasDirs = ['behind the scenes', 'deleted scenes', 'featurettes', 'interviews', 'scenes', 'shorts', 'trailers'] # Directories to be ignored
KEYS = ['IGNORE_HIDDEN', 'IGNORED_DIRS', 'VALID_EXTENSIONS'] # Valid keys for prefs
excludeElements='Actor,Collection,Country,Director,Genre,Label,Mood,Producer,Role,Similar,Writer'
excludeFields='summary,tagline'



class findMedia(object):
Expand All @@ -48,11 +52,10 @@ def populatePrefs(self):
Dict['findMedia'] = {
'IGNORE_HIDDEN' : True,
'IGNORED_DIRS' : [".@__thumb",".AppleDouble","lost+found"],
'VALID_EXTENSIONS' : ['.m4v', '.3gp', '.nsv', '.ts', '.ty', '.strm', '.rm', '.rmvb', '.m3u',
'.mov', '.qt', '.divx', '.xvid', '.bivx', '.vob', '.nrg', '.img', '.iso',
'.pva', '.wmv', '.asf', '.asx', '.ogm', '.m2v', '.avi', '.bin', '.dat',
'.dvr-ms', '.mpg', '.mpeg', '.mp4', '.mkv', '.avc', '.vp3', '.svq3', '.nuv',
'.viv', '.dv', '.fli', '.flv', '.rar', '.001', '.wpl', '.zip', '.mp3']
'VALID_EXTENSIONS' : ['3g2', '3gp', 'asf', 'asx', 'avc', 'avi', 'avs', 'bivx', 'bup', 'divx', 'dv', 'dvr-ms', 'evo',
'fli', 'flv', 'm2t', 'm2ts', 'm2v', 'm4v', 'mkv', 'mov', 'mp4', 'mpeg', 'mpg', 'mts', 'nsv',
'nuv', 'ogm', 'ogv', 'tp', 'pva', 'qt', 'rm', 'rmvb', 'sdp', 'svq3', 'strm', 'ts', 'ty', 'vdr',
'viv', 'vob', 'vp3', 'wmv', 'wpl', 'wtv', 'xsp', 'xvid', 'webm']
}
Dict.Save()

Expand Down Expand Up @@ -292,18 +295,23 @@ def getFiles(filePath):
if bAbort:
Log.Info('Aborted in getFiles')
raise ValueError('Aborted')
if os.path.splitext(file)[1] in Dict['findMedia']['VALID_EXTENSIONS']:
if os.path.splitext(file)[1].lower()[1:] in Dict['findMedia']['VALID_EXTENSIONS']:
# File has a valid extention
if file.startswith('.') and Dict['findMedia']['IGNORE_HIDDEN']:
continue
# Filter out local extras
if '-' in file:
if os.path.splitext(os.path.basename(file))[0].rsplit('-', 1)[1] in Extras:
continue
if os.path.splitext(os.path.basename(file))[0].rsplit('-', 1)[1].lower() in Extras:
continue
# filter out local extras directories
if os.path.basename(os.path.normpath(root)).lower() in ExtrasDirs:
continue
composed_file = misc().Unicodize(Core.storage.join_path(root,file))
if Platform.OS == 'Windows':
# I hate windows
composed_file = composed_file[4:]
pos = composed_file.find(':') -1
#composed_file = composed_file[4:]
composed_file = composed_file[pos:]
mediasFromFileSystem.append(composed_file)
statusMsg = 'Scanning file: ' + file
Log.Debug('***** Finished scanning filesystem *****')
Expand Down Expand Up @@ -338,7 +346,7 @@ def scanShowDB(sectionNumber=0):
# So let's walk the library
while True:
# Grap shows
shows = XML.ElementFromURL(self.CoreUrl + sectionNumber + '/all?X-Plex-Container-Start=' + str(iCShow) + '&X-Plex-Container-Size=' + str(self.MediaChuncks)).xpath('//Directory')
shows = XML.ElementFromURL(self.CoreUrl + sectionNumber + '/all?X-Plex-Container-Start=' + str(iCShow) + '&X-Plex-Container-Size=' + str(self.MediaChuncks) + '&excludeElements=' + excludeElements + '&excludeFields=' + excludeFields).xpath('//Directory')
# Grap individual show
for show in shows:
statusShow = show.get('title')
Expand All @@ -347,7 +355,7 @@ def scanShowDB(sectionNumber=0):
iCSeason = 0
# Grap seasons
while True:
seasons = XML.ElementFromURL('http://127.0.0.1:32400' + show.get('key') + '?X-Plex-Container-Start=' + str(iCSeason) + '&X-Plex-Container-Size=' + str(self.MediaChuncks)).xpath('//Directory')
seasons = XML.ElementFromURL('http://127.0.0.1:32400' + show.get('key') + '?X-Plex-Container-Start=' + str(iCSeason) + '&X-Plex-Container-Size=' + str(self.MediaChuncks) + '&excludeElements=' + excludeElements + '&excludeFields=' + excludeFields).xpath('//Directory')
# Grap individual season
for season in seasons:
if season.get('title') == 'All episodes':
Expand All @@ -360,7 +368,7 @@ def scanShowDB(sectionNumber=0):
iEpisode = 0
iCEpisode = 0
while True:
episodes = XML.ElementFromURL('http://127.0.0.1:32400' + season.get('key') + '?X-Plex-Container-Start=' + str(iCEpisode) + '&X-Plex-Container-Size=' + str(self.MediaChuncks)).xpath('//Part')
episodes = XML.ElementFromURL('http://127.0.0.1:32400' + season.get('key') + '?X-Plex-Container-Start=' + str(iCEpisode) + '&X-Plex-Container-Size=' + str(self.MediaChuncks) + '&excludeElements=' + excludeElements + '&excludeFields=' + excludeFields).xpath('//Part')
for episode in episodes:
if bAbort:
raise ValueError('Aborted')
Expand Down Expand Up @@ -417,7 +425,7 @@ def scanMovieDb(sectionNumber=0):
# So let's walk the library
while True:
# Grap a chunk from the server
medias = XML.ElementFromURL(self.CoreUrl + sectionNumber + '/all?X-Plex-Container-Start=' + str(iStart) + '&X-Plex-Container-Size=' + str(self.MediaChuncks)).xpath('//Part')
medias = XML.ElementFromURL(self.CoreUrl + sectionNumber + '/all?X-Plex-Container-Start=' + str(iStart) + '&X-Plex-Container-Size=' + str(self.MediaChuncks) + '&excludeElements=' + excludeElements + '&excludeFields=' + excludeFields).xpath('//Part')
# Walk the chunk
for part in medias:
if bAbort:
Expand Down
73 changes: 61 additions & 12 deletions Contents/Code/git.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,12 +221,20 @@ def getUpdateList(self, req):
for bundle in bundles:
if bundle.startswith('https://github'):
# Going the new detection way with the commitId?
if 'CommitId' in Dict['installed'][bundle]:
updateInfo = self.getAtom_UpdateTime_Id(bundle, Dict['installed'][bundle]['branch'])
if Dict['installed'][bundle]['CommitId'] != updateInfo['commitId']:
gitInfo = Dict['installed'][bundle]
gitInfo['gitHubTime'] = updateInfo['mostRecent']
result[bundle] = gitInfo
if 'CommitId' in Dict['installed'][bundle]:
if 'release' in Dict['installed'][bundle]:
relUrl = 'https://api.github.com/repos' + bundle[18:] + '/releases/latest'
Id = JSON.ObjectFromURL(relUrl)['id']
if Dict['installed'][bundle]['CommitId'] != Id:
gitInfo = Dict['installed'][bundle]
gitInfo['gitHubTime'] = JSON.ObjectFromURL(relUrl)['published_at']
result[bundle] = gitInfo
else:
updateInfo = self.getAtom_UpdateTime_Id(bundle, Dict['installed'][bundle]['branch'])
if Dict['installed'][bundle]['CommitId'] != updateInfo['commitId']:
gitInfo = Dict['installed'][bundle]
gitInfo['gitHubTime'] = updateInfo['mostRecent']
result[bundle] = gitInfo
else:
# Sadly has to use timestamps
Log.Info('Using timestamps to detect avail update for ' + bundle)
Expand Down Expand Up @@ -565,8 +573,12 @@ def saveInstallInfo(url, bundleName, branch):
# Walk the one by one, so we can handle upper/lower case
for git in gits:
if url.upper() == git['repo'].upper():
# Get the last Commit Id of the branch
Id = HTML.ElementFromURL(url + '/commits/' + branch + '.atom').xpath('//entry')[0].xpath('./id')[0].text.split('/')[-1][:10]
# Needs to seperate between release downloads, and branch downloads
if 'RELEASE' in branch.upper():
relUrl = 'https://api.github.com/repos' + url[18:] + '/releases/latest'
Id = JSON.ObjectFromURL(relUrl)['id']
else:
Id = HTML.ElementFromURL(url + '/commits/' + branch + '.atom').xpath('//entry')[0].xpath('./id')[0].text.split('/')[-1][:10]
key = git['repo']
del git['repo']
git['CommitId'] = Id
Expand Down Expand Up @@ -603,6 +615,26 @@ def saveInstallInfo(url, bundleName, branch):
Dict.Save()
return

''' Get latest Release version '''
def getLatestRelease(url):
# Get release info if present
try:
relUrl = 'https://api.github.com/repos' + url[18:] + '/releases/latest'
relInfo = JSON.ObjectFromURL(relUrl)
downloadUrl = None
for asset in relInfo['assets']:
if asset['name'].upper() == Dict['PMS-AllBundleInfo'][url]['release'].upper():
downloadUrl = asset['browser_download_url']
continue
if downloadUrl:
return downloadUrl
else:
raise "Download URL not found"
except Exception, ex:
Log.Critical('Release info not found on Github: ' + relUrl)
pass
return

''' Download the bundle '''
def downloadBundle2tmp(url, bundleName, branch):
# Helper function
Expand All @@ -627,7 +659,10 @@ def removeEmptyFolders(path, removeRoot=True):
# Get the dict with the installed bundles, and init it if it doesn't exists
if not 'installed' in Dict:
Dict['installed'] = {}
zipPath = url + '/archive/' + branch + '.zip'
if 'RELEASE' in branch.upper():
zipPath = getLatestRelease(url)
else:
zipPath = url + '/archive/' + branch + '.zip'
try:
# Grap file from Github
zipfile = Archive.ZipFromURL(zipPath)
Expand Down Expand Up @@ -811,7 +846,16 @@ def removeEmptyFolders(path, removeRoot=True):
Log.Debug('Starting install')
req.clear()
url = req.get_argument('url', 'missing')
# Set branch to url argument, or master if missing
branch = req.get_argument('branch', 'master')
# Got a release url, and if not, go for what's in the dict for branch
try:
branch = Dict['PMS-AllBundleInfo'][url]['release']+'_WTRELEASE'
except:
try:
branch = Dict['PMS-AllBundleInfo'][url]['branch']
except:
pass
if url == 'missing':
req.set_status(412)
req.finish("<html><body>Missing url of git</body></html>")
Expand Down Expand Up @@ -864,9 +908,14 @@ def getLastUpdateTime(self, req, UAS=False, url=''):
branch = 'master'
# Check for updates
try:
url += '/commits/%s.atom' % branch
Log.Debug('URL is: ' + url)
response = Datetime.ParseDate(HTML.ElementFromURL(url).xpath('//entry')[0].xpath('./updated')[0].text).strftime("%Y-%m-%d %H:%M:%S")
if '_WTRELEASE' in branch:
url = 'https://api.github.com/repos' + url[18:] + '/releases/latest'
Log.Debug('URL is: ' + url)
response = JSON.ObjectFromURL(url)['published_at']
else:
url += '/commits/%s.atom' % branch
Log.Debug('URL is: ' + url)
response = Datetime.ParseDate(HTML.ElementFromURL(url).xpath('//entry')[0].xpath('./updated')[0].text).strftime("%Y-%m-%d %H:%M:%S")
Log.Debug('Last update for: ' + url + ' is: ' + str(response))
if UAS:
return response
Expand Down
14 changes: 7 additions & 7 deletions Contents/Code/jsonExporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ def populatePrefs(self):
Dict['jsonExportTimeStamps'] = {}
Dict.Save()


''' Grap the tornado req, and process it for a POST request'''
def reqprocessPost(self, req):
function = req.get_argument('function', 'missing')
Expand All @@ -56,7 +55,10 @@ def export(self, req):
''' Return the type of the section '''
def getSectionType(section):
url = 'http://127.0.0.1:32400/library/sections/' + section + '/all?X-Plex-Container-Start=1&X-Plex-Container-Size=0'
return XML.ElementFromURL(url).xpath('//MediaContainer/@viewGroup')[0]
try:
return XML.ElementFromURL(url).xpath('//MediaContainer/@viewGroup')[0]
except:
return "None"

''' Create a simple entry in the videoDetails tree '''
def makeSimpleEntry(media, videoDetails, el):
Expand Down Expand Up @@ -172,7 +174,6 @@ def makeFiles(ratingKey):
except Exception, e:
Log.Exception('Exception happend in generating json file: ' + str(e))


''' Scan a movie section '''
def scanMovieSection(req, sectionNumber):
Log.Debug('Starting scanMovieSection')
Expand Down Expand Up @@ -219,8 +220,7 @@ def scanMovieSection(req, sectionNumber):
if bAbort:
raise ValueError('Aborted')
iCount += 1
makeFiles(video.get('ratingKey'))

makeFiles(video.get('ratingKey'))
statusMsg = 'Scanning database: item %s of %s : Working' %(iCount, totalSize)
iStart += self.MediaChuncks
if len(videos) == 0:
Expand Down Expand Up @@ -255,10 +255,10 @@ def scanShowSection(req, sectionNumber):
elif getSectionType(section) == 'show':
scanShowSection(req, section)
else:
Log.debug('Unknown section type for section:' + section + ' type: ' + getSectionType(section))
Log.Debug('Unknown section type for section:' + section + ' type: ' + getSectionType(section))
req.clear()
req.set_status(404)
req.finish("Unknown sectiontype")
req.finish("Unknown sectiontype or sectiion")
except Exception, e:
Log.Exception('Exception in json export' + str(e))

Expand Down
29 changes: 17 additions & 12 deletions Contents/Code/pms.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,17 +89,19 @@ def updateInstallDict():
installBranch = ''
# Check if already present, and if an install date also is there
installDate = ""
CommitId = ""
if key in Dict['PMS-AllBundleInfo']:
jsonPMSAllBundleInfo = Dict['PMS-AllBundleInfo'][key]
if 'branch' in jsonPMSAllBundleInfo:
installBranch = Dict['PMS-AllBundleInfo'][key]['branch']
if 'date' in jsonPMSAllBundleInfo:
installDate = Dict['PMS-AllBundleInfo'][key]['date']
if 'CommitId' in jsonPMSAllBundleInfo:
CommitId = Dict['PMS-AllBundleInfo'][key]['CommitId']
del git['repo']
# Add/Update our Dict
Dict['PMS-AllBundleInfo'][key] = git
Dict['PMS-AllBundleInfo'][key]['branch'] = installBranch
Dict['PMS-AllBundleInfo'][key]['date'] = installDate
Dict['PMS-AllBundleInfo'][key]['CommitId'] = CommitId

except Exception, e:
Log.Exception('Critical error in updateAllBundleInfoFromUAS1 while walking the gits: ' + str(e))
Dict.Save()
Expand Down Expand Up @@ -476,15 +478,18 @@ def delSub(self, req):
if filePath.startswith('media://'):
# Path to symblink
filePath = filePath.replace('media:/', os.path.join(Core.app_support_path, 'Media', 'localhost'))
# Subtitle name
agent, sub = filePath.split('_')
tmp, agent = agent.split('com.')
# Agent used
agent = 'com.' + agent
filePath2 = filePath.replace('Contents', os.path.join('Contents', 'Subtitle Contributions'))
filePath2, language = filePath2.split('Subtitles')
language = language[1:3]
filePath3 = os.path.join(filePath2[:-1], agent, language, sub)
try:
# Subtitle name
agent, sub = filePath.rsplit('_',1)
tmp, agent = agent.split('com.')
# Agent used
agent = 'com.' + agent
filePath2 = filePath.replace('Contents', os.path.join('Contents', 'Subtitle Contributions'))
filePath2, language = filePath2.split('Subtitles')
language = language[1:3]
filePath3 = os.path.join(filePath2[:-1], agent, language, sub)
except Exception, e:
Log.Exception('Exception in delSub generation file Path: ' + str(e))
subtitlesXMLPath, tmp = filePath.split('Contents')
agentXMLPath = os.path.join(subtitlesXMLPath, 'Contents', 'Subtitle Contributions', agent + '.xml')
subtitlesXMLPath = os.path.join(subtitlesXMLPath, 'Contents', 'Subtitles.xml')
Expand Down
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
WebTools.bundle
===============
[![GitHub issues](https://img.shields.io/github/issues/dagalufh/WebTools.bundle.svg?style=flat)](https://github.com/dagalufh/WebTools.bundle/issues) [![](https://img.shields.io/github/release/dagalufh/WebTools.bundle.svg?style=flat)](https://github.com/dagalufh/WebTools.bundle/releases) [![Download of latest release](https://img.shields.io/github/downloads/dagalufh/WebTools.bundle/latest/total.svg?style=flat)](https://github.com/dagalufh/WebTools.bundle/releases/latest)
[![master](https://img.shields.io/badge/master-stable-green.svg?maxAge=2592000)]()
[![Maintenance](https://img.shields.io/maintenance/yes/2016.svg?maxAge=2592000)]()


Please see the wiki for futher information

https://github.com/dagalufh/WebTools.bundle/wiki

To download, go here:
https://github.com/dagalufh/WebTools.bundle/releases/latest
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
2.3 DEV
2.3
Loading

0 comments on commit 5f5d3c8

Please sign in to comment.