mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-22 01:23:43 +00:00
Merge pull request #775 from JackDandy/feature/ChangeOptimiseTVDBv1
Change optimise TheTVDB processes, 40% to 66% saved adding new and ex…
This commit is contained in:
commit
b44b11b3fc
9 changed files with 227 additions and 123 deletions
|
@ -159,6 +159,11 @@
|
||||||
* Change show loaded log message at start up and include info source
|
* Change show loaded log message at start up and include info source
|
||||||
* Change if episode has no airdate then set status to unaired (was skipped)
|
* Change if episode has no airdate then set status to unaired (was skipped)
|
||||||
* Fix only replace initial quality releases from the upgrade to list
|
* Fix only replace initial quality releases from the upgrade to list
|
||||||
|
* Change optimise TheTVDB processes, 40% to 66% saved adding new and existing shows, 40% to 50% saved per show update
|
||||||
|
* Change improve shows with more episodes gain largest reductions in time spent processing
|
||||||
|
* Change when using "Add new show" reduce search time outs
|
||||||
|
* Change always allow incomplete show data
|
||||||
|
* Remove redundant config/general/"Allow incomplete show data"
|
||||||
|
|
||||||
[develop changelog]
|
[develop changelog]
|
||||||
* Change send nzb data to NZBGet for Anizb instead of url
|
* Change send nzb data to NZBGet for Anizb instead of url
|
||||||
|
|
|
@ -88,18 +88,6 @@
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
#if hasattr($sickbeard, 'ALLOW_INCOMPLETE_SHOWDATA')
|
|
||||||
<div class="field-pair">
|
|
||||||
<label for="allow_incomplete_showdata">
|
|
||||||
<span class="component-title">Allow incomplete show data</span>
|
|
||||||
<span class="component-desc">
|
|
||||||
<input type="checkbox" name="allow_incomplete_showdata" id="allow_incomplete_showdata"#echo ('', $checked)[$sickbeard.ALLOW_INCOMPLETE_SHOWDATA]#>
|
|
||||||
<p>add partial show data for future updates to complete</p>
|
|
||||||
</span>
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
#end if
|
|
||||||
|
|
||||||
<div class="field-pair">
|
<div class="field-pair">
|
||||||
<span class="component-title">Send to trash for actions</span>
|
<span class="component-title">Send to trash for actions</span>
|
||||||
<span class="component-desc">
|
<span class="component-desc">
|
||||||
|
|
141
lib/etreetodict.py
Normal file
141
lib/etreetodict.py
Normal file
|
@ -0,0 +1,141 @@
|
||||||
|
try:
|
||||||
|
from lxml import ElementTree
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
import xml.etree.cElementTree as ElementTree
|
||||||
|
except ImportError:
|
||||||
|
import xml.etree.ElementTree as ElementTree
|
||||||
|
|
||||||
|
|
||||||
|
class XmlDictObject(dict):
|
||||||
|
"""
|
||||||
|
Adds object like functionality to the standard dictionary.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, initdict=None):
|
||||||
|
if initdict is None:
|
||||||
|
initdict = {}
|
||||||
|
dict.__init__(self, initdict)
|
||||||
|
|
||||||
|
def __getattr__(self, item):
|
||||||
|
return self.__getitem__(item)
|
||||||
|
|
||||||
|
def __setattr__(self, item, value):
|
||||||
|
self.__setitem__(item, value)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
if self.has_key('_text'):
|
||||||
|
return self.__getitem__('_text')
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def Wrap(x):
|
||||||
|
"""
|
||||||
|
Static method to wrap a dictionary recursively as an XmlDictObject
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isinstance(x, dict):
|
||||||
|
return XmlDictObject((k, XmlDictObject.Wrap(v)) for (k, v) in x.iteritems())
|
||||||
|
elif isinstance(x, list):
|
||||||
|
return [XmlDictObject.Wrap(v) for v in x]
|
||||||
|
else:
|
||||||
|
return x
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _UnWrap(x):
|
||||||
|
if isinstance(x, dict):
|
||||||
|
return dict((k, XmlDictObject._UnWrap(v)) for (k, v) in x.iteritems())
|
||||||
|
elif isinstance(x, list):
|
||||||
|
return [XmlDictObject._UnWrap(v) for v in x]
|
||||||
|
else:
|
||||||
|
return x
|
||||||
|
|
||||||
|
def UnWrap(self):
|
||||||
|
"""
|
||||||
|
Recursively converts an XmlDictObject to a standard dictionary and returns the result.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return XmlDictObject._UnWrap(self)
|
||||||
|
|
||||||
|
|
||||||
|
def _ConvertDictToXmlRecurse(parent, dictitem):
|
||||||
|
assert type(dictitem) is not type([])
|
||||||
|
|
||||||
|
if isinstance(dictitem, dict):
|
||||||
|
for (tag, child) in dictitem.iteritems():
|
||||||
|
if str(tag) == '_text':
|
||||||
|
parent.text = str(child)
|
||||||
|
elif type(child) is type([]):
|
||||||
|
# iterate through the array and convert
|
||||||
|
for listchild in child:
|
||||||
|
elem = ElementTree.Element(tag)
|
||||||
|
parent.append(elem)
|
||||||
|
_ConvertDictToXmlRecurse(elem, listchild)
|
||||||
|
else:
|
||||||
|
elem = ElementTree.Element(tag)
|
||||||
|
parent.append(elem)
|
||||||
|
_ConvertDictToXmlRecurse(elem, child)
|
||||||
|
else:
|
||||||
|
parent.text = str(dictitem)
|
||||||
|
|
||||||
|
|
||||||
|
def ConvertDictToXml(xmldict):
|
||||||
|
"""
|
||||||
|
Converts a dictionary to an XML ElementTree Element
|
||||||
|
"""
|
||||||
|
|
||||||
|
roottag = xmldict.keys()[0]
|
||||||
|
root = ElementTree.Element(roottag)
|
||||||
|
_ConvertDictToXmlRecurse(root, xmldict[roottag])
|
||||||
|
return root
|
||||||
|
|
||||||
|
|
||||||
|
def _ConvertXmlToDictRecurse(node, dictclass):
|
||||||
|
nodedict = dictclass()
|
||||||
|
|
||||||
|
if len(node.items()) > 0:
|
||||||
|
# if we have attributes, set them
|
||||||
|
nodedict.update(dict(node.items()))
|
||||||
|
|
||||||
|
for child in node:
|
||||||
|
# recursively add the element's children
|
||||||
|
newitem = _ConvertXmlToDictRecurse(child, dictclass)
|
||||||
|
if nodedict.has_key(child.tag):
|
||||||
|
# found duplicate tag, force a list
|
||||||
|
if type(nodedict[child.tag]) is type([]):
|
||||||
|
# append to existing list
|
||||||
|
nodedict[child.tag].append(newitem)
|
||||||
|
else:
|
||||||
|
# convert to list
|
||||||
|
nodedict[child.tag] = [nodedict[child.tag], newitem]
|
||||||
|
else:
|
||||||
|
# only one, directly set the dictionary
|
||||||
|
nodedict[child.tag] = newitem
|
||||||
|
|
||||||
|
if node.text is None:
|
||||||
|
text = ''
|
||||||
|
else:
|
||||||
|
text = node.text.strip()
|
||||||
|
|
||||||
|
if len(nodedict) > 0:
|
||||||
|
# if we have a dictionary add the text as a dictionary value (if there is any)
|
||||||
|
if len(text) > 0:
|
||||||
|
nodedict['_text'] = text
|
||||||
|
else:
|
||||||
|
# if we don't have child nodes or attributes, just set the text
|
||||||
|
nodedict = text
|
||||||
|
|
||||||
|
return nodedict
|
||||||
|
|
||||||
|
def ConvertXmlToDict(root, dictclass=XmlDictObject):
|
||||||
|
"""
|
||||||
|
Converts an XML String to a dictionary
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isinstance(root, basestring):
|
||||||
|
root = ElementTree.fromstring(root)
|
||||||
|
elif not isinstance(root, ElementTree.Element):
|
||||||
|
raise TypeError, 'Expected string'
|
||||||
|
|
||||||
|
return dictclass({root.tag: _ConvertXmlToDictRecurse(root, dictclass)})
|
|
@ -5,8 +5,8 @@
|
||||||
# repository:http://github.com/dbr/tvdb_api
|
# repository:http://github.com/dbr/tvdb_api
|
||||||
# license:unlicense (http://unlicense.org/)
|
# license:unlicense (http://unlicense.org/)
|
||||||
|
|
||||||
from functools import wraps
|
|
||||||
import traceback
|
import traceback
|
||||||
|
from functools import wraps
|
||||||
|
|
||||||
__author__ = 'dbr/Ben'
|
__author__ = 'dbr/Ben'
|
||||||
__version__ = '1.9'
|
__version__ = '1.9'
|
||||||
|
@ -21,12 +21,6 @@ import logging
|
||||||
import zipfile
|
import zipfile
|
||||||
import requests
|
import requests
|
||||||
import requests.exceptions
|
import requests.exceptions
|
||||||
import xmltodict
|
|
||||||
|
|
||||||
try:
|
|
||||||
import xml.etree.cElementTree as ElementTree
|
|
||||||
except ImportError:
|
|
||||||
import xml.etree.ElementTree as ElementTree
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import gzip
|
import gzip
|
||||||
|
@ -36,6 +30,7 @@ except ImportError:
|
||||||
from lib.dateutil.parser import parse
|
from lib.dateutil.parser import parse
|
||||||
from lib.cachecontrol import CacheControl, caches
|
from lib.cachecontrol import CacheControl, caches
|
||||||
|
|
||||||
|
from lib.etreetodict import ConvertXmlToDict
|
||||||
from tvdb_ui import BaseUI, ConsoleUI
|
from tvdb_ui import BaseUI, ConsoleUI
|
||||||
from tvdb_exceptions import (tvdb_error, tvdb_shownotfound,
|
from tvdb_exceptions import (tvdb_error, tvdb_shownotfound,
|
||||||
tvdb_seasonnotfound, tvdb_episodenotfound, tvdb_attributenotfound)
|
tvdb_seasonnotfound, tvdb_episodenotfound, tvdb_attributenotfound)
|
||||||
|
@ -565,18 +560,17 @@ class Tvdb:
|
||||||
except Exception:
|
except Exception:
|
||||||
raise tvdb_error('Unknown exception while loading URL %s: %s' % (url, traceback.format_exc()))
|
raise tvdb_error('Unknown exception while loading URL %s: %s' % (url, traceback.format_exc()))
|
||||||
|
|
||||||
def process(path, key, value):
|
def process_data(data):
|
||||||
key = key.lower()
|
te = ConvertXmlToDict(data)
|
||||||
|
if isinstance(te, dict) and 'Data' in te and isinstance(te['Data'], dict) and \
|
||||||
# clean up value and do type changes
|
'Series' in te['Data'] and isinstance(te['Data']['Series'], dict) and \
|
||||||
if value:
|
'FirstAired' in te['Data']['Series']:
|
||||||
if 'firstaired' == key:
|
try:
|
||||||
try:
|
value = parse(te['Data']['Series']['FirstAired'], fuzzy=True).strftime('%Y-%m-%d')
|
||||||
value = parse(value, fuzzy=True).strftime('%Y-%m-%d')
|
except:
|
||||||
except:
|
value = None
|
||||||
value = None
|
te['Data']['Series']['firstaired'] = value
|
||||||
|
return te
|
||||||
return key, value
|
|
||||||
|
|
||||||
if resp.ok:
|
if resp.ok:
|
||||||
if 'application/zip' in resp.headers.get('Content-Type', ''):
|
if 'application/zip' in resp.headers.get('Content-Type', ''):
|
||||||
|
@ -586,12 +580,12 @@ class Tvdb:
|
||||||
zipdata = StringIO.StringIO()
|
zipdata = StringIO.StringIO()
|
||||||
zipdata.write(resp.content)
|
zipdata.write(resp.content)
|
||||||
myzipfile = zipfile.ZipFile(zipdata)
|
myzipfile = zipfile.ZipFile(zipdata)
|
||||||
return xmltodict.parse(myzipfile.read('%s.xml' % language), postprocessor=process)
|
return process_data(myzipfile.read('%s.xml' % language))
|
||||||
except zipfile.BadZipfile:
|
except zipfile.BadZipfile:
|
||||||
raise tvdb_error('Bad zip file received from thetvdb.com, could not read it')
|
raise tvdb_error('Bad zip file received from thetvdb.com, could not read it')
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
return xmltodict.parse(resp.content.strip(), postprocessor=process)
|
return process_data(resp.content.strip())
|
||||||
except:
|
except:
|
||||||
return dict([(u'data', None)])
|
return dict([(u'data', None)])
|
||||||
|
|
||||||
|
@ -641,7 +635,7 @@ class Tvdb:
|
||||||
- Replaces & with &
|
- Replaces & with &
|
||||||
- Trailing whitespace
|
- Trailing whitespace
|
||||||
"""
|
"""
|
||||||
return data if data is None else data.strip().replace(u'&', u'&')
|
return data if not isinstance(data, basestring) else data.strip().replace(u'&', u'&')
|
||||||
|
|
||||||
def search(self, series):
|
def search(self, series):
|
||||||
"""This searches TheTVDB.com for the series name
|
"""This searches TheTVDB.com for the series name
|
||||||
|
@ -654,6 +648,7 @@ class Tvdb:
|
||||||
try:
|
try:
|
||||||
series_found = self._getetsrc(self.config['url_get_series'], self.config['params_get_series'])
|
series_found = self._getetsrc(self.config['url_get_series'], self.config['params_get_series'])
|
||||||
if series_found:
|
if series_found:
|
||||||
|
series_found['Series'] = [{k.lower(): v for k, v in s.iteritems()} for s in series_found['Series']]
|
||||||
return series_found.values()[0]
|
return series_found.values()[0]
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
@ -804,21 +799,21 @@ class Tvdb:
|
||||||
|
|
||||||
# Parse show information
|
# Parse show information
|
||||||
log().debug('Getting all series data for %s' % sid)
|
log().debug('Getting all series data for %s' % sid)
|
||||||
url = self.config['url_epInfo%s' % ('', '_zip')[self.config['useZip']]] % (sid, language)
|
url = (self.config['url_seriesInfo'] % (sid, language), self.config['url_epInfo%s' % ('', '_zip')[self.config['useZip']]] % (sid, language))[get_ep_info]
|
||||||
show_data = self._getetsrc(url, language=get_show_in_language)
|
show_data = self._getetsrc(url, language=get_show_in_language)
|
||||||
|
|
||||||
# check and make sure we have data to process and that it contains a series name
|
# check and make sure we have data to process and that it contains a series name
|
||||||
if not len(show_data) or (isinstance(show_data, dict) and 'seriesname' not in show_data['series']):
|
if not len(show_data) or (isinstance(show_data, dict) and 'SeriesName' not in show_data['Series']):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for k, v in show_data['series'].items():
|
for k, v in show_data['Series'].iteritems():
|
||||||
if None is not v:
|
if None is not v:
|
||||||
if k in ['banner', 'fanart', 'poster']:
|
if k in ['banner', 'fanart', 'poster']:
|
||||||
v = self.config['url_artworkPrefix'] % v
|
v = self.config['url_artworkPrefix'] % v
|
||||||
else:
|
else:
|
||||||
v = self._clean_data(v)
|
v = self._clean_data(v)
|
||||||
|
|
||||||
self._set_show_data(sid, k, v)
|
self._set_show_data(sid, k.lower(), v)
|
||||||
|
|
||||||
if get_ep_info:
|
if get_ep_info:
|
||||||
# Parse banners
|
# Parse banners
|
||||||
|
@ -832,24 +827,24 @@ class Tvdb:
|
||||||
# Parse episode data
|
# Parse episode data
|
||||||
log().debug('Getting all episodes of %s' % sid)
|
log().debug('Getting all episodes of %s' % sid)
|
||||||
|
|
||||||
if 'episode' not in show_data:
|
if 'Episode' not in show_data:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
episodes = show_data['episode']
|
episodes = show_data['Episode']
|
||||||
if not isinstance(episodes, list):
|
if not isinstance(episodes, list):
|
||||||
episodes = [episodes]
|
episodes = [episodes]
|
||||||
|
|
||||||
for cur_ep in episodes:
|
for cur_ep in episodes:
|
||||||
if self.config['dvdorder']:
|
if self.config['dvdorder']:
|
||||||
log().debug('Using DVD ordering.')
|
log().debug('Using DVD ordering.')
|
||||||
use_dvd = None is not cur_ep['dvd_season'] and None is not cur_ep['dvd_episodenumber']
|
use_dvd = None is not cur_ep['DVD_season'] and None is not cur_ep['DVD_episodenumber']
|
||||||
else:
|
else:
|
||||||
use_dvd = False
|
use_dvd = False
|
||||||
|
|
||||||
if use_dvd:
|
if use_dvd:
|
||||||
elem_seasnum, elem_epno = cur_ep['dvd_season'], cur_ep['dvd_episodenumber']
|
elem_seasnum, elem_epno = cur_ep['DVD_season'], cur_ep['DVD_episodenumber']
|
||||||
else:
|
else:
|
||||||
elem_seasnum, elem_epno = cur_ep['seasonnumber'], cur_ep['episodenumber']
|
elem_seasnum, elem_epno = cur_ep['SeasonNumber'], cur_ep['EpisodeNumber']
|
||||||
|
|
||||||
if None is elem_seasnum or None is elem_epno:
|
if None is elem_seasnum or None is elem_epno:
|
||||||
log().warning('An episode has incomplete season/episode number (season: %r, episode: %r)' % (
|
log().warning('An episode has incomplete season/episode number (season: %r, episode: %r)' % (
|
||||||
|
@ -895,10 +890,16 @@ class Tvdb:
|
||||||
"""Handles tvdb_instance['seriesname'] calls.
|
"""Handles tvdb_instance['seriesname'] calls.
|
||||||
The dict index should be the show id
|
The dict index should be the show id
|
||||||
"""
|
"""
|
||||||
|
arg = None
|
||||||
|
if isinstance(key, tuple) and 2 == len(key):
|
||||||
|
key, arg = key
|
||||||
|
if not isinstance(arg, bool):
|
||||||
|
arg = None
|
||||||
|
|
||||||
if isinstance(key, (int, long)):
|
if isinstance(key, (int, long)):
|
||||||
# Item is integer, treat as show id
|
# Item is integer, treat as show id
|
||||||
if key not in self.shows:
|
if key not in self.shows:
|
||||||
self._get_show_data(key, self.config['language'], True)
|
self._get_show_data(key, self.config['language'], (True, arg)[arg is not None])
|
||||||
return None if key not in self.shows else self.shows[key]
|
return None if key not in self.shows else self.shows[key]
|
||||||
|
|
||||||
key = str(key).lower()
|
key = str(key).lower()
|
||||||
|
|
|
@ -657,10 +657,16 @@ class TVRage:
|
||||||
"""Handles tvrage_instance['seriesname'] calls.
|
"""Handles tvrage_instance['seriesname'] calls.
|
||||||
The dict index should be the show id
|
The dict index should be the show id
|
||||||
"""
|
"""
|
||||||
|
arg = None
|
||||||
|
if isinstance(key, tuple) and 2 == len(key):
|
||||||
|
key, arg = key
|
||||||
|
if not isinstance(arg, bool):
|
||||||
|
arg = None
|
||||||
|
|
||||||
if isinstance(key, (int, long)):
|
if isinstance(key, (int, long)):
|
||||||
# Item is integer, treat as show id
|
# Item is integer, treat as show id
|
||||||
if key not in self.shows:
|
if key not in self.shows:
|
||||||
self._getShowData(key, True)
|
self._getShowData(key, (True, arg)[arg is not None])
|
||||||
return None if key not in self.shows else self.shows[key]
|
return None if key not in self.shows else self.shows[key]
|
||||||
|
|
||||||
key = key.lower()
|
key = key.lower()
|
||||||
|
|
|
@ -89,7 +89,6 @@ background_mapping_task = None
|
||||||
showList = None
|
showList = None
|
||||||
UPDATE_SHOWS_ON_START = False
|
UPDATE_SHOWS_ON_START = False
|
||||||
SHOW_UPDATE_HOUR = 3
|
SHOW_UPDATE_HOUR = 3
|
||||||
ALLOW_INCOMPLETE_SHOWDATA = False
|
|
||||||
|
|
||||||
providerList = []
|
providerList = []
|
||||||
newznabProviderList = []
|
newznabProviderList = []
|
||||||
|
@ -514,7 +513,7 @@ def initialize(consoleLogging=True):
|
||||||
PLEX_UPDATE_LIBRARY, PLEX_SERVER_HOST, PLEX_HOST, PLEX_USERNAME, PLEX_PASSWORD, \
|
PLEX_UPDATE_LIBRARY, PLEX_SERVER_HOST, PLEX_HOST, PLEX_USERNAME, PLEX_PASSWORD, \
|
||||||
USE_TRAKT, TRAKT_CONNECTED_ACCOUNT, TRAKT_ACCOUNTS, TRAKT_MRU, TRAKT_VERIFY, TRAKT_REMOVE_WATCHLIST, TRAKT_TIMEOUT, TRAKT_USE_WATCHLIST, TRAKT_METHOD_ADD, TRAKT_START_PAUSED, traktCheckerScheduler, TRAKT_SYNC, TRAKT_DEFAULT_INDEXER, TRAKT_REMOVE_SERIESLIST, TRAKT_UPDATE_COLLECTION, \
|
USE_TRAKT, TRAKT_CONNECTED_ACCOUNT, TRAKT_ACCOUNTS, TRAKT_MRU, TRAKT_VERIFY, TRAKT_REMOVE_WATCHLIST, TRAKT_TIMEOUT, TRAKT_USE_WATCHLIST, TRAKT_METHOD_ADD, TRAKT_START_PAUSED, traktCheckerScheduler, TRAKT_SYNC, TRAKT_DEFAULT_INDEXER, TRAKT_REMOVE_SERIESLIST, TRAKT_UPDATE_COLLECTION, \
|
||||||
BACKLOG_FREQUENCY, DEFAULT_BACKLOG_FREQUENCY, MIN_BACKLOG_FREQUENCY, MAX_BACKLOG_FREQUENCY, BACKLOG_STARTUP, SKIP_REMOVED_FILES, \
|
BACKLOG_FREQUENCY, DEFAULT_BACKLOG_FREQUENCY, MIN_BACKLOG_FREQUENCY, MAX_BACKLOG_FREQUENCY, BACKLOG_STARTUP, SKIP_REMOVED_FILES, \
|
||||||
showUpdateScheduler, __INITIALIZED__, LAUNCH_BROWSER, TRASH_REMOVE_SHOW, TRASH_ROTATE_LOGS, HOME_SEARCH_FOCUS, SORT_ARTICLE, showList, loadingShowList, UPDATE_SHOWS_ON_START, SHOW_UPDATE_HOUR, ALLOW_INCOMPLETE_SHOWDATA, \
|
showUpdateScheduler, __INITIALIZED__, LAUNCH_BROWSER, TRASH_REMOVE_SHOW, TRASH_ROTATE_LOGS, HOME_SEARCH_FOCUS, SORT_ARTICLE, showList, loadingShowList, UPDATE_SHOWS_ON_START, SHOW_UPDATE_HOUR, \
|
||||||
NEWZNAB_DATA, INDEXER_DEFAULT, INDEXER_TIMEOUT, USENET_RETENTION, TORRENT_DIR, \
|
NEWZNAB_DATA, INDEXER_DEFAULT, INDEXER_TIMEOUT, USENET_RETENTION, TORRENT_DIR, \
|
||||||
QUALITY_DEFAULT, FLATTEN_FOLDERS_DEFAULT, SUBTITLES_DEFAULT, STATUS_DEFAULT, WANTED_BEGIN_DEFAULT, WANTED_LATEST_DEFAULT, RECENTSEARCH_STARTUP, \
|
QUALITY_DEFAULT, FLATTEN_FOLDERS_DEFAULT, SUBTITLES_DEFAULT, STATUS_DEFAULT, WANTED_BEGIN_DEFAULT, WANTED_LATEST_DEFAULT, RECENTSEARCH_STARTUP, \
|
||||||
GROWL_NOTIFY_ONSNATCH, GROWL_NOTIFY_ONDOWNLOAD, GROWL_NOTIFY_ONSUBTITLEDOWNLOAD, TWITTER_NOTIFY_ONSNATCH, TWITTER_NOTIFY_ONDOWNLOAD, TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD, \
|
GROWL_NOTIFY_ONSNATCH, GROWL_NOTIFY_ONDOWNLOAD, GROWL_NOTIFY_ONSUBTITLEDOWNLOAD, TWITTER_NOTIFY_ONSNATCH, TWITTER_NOTIFY_ONDOWNLOAD, TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD, \
|
||||||
|
@ -666,7 +665,6 @@ def initialize(consoleLogging=True):
|
||||||
UPDATE_SHOWS_ON_START = bool(check_setting_int(CFG, 'General', 'update_shows_on_start', 0))
|
UPDATE_SHOWS_ON_START = bool(check_setting_int(CFG, 'General', 'update_shows_on_start', 0))
|
||||||
SHOW_UPDATE_HOUR = check_setting_int(CFG, 'General', 'show_update_hour', 3)
|
SHOW_UPDATE_HOUR = check_setting_int(CFG, 'General', 'show_update_hour', 3)
|
||||||
SHOW_UPDATE_HOUR = minimax(SHOW_UPDATE_HOUR, 3, 0, 23)
|
SHOW_UPDATE_HOUR = minimax(SHOW_UPDATE_HOUR, 3, 0, 23)
|
||||||
ALLOW_INCOMPLETE_SHOWDATA = bool(check_setting_int(CFG, 'General', 'allow_incomplete_showdata', 0))
|
|
||||||
|
|
||||||
TRASH_REMOVE_SHOW = bool(check_setting_int(CFG, 'General', 'trash_remove_show', 0))
|
TRASH_REMOVE_SHOW = bool(check_setting_int(CFG, 'General', 'trash_remove_show', 0))
|
||||||
TRASH_ROTATE_LOGS = bool(check_setting_int(CFG, 'General', 'trash_rotate_logs', 0))
|
TRASH_ROTATE_LOGS = bool(check_setting_int(CFG, 'General', 'trash_rotate_logs', 0))
|
||||||
|
@ -1524,7 +1522,6 @@ def save_config():
|
||||||
new_config['General']['launch_browser'] = int(LAUNCH_BROWSER)
|
new_config['General']['launch_browser'] = int(LAUNCH_BROWSER)
|
||||||
new_config['General']['update_shows_on_start'] = int(UPDATE_SHOWS_ON_START)
|
new_config['General']['update_shows_on_start'] = int(UPDATE_SHOWS_ON_START)
|
||||||
new_config['General']['show_update_hour'] = int(SHOW_UPDATE_HOUR)
|
new_config['General']['show_update_hour'] = int(SHOW_UPDATE_HOUR)
|
||||||
new_config['General']['allow_incomplete_showdata'] = int(ALLOW_INCOMPLETE_SHOWDATA)
|
|
||||||
new_config['General']['trash_remove_show'] = int(TRASH_REMOVE_SHOW)
|
new_config['General']['trash_remove_show'] = int(TRASH_REMOVE_SHOW)
|
||||||
new_config['General']['trash_rotate_logs'] = int(TRASH_ROTATE_LOGS)
|
new_config['General']['trash_rotate_logs'] = int(TRASH_ROTATE_LOGS)
|
||||||
new_config['General']['home_search_focus'] = int(HOME_SEARCH_FOCUS)
|
new_config['General']['home_search_focus'] = int(HOME_SEARCH_FOCUS)
|
||||||
|
|
|
@ -296,7 +296,7 @@ class QueueItemAdd(ShowQueueItem):
|
||||||
logger.log(u'' + str(sickbeard.indexerApi(self.indexer).name) + ': ' + repr(lINDEXER_API_PARMS))
|
logger.log(u'' + str(sickbeard.indexerApi(self.indexer).name) + ': ' + repr(lINDEXER_API_PARMS))
|
||||||
|
|
||||||
t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS)
|
||||||
s = t[self.indexer_id]
|
s = t[self.indexer_id, False]
|
||||||
|
|
||||||
# this usually only happens if they have an NFO in their show dir which gave us a Indexer ID that has no proper english version of the show
|
# this usually only happens if they have an NFO in their show dir which gave us a Indexer ID that has no proper english version of the show
|
||||||
if getattr(s, 'seriesname', None) is None:
|
if getattr(s, 'seriesname', None) is None:
|
||||||
|
@ -307,14 +307,6 @@ class QueueItemAdd(ShowQueueItem):
|
||||||
(self.showDir, sickbeard.indexerApi(self.indexer).name))
|
(self.showDir, sickbeard.indexerApi(self.indexer).name))
|
||||||
self._finishEarly()
|
self._finishEarly()
|
||||||
return
|
return
|
||||||
# if the show has no episodes/seasons
|
|
||||||
if not sickbeard.ALLOW_INCOMPLETE_SHOWDATA and not s:
|
|
||||||
msg = 'Show %s is on %s but contains no season/episode data. Only the show folder was created.'\
|
|
||||||
% (s['seriesname'], sickbeard.indexerApi(self.indexer).name)
|
|
||||||
logger.log(msg, logger.ERROR)
|
|
||||||
ui.notifications.error('Unable to add show', msg)
|
|
||||||
self._finishEarly()
|
|
||||||
return
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.log('Unable to find show ID:%s on Indexer: %s' % (self.indexer_id, sickbeard.indexerApi(self.indexer).name),
|
logger.log('Unable to find show ID:%s on Indexer: %s' % (self.indexer_id, sickbeard.indexerApi(self.indexer).name),
|
||||||
logger.ERROR)
|
logger.ERROR)
|
||||||
|
|
|
@ -559,6 +559,8 @@ class TVShow(object):
|
||||||
if self.dvdorder != 0:
|
if self.dvdorder != 0:
|
||||||
lINDEXER_API_PARMS['dvdorder'] = True
|
lINDEXER_API_PARMS['dvdorder'] = True
|
||||||
|
|
||||||
|
logger.log('%s: Loading all episodes from %s..' % (self.indexerid, sickbeard.indexerApi(self.indexer).name))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS)
|
||||||
showObj = t[self.indexerid]
|
showObj = t[self.indexerid]
|
||||||
|
@ -567,8 +569,6 @@ class TVShow(object):
|
||||||
(sickbeard.indexerApi(self.indexer).name, sickbeard.indexerApi(self.indexer).name), logger.ERROR)
|
(sickbeard.indexerApi(self.indexer).name, sickbeard.indexerApi(self.indexer).name), logger.ERROR)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
logger.log('%s: Loading all episodes from %s..' % (self.indexerid, sickbeard.indexerApi(self.indexer).name))
|
|
||||||
|
|
||||||
scannedEps = {}
|
scannedEps = {}
|
||||||
|
|
||||||
sql_l = []
|
sql_l = []
|
||||||
|
@ -912,7 +912,7 @@ class TVShow(object):
|
||||||
else:
|
else:
|
||||||
t = tvapi
|
t = tvapi
|
||||||
|
|
||||||
myEp = t[self.indexerid]
|
myEp = t[self.indexerid, False]
|
||||||
if None is myEp:
|
if None is myEp:
|
||||||
logger.log('Show not found (maybe even removed?)', logger.WARNING)
|
logger.log('Show not found (maybe even removed?)', logger.WARNING)
|
||||||
return False
|
return False
|
||||||
|
@ -931,7 +931,7 @@ class TVShow(object):
|
||||||
self.imdbid = getattr(myEp, 'imdb_id', '')
|
self.imdbid = getattr(myEp, 'imdb_id', '')
|
||||||
|
|
||||||
if getattr(myEp, 'airs_dayofweek', None) is not None and getattr(myEp, 'airs_time', None) is not None:
|
if getattr(myEp, 'airs_dayofweek', None) is not None and getattr(myEp, 'airs_time', None) is not None:
|
||||||
self.airs = myEp["airs_dayofweek"] + " " + myEp["airs_time"]
|
self.airs = ('%s %s' % (myEp['airs_dayofweek'], myEp['airs_time'])).strip()
|
||||||
|
|
||||||
if getattr(myEp, 'firstaired', None) is not None:
|
if getattr(myEp, 'firstaired', None) is not None:
|
||||||
self.startyear = int(str(myEp["firstaired"]).split('-')[0])
|
self.startyear = int(str(myEp["firstaired"]).split('-')[0])
|
||||||
|
@ -1076,15 +1076,15 @@ class TVShow(object):
|
||||||
for path, dirs, files in ek.ek(os.walk, image_cache_dir):
|
for path, dirs, files in ek.ek(os.walk, image_cache_dir):
|
||||||
for filename in ek.ek(fnmatch.filter, files, '%s.*' % self.indexerid):
|
for filename in ek.ek(fnmatch.filter, files, '%s.*' % self.indexerid):
|
||||||
cache_file = ek.ek(os.path.join, path, filename)
|
cache_file = ek.ek(os.path.join, path, filename)
|
||||||
logger.log('Attempt to %s cache file %s' % (action, cache_file))
|
logger.log('Attempt to %s cache file %s' % (action, cache_file))
|
||||||
try:
|
try:
|
||||||
if sickbeard.TRASH_REMOVE_SHOW:
|
if sickbeard.TRASH_REMOVE_SHOW:
|
||||||
send2trash(cache_file)
|
send2trash(cache_file)
|
||||||
else:
|
else:
|
||||||
os.remove(cache_file)
|
os.remove(cache_file)
|
||||||
|
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
logger.log('Unable to %s %s: %s / %s' % (action, cache_file, repr(e), str(e)), logger.WARNING)
|
logger.log('Unable to %s %s: %s / %s' % (action, cache_file, repr(e), str(e)), logger.WARNING)
|
||||||
|
|
||||||
# remove entire show folder
|
# remove entire show folder
|
||||||
if full:
|
if full:
|
||||||
|
@ -1771,15 +1771,7 @@ class TVEpisode(object):
|
||||||
self.deleteEpisode()
|
self.deleteEpisode()
|
||||||
return
|
return
|
||||||
|
|
||||||
if not sickbeard.ALLOW_INCOMPLETE_SHOWDATA and None is getattr(myEp, 'episodename', None):
|
if getattr(myEp, 'absolute_number', None) in (None, ''):
|
||||||
logger.log('This episode (%s - %sx%s) has no name on %s' %
|
|
||||||
(self.show.name, season, episode, sickbeard.indexerApi(self.indexer).name))
|
|
||||||
# if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now
|
|
||||||
if -1 != self.indexerid:
|
|
||||||
self.deleteEpisode()
|
|
||||||
return False
|
|
||||||
|
|
||||||
if None is getattr(myEp, 'absolute_number', None):
|
|
||||||
logger.log('This episode (%s - %sx%s) has no absolute number on %s' %
|
logger.log('This episode (%s - %sx%s) has no absolute number on %s' %
|
||||||
(self.show.name, season, episode, sickbeard.indexerApi(self.indexer).name), logger.DEBUG)
|
(self.show.name, season, episode, sickbeard.indexerApi(self.indexer).name), logger.DEBUG)
|
||||||
else:
|
else:
|
||||||
|
@ -1827,8 +1819,7 @@ class TVEpisode(object):
|
||||||
self.indexerid = getattr(myEp, 'id', None)
|
self.indexerid = getattr(myEp, 'id', None)
|
||||||
if None is self.indexerid:
|
if None is self.indexerid:
|
||||||
logger.log('Failed to retrieve ID from %s' % sickbeard.indexerApi(self.indexer).name, logger.ERROR)
|
logger.log('Failed to retrieve ID from %s' % sickbeard.indexerApi(self.indexer).name, logger.ERROR)
|
||||||
if -1 != self.indexerid:
|
self.deleteEpisode()
|
||||||
self.deleteEpisode()
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# don't update show status if show dir is missing, unless it's missing on purpose
|
# don't update show status if show dir is missing, unless it's missing on purpose
|
||||||
|
@ -2060,42 +2051,26 @@ class TVEpisode(object):
|
||||||
logger.log('%s: Not creating SQL queue - record is not dirty' % self.show.indexerid, logger.DEBUG)
|
logger.log('%s: Not creating SQL queue - record is not dirty' % self.show.indexerid, logger.DEBUG)
|
||||||
return
|
return
|
||||||
|
|
||||||
myDB = db.DBConnection()
|
|
||||||
rows = myDB.select(
|
|
||||||
'SELECT episode_id FROM tv_episodes WHERE showid = ? AND indexer=? AND season = ? AND episode = ?',
|
|
||||||
[self.show.indexerid, self.show.indexer, self.season, self.episode])
|
|
||||||
|
|
||||||
epID = None
|
|
||||||
if rows:
|
|
||||||
epID = int(rows[0]['episode_id'])
|
|
||||||
|
|
||||||
self.dirty = False
|
self.dirty = False
|
||||||
if epID:
|
return [
|
||||||
# use a custom update method to get the data into the DB for existing records.
|
'INSERT OR REPLACE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, '
|
||||||
return [
|
'subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, '
|
||||||
'UPDATE tv_episodes SET indexerid = ?, indexer = ?, name = ?, description = ?, subtitles = ?, '
|
'release_name, is_proper, showid, season, episode, absolute_number, version, release_group, '
|
||||||
'subtitles_searchcount = ?, subtitles_lastsearch = ?, airdate = ?, hasnfo = ?, hastbn = ?, status = ?, '
|
'scene_absolute_number, scene_season, scene_episode) VALUES '
|
||||||
'location = ?, file_size = ?, release_name = ?, is_proper = ?, showid = ?, season = ?, episode = ?, '
|
'((SELECT episode_id FROM tv_episodes WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?)'
|
||||||
'absolute_number = ?, version = ?, release_group = ? WHERE episode_id = ?',
|
',?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,'
|
||||||
[self.indexerid, self.indexer, self.name, self.description, ",".join([sub for sub in self.subtitles]),
|
'(SELECT scene_absolute_number FROM tv_episodes WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?),'
|
||||||
self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo,
|
'(SELECT scene_season FROM tv_episodes WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?),'
|
||||||
self.hastbn,
|
'(SELECT scene_episode FROM tv_episodes WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?));',
|
||||||
self.status, self.location, self.file_size, self.release_name, self.is_proper, self.show.indexerid,
|
[self.show.indexer, self.show.indexerid, self.season, self.episode, self.indexerid, self.indexer, self.name,
|
||||||
self.season, self.episode, self.absolute_number, self.version, self.release_group, epID]]
|
self.description,
|
||||||
else:
|
",".join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch,
|
||||||
# use a custom insert method to get the data into the DB.
|
self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size,
|
||||||
return [
|
self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode,
|
||||||
'INSERT OR IGNORE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, '
|
self.absolute_number, self.version, self.release_group,
|
||||||
'subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, '
|
self.show.indexer, self.show.indexerid, self.season, self.episode,
|
||||||
'release_name, is_proper, showid, season, episode, absolute_number, version, release_group) VALUES '
|
self.show.indexer, self.show.indexerid, self.season, self.episode,
|
||||||
'((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?)'
|
self.show.indexer, self.show.indexerid, self.season, self.episode]]
|
||||||
',?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);',
|
|
||||||
[self.show.indexerid, self.season, self.episode, self.indexerid, self.indexer, self.name,
|
|
||||||
self.description,
|
|
||||||
",".join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch,
|
|
||||||
self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size,
|
|
||||||
self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode,
|
|
||||||
self.absolute_number, self.version, self.release_group]]
|
|
||||||
|
|
||||||
def saveToDB(self, forceSave=False):
|
def saveToDB(self, forceSave=False):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -2368,11 +2368,11 @@ class NewHomeAddShows(Home):
|
||||||
logger.log('Fetching show using id: %s (%s) from tv datasource %s' % (
|
logger.log('Fetching show using id: %s (%s) from tv datasource %s' % (
|
||||||
search_id, search_term, sickbeard.indexerApi(indexer).name), logger.DEBUG)
|
search_id, search_term, sickbeard.indexerApi(indexer).name), logger.DEBUG)
|
||||||
results.setdefault('tt' in search_id and 3 or indexer, []).extend(
|
results.setdefault('tt' in search_id and 3 or indexer, []).extend(
|
||||||
[{'id': indexer_id, 'seriesname': t[indexer_id]['seriesname'],
|
[{'id': indexer_id, 'seriesname': t[indexer_id, False]['seriesname'],
|
||||||
'firstaired': t[indexer_id]['firstaired'], 'network': t[indexer_id]['network'],
|
'firstaired': t[indexer_id, False]['firstaired'], 'network': t[indexer_id, False]['network'],
|
||||||
'overview': t[indexer_id]['overview'],
|
'overview': t[indexer_id, False]['overview'],
|
||||||
'genres': '' if not t[indexer_id]['genre'] else
|
'genres': '' if not t[indexer_id, False]['genre'] else
|
||||||
t[indexer_id]['genre'].lower().strip('|').replace('|', ', '),
|
t[indexer_id, False]['genre'].lower().strip('|').replace('|', ', '),
|
||||||
}])
|
}])
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
|
@ -4305,7 +4305,7 @@ class ConfigGeneral(Config):
|
||||||
return m.hexdigest()
|
return m.hexdigest()
|
||||||
|
|
||||||
def saveGeneral(self, log_dir=None, web_port=None, web_log=None, encryption_version=None, web_ipv6=None,
|
def saveGeneral(self, log_dir=None, web_port=None, web_log=None, encryption_version=None, web_ipv6=None,
|
||||||
update_shows_on_start=None, show_update_hour=None, allow_incomplete_showdata=None,
|
update_shows_on_start=None, show_update_hour=None,
|
||||||
trash_remove_show=None, trash_rotate_logs=None, update_frequency=None, launch_browser=None, web_username=None,
|
trash_remove_show=None, trash_rotate_logs=None, update_frequency=None, launch_browser=None, web_username=None,
|
||||||
use_api=None, api_key=None, indexer_default=None, timezone_display=None, cpu_preset=None, file_logging_preset=None,
|
use_api=None, api_key=None, indexer_default=None, timezone_display=None, cpu_preset=None, file_logging_preset=None,
|
||||||
web_password=None, version_notify=None, enable_https=None, https_cert=None, https_key=None,
|
web_password=None, version_notify=None, enable_https=None, https_cert=None, https_key=None,
|
||||||
|
@ -4327,7 +4327,6 @@ class ConfigGeneral(Config):
|
||||||
|
|
||||||
sickbeard.UPDATE_SHOWS_ON_START = config.checkbox_to_value(update_shows_on_start)
|
sickbeard.UPDATE_SHOWS_ON_START = config.checkbox_to_value(update_shows_on_start)
|
||||||
sickbeard.SHOW_UPDATE_HOUR = config.minimax(show_update_hour, 3, 0, 23)
|
sickbeard.SHOW_UPDATE_HOUR = config.minimax(show_update_hour, 3, 0, 23)
|
||||||
sickbeard.ALLOW_INCOMPLETE_SHOWDATA = config.checkbox_to_value(allow_incomplete_showdata)
|
|
||||||
sickbeard.TRASH_REMOVE_SHOW = config.checkbox_to_value(trash_remove_show)
|
sickbeard.TRASH_REMOVE_SHOW = config.checkbox_to_value(trash_remove_show)
|
||||||
sickbeard.TRASH_ROTATE_LOGS = config.checkbox_to_value(trash_rotate_logs)
|
sickbeard.TRASH_ROTATE_LOGS = config.checkbox_to_value(trash_rotate_logs)
|
||||||
config.change_UPDATE_FREQUENCY(update_frequency)
|
config.change_UPDATE_FREQUENCY(update_frequency)
|
||||||
|
|
Loading…
Reference in a new issue