mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-05 17:43:37 +00:00
Change optimise TheTVDB processes, 40% to 66% saved adding new and existing shows, 40% to 50% saved per show update.
Change improve shows with more episodes gain largest reductions in time spent processing. Change when using "Add new show" reduce search time outs. Change always allow incomplete show data. Remove redundant config/general/"Allow incomplete show data".
This commit is contained in:
parent
289394ecee
commit
9b3af8b84a
9 changed files with 227 additions and 123 deletions
|
@ -159,6 +159,11 @@
|
|||
* Change show loaded log message at start up and include info source
|
||||
* Change if episode has no airdate then set status to unaired (was skipped)
|
||||
* Fix only replace initial quality releases from the upgrade to list
|
||||
* Change optimise TheTVDB processes, 40% to 66% saved adding new and existing shows, 40% to 50% saved per show update
|
||||
* Change improve shows with more episodes gain largest reductions in time spent processing
|
||||
* Change when using "Add new show" reduce search time outs
|
||||
* Change always allow incomplete show data
|
||||
* Remove redundant config/general/"Allow incomplete show data"
|
||||
|
||||
[develop changelog]
|
||||
* Change send nzb data to NZBGet for Anizb instead of url
|
||||
|
|
|
@ -88,18 +88,6 @@
|
|||
</label>
|
||||
</div>
|
||||
|
||||
#if hasattr($sickbeard, 'ALLOW_INCOMPLETE_SHOWDATA')
|
||||
<div class="field-pair">
|
||||
<label for="allow_incomplete_showdata">
|
||||
<span class="component-title">Allow incomplete show data</span>
|
||||
<span class="component-desc">
|
||||
<input type="checkbox" name="allow_incomplete_showdata" id="allow_incomplete_showdata"#echo ('', $checked)[$sickbeard.ALLOW_INCOMPLETE_SHOWDATA]#>
|
||||
<p>add partial show data for future updates to complete</p>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
#end if
|
||||
|
||||
<div class="field-pair">
|
||||
<span class="component-title">Send to trash for actions</span>
|
||||
<span class="component-desc">
|
||||
|
|
141
lib/etreetodict.py
Normal file
141
lib/etreetodict.py
Normal file
|
@ -0,0 +1,141 @@
|
|||
try:
|
||||
from lxml import ElementTree
|
||||
except ImportError:
|
||||
try:
|
||||
import xml.etree.cElementTree as ElementTree
|
||||
except ImportError:
|
||||
import xml.etree.ElementTree as ElementTree
|
||||
|
||||
|
||||
class XmlDictObject(dict):
|
||||
"""
|
||||
Adds object like functionality to the standard dictionary.
|
||||
"""
|
||||
|
||||
def __init__(self, initdict=None):
|
||||
if initdict is None:
|
||||
initdict = {}
|
||||
dict.__init__(self, initdict)
|
||||
|
||||
def __getattr__(self, item):
|
||||
return self.__getitem__(item)
|
||||
|
||||
def __setattr__(self, item, value):
|
||||
self.__setitem__(item, value)
|
||||
|
||||
def __str__(self):
|
||||
if self.has_key('_text'):
|
||||
return self.__getitem__('_text')
|
||||
else:
|
||||
return ''
|
||||
|
||||
@staticmethod
|
||||
def Wrap(x):
|
||||
"""
|
||||
Static method to wrap a dictionary recursively as an XmlDictObject
|
||||
"""
|
||||
|
||||
if isinstance(x, dict):
|
||||
return XmlDictObject((k, XmlDictObject.Wrap(v)) for (k, v) in x.iteritems())
|
||||
elif isinstance(x, list):
|
||||
return [XmlDictObject.Wrap(v) for v in x]
|
||||
else:
|
||||
return x
|
||||
|
||||
@staticmethod
|
||||
def _UnWrap(x):
|
||||
if isinstance(x, dict):
|
||||
return dict((k, XmlDictObject._UnWrap(v)) for (k, v) in x.iteritems())
|
||||
elif isinstance(x, list):
|
||||
return [XmlDictObject._UnWrap(v) for v in x]
|
||||
else:
|
||||
return x
|
||||
|
||||
def UnWrap(self):
|
||||
"""
|
||||
Recursively converts an XmlDictObject to a standard dictionary and returns the result.
|
||||
"""
|
||||
|
||||
return XmlDictObject._UnWrap(self)
|
||||
|
||||
|
||||
def _ConvertDictToXmlRecurse(parent, dictitem):
|
||||
assert type(dictitem) is not type([])
|
||||
|
||||
if isinstance(dictitem, dict):
|
||||
for (tag, child) in dictitem.iteritems():
|
||||
if str(tag) == '_text':
|
||||
parent.text = str(child)
|
||||
elif type(child) is type([]):
|
||||
# iterate through the array and convert
|
||||
for listchild in child:
|
||||
elem = ElementTree.Element(tag)
|
||||
parent.append(elem)
|
||||
_ConvertDictToXmlRecurse(elem, listchild)
|
||||
else:
|
||||
elem = ElementTree.Element(tag)
|
||||
parent.append(elem)
|
||||
_ConvertDictToXmlRecurse(elem, child)
|
||||
else:
|
||||
parent.text = str(dictitem)
|
||||
|
||||
|
||||
def ConvertDictToXml(xmldict):
|
||||
"""
|
||||
Converts a dictionary to an XML ElementTree Element
|
||||
"""
|
||||
|
||||
roottag = xmldict.keys()[0]
|
||||
root = ElementTree.Element(roottag)
|
||||
_ConvertDictToXmlRecurse(root, xmldict[roottag])
|
||||
return root
|
||||
|
||||
|
||||
def _ConvertXmlToDictRecurse(node, dictclass):
|
||||
nodedict = dictclass()
|
||||
|
||||
if len(node.items()) > 0:
|
||||
# if we have attributes, set them
|
||||
nodedict.update(dict(node.items()))
|
||||
|
||||
for child in node:
|
||||
# recursively add the element's children
|
||||
newitem = _ConvertXmlToDictRecurse(child, dictclass)
|
||||
if nodedict.has_key(child.tag):
|
||||
# found duplicate tag, force a list
|
||||
if type(nodedict[child.tag]) is type([]):
|
||||
# append to existing list
|
||||
nodedict[child.tag].append(newitem)
|
||||
else:
|
||||
# convert to list
|
||||
nodedict[child.tag] = [nodedict[child.tag], newitem]
|
||||
else:
|
||||
# only one, directly set the dictionary
|
||||
nodedict[child.tag] = newitem
|
||||
|
||||
if node.text is None:
|
||||
text = ''
|
||||
else:
|
||||
text = node.text.strip()
|
||||
|
||||
if len(nodedict) > 0:
|
||||
# if we have a dictionary add the text as a dictionary value (if there is any)
|
||||
if len(text) > 0:
|
||||
nodedict['_text'] = text
|
||||
else:
|
||||
# if we don't have child nodes or attributes, just set the text
|
||||
nodedict = text
|
||||
|
||||
return nodedict
|
||||
|
||||
def ConvertXmlToDict(root, dictclass=XmlDictObject):
|
||||
"""
|
||||
Converts an XML String to a dictionary
|
||||
"""
|
||||
|
||||
if isinstance(root, basestring):
|
||||
root = ElementTree.fromstring(root)
|
||||
elif not isinstance(root, ElementTree.Element):
|
||||
raise TypeError, 'Expected string'
|
||||
|
||||
return dictclass({root.tag: _ConvertXmlToDictRecurse(root, dictclass)})
|
|
@ -5,8 +5,8 @@
|
|||
# repository:http://github.com/dbr/tvdb_api
|
||||
# license:unlicense (http://unlicense.org/)
|
||||
|
||||
from functools import wraps
|
||||
import traceback
|
||||
from functools import wraps
|
||||
|
||||
__author__ = 'dbr/Ben'
|
||||
__version__ = '1.9'
|
||||
|
@ -21,12 +21,6 @@ import logging
|
|||
import zipfile
|
||||
import requests
|
||||
import requests.exceptions
|
||||
import xmltodict
|
||||
|
||||
try:
|
||||
import xml.etree.cElementTree as ElementTree
|
||||
except ImportError:
|
||||
import xml.etree.ElementTree as ElementTree
|
||||
|
||||
try:
|
||||
import gzip
|
||||
|
@ -36,6 +30,7 @@ except ImportError:
|
|||
from lib.dateutil.parser import parse
|
||||
from lib.cachecontrol import CacheControl, caches
|
||||
|
||||
from lib.etreetodict import ConvertXmlToDict
|
||||
from tvdb_ui import BaseUI, ConsoleUI
|
||||
from tvdb_exceptions import (tvdb_error, tvdb_shownotfound,
|
||||
tvdb_seasonnotfound, tvdb_episodenotfound, tvdb_attributenotfound)
|
||||
|
@ -565,18 +560,17 @@ class Tvdb:
|
|||
except Exception:
|
||||
raise tvdb_error('Unknown exception while loading URL %s: %s' % (url, traceback.format_exc()))
|
||||
|
||||
def process(path, key, value):
|
||||
key = key.lower()
|
||||
|
||||
# clean up value and do type changes
|
||||
if value:
|
||||
if 'firstaired' == key:
|
||||
try:
|
||||
value = parse(value, fuzzy=True).strftime('%Y-%m-%d')
|
||||
except:
|
||||
value = None
|
||||
|
||||
return key, value
|
||||
def process_data(data):
|
||||
te = ConvertXmlToDict(data)
|
||||
if isinstance(te, dict) and 'Data' in te and isinstance(te['Data'], dict) and \
|
||||
'Series' in te['Data'] and isinstance(te['Data']['Series'], dict) and \
|
||||
'FirstAired' in te['Data']['Series']:
|
||||
try:
|
||||
value = parse(te['Data']['Series']['FirstAired'], fuzzy=True).strftime('%Y-%m-%d')
|
||||
except:
|
||||
value = None
|
||||
te['Data']['Series']['firstaired'] = value
|
||||
return te
|
||||
|
||||
if resp.ok:
|
||||
if 'application/zip' in resp.headers.get('Content-Type', ''):
|
||||
|
@ -586,12 +580,12 @@ class Tvdb:
|
|||
zipdata = StringIO.StringIO()
|
||||
zipdata.write(resp.content)
|
||||
myzipfile = zipfile.ZipFile(zipdata)
|
||||
return xmltodict.parse(myzipfile.read('%s.xml' % language), postprocessor=process)
|
||||
return process_data(myzipfile.read('%s.xml' % language))
|
||||
except zipfile.BadZipfile:
|
||||
raise tvdb_error('Bad zip file received from thetvdb.com, could not read it')
|
||||
else:
|
||||
try:
|
||||
return xmltodict.parse(resp.content.strip(), postprocessor=process)
|
||||
return process_data(resp.content.strip())
|
||||
except:
|
||||
return dict([(u'data', None)])
|
||||
|
||||
|
@ -641,7 +635,7 @@ class Tvdb:
|
|||
- Replaces & with &
|
||||
- Trailing whitespace
|
||||
"""
|
||||
return data if data is None else data.strip().replace(u'&', u'&')
|
||||
return data if not isinstance(data, basestring) else data.strip().replace(u'&', u'&')
|
||||
|
||||
def search(self, series):
|
||||
"""This searches TheTVDB.com for the series name
|
||||
|
@ -654,6 +648,7 @@ class Tvdb:
|
|||
try:
|
||||
series_found = self._getetsrc(self.config['url_get_series'], self.config['params_get_series'])
|
||||
if series_found:
|
||||
series_found['Series'] = [{k.lower(): v for k, v in s.iteritems()} for s in series_found['Series']]
|
||||
return series_found.values()[0]
|
||||
except:
|
||||
pass
|
||||
|
@ -804,21 +799,21 @@ class Tvdb:
|
|||
|
||||
# Parse show information
|
||||
log().debug('Getting all series data for %s' % sid)
|
||||
url = self.config['url_epInfo%s' % ('', '_zip')[self.config['useZip']]] % (sid, language)
|
||||
url = (self.config['url_seriesInfo'] % (sid, language), self.config['url_epInfo%s' % ('', '_zip')[self.config['useZip']]] % (sid, language))[get_ep_info]
|
||||
show_data = self._getetsrc(url, language=get_show_in_language)
|
||||
|
||||
# check and make sure we have data to process and that it contains a series name
|
||||
if not len(show_data) or (isinstance(show_data, dict) and 'seriesname' not in show_data['series']):
|
||||
if not len(show_data) or (isinstance(show_data, dict) and 'SeriesName' not in show_data['Series']):
|
||||
return False
|
||||
|
||||
for k, v in show_data['series'].items():
|
||||
for k, v in show_data['Series'].iteritems():
|
||||
if None is not v:
|
||||
if k in ['banner', 'fanart', 'poster']:
|
||||
v = self.config['url_artworkPrefix'] % v
|
||||
else:
|
||||
v = self._clean_data(v)
|
||||
|
||||
self._set_show_data(sid, k, v)
|
||||
self._set_show_data(sid, k.lower(), v)
|
||||
|
||||
if get_ep_info:
|
||||
# Parse banners
|
||||
|
@ -832,24 +827,24 @@ class Tvdb:
|
|||
# Parse episode data
|
||||
log().debug('Getting all episodes of %s' % sid)
|
||||
|
||||
if 'episode' not in show_data:
|
||||
if 'Episode' not in show_data:
|
||||
return False
|
||||
|
||||
episodes = show_data['episode']
|
||||
episodes = show_data['Episode']
|
||||
if not isinstance(episodes, list):
|
||||
episodes = [episodes]
|
||||
|
||||
for cur_ep in episodes:
|
||||
if self.config['dvdorder']:
|
||||
log().debug('Using DVD ordering.')
|
||||
use_dvd = None is not cur_ep['dvd_season'] and None is not cur_ep['dvd_episodenumber']
|
||||
use_dvd = None is not cur_ep['DVD_season'] and None is not cur_ep['DVD_episodenumber']
|
||||
else:
|
||||
use_dvd = False
|
||||
|
||||
if use_dvd:
|
||||
elem_seasnum, elem_epno = cur_ep['dvd_season'], cur_ep['dvd_episodenumber']
|
||||
elem_seasnum, elem_epno = cur_ep['DVD_season'], cur_ep['DVD_episodenumber']
|
||||
else:
|
||||
elem_seasnum, elem_epno = cur_ep['seasonnumber'], cur_ep['episodenumber']
|
||||
elem_seasnum, elem_epno = cur_ep['SeasonNumber'], cur_ep['EpisodeNumber']
|
||||
|
||||
if None is elem_seasnum or None is elem_epno:
|
||||
log().warning('An episode has incomplete season/episode number (season: %r, episode: %r)' % (
|
||||
|
@ -895,10 +890,16 @@ class Tvdb:
|
|||
"""Handles tvdb_instance['seriesname'] calls.
|
||||
The dict index should be the show id
|
||||
"""
|
||||
arg = None
|
||||
if isinstance(key, tuple) and 2 == len(key):
|
||||
key, arg = key
|
||||
if not isinstance(arg, bool):
|
||||
arg = None
|
||||
|
||||
if isinstance(key, (int, long)):
|
||||
# Item is integer, treat as show id
|
||||
if key not in self.shows:
|
||||
self._get_show_data(key, self.config['language'], True)
|
||||
self._get_show_data(key, self.config['language'], (True, arg)[arg is not None])
|
||||
return None if key not in self.shows else self.shows[key]
|
||||
|
||||
key = str(key).lower()
|
||||
|
|
|
@ -657,10 +657,16 @@ class TVRage:
|
|||
"""Handles tvrage_instance['seriesname'] calls.
|
||||
The dict index should be the show id
|
||||
"""
|
||||
arg = None
|
||||
if isinstance(key, tuple) and 2 == len(key):
|
||||
key, arg = key
|
||||
if not isinstance(arg, bool):
|
||||
arg = None
|
||||
|
||||
if isinstance(key, (int, long)):
|
||||
# Item is integer, treat as show id
|
||||
if key not in self.shows:
|
||||
self._getShowData(key, True)
|
||||
self._getShowData(key, (True, arg)[arg is not None])
|
||||
return None if key not in self.shows else self.shows[key]
|
||||
|
||||
key = key.lower()
|
||||
|
|
|
@ -89,7 +89,6 @@ background_mapping_task = None
|
|||
showList = None
|
||||
UPDATE_SHOWS_ON_START = False
|
||||
SHOW_UPDATE_HOUR = 3
|
||||
ALLOW_INCOMPLETE_SHOWDATA = False
|
||||
|
||||
providerList = []
|
||||
newznabProviderList = []
|
||||
|
@ -514,7 +513,7 @@ def initialize(consoleLogging=True):
|
|||
PLEX_UPDATE_LIBRARY, PLEX_SERVER_HOST, PLEX_HOST, PLEX_USERNAME, PLEX_PASSWORD, \
|
||||
USE_TRAKT, TRAKT_CONNECTED_ACCOUNT, TRAKT_ACCOUNTS, TRAKT_MRU, TRAKT_VERIFY, TRAKT_REMOVE_WATCHLIST, TRAKT_TIMEOUT, TRAKT_USE_WATCHLIST, TRAKT_METHOD_ADD, TRAKT_START_PAUSED, traktCheckerScheduler, TRAKT_SYNC, TRAKT_DEFAULT_INDEXER, TRAKT_REMOVE_SERIESLIST, TRAKT_UPDATE_COLLECTION, \
|
||||
BACKLOG_FREQUENCY, DEFAULT_BACKLOG_FREQUENCY, MIN_BACKLOG_FREQUENCY, MAX_BACKLOG_FREQUENCY, BACKLOG_STARTUP, SKIP_REMOVED_FILES, \
|
||||
showUpdateScheduler, __INITIALIZED__, LAUNCH_BROWSER, TRASH_REMOVE_SHOW, TRASH_ROTATE_LOGS, HOME_SEARCH_FOCUS, SORT_ARTICLE, showList, loadingShowList, UPDATE_SHOWS_ON_START, SHOW_UPDATE_HOUR, ALLOW_INCOMPLETE_SHOWDATA, \
|
||||
showUpdateScheduler, __INITIALIZED__, LAUNCH_BROWSER, TRASH_REMOVE_SHOW, TRASH_ROTATE_LOGS, HOME_SEARCH_FOCUS, SORT_ARTICLE, showList, loadingShowList, UPDATE_SHOWS_ON_START, SHOW_UPDATE_HOUR, \
|
||||
NEWZNAB_DATA, INDEXER_DEFAULT, INDEXER_TIMEOUT, USENET_RETENTION, TORRENT_DIR, \
|
||||
QUALITY_DEFAULT, FLATTEN_FOLDERS_DEFAULT, SUBTITLES_DEFAULT, STATUS_DEFAULT, WANTED_BEGIN_DEFAULT, WANTED_LATEST_DEFAULT, RECENTSEARCH_STARTUP, \
|
||||
GROWL_NOTIFY_ONSNATCH, GROWL_NOTIFY_ONDOWNLOAD, GROWL_NOTIFY_ONSUBTITLEDOWNLOAD, TWITTER_NOTIFY_ONSNATCH, TWITTER_NOTIFY_ONDOWNLOAD, TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD, \
|
||||
|
@ -666,7 +665,6 @@ def initialize(consoleLogging=True):
|
|||
UPDATE_SHOWS_ON_START = bool(check_setting_int(CFG, 'General', 'update_shows_on_start', 0))
|
||||
SHOW_UPDATE_HOUR = check_setting_int(CFG, 'General', 'show_update_hour', 3)
|
||||
SHOW_UPDATE_HOUR = minimax(SHOW_UPDATE_HOUR, 3, 0, 23)
|
||||
ALLOW_INCOMPLETE_SHOWDATA = bool(check_setting_int(CFG, 'General', 'allow_incomplete_showdata', 0))
|
||||
|
||||
TRASH_REMOVE_SHOW = bool(check_setting_int(CFG, 'General', 'trash_remove_show', 0))
|
||||
TRASH_ROTATE_LOGS = bool(check_setting_int(CFG, 'General', 'trash_rotate_logs', 0))
|
||||
|
@ -1524,7 +1522,6 @@ def save_config():
|
|||
new_config['General']['launch_browser'] = int(LAUNCH_BROWSER)
|
||||
new_config['General']['update_shows_on_start'] = int(UPDATE_SHOWS_ON_START)
|
||||
new_config['General']['show_update_hour'] = int(SHOW_UPDATE_HOUR)
|
||||
new_config['General']['allow_incomplete_showdata'] = int(ALLOW_INCOMPLETE_SHOWDATA)
|
||||
new_config['General']['trash_remove_show'] = int(TRASH_REMOVE_SHOW)
|
||||
new_config['General']['trash_rotate_logs'] = int(TRASH_ROTATE_LOGS)
|
||||
new_config['General']['home_search_focus'] = int(HOME_SEARCH_FOCUS)
|
||||
|
|
|
@ -296,7 +296,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||
logger.log(u'' + str(sickbeard.indexerApi(self.indexer).name) + ': ' + repr(lINDEXER_API_PARMS))
|
||||
|
||||
t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS)
|
||||
s = t[self.indexer_id]
|
||||
s = t[self.indexer_id, False]
|
||||
|
||||
# this usually only happens if they have an NFO in their show dir which gave us a Indexer ID that has no proper english version of the show
|
||||
if getattr(s, 'seriesname', None) is None:
|
||||
|
@ -307,14 +307,6 @@ class QueueItemAdd(ShowQueueItem):
|
|||
(self.showDir, sickbeard.indexerApi(self.indexer).name))
|
||||
self._finishEarly()
|
||||
return
|
||||
# if the show has no episodes/seasons
|
||||
if not sickbeard.ALLOW_INCOMPLETE_SHOWDATA and not s:
|
||||
msg = 'Show %s is on %s but contains no season/episode data. Only the show folder was created.'\
|
||||
% (s['seriesname'], sickbeard.indexerApi(self.indexer).name)
|
||||
logger.log(msg, logger.ERROR)
|
||||
ui.notifications.error('Unable to add show', msg)
|
||||
self._finishEarly()
|
||||
return
|
||||
except Exception as e:
|
||||
logger.log('Unable to find show ID:%s on Indexer: %s' % (self.indexer_id, sickbeard.indexerApi(self.indexer).name),
|
||||
logger.ERROR)
|
||||
|
|
|
@ -559,6 +559,8 @@ class TVShow(object):
|
|||
if self.dvdorder != 0:
|
||||
lINDEXER_API_PARMS['dvdorder'] = True
|
||||
|
||||
logger.log('%s: Loading all episodes from %s..' % (self.indexerid, sickbeard.indexerApi(self.indexer).name))
|
||||
|
||||
try:
|
||||
t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS)
|
||||
showObj = t[self.indexerid]
|
||||
|
@ -567,8 +569,6 @@ class TVShow(object):
|
|||
(sickbeard.indexerApi(self.indexer).name, sickbeard.indexerApi(self.indexer).name), logger.ERROR)
|
||||
return None
|
||||
|
||||
logger.log('%s: Loading all episodes from %s..' % (self.indexerid, sickbeard.indexerApi(self.indexer).name))
|
||||
|
||||
scannedEps = {}
|
||||
|
||||
sql_l = []
|
||||
|
@ -912,7 +912,7 @@ class TVShow(object):
|
|||
else:
|
||||
t = tvapi
|
||||
|
||||
myEp = t[self.indexerid]
|
||||
myEp = t[self.indexerid, False]
|
||||
if None is myEp:
|
||||
logger.log('Show not found (maybe even removed?)', logger.WARNING)
|
||||
return False
|
||||
|
@ -931,7 +931,7 @@ class TVShow(object):
|
|||
self.imdbid = getattr(myEp, 'imdb_id', '')
|
||||
|
||||
if getattr(myEp, 'airs_dayofweek', None) is not None and getattr(myEp, 'airs_time', None) is not None:
|
||||
self.airs = myEp["airs_dayofweek"] + " " + myEp["airs_time"]
|
||||
self.airs = ('%s %s' % (myEp['airs_dayofweek'], myEp['airs_time'])).strip()
|
||||
|
||||
if getattr(myEp, 'firstaired', None) is not None:
|
||||
self.startyear = int(str(myEp["firstaired"]).split('-')[0])
|
||||
|
@ -1076,15 +1076,15 @@ class TVShow(object):
|
|||
for path, dirs, files in ek.ek(os.walk, image_cache_dir):
|
||||
for filename in ek.ek(fnmatch.filter, files, '%s.*' % self.indexerid):
|
||||
cache_file = ek.ek(os.path.join, path, filename)
|
||||
logger.log('Attempt to %s cache file %s' % (action, cache_file))
|
||||
try:
|
||||
if sickbeard.TRASH_REMOVE_SHOW:
|
||||
send2trash(cache_file)
|
||||
else:
|
||||
os.remove(cache_file)
|
||||
logger.log('Attempt to %s cache file %s' % (action, cache_file))
|
||||
try:
|
||||
if sickbeard.TRASH_REMOVE_SHOW:
|
||||
send2trash(cache_file)
|
||||
else:
|
||||
os.remove(cache_file)
|
||||
|
||||
except OSError as e:
|
||||
logger.log('Unable to %s %s: %s / %s' % (action, cache_file, repr(e), str(e)), logger.WARNING)
|
||||
except OSError as e:
|
||||
logger.log('Unable to %s %s: %s / %s' % (action, cache_file, repr(e), str(e)), logger.WARNING)
|
||||
|
||||
# remove entire show folder
|
||||
if full:
|
||||
|
@ -1771,15 +1771,7 @@ class TVEpisode(object):
|
|||
self.deleteEpisode()
|
||||
return
|
||||
|
||||
if not sickbeard.ALLOW_INCOMPLETE_SHOWDATA and None is getattr(myEp, 'episodename', None):
|
||||
logger.log('This episode (%s - %sx%s) has no name on %s' %
|
||||
(self.show.name, season, episode, sickbeard.indexerApi(self.indexer).name))
|
||||
# if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now
|
||||
if -1 != self.indexerid:
|
||||
self.deleteEpisode()
|
||||
return False
|
||||
|
||||
if None is getattr(myEp, 'absolute_number', None):
|
||||
if getattr(myEp, 'absolute_number', None) in (None, ''):
|
||||
logger.log('This episode (%s - %sx%s) has no absolute number on %s' %
|
||||
(self.show.name, season, episode, sickbeard.indexerApi(self.indexer).name), logger.DEBUG)
|
||||
else:
|
||||
|
@ -1827,8 +1819,7 @@ class TVEpisode(object):
|
|||
self.indexerid = getattr(myEp, 'id', None)
|
||||
if None is self.indexerid:
|
||||
logger.log('Failed to retrieve ID from %s' % sickbeard.indexerApi(self.indexer).name, logger.ERROR)
|
||||
if -1 != self.indexerid:
|
||||
self.deleteEpisode()
|
||||
self.deleteEpisode()
|
||||
return False
|
||||
|
||||
# don't update show status if show dir is missing, unless it's missing on purpose
|
||||
|
@ -2060,42 +2051,26 @@ class TVEpisode(object):
|
|||
logger.log('%s: Not creating SQL queue - record is not dirty' % self.show.indexerid, logger.DEBUG)
|
||||
return
|
||||
|
||||
myDB = db.DBConnection()
|
||||
rows = myDB.select(
|
||||
'SELECT episode_id FROM tv_episodes WHERE showid = ? AND indexer=? AND season = ? AND episode = ?',
|
||||
[self.show.indexerid, self.show.indexer, self.season, self.episode])
|
||||
|
||||
epID = None
|
||||
if rows:
|
||||
epID = int(rows[0]['episode_id'])
|
||||
|
||||
self.dirty = False
|
||||
if epID:
|
||||
# use a custom update method to get the data into the DB for existing records.
|
||||
return [
|
||||
'UPDATE tv_episodes SET indexerid = ?, indexer = ?, name = ?, description = ?, subtitles = ?, '
|
||||
'subtitles_searchcount = ?, subtitles_lastsearch = ?, airdate = ?, hasnfo = ?, hastbn = ?, status = ?, '
|
||||
'location = ?, file_size = ?, release_name = ?, is_proper = ?, showid = ?, season = ?, episode = ?, '
|
||||
'absolute_number = ?, version = ?, release_group = ? WHERE episode_id = ?',
|
||||
[self.indexerid, self.indexer, self.name, self.description, ",".join([sub for sub in self.subtitles]),
|
||||
self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo,
|
||||
self.hastbn,
|
||||
self.status, self.location, self.file_size, self.release_name, self.is_proper, self.show.indexerid,
|
||||
self.season, self.episode, self.absolute_number, self.version, self.release_group, epID]]
|
||||
else:
|
||||
# use a custom insert method to get the data into the DB.
|
||||
return [
|
||||
'INSERT OR IGNORE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, '
|
||||
'subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, '
|
||||
'release_name, is_proper, showid, season, episode, absolute_number, version, release_group) VALUES '
|
||||
'((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?)'
|
||||
',?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);',
|
||||
[self.show.indexerid, self.season, self.episode, self.indexerid, self.indexer, self.name,
|
||||
self.description,
|
||||
",".join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch,
|
||||
self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size,
|
||||
self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode,
|
||||
self.absolute_number, self.version, self.release_group]]
|
||||
return [
|
||||
'INSERT OR REPLACE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, '
|
||||
'subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, '
|
||||
'release_name, is_proper, showid, season, episode, absolute_number, version, release_group, '
|
||||
'scene_absolute_number, scene_season, scene_episode) VALUES '
|
||||
'((SELECT episode_id FROM tv_episodes WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?)'
|
||||
',?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,'
|
||||
'(SELECT scene_absolute_number FROM tv_episodes WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?),'
|
||||
'(SELECT scene_season FROM tv_episodes WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?),'
|
||||
'(SELECT scene_episode FROM tv_episodes WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?));',
|
||||
[self.show.indexer, self.show.indexerid, self.season, self.episode, self.indexerid, self.indexer, self.name,
|
||||
self.description,
|
||||
",".join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch,
|
||||
self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size,
|
||||
self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode,
|
||||
self.absolute_number, self.version, self.release_group,
|
||||
self.show.indexer, self.show.indexerid, self.season, self.episode,
|
||||
self.show.indexer, self.show.indexerid, self.season, self.episode,
|
||||
self.show.indexer, self.show.indexerid, self.season, self.episode]]
|
||||
|
||||
def saveToDB(self, forceSave=False):
|
||||
"""
|
||||
|
|
|
@ -2368,11 +2368,11 @@ class NewHomeAddShows(Home):
|
|||
logger.log('Fetching show using id: %s (%s) from tv datasource %s' % (
|
||||
search_id, search_term, sickbeard.indexerApi(indexer).name), logger.DEBUG)
|
||||
results.setdefault('tt' in search_id and 3 or indexer, []).extend(
|
||||
[{'id': indexer_id, 'seriesname': t[indexer_id]['seriesname'],
|
||||
'firstaired': t[indexer_id]['firstaired'], 'network': t[indexer_id]['network'],
|
||||
'overview': t[indexer_id]['overview'],
|
||||
'genres': '' if not t[indexer_id]['genre'] else
|
||||
t[indexer_id]['genre'].lower().strip('|').replace('|', ', '),
|
||||
[{'id': indexer_id, 'seriesname': t[indexer_id, False]['seriesname'],
|
||||
'firstaired': t[indexer_id, False]['firstaired'], 'network': t[indexer_id, False]['network'],
|
||||
'overview': t[indexer_id, False]['overview'],
|
||||
'genres': '' if not t[indexer_id, False]['genre'] else
|
||||
t[indexer_id, False]['genre'].lower().strip('|').replace('|', ', '),
|
||||
}])
|
||||
break
|
||||
else:
|
||||
|
@ -4305,7 +4305,7 @@ class ConfigGeneral(Config):
|
|||
return m.hexdigest()
|
||||
|
||||
def saveGeneral(self, log_dir=None, web_port=None, web_log=None, encryption_version=None, web_ipv6=None,
|
||||
update_shows_on_start=None, show_update_hour=None, allow_incomplete_showdata=None,
|
||||
update_shows_on_start=None, show_update_hour=None,
|
||||
trash_remove_show=None, trash_rotate_logs=None, update_frequency=None, launch_browser=None, web_username=None,
|
||||
use_api=None, api_key=None, indexer_default=None, timezone_display=None, cpu_preset=None, file_logging_preset=None,
|
||||
web_password=None, version_notify=None, enable_https=None, https_cert=None, https_key=None,
|
||||
|
@ -4327,7 +4327,6 @@ class ConfigGeneral(Config):
|
|||
|
||||
sickbeard.UPDATE_SHOWS_ON_START = config.checkbox_to_value(update_shows_on_start)
|
||||
sickbeard.SHOW_UPDATE_HOUR = config.minimax(show_update_hour, 3, 0, 23)
|
||||
sickbeard.ALLOW_INCOMPLETE_SHOWDATA = config.checkbox_to_value(allow_incomplete_showdata)
|
||||
sickbeard.TRASH_REMOVE_SHOW = config.checkbox_to_value(trash_remove_show)
|
||||
sickbeard.TRASH_ROTATE_LOGS = config.checkbox_to_value(trash_rotate_logs)
|
||||
config.change_UPDATE_FREQUENCY(update_frequency)
|
||||
|
|
Loading…
Reference in a new issue