mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-11 05:33:37 +00:00
Fixed bug in TVRage API that was not properly handling malformed airdates for shows.
Fixed bug in metadata function that was throwing attribute errors when checking for banner images for shows from TVRage Converted some list comprehensions to generators to improve speed and lower memory usage of SB
This commit is contained in:
parent
8742b5825f
commit
6a7906eeb1
5 changed files with 73 additions and 32 deletions
|
@ -47,10 +47,6 @@ from tvdb_exceptions import (tvdb_error, tvdb_userabort, tvdb_shownotfound,
|
||||||
# Cached Session Handler
|
# Cached Session Handler
|
||||||
from lib.httpcache import CachingHTTPAdapter
|
from lib.httpcache import CachingHTTPAdapter
|
||||||
|
|
||||||
s = requests.Session()
|
|
||||||
s.mount('http://', CachingHTTPAdapter())
|
|
||||||
|
|
||||||
|
|
||||||
def log():
|
def log():
|
||||||
return logging.getLogger("tvdb_api")
|
return logging.getLogger("tvdb_api")
|
||||||
|
|
||||||
|
@ -518,12 +514,36 @@ class Tvdb:
|
||||||
|
|
||||||
return os.path.join(tempfile.gettempdir(), "tvdb_api-%s" % (uid))
|
return os.path.join(tempfile.gettempdir(), "tvdb_api-%s" % (uid))
|
||||||
|
|
||||||
|
|
||||||
|
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2):
|
||||||
|
def deco_retry(f):
|
||||||
|
def f_retry(*args, **kwargs):
|
||||||
|
mtries, mdelay = tries, delay
|
||||||
|
while mtries > 0:
|
||||||
|
try:
|
||||||
|
return f(*args, **kwargs)
|
||||||
|
except ExceptionToCheck, e:
|
||||||
|
print "%s, Retrying in %d seconds..." % (str(e), mdelay)
|
||||||
|
time.sleep(mdelay)
|
||||||
|
mtries -= 1
|
||||||
|
mdelay *= backoff
|
||||||
|
lastException = e
|
||||||
|
raise lastException
|
||||||
|
|
||||||
|
return f_retry # true decorator
|
||||||
|
|
||||||
|
return deco_retry
|
||||||
|
|
||||||
|
@retry(tvdb_error, tries=4)
|
||||||
def _loadUrl(self, url, params=None, language=None):
|
def _loadUrl(self, url, params=None, language=None):
|
||||||
try:
|
try:
|
||||||
log().debug("Retrieving URL %s" % url)
|
log().debug("Retrieving URL %s" % url)
|
||||||
|
|
||||||
# get response from TVDB
|
# get response from TVDB
|
||||||
if self.config['cache_enabled']:
|
if self.config['cache_enabled']:
|
||||||
|
s = requests.Session()
|
||||||
|
s.mount('http://', CachingHTTPAdapter())
|
||||||
|
|
||||||
resp = s.get(url, params=params)
|
resp = s.get(url, params=params)
|
||||||
else:
|
else:
|
||||||
resp = requests.get(url, params=params)
|
resp = requests.get(url, params=params)
|
||||||
|
@ -629,7 +649,7 @@ class Tvdb:
|
||||||
log().debug("Searching for show %s" % series)
|
log().debug("Searching for show %s" % series)
|
||||||
self.config['params_getSeries']['seriesname'] = series
|
self.config['params_getSeries']['seriesname'] = series
|
||||||
seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
|
seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
|
||||||
allSeries = [dict((s.tag.lower(), s.text) for s in x.getchildren()) for x in seriesEt]
|
allSeries = list(dict((s.tag.lower(), s.text) for s in x.getchildren()) for x in seriesEt)
|
||||||
|
|
||||||
return allSeries
|
return allSeries
|
||||||
|
|
||||||
|
@ -869,20 +889,17 @@ class Tvdb:
|
||||||
already been grabbed), or grabs all episodes and returns
|
already been grabbed), or grabs all episodes and returns
|
||||||
the correct SID.
|
the correct SID.
|
||||||
"""
|
"""
|
||||||
sid = set()
|
|
||||||
if name in self.corrections:
|
if name in self.corrections:
|
||||||
log().debug('Correcting %s to %s' % (name, self.corrections[name]))
|
log().debug('Correcting %s to %s' % (name, self.corrections[name]))
|
||||||
sid = self.corrections[name]
|
return self.corrections[name]
|
||||||
else:
|
else:
|
||||||
log().debug('Getting show %s' % (name))
|
log().debug('Getting show %s' % (name))
|
||||||
selected_series = self._getSeries(name)
|
selected_series = self._getSeries(name)
|
||||||
if isinstance(selected_series, dict):
|
if isinstance(selected_series, dict):
|
||||||
selected_series = [selected_series]
|
selected_series = [selected_series]
|
||||||
[sid.add(int(x['id'])) for x in selected_series if
|
sids = list(int(x['id']) for x in selected_series if self._getShowData(int(x['id']), self.config['language'], seriesSearch=True))
|
||||||
self._getShowData(int(x['id']), self.config['language'], seriesSearch=True)]
|
self.corrections.update(dict((x['seriesname'], int(x['id'])) for x in selected_series))
|
||||||
[self.corrections.update({x['seriesname']: int(x['id'])}) for x in selected_series]
|
return sids
|
||||||
|
|
||||||
return sid
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
"""Handles tvdb_instance['seriesname'] calls.
|
"""Handles tvdb_instance['seriesname'] calls.
|
||||||
|
@ -896,7 +913,7 @@ class Tvdb:
|
||||||
|
|
||||||
key = key.lower() # make key lower case
|
key = key.lower() # make key lower case
|
||||||
sids = self._nameToSid(key)
|
sids = self._nameToSid(key)
|
||||||
return [self.shows[sid] for sid in sids]
|
return list(self.shows[sid] for sid in sids)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return str(self.shows)
|
return str(self.shows)
|
||||||
|
|
|
@ -29,7 +29,6 @@ try:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
import xml.etree.ElementTree as ElementTree
|
import xml.etree.ElementTree as ElementTree
|
||||||
|
|
||||||
from collections import defaultdict
|
|
||||||
from lib.dateutil.parser import parse
|
from lib.dateutil.parser import parse
|
||||||
from lib import requests
|
from lib import requests
|
||||||
|
|
||||||
|
@ -39,8 +38,6 @@ from tvrage_exceptions import (tvrage_error, tvrage_userabort, tvrage_shownotfou
|
||||||
|
|
||||||
# Cached Session Handler
|
# Cached Session Handler
|
||||||
from lib.httpcache import CachingHTTPAdapter
|
from lib.httpcache import CachingHTTPAdapter
|
||||||
s = requests.Session()
|
|
||||||
s.mount('http://', CachingHTTPAdapter())
|
|
||||||
|
|
||||||
def log():
|
def log():
|
||||||
return logging.getLogger("tvrage_api")
|
return logging.getLogger("tvrage_api")
|
||||||
|
@ -343,12 +340,35 @@ class TVRage:
|
||||||
|
|
||||||
return os.path.join(tempfile.gettempdir(), "tvrage_api-%s" % (uid))
|
return os.path.join(tempfile.gettempdir(), "tvrage_api-%s" % (uid))
|
||||||
|
|
||||||
|
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2):
|
||||||
|
def deco_retry(f):
|
||||||
|
def f_retry(*args, **kwargs):
|
||||||
|
mtries, mdelay = tries, delay
|
||||||
|
while mtries > 0:
|
||||||
|
try:
|
||||||
|
return f(*args, **kwargs)
|
||||||
|
except ExceptionToCheck, e:
|
||||||
|
print "%s, Retrying in %d seconds..." % (str(e), mdelay)
|
||||||
|
time.sleep(mdelay)
|
||||||
|
mtries -= 1
|
||||||
|
mdelay *= backoff
|
||||||
|
lastException = e
|
||||||
|
raise lastException
|
||||||
|
|
||||||
|
return f_retry # true decorator
|
||||||
|
|
||||||
|
return deco_retry
|
||||||
|
|
||||||
|
@retry(tvrage_error, tries=4)
|
||||||
def _loadUrl(self, url, params=None):
|
def _loadUrl(self, url, params=None):
|
||||||
try:
|
try:
|
||||||
log().debug("Retrieving URL %s" % url)
|
log().debug("Retrieving URL %s" % url)
|
||||||
|
|
||||||
# get response from TVRage
|
# get response from TVRage
|
||||||
if self.config['cache_enabled']:
|
if self.config['cache_enabled']:
|
||||||
|
s = requests.Session()
|
||||||
|
s.mount('http://', CachingHTTPAdapter())
|
||||||
|
|
||||||
resp = s.get(url, params=params)
|
resp = s.get(url, params=params)
|
||||||
else:
|
else:
|
||||||
resp = requests.get(url, params=params)
|
resp = requests.get(url, params=params)
|
||||||
|
@ -396,7 +416,8 @@ class TVRage:
|
||||||
|
|
||||||
if elm.tag in 'firstaired':
|
if elm.tag in 'firstaired':
|
||||||
try:
|
try:
|
||||||
if elm.text is "0000-00-00": elm.text = str(dt.date.fromordinal(1))
|
if elm.text in "0000-00-00":
|
||||||
|
elm.text = str(dt.date.fromordinal(1))
|
||||||
elm.text = re.sub("([-]0{2}){1,}", "", elm.text)
|
elm.text = re.sub("([-]0{2}){1,}", "", elm.text)
|
||||||
fixDate = parse(elm.text, fuzzy=True).date()
|
fixDate = parse(elm.text, fuzzy=True).date()
|
||||||
elm.text = fixDate.strftime("%Y-%m-%d")
|
elm.text = fixDate.strftime("%Y-%m-%d")
|
||||||
|
@ -487,7 +508,7 @@ class TVRage:
|
||||||
log().debug("Searching for show %s" % series)
|
log().debug("Searching for show %s" % series)
|
||||||
self.config['params_getSeries']['show'] = series
|
self.config['params_getSeries']['show'] = series
|
||||||
seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
|
seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
|
||||||
allSeries = [dict((s.tag.lower(),s.text) for s in x.getchildren()) for x in seriesEt]
|
allSeries = list(dict((s.tag.lower(),s.text) for s in x.getchildren()) for x in seriesEt)
|
||||||
|
|
||||||
return allSeries
|
return allSeries
|
||||||
|
|
||||||
|
@ -589,19 +610,17 @@ class TVRage:
|
||||||
already been grabbed), or grabs all episodes and returns
|
already been grabbed), or grabs all episodes and returns
|
||||||
the correct SID.
|
the correct SID.
|
||||||
"""
|
"""
|
||||||
sid = set()
|
|
||||||
if name in self.corrections:
|
if name in self.corrections:
|
||||||
log().debug('Correcting %s to %s' % (name, self.corrections[name]) )
|
log().debug('Correcting %s to %s' % (name, self.corrections[name]) )
|
||||||
sid = self.corrections[name]
|
return self.corrections[name]
|
||||||
else:
|
else:
|
||||||
log().debug('Getting show %s' % (name))
|
log().debug('Getting show %s' % (name))
|
||||||
selected_series = self._getSeries( name )
|
selected_series = self._getSeries(name)
|
||||||
if isinstance(selected_series, dict):
|
if isinstance(selected_series, dict):
|
||||||
selected_series = [selected_series]
|
selected_series = [selected_series]
|
||||||
[sid.add(int(x['id'])) for x in selected_series if self._getShowData(int(x['id']), seriesSearch=True)]
|
sids = list(int(x['id']) for x in selected_series if self._getShowData(int(x['id']), seriesSearch=True))
|
||||||
[self.corrections.update({x['seriesname']:int(x['id'])}) for x in selected_series]
|
self.corrections.update(dict((x['seriesname'], int(x['id'])) for x in selected_series))
|
||||||
|
return sids
|
||||||
return sid
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
"""Handles tvrage_instance['seriesname'] calls.
|
"""Handles tvrage_instance['seriesname'] calls.
|
||||||
|
@ -615,7 +634,7 @@ class TVRage:
|
||||||
|
|
||||||
key = key.lower() # make key lower case
|
key = key.lower() # make key lower case
|
||||||
sids = self._nameToSid(key)
|
sids = self._nameToSid(key)
|
||||||
return [self.shows[sid] for sid in sids]
|
return list(self.shows[sid] for sid in sids)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return str(self.shows)
|
return str(self.shows)
|
||||||
|
|
|
@ -874,6 +874,10 @@ class GenericMetadata():
|
||||||
show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR)
|
show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
# if we have no season banners then just finish
|
||||||
|
if getattr(indexer_show_obj, '_banners', None) is None:
|
||||||
|
return result
|
||||||
|
|
||||||
# if we have no season banners then just finish
|
# if we have no season banners then just finish
|
||||||
if 'season' not in indexer_show_obj['_banners'] or 'seasonwide' not in indexer_show_obj['_banners']['season']:
|
if 'season' not in indexer_show_obj['_banners'] or 'seasonwide' not in indexer_show_obj['_banners']['season']:
|
||||||
return result
|
return result
|
||||||
|
|
|
@ -1424,13 +1424,13 @@ class TVEpisode(object):
|
||||||
self.description = getattr(myEp, 'overview', "")
|
self.description = getattr(myEp, 'overview', "")
|
||||||
|
|
||||||
firstaired = getattr(myEp, 'firstaired', None)
|
firstaired = getattr(myEp, 'firstaired', None)
|
||||||
if firstaired is None or firstaired is "0000-00-00":
|
if firstaired is None or firstaired in "0000-00-00":
|
||||||
firstaired = str(datetime.date.fromordinal(1))
|
firstaired = str(datetime.date.fromordinal(1))
|
||||||
rawAirdate = [int(x) for x in firstaired.split("-")]
|
rawAirdate = [int(x) for x in firstaired.split("-")]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.airdate = datetime.date(rawAirdate[0], rawAirdate[1], rawAirdate[2])
|
self.airdate = datetime.date(rawAirdate[0], rawAirdate[1], rawAirdate[2])
|
||||||
except ValueError:
|
except (ValueError, IndexError):
|
||||||
logger.log(u"Malformed air date retrieved from " + sickbeard.indexerApi(
|
logger.log(u"Malformed air date retrieved from " + sickbeard.indexerApi(
|
||||||
self.indexer).name + " (" + self.show.name + " - " + str(season) + "x" + str(episode) + ")",
|
self.indexer).name + " (" + self.show.name + " - " + str(season) + "x" + str(episode) + ")",
|
||||||
logger.ERROR)
|
logger.ERROR)
|
||||||
|
|
|
@ -29,7 +29,7 @@ import random
|
||||||
import locale
|
import locale
|
||||||
import logging
|
import logging
|
||||||
import itertools
|
import itertools
|
||||||
import string
|
import operator
|
||||||
|
|
||||||
from Cheetah.Template import Template
|
from Cheetah.Template import Template
|
||||||
import cherrypy.lib
|
import cherrypy.lib
|
||||||
|
@ -1991,13 +1991,14 @@ class NewHomeAddShows:
|
||||||
search = [search]
|
search = [search]
|
||||||
|
|
||||||
# add search results
|
# add search results
|
||||||
results += [[sickbeard.indexerApi(indexer).name, int(sickbeard.indexerApi(indexer).config['id']),
|
results += list([sickbeard.indexerApi(indexer).name, int(sickbeard.indexerApi(indexer).config['id']),
|
||||||
sickbeard.indexerApi(indexer).config["show_url"], int(x['id']), x['seriesname'],
|
sickbeard.indexerApi(indexer).config["show_url"], int(x['id']), x['seriesname'],
|
||||||
x['firstaired']] for x in search if x['firstaired']]
|
x['firstaired']] for x in search if x['firstaired'])
|
||||||
except:
|
except:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# remove duplicates
|
# remove duplicates and sort by firstaired
|
||||||
|
results = sorted(results, reverse=True, key=operator.itemgetter(5))
|
||||||
results = list(results for results, _ in itertools.groupby(results))
|
results = list(results for results, _ in itertools.groupby(results))
|
||||||
|
|
||||||
lang_id = sickbeard.indexerApi().config['langabbv_to_id'][lang]
|
lang_id = sickbeard.indexerApi().config['langabbv_to_id'][lang]
|
||||||
|
|
Loading…
Reference in a new issue