mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-02 17:33:37 +00:00
Merge branch 'master' into develop
This commit is contained in:
commit
17a42dc188
9 changed files with 151 additions and 45 deletions
|
@ -9,6 +9,14 @@
|
|||
|
||||
|
||||
|
||||
### 0.16.6 (2018-05-14 01:00:00 UTC)
|
||||
|
||||
* Change improve tolerance to parse a release title with a badly placed episode name
|
||||
* Change improve handling tvdb_api data when adding upcoming shows with unfilled data
|
||||
* Change search only once per cycle for shows with multiple episodes that air on the same day
|
||||
* Fix SpeedCD
|
||||
|
||||
|
||||
### 0.16.5 (2018-05-07 21:15:00 UTC)
|
||||
|
||||
* Fix HTTP 422 error when using Plex Username and Password
|
||||
|
|
|
@ -563,6 +563,10 @@ class Tvdb:
|
|||
|
||||
return os.path.join(tempfile.gettempdir(), 'tvdb_api-%s' % uid)
|
||||
|
||||
def _match_url_pattern(self, pattern, url):
|
||||
if pattern in self.config:
|
||||
return re.search('^%s$' % re.escape(self.config[pattern]).replace('\\%s', '[^/]+'), url)
|
||||
|
||||
@retry((tvdb_error, tvdb_tokenexpired))
|
||||
def _load_url(self, url, params=None, language=None):
|
||||
log().debug('Retrieving URL %s' % url)
|
||||
|
@ -583,7 +587,7 @@ class Tvdb:
|
|||
session.headers.update({'Accept-Language': language})
|
||||
|
||||
resp = None
|
||||
if re.search(re.escape(self.config['url_seriesInfo']).replace('%s', '.*'), url):
|
||||
if self._match_url_pattern('url_seriesInfo', url):
|
||||
self.show_not_found = False
|
||||
self.not_found = False
|
||||
try:
|
||||
|
@ -595,8 +599,10 @@ class Tvdb:
|
|||
sickbeard.THETVDB_V2_API_TOKEN = self.get_new_token()
|
||||
raise tvdb_tokenexpired
|
||||
elif 404 == e.response.status_code:
|
||||
if re.search(re.escape(self.config['url_seriesInfo']).replace('%s', '.*'), url):
|
||||
if self._match_url_pattern('url_seriesInfo', url):
|
||||
self.show_not_found = True
|
||||
elif self._match_url_pattern('url_epInfo', url):
|
||||
resp = {'data': []}
|
||||
self.not_found = True
|
||||
elif 404 != e.response.status_code:
|
||||
raise tvdb_error
|
||||
|
@ -647,10 +653,16 @@ class Tvdb:
|
|||
try:
|
||||
src = self._load_url(url, params=params, language=language)
|
||||
if isinstance(src, dict):
|
||||
data = src['data'] or {}
|
||||
if None is not src['data']:
|
||||
data = src['data']
|
||||
else:
|
||||
data = {}
|
||||
# data = src['data'] or {}
|
||||
if isinstance(data, list):
|
||||
data = data[0] or {}
|
||||
if 1 > len(data.keys()):
|
||||
if 0 < len(data):
|
||||
data = data[0]
|
||||
# data = data[0] or {}
|
||||
if None is data or (isinstance(data, dict) and 1 > len(data.keys())):
|
||||
raise ValueError
|
||||
return src
|
||||
except (KeyError, IndexError, Exception):
|
||||
|
@ -912,9 +924,9 @@ class Tvdb:
|
|||
episode_data = self._getetsrc(self.config['url_epInfo'] % (sid, page), language=language)
|
||||
if None is episode_data:
|
||||
raise tvdb_error('Exception retrieving episodes for show')
|
||||
if None is not episode_data.get('data'):
|
||||
if not getattr(self, 'not_found', False) and None is not episode_data.get('data'):
|
||||
episodes.extend(episode_data['data'])
|
||||
page = episode_data.get('links', {}).get('next')
|
||||
page = episode_data.get('links', {}).get('next', None)
|
||||
|
||||
ep_map_keys = {'absolutenumber': u'absolute_number', 'airedepisodenumber': u'episodenumber',
|
||||
'airedseason': u'seasonnumber', 'airedseasonid': u'seasonid',
|
||||
|
|
|
@ -290,17 +290,29 @@ class GenericMetadata():
|
|||
return None
|
||||
|
||||
def create_show_metadata(self, show_obj):
|
||||
result = False
|
||||
if self.show_metadata and show_obj and not self._has_show_metadata(show_obj):
|
||||
logger.log(u"Metadata provider " + self.name + " creating show metadata for " + show_obj.name, logger.DEBUG)
|
||||
return self.write_show_file(show_obj)
|
||||
return False
|
||||
logger.log('Metadata provider %s creating show metadata for %s' % (self.name, show_obj.name), logger.DEBUG)
|
||||
try:
|
||||
result = self.write_show_file(show_obj)
|
||||
except sickbeard.indexer_error as e:
|
||||
logger.log('Unable to find useful show metadata for %s on %s: %s' % (
|
||||
self.name, sickbeard.indexerApi(show_obj.indexer).name, ex(e)), logger.WARNING)
|
||||
|
||||
return result
|
||||
|
||||
def create_episode_metadata(self, ep_obj):
|
||||
result = False
|
||||
if self.episode_metadata and ep_obj and not self._has_episode_metadata(ep_obj):
|
||||
logger.log(u"Metadata provider " + self.name + " creating episode metadata for " + ep_obj.prettyName(),
|
||||
logger.log('Metadata provider %s creating episode metadata for %s' % (self.name, ep_obj.prettyName()),
|
||||
logger.DEBUG)
|
||||
return self.write_ep_file(ep_obj)
|
||||
return False
|
||||
try:
|
||||
result = self.write_ep_file(ep_obj)
|
||||
except sickbeard.indexer_error as e:
|
||||
logger.log('Unable to find useful episode metadata for %s on %s: %s' % (
|
||||
self.name, sickbeard.indexerApi(ep_obj.show.indexer).name, ex(e)), logger.WARNING)
|
||||
|
||||
return result
|
||||
|
||||
def update_show_indexer_metadata(self, show_obj):
|
||||
if self.show_metadata and show_obj and self._has_show_metadata(show_obj):
|
||||
|
@ -804,7 +816,7 @@ class GenericMetadata():
|
|||
indexer_show_obj = t[show_obj.indexerid, False]
|
||||
except (sickbeard.indexer_error, IOError) as e:
|
||||
logger.log(u"Unable to look up show on " + sickbeard.indexerApi(
|
||||
show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR)
|
||||
show_obj.indexer).name + ", not downloading images: " + ex(e), logger.WARNING)
|
||||
return None
|
||||
|
||||
if not self._valid_show(indexer_show_obj, show_obj):
|
||||
|
@ -893,7 +905,7 @@ class GenericMetadata():
|
|||
indexer_show_obj = t[show_obj.indexerid]
|
||||
except (sickbeard.indexer_error, IOError) as e:
|
||||
logger.log(u'Unable to look up show on ' + sickbeard.indexerApi(
|
||||
show_obj.indexer).name + ', not downloading images: ' + ex(e), logger.ERROR)
|
||||
show_obj.indexer).name + ', not downloading images: ' + ex(e), logger.WARNING)
|
||||
return result
|
||||
|
||||
if not self._valid_show(indexer_show_obj, show_obj):
|
||||
|
|
|
@ -674,10 +674,10 @@ class ParseResult(object):
|
|||
|
||||
@staticmethod
|
||||
def _replace_ep_name_helper(e_i_n_n, n):
|
||||
ep_regex = r'\W*%s\W*' % re.sub(r' ', r'\W', re.sub(r'[^a-zA-Z0-9 ]', r'\W?',
|
||||
ep_regex = r'\W*%s(\W*)' % re.sub(r' ', r'\W', re.sub(r'[^a-zA-Z0-9 ]', r'\W?',
|
||||
re.sub(r'\W+$', '', n.strip())))
|
||||
if None is regex:
|
||||
return re.sub(ep_regex, '', e_i_n_n, flags=re.I)
|
||||
return re.sub(r'^\W+', '', re.sub(ep_regex, r'\1', e_i_n_n, flags=re.I))
|
||||
|
||||
er = trunc(len(re.findall(r'\w', ep_regex)) / 5)
|
||||
try:
|
||||
|
@ -685,7 +685,7 @@ class ParseResult(object):
|
|||
me = min(3, me)
|
||||
except (StandardError, Exception):
|
||||
me = 3
|
||||
return regex.sub(r'(%s){e<=%d}' % (ep_regex, (er, me)[er > me]), '', e_i_n_n, flags=regex.I | regex.B)
|
||||
return re.sub(r'^\W+', '', regex.sub(r'(?:%s){e<=%d}' % (ep_regex, (er, me)[er > me]), r'\1', e_i_n_n, flags=regex.I | regex.B))
|
||||
|
||||
def get_extra_info_no_name(self):
|
||||
extra_info_no_name = self.extra_info
|
||||
|
|
|
@ -884,9 +884,8 @@ class GenericProvider(object):
|
|||
return results
|
||||
|
||||
searched_scene_season = None
|
||||
search_list = []
|
||||
for ep_obj in episodes:
|
||||
if self.should_skip(log_warning=False):
|
||||
break
|
||||
# search cache for episode result
|
||||
cache_result = self.cache.searchCache(ep_obj, manual_search)
|
||||
if cache_result:
|
||||
|
@ -911,7 +910,16 @@ class GenericProvider(object):
|
|||
# get single episode search params
|
||||
search_params = self._episode_strings(ep_obj)
|
||||
|
||||
search_list += [search_params]
|
||||
|
||||
search_done = []
|
||||
for search_params in search_list:
|
||||
if self.should_skip(log_warning=False):
|
||||
break
|
||||
for cur_param in search_params:
|
||||
if cur_param in search_done:
|
||||
continue
|
||||
search_done += [cur_param]
|
||||
item_list += self._search_provider(cur_param, search_mode=search_mode, epcount=len(episodes))
|
||||
if self.should_skip():
|
||||
break
|
||||
|
@ -1577,7 +1585,7 @@ class TorrentProvider(GenericProvider):
|
|||
if not url:
|
||||
return super(TorrentProvider, self)._authorised()
|
||||
|
||||
if getattr(self, 'username', None) and getattr(self, 'password', None):
|
||||
if getattr(self, 'username', None) and getattr(self, 'password', None) and post_params.pop('login', True):
|
||||
if not post_params:
|
||||
post_params = dict(username=self.username, password=self.password)
|
||||
elif isinstance(post_params, type({})):
|
||||
|
|
|
@ -563,9 +563,8 @@ class NewznabProvider(generic.NZBProvider):
|
|||
name_space = {}
|
||||
|
||||
searched_scene_season = s_mode = None
|
||||
search_list = []
|
||||
for ep_obj in episodes:
|
||||
if self.should_skip(log_warning=False):
|
||||
break
|
||||
# skip if season already searched
|
||||
if (s_mode or 'sponly' == search_mode) and 1 < len(episodes) \
|
||||
and searched_scene_season == ep_obj.scene_season:
|
||||
|
@ -598,7 +597,16 @@ class NewznabProvider(generic.NZBProvider):
|
|||
else:
|
||||
search_params = self._episode_strings(ep_obj)
|
||||
|
||||
search_list += [(search_params, needed, max_items)]
|
||||
|
||||
search_done = []
|
||||
for (search_params, needed, max_items) in search_list:
|
||||
if self.should_skip(log_warning=False):
|
||||
break
|
||||
for cur_param in search_params:
|
||||
if cur_param in search_done:
|
||||
continue
|
||||
search_done += [cur_param]
|
||||
items, n_space = self._search_provider(cur_param, search_mode=search_mode, epcount=len(episodes),
|
||||
needed=needed, max_items=max_items,
|
||||
try_all_searches=try_other_searches)
|
||||
|
|
|
@ -56,11 +56,8 @@ class SpeedCDProvider(generic.TorrentProvider):
|
|||
self.session.cookies.clear('.speed.cd') is None or True] +
|
||||
['RSS' in y, 'type="password"' not in y, self.has_all_cookies(['speedian'], 'inSpeed_')] +
|
||||
[(self.session.cookies.get('inSpeed_' + c) or 'sg!no!pw') in self.digest for c in ['speedian']])),
|
||||
failed_msg=(lambda y=None: None))
|
||||
username = self.username
|
||||
del self.username
|
||||
failed_msg=(lambda y=None: None), post_params={'login': False})
|
||||
result = super(SpeedCDProvider, self)._authorised(**params)
|
||||
setattr(self, 'username', username)
|
||||
|
||||
if not result and not self.failure_count:
|
||||
if self.digest:
|
||||
|
|
|
@ -580,7 +580,6 @@ class TVShow(object):
|
|||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
|
||||
def loadEpisodesFromDB(self, update=False):
|
||||
|
||||
logger.log('Loading all episodes for [%s] from the DB' % self.name)
|
||||
|
@ -601,7 +600,12 @@ class TVShow(object):
|
|||
|
||||
t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS)
|
||||
|
||||
cachedShow = None
|
||||
try:
|
||||
cachedShow = t[self.indexerid]
|
||||
except sickbeard.indexer_error as e:
|
||||
logger.log('Unable to find cached seasons from %s: %s' % (
|
||||
sickbeard.indexerApi(self.indexer).name, ex(e)), logger.WARNING)
|
||||
if None is cachedShow:
|
||||
return scannedEps
|
||||
|
||||
|
@ -878,6 +882,7 @@ class TVShow(object):
|
|||
sqlResults = myDB.select('SELECT * FROM tv_shows WHERE indexer_id = ?', [self.indexerid])
|
||||
|
||||
if 1 != len(sqlResults):
|
||||
if 1 < len(sqlResults):
|
||||
lINDEXER_API_PARMS = sickbeard.indexerApi(self.indexer).api_params.copy()
|
||||
if self.lang:
|
||||
lINDEXER_API_PARMS['language'] = self.lang
|
||||
|
@ -887,7 +892,7 @@ class TVShow(object):
|
|||
if 0 != len(sqlResults):
|
||||
logger.log('%s: Loading show info%s from database' % vals)
|
||||
raise exceptions.MultipleDBShowsException()
|
||||
logger.log('%s: Unable to find the show%s in the database' % vals)
|
||||
logger.log('%s: Unable to find the show%s in the database' % (self.indexerid, self.name))
|
||||
return
|
||||
else:
|
||||
if not self.indexer:
|
||||
|
|
|
@ -9,7 +9,7 @@ sys.path.insert(1, os.path.abspath('..'))
|
|||
sys.path.insert(1, os.path.abspath('../lib'))
|
||||
|
||||
from sickbeard.name_parser import parser
|
||||
from sickbeard import name_cache
|
||||
from sickbeard import name_cache, tv
|
||||
|
||||
import sickbeard
|
||||
|
||||
|
@ -355,6 +355,35 @@ failure_cases = ['7sins-jfcs01e09-720p-bluray-x264']
|
|||
|
||||
invalid_cases = [('The.Show.Name.111E14.1080p.WEB.x264-GROUP', 'the show name', 11, False)]
|
||||
|
||||
extra_info_no_name_tests = [('The Show Name', [('Episode 302', 3, 2)],
|
||||
'The.Show.Name.S03E02.REPACK.Episode.302.720p.AMZN.WEBRip.DDP5.1.x264-GROUP',
|
||||
'REPACK.720p.AMZN.WEBRip.DDP5.1.x264'),
|
||||
('The Show Name', [('Episode 302', 3, 2)],
|
||||
'The.Show.Name.S03E02.Episode.302.REPACK.720p.AMZN.WEBRip.DDP5.1.x264-GROUP',
|
||||
'REPACK.720p.AMZN.WEBRip.DDP5.1.x264'),
|
||||
('The Show Name', [('Episode 302', 3, 2)],
|
||||
'The.Show.Name.S03E02.Episode.302.REPACK.720p.AMZN.WEBRip.DDP5.1.x264-GROUP',
|
||||
'REPACK.720p.AMZN.WEBRip.DDP5.1.x264'),
|
||||
('The Show Name', [('Episode 302', 3, 2)],
|
||||
'The.Show.Name.S03E02.REPACK.720p.AMZN.WEBRip.DDP5.1.x264-GROUP',
|
||||
'REPACK.720p.AMZN.WEBRip.DDP5.1.x264'),
|
||||
('The Show Name', [('Episode 302', 3, 2)],
|
||||
'The.Show.Name.S03E02.720p.AMZN.WEBRip.DDP5.1.x264-GROUP',
|
||||
'720p.AMZN.WEBRip.DDP5.1.x264'),
|
||||
('The Show Name', [('Episode 302', 3, 2), ('Name 2', 3, 3)],
|
||||
'The.Show.Name.S03E02E03.720p.AMZN.WEBRip.DDP5.1.x264-GROUP',
|
||||
'720p.AMZN.WEBRip.DDP5.1.x264'),
|
||||
('The Show Name', [('Episode 302', 3, 2), ('Name 2', 3, 3)],
|
||||
'The.Show.Name.S03E02E03.Episode.302.Name2.720p.AMZN.WEBRip.DDP5.1.x264-GROUP',
|
||||
'720p.AMZN.WEBRip.DDP5.1.x264'),
|
||||
('The Show Name', [('Episode 302', 3, 2), ('Name 2', 3, 3)],
|
||||
'The.Show.Name.S03E02E03.REPACK.Episode.302.Name2.720p.AMZN.WEBRip.DDP5.1.x264-GROUP',
|
||||
'REPACK.720p.AMZN.WEBRip.DDP5.1.x264'),
|
||||
('The Show Name', [('Episode 302', 3, 2), ('Name 2', 3, 3)],
|
||||
'The.Show.Name.S03E02E03.Episode.302.Name2.REPACK.720p.AMZN.WEBRip.DDP5.1.x264-GROUP',
|
||||
'REPACK.720p.AMZN.WEBRip.DDP5.1.x264'),
|
||||
]
|
||||
|
||||
|
||||
class InvalidCases(test.SickbeardTestDBCase):
|
||||
|
||||
|
@ -612,11 +641,35 @@ class BasicTests(test.SickbeardTestDBCase):
|
|||
self._test_names(np, 'anime_bare')
|
||||
|
||||
|
||||
class TVShow(object):
|
||||
def __init__(self, is_anime=False, name='', indexerid=0):
|
||||
self.is_anime = is_anime
|
||||
self.name = name
|
||||
self.indexerid = indexerid
|
||||
class TVShow(tv.TVShow):
|
||||
def __init__(self, is_anime=False, name='', indexerid=0, indexer=0):
|
||||
self._anime = is_anime
|
||||
self._name = name
|
||||
self._indexerid = indexerid
|
||||
self._indexer = indexer
|
||||
self.episodes = {}
|
||||
|
||||
|
||||
class TVEpisode(tv.TVEpisode):
|
||||
def __init__(self, name=''):
|
||||
self._name = name
|
||||
|
||||
|
||||
class ExtraInfoNoNameTests(test.SickbeardTestDBCase):
|
||||
def test_extra_info_no_name(self):
|
||||
for case in extra_info_no_name_tests:
|
||||
tvs = TVShow(False, case[0], 2, 1)
|
||||
for e in case[1]:
|
||||
tvs.episodes.setdefault(e[1], {}).update({e[2]: TVEpisode(e[0])})
|
||||
|
||||
sickbeard.showList = [tvs]
|
||||
name_cache.nameCache = {}
|
||||
name_cache.buildNameCache()
|
||||
|
||||
np = parser.NameParser()
|
||||
r = np.parse(case[2])
|
||||
n_ep = r.extra_info_no_name()
|
||||
self.assertEqual(n_ep, case[3])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -637,3 +690,6 @@ if __name__ == '__main__':
|
|||
|
||||
suite = unittest.TestLoader().loadTestsFromTestCase(InvalidCases)
|
||||
unittest.TextTestRunner(verbosity=2).run(suite)
|
||||
|
||||
suite = unittest.TestLoader().loadTestsFromTestCase(ExtraInfoNoNameTests)
|
||||
unittest.TextTestRunner(verbosity=2).run(suite)
|
||||
|
|
Loading…
Reference in a new issue