From f782567fd1e65ddfd12b2d0a5ed830bec4993f86 Mon Sep 17 00:00:00 2001 From: JackDandy Date: Sat, 20 Jun 2015 00:34:56 +0100 Subject: [PATCH] Change provider SCC login process to use General Config/Advanced/Proxy host setting. Change refactor SCC to use torrent provider simplification and PEP8. --- CHANGES.md | 2 + gui/slick/images/providers/torrent.png | Bin 0 -> 916 bytes sickbeard/helpers.py | 13 +- sickbeard/providers/generic.py | 255 ++++++++++++++++++++--- sickbeard/providers/newznab.py | 54 +---- sickbeard/providers/scc.py | 277 +++++++------------------ sickbeard/tv.py | 9 +- sickbeard/tvcache.py | 12 +- 8 files changed, 320 insertions(+), 302 deletions(-) create mode 100644 gui/slick/images/providers/torrent.png diff --git a/CHANGES.md b/CHANGES.md index fbab3939..751b9de4 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -16,6 +16,7 @@ * Fix provider SCC stop snatching releases for episodes already completed * Fix provider SCC handle null server responses * Change provider SCC remove 1 of 3 requests per search to save 30% time +* Change provider SCC login process to use General Config/Advanced/Proxy host setting * Change provider IPT only decode unicode search strings * Change provider IPT login process to use General Config/Advanced/Proxy host setting * Change provider TB PEP8 and code convention cleanup @@ -28,6 +29,7 @@ * Change provider KAT to use mediaExtensions from common instead of private list * Change provider KAT provider PEP8 and code convention cleanup * Change refactor and code simplification for torrent providers +* Change refactor SCC to use torrent provider simplification and PEP8 * Change provider SCD PEP8 and code convention cleanup * Remove HDTorrents torrent provider * Remove NextGen torrent provider diff --git a/gui/slick/images/providers/torrent.png b/gui/slick/images/providers/torrent.png new file mode 100644 index 0000000000000000000000000000000000000000..0e3fdfa094b3fafbef85e113978e0c8ebd7a94c9 GIT binary patch literal 916 zcmV;F18e+=P){`O`M5iX$plRL1?*@$xNtSTyC)ZO5zQG!Z^Ticr$KXRUJJMZD-*w1Vp-oT}c zSq{H9g&T2*#5}^#A{OAk>aA+YbBr@AX`_<$PIOERf9>3B!=kTKtlah$WlFREytLGLHi72W{r4o?O`FbTY|M|(UK6!F%Y^-v_8<((U;I+2SyV~FI%aWR(E3U$t qvg5fIjZ{t*%Jcg^9e;afIsadv&8t;bz3Za@0000 1 and 'eponly' != search_mode: + if 'sponly' == search_mode: # get season search results for curString in self._get_season_search_strings(epObj): itemList += self._doSearch(curString, search_mode, len(episodes)) @@ -469,28 +478,107 @@ class GenericProvider: ''' return '' + @staticmethod + def _log_result(mode='cache', count=0, url='url missing'): + """ + Simple function to log the result of a search + :param count: count of successfully processed items + :param url: source url of item(s) + """ + mode = mode.lower() + logger.log(u'%s in response from %s' % (('No %s items' % mode, + '%s %s item%s' % (count, mode, maybe_plural(count)))[0 < count], url)) + class NZBProvider(GenericProvider): + def __init__(self, name, supports_backlog=True, anime_only=False): GenericProvider.__init__(self, name, supports_backlog, anime_only) + self.providerType = GenericProvider.NZB + def imageName(self): + + return GenericProvider.imageName(self, 'newznab') + + def _find_propers(self, search_date=None): + + cache_results = self.cache.listPropers(search_date) + results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in + cache_results] + + index = 0 + alt_search = ('nzbs_org' == self.getID()) + term_items_found = False + do_search_alt = False + + search_terms = ['.proper.', '.repack.'] + proper_check = re.compile(r'(?i)\b(proper)|(repack)\b') + + while index < len(search_terms): + search_params = {'q': search_terms[index]} + if alt_search: + + if do_search_alt: + index += 1 + + if term_items_found: + do_search_alt = True + term_items_found = False + else: + if do_search_alt: + search_params['t'] = 'search' + + do_search_alt = (True, False)[do_search_alt] + + else: + index += 1 + + for item in self._doSearch(search_params, age=4): + + (title, url) = self._get_title_and_url(item) + + if not proper_check.search(title): + continue + + if 'published_parsed' in item and item['published_parsed']: + result_date = item.published_parsed + if result_date: + result_date = datetime.datetime(*result_date[0:6]) + else: + logger.log(u'Unable to figure out the date for entry %s, skipping it', title) + continue + + if not search_date or result_date > search_date: + search_result = classes.Proper(title, url, result_date, self.show) + results.append(search_result) + term_items_found = True + do_search_alt = False + + time.sleep(0.2) + + return results + class TorrentProvider(GenericProvider): def __init__(self, name, supports_backlog=True, anime_only=False): GenericProvider.__init__(self, name, supports_backlog, anime_only) + self.providerType = GenericProvider.TORRENT + self._seed_ratio = None - def get_cache_data(self): - search_params = {'RSS': ['']} - return self._doSearch(search_params) + def imageName(self): + + return GenericProvider.imageName(self, 'torrent') def seedRatio(self): + return self._seed_ratio def getQuality(self, item, anime=False): + if isinstance(item, tuple): name = item[0] elif isinstance(item, dict): @@ -499,11 +587,98 @@ class TorrentProvider(GenericProvider): name = item.title return Quality.sceneQuality(name, anime) - def _find_propers(self, search_date=datetime.datetime.today(), method=None): + @staticmethod + def _reverse_quality(quality): + + return { + Quality.SDTV: 'HDTV x264', + Quality.SDDVD: 'DVDRIP', + Quality.HDTV: '720p HDTV x264', + Quality.FULLHDTV: '1080p HDTV x264', + Quality.RAWHDTV: '1080i HDTV mpeg2', + Quality.HDWEBDL: '720p WEB-DL h264', + Quality.FULLHDWEBDL: '1080p WEB-DL h264', + Quality.HDBLURAY: '720p Bluray x264', + Quality.FULLHDBLURAY: '1080p Bluray x264' + }.get(quality, '') + + def _get_season_search_strings(self, ep_obj, detail_only=False, scene=True): + + if ep_obj.show.air_by_date or ep_obj.show.sports: + ep_detail = str(ep_obj.airdate).split('-')[0] + elif ep_obj.show.anime: + ep_detail = ep_obj.scene_absolute_number + else: + ep_detail = 'S%02d' % int(ep_obj.scene_season) + + detail = ({}, {'Season_only': [ep_detail]})[detail_only and not self.show.sports and not self.show.anime] + return [dict({'Season': self._build_search_strings(ep_detail, scene)}.items() + detail.items())] + + def _get_episode_search_strings(self, ep_obj, add_string='', detail_only=False, scene=True, sep_date=' ', use_or=True): + + if not ep_obj: + return [] + + if self.show.air_by_date or self.show.sports: + ep_detail = str(ep_obj.airdate).replace('-', sep_date) + if self.show.sports: + month = ep_obj.airdate.strftime('%b') + ep_detail = ([ep_detail] + [month], '%s|%s' % (ep_detail, month))[use_or] + elif self.show.anime: + ep_detail = ep_obj.scene_absolute_number + else: + ep_detail = sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season, + 'episodenumber': ep_obj.scene_episode} + append = (add_string, '')[self.show.anime] + detail = ({}, {'Episode_only': [ep_detail]})[detail_only and not self.show.sports and not self.show.anime] + return [dict({'Episode': self._build_search_strings(ep_detail, scene, append)}.items() + detail.items())] + + def _build_search_strings(self, ep_detail, process_name=True, append=''): + """ + Build a list of search strings for querying a provider + :param ep_detail: String of episode detail or List of episode details + :param process_name: Bool Whether to call sanitizeSceneName() on show name + :param append: String to append to search strings + :return: List of search string parameters + """ + if not isinstance(ep_detail, list): + ep_detail = [ep_detail] + if not isinstance(append, list): + append = [append] + + search_params = [] + crop = re.compile(r'([\.\s])(?:\1)+') + for name in set(allPossibleShowNames(self.show)): + if process_name: + name = helpers.sanitizeSceneName(name) + for detail in ep_detail: + search_params += [crop.sub(r'\1', '%s %s' % (name, detail) + ('', ' ' + x)[any(x)]) for x in append] + return search_params + + def _checkAuth(self): + + if hasattr(self, 'username') and hasattr(self, 'password'): + if self.username and self.password: + return True + setting = 'Password or Username' + elif hasattr(self, 'username') and hasattr(self, 'passkey'): + if self.username and self.passkey: + return True + setting = 'Passkey or Username' + elif hasattr(self, 'api_key'): + if self.api_key: + return True + setting = 'Apikey' + else: + return GenericProvider._checkAuth(self) + + raise AuthException('%s for %s is empty in config provider options' % (setting, self.name)) + + def _find_propers(self, search_date=datetime.datetime.today(), search_terms=None): """ Search for releases of type PROPER :param search_date: Filter search on episodes since this date - :param method: String or list of strings that qualify PROPER release types + :param search_terms: String or list of strings that qualify PROPER release types :return: list of Proper objects """ results = [] @@ -520,8 +695,9 @@ class TorrentProvider(GenericProvider): if not sql_results: return results + clean_term = re.compile(r'(?i)[^a-z\|\.]+') for sqlshow in sql_results: - showid, season, episode = (int(sqlshow['showid']), int(sqlshow['season']), int(sqlshow['episode'])) + showid, season, episode = [int(sqlshow[item]) for item in ('showid', 'season', 'episode')] self.show = helpers.findCertainShow(sickbeard.showList, showid) if not self.show: @@ -529,19 +705,36 @@ class TorrentProvider(GenericProvider): cur_ep = self.show.getEpisode(season, episode) - if not isinstance(method, list): - if None is method: - method = 'PROPER|REPACK' - method = [method] + if None is search_terms: + search_terms = ['proper', 'repack'] + elif not isinstance(search_terms, list): + if '' == search_terms: + search_terms = 'proper|repack' + search_terms = [search_terms] - for proper_string in method: - search_string = self._get_episode_search_strings(cur_ep, add_string=proper_string) + for proper_term in search_terms: + proper_check = re.compile(r'(?i)(?:%s)' % clean_term.sub('', proper_term)) - proper_exp = re.sub(r'(?i)[^a-z\|\.]+', '', proper_string) + search_string = self._get_episode_search_strings(cur_ep, add_string=proper_term) for item in self._doSearch(search_string[0]): title, url = self._get_title_and_url(item) - if not re.search('(?i)(?:%s)' % proper_exp, title): + if not proper_check.search(title): continue results.append(classes.Proper(title, url, datetime.datetime.today(), self.show)) return results + + @staticmethod + def _has_no_results(*html): + return re.search(r'(?i)<(?:h\d|strong)[^>]*>(?:' + + 'your\ssearch\sdid\snot\smatch|' + + 'nothing\sfound|' + + 'no\storrents\sfound|' + + '.*?there\sare\sno\sresults|' + + '.*?no\shits\.\sTry\sadding' + + ')', html[0]) + + def get_cache_data(self, *args, **kwargs): + + search_params = {'Cache': ['']} + return self._doSearch(search_params) diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index 428d9a19..b6fa8697 100755 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -18,7 +18,6 @@ import urllib import time -import datetime import os try: @@ -331,58 +330,7 @@ class NewznabProvider(generic.NZBProvider): return results def findPropers(self, search_date=None): - - search_terms = ['.proper.', '.repack.'] - - cache_results = self.cache.listPropers(search_date) - results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in - cache_results] - - index = 0 - alt_search = ('nzbs_org' == self.getID()) - term_items_found = False - do_search_alt = False - - while index < len(search_terms): - search_params = {'q': search_terms[index]} - if alt_search: - - if do_search_alt: - index += 1 - - if term_items_found: - do_search_alt = True - term_items_found = False - else: - if do_search_alt: - search_params['t'] = "search" - - do_search_alt = (True, False)[do_search_alt] - - else: - index += 1 - - for item in self._doSearch(search_params, age=4): - - (title, url) = self._get_title_and_url(item) - - if item.has_key('published_parsed') and item['published_parsed']: - result_date = item.published_parsed - if result_date: - result_date = datetime.datetime(*result_date[0:6]) - else: - logger.log(u"Unable to figure out the date for entry " + title + ", skipping it") - continue - - if not search_date or result_date > search_date: - search_result = classes.Proper(title, url, result_date, self.show) - results.append(search_result) - term_items_found = True - do_search_alt = False - - time.sleep(0.2) - - return results + return self._find_propers(search_date) class NewznabCache(tvcache.TVCache): diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index c3709472..7a5cc2ee 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -1,6 +1,4 @@ -# Author: Idan Gutman -# Modified by jkaberg, https://github.com/jkaberg for SceneAccess -# URL: http://code.google.com/p/sickbeard/ +# coding=utf-8 # # This file is part of SickGear. # @@ -18,268 +16,137 @@ # along with SickGear. If not, see . import re -import traceback import datetime +import time +import traceback -import sickbeard -import generic -from sickbeard import logger, tvcache, db, classes, helpers, show_name_helpers -from sickbeard.common import Quality -from sickbeard.exceptions import ex +from . import generic +from sickbeard import logger, tvcache, helpers from sickbeard.bs4_parser import BS4Parser -from sickbeard.helpers import sanitizeSceneName -from lib import requests -from lib.requests import exceptions from lib.unidecode import unidecode class SCCProvider(generic.TorrentProvider): - urls = {'base_url': 'https://sceneaccess.eu', - 'login': 'https://sceneaccess.eu/login', - 'detail': 'https://sceneaccess.eu/details?id=%s', - 'search': 'https://sceneaccess.eu/browse?search=%s&method=1&%s', - 'nonscene': 'https://sceneaccess.eu/nonscene?search=%s&method=1&c44=44&c45=44', - 'archive': 'https://sceneaccess.eu/archive?search=%s&method=1&c26=26', - 'download': 'https://sceneaccess.eu/%s'} def __init__(self): - generic.TorrentProvider.__init__(self, 'SceneAccess', True, False) - self.username = None - self.password = None - self.ratio = None - self.minseed = None - self.minleech = None + generic.TorrentProvider.__init__(self, 'SceneAccess') + + self.url_base = 'https://sceneaccess.eu/' + self.urls = {'config_provider_home_uri': self.url_base, + 'login': self.url_base + 'login', + 'search': self.url_base + 'browse?search=%s&method=1&c27=27&c17=17&c11=11', + 'nonscene': self.url_base + 'nonscene?search=%s&method=1&c44=44&c45=44', + 'archive': self.url_base + 'archive?search=%s&method=1&c26=26', + 'get': self.url_base + '%s'} + + self.url = self.urls['config_provider_home_uri'] + + self.username, self.password, self.minseed, self.minleech = 4 * [None] self.cache = SCCCache(self) - self.url = self.urls['base_url'] - self.categories = 'c27=27&c17=17&c11=11' - - def getQuality(self, item, anime=False): - - quality = Quality.sceneQuality(item[0], anime) - return quality def _doLogin(self): - login_params = {'username': self.username, - 'password': self.password, - 'submit': 'come on in'} - - self.session = requests.Session() - - try: - response = self.session.post(self.urls['login'], data=login_params, headers=self.headers, timeout=30, verify=False) - except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e: - logger.log(u'Unable to connect to %s provider: %s' % (self.name, ex(e)), logger.ERROR) - return False - - if re.search('Username or password incorrect', response.text) \ - or re.search('SceneAccess \| Login', response.text) \ - or 401 == response.status_code: - logger.log(u'Your authentication credentials for %s are incorrect, check your config.' % self.name, logger.ERROR) - return False - - return True - - def _get_season_search_strings(self, ep_obj): - - search_string = {'Season': []} - if ep_obj.show.air_by_date or ep_obj.show.sports: - ep_string = str(ep_obj.airdate).split('-')[0] - elif ep_obj.show.anime: - ep_string = '%d' % ep_obj.scene_absolute_number - else: - ep_string = 'S%02d' % int(ep_obj.scene_season) # 1) showName SXX - - for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): - search_string['Season'].append('%s %s' % (show_name, ep_string)) - - return [search_string] - - def _get_episode_search_strings(self, ep_obj, add_string=''): - - search_string = {'Episode': []} - - if not ep_obj: - return [] - - airdate = str(ep_obj.airdate).replace('-', '.') - if self.show.air_by_date: - ep_detail = airdate - elif self.show.sports: - ep_detail = '%s|%s' % (airdate, ep_obj.airdate.strftime('%b')) - elif self.show.anime: - ep_detail = ep_obj.scene_absolute_number - else: - ep_detail = sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season, - 'episodenumber': ep_obj.scene_episode} - if add_string and not self.show.anime: - ep_detail += ' ' + add_string - - for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): - search_string['Episode'].append(re.sub('\s+', ' ', '%s %s' % (sanitizeSceneName(show_name), ep_detail))) - - return [search_string] - - def _isSection(self, section, text): - title = '.+? \| %s' % section - if re.search(title, text, re.IGNORECASE): + logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies + if logged_in(): return True - else: - return False + + if self._checkAuth(): + login_params = {'username': self.username, 'password': self.password, 'submit': 'come on in'} + + response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session) + if response and logged_in(): + return True + + logger.log(u'Failed to authenticate with %s, abort provider.' % self.name, logger.ERROR) + + return False def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): results = [] - items = {'Season': [], 'Episode': [], 'RSS': []} + items = {'Season': [], 'Episode': [], 'Cache': []} if not self._doLogin(): return results + rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download'}.items()) for mode in search_params.keys(): for search_string in search_params[mode]: - search_string, url = self._get_title_and_url([search_string, self.urls['search'], '', '', '']) + search_string, void = self._get_title_and_url((search_string, None)) if isinstance(search_string, unicode): search_string = unidecode(search_string) - nonsceneSearchURL = None if 'Season' == mode: - searchURL = self.urls['archive'] % search_string - response = [self.getURL(searchURL)] + searches = [self.urls['archive'] % search_string] else: - searchURL = self.urls['search'] % (search_string, self.categories) - nonsceneSearchURL = self.urls['nonscene'] % search_string - response = [self.getURL(searchURL), - self.getURL(nonsceneSearchURL)] - logger.log(u'Search string: ' + nonsceneSearchURL, logger.DEBUG) + searches = [self.urls['search'] % search_string, + self.urls['nonscene'] % search_string] - logger.log(u'Search string: ' + searchURL, logger.DEBUG) + for search_url in searches: + html = self.getURL(search_url) - response = [html for html in response if html is not None] - if not len(response): - continue + cnt = len(items[mode]) + try: + if not html or self._has_no_results(html): + raise generic.HaltParseException - try: - for markup in response: - with BS4Parser(markup, features=['html5lib', 'permissive']) as soup: + with BS4Parser(html, features=['html5lib', 'permissive']) as soup: torrent_table = soup.find('table', attrs={'id': 'torrents-table'}) - torrent_rows = [] - if torrent_table: - torrent_rows = torrent_table.find_all('tr') + torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') - # Continue only if at least one Release is found if 2 > len(torrent_rows): - if soup.title: - source = '%s (%s)' % (self.name, soup.title.string) - else: - source = self.name - logger.log(u'The data returned from %s does not contain any torrents' % source, logger.DEBUG) - continue - - for result in torrent_table.find_all('tr')[1:]: + raise generic.HaltParseException + for tr in torrent_table.find_all('tr')[1:]: try: - link = result.find('td', attrs={'class': 'ttr_name'}).find('a') - all_urls = result.find('td', attrs={'class': 'td_dl'}).find_all('a', limit=2) - url = all_urls[0] + seeders, leechers = [int(tr.find('td', attrs={'class': x}).get_text().strip()) + for x in ('ttr_seeders', 'ttr_leechers')] + if 'Cache' != mode and (seeders < self.minseed or leechers < self.minleech): + continue - title = link.string - if re.search('\.\.\.', title): - response = self.getURL(self.url + '/' + link['href']) - if response: - with BS4Parser(response) as soup_detail: - title = re.search('(?<=").+(? seeders or self.minleech > leechers): - continue + if title and download_url: + items[mode].append((title, download_url, seeders)) - if not title or not download_url: - continue - - item = title, download_url, id, seeders, leechers - - if self._isSection('Non-Scene', markup): - logger.log(u'Found result: %s (%s)' % (title, nonsceneSearchURL), logger.DEBUG) - else: - logger.log(u'Found result: %s (%s)' % (title, searchURL), logger.DEBUG) - - items[mode].append(item) - - except Exception as e: - logger.log(u'Failed parsing %s Traceback: %s' % (self.name, traceback.format_exc()), logger.ERROR) + except generic.HaltParseException: + time.sleep(1.1) + except Exception: + logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + self._log_result(mode, len(items[mode]) - cnt, search_url) # For each search mode sort all the items by seeders - items[mode].sort(key=lambda tup: tup[3], reverse=True) + items[mode].sort(key=lambda tup: tup[2], reverse=True) results += items[mode] return results - def _get_title_and_url(self, item): - - title, url, id, seeders, leechers = item - - if title: - title += u'' - title = re.sub(r'\s+', '.', title) - - if url: - url = str(url).replace('&', '&') - - return title, url - def findPropers(self, search_date=datetime.datetime.today()): - results = [] + return self._find_propers(search_date) - my_db = db.DBConnection() - sql_results = my_db.select( - 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + - ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + - ' WHERE e.airdate >= ' + str(search_date.toordinal()) + - ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + - ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' - ) + def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs): - if not sql_results: - return [] - - for sqlshow in sql_results: - showid, season, episode = (int(sqlshow['showid']), int(sqlshow['season']), int(sqlshow['episode'])) - self.show = helpers.findCertainShow(sickbeard.showList, showid) - if not self.show: - continue - cur_ep = self.show.getEpisode(season, episode) - - for search in ['.proper.', '.repack.']: - search_string = self._get_episode_search_strings(cur_ep, add_string=search) - - for item in self._doSearch(search_string[0]): - title, url = self._get_title_and_url(item) - results.append(classes.Proper(title, url, datetime.datetime.today(), self.show)) - - return results - - def seedRatio(self): - return self.ratio + return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, sep_date='.', use_or=False) class SCCCache(tvcache.TVCache): - def __init__(self, provider): - tvcache.TVCache.__init__(self, provider) + def __init__(self, this_provider): + tvcache.TVCache.__init__(self, this_provider) - # only poll SCC every 10 minutes max - self.minTime = 20 + self.minTime = 20 # cache update frequency def _getRSSData(self): - search_params = {'RSS': ['']} - return self.provider._doSearch(search_params) + + return self.provider.get_cache_data() provider = SCCProvider() diff --git a/sickbeard/tv.py b/sickbeard/tv.py index a3138797..65ebea27 100644 --- a/sickbeard/tv.py +++ b/sickbeard/tv.py @@ -1241,7 +1241,6 @@ class TVShow(object): toReturn += "anime: " + str(self.is_anime) + "\n" return toReturn - def wantEpisode(self, season, episode, quality, manualSearch=False): logger.log(u"Checking if found episode " + str(season) + "x" + str(episode) + " is wanted at quality " + @@ -1250,8 +1249,12 @@ class TVShow(object): # if the quality isn't one we want under any circumstances then just say no initialQualities, archiveQualities = Quality.splitQuality(self.quality) allQualities = list(set(initialQualities + archiveQualities)) - logger.log(u"initial + archive = (" + ",".join([Quality.qualityStrings[qual] for qual in initialQualities]) + ") + (" + ",".join([Quality.qualityStrings[qual] for qual in archiveQualities]) + ") and found " + Quality.qualityStrings[quality], - logger.DEBUG) + + initial = u'= (%s)' % ','.join([Quality.qualityStrings[qual] for qual in initialQualities]) + if 0 < len(archiveQualities): + initial = u'+ upgrade to %s + (%s)'\ + % (initial, ','.join([Quality.qualityStrings[qual] for qual in archiveQualities])) + logger.log(u'Want initial %s and found %s' % (initial, Quality.qualityStrings[quality]), logger.DEBUG) if quality not in allQualities: logger.log(u"Don't want this quality, ignoring found episode", logger.DEBUG) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index f8fa3389..0967b74d 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -27,11 +27,9 @@ from sickbeard import logger from sickbeard.common import Quality from sickbeard import helpers, show_name_helpers -from sickbeard.exceptions import MultipleShowObjectsException -from sickbeard.exceptions import AuthException +from sickbeard.exceptions import AuthException, ex from name_parser.parser import NameParser, InvalidNameException, InvalidShowException from sickbeard.rssfeeds import getFeed -from sickbeard import clients import itertools class CacheDBConnection(db.DBConnection): @@ -77,7 +75,13 @@ class TVCache(): return True def updateCache(self): - if self.shouldUpdate() and self._checkAuth(): + try: + self._checkAuth() + except AuthException as e: + logger.log(u'Authentication error: ' + ex(e), logger.ERROR) + return [] + + if self.shouldUpdate(): # as long as the http request worked we count this as an update data = self._getRSSData() if not data: