From f9cc6ed3309b8b214d5356fcacb3609ed49ed48b Mon Sep 17 00:00:00 2001 From: Prinz23 Date: Thu, 2 Nov 2017 18:30:05 +0000 Subject: [PATCH 1/2] Add provider error table to page Manage/Media Search. Add failure handling, skip provider for x hour(s) depending on count of failures. Add detection of api hit limit reached. Add failure count to omgwtfnzbs. Change improve categories selection (manual search for example). --- .../default/manage_manageSearches.tmpl | 42 +- gui/slick/js/manageSearches.js | 13 +- sickbeard/databases/cache_db.py | 25 +- sickbeard/providers/generic.py | 369 +++++++++++++++++- sickbeard/providers/newznab.py | 71 +++- sickbeard/providers/omgwtfnzbs.py | 14 +- sickbeard/search.py | 5 + sickbeard/webserve.py | 16 + 8 files changed, 532 insertions(+), 23 deletions(-) diff --git a/gui/slick/interfaces/default/manage_manageSearches.tmpl b/gui/slick/interfaces/default/manage_manageSearches.tmpl index dfeb37b7..701d4eb4 100644 --- a/gui/slick/interfaces/default/manage_manageSearches.tmpl +++ b/gui/slick/interfaces/default/manage_manageSearches.tmpl @@ -1,4 +1,5 @@ #import sickbeard +#from sickbeard import sbdatetime ## #set global $title = 'Media Search' #set global $header = 'Media Search' @@ -7,6 +8,7 @@ ## #import os.path #include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl') + @@ -46,9 +48,45 @@ In Progress
#end if
- +#if $provider_errors +

Provider Errors:


+#for $prov in $provider_error_stats + #if $len($prov['errors']) + $prov['name'] + #if $prov['next_try'] + #set nt = $str($prov['next_try']).split('.', 2) + Next try in: $nt[0] + #end if +
+ + + #if $prov['has_limit'] + + #end if + + + #set $row = 0 + #for $error in $prov['errors'] + + + + + + + + + #if $prov['has_limit'] + + #end if + + #end for + +
Dayhttpconnectionconnection timeouttimeoutunknownno dataHit Limit
$sbdatetime.sbdatetime.sbfdate($error['date'])$error['http'].get('count', 0)$error['connection'].get('count', 0)$error['connection_timeout'].get('count', 0)$error['timeout'].get('count', 0)$error['unknown'].get('count', 0)$error['nodata'].get('count', 0)$error.get('limit', {}).get('count', 0)
+ #end if +#end for +


- +#end if

Search Queue:

#if $queue_length['backlog'] or $queue_length['manual'] or $queue_length['failed']
diff --git a/gui/slick/js/manageSearches.js b/gui/slick/js/manageSearches.js index 63397285..e4457b87 100644 --- a/gui/slick/js/manageSearches.js +++ b/gui/slick/js/manageSearches.js @@ -1,4 +1,4 @@ -$(document).ready(function() { +$(document).ready(function() { $('#recentsearch,#propersearch').click(function(){ $(this).addClass('disabled'); }) @@ -30,4 +30,15 @@ $(document).ready(function() { $(this).hide(); $(this).nextAll('input:first').show(); }) + $('.prov-retry').click(function () { + $(this).addClass('disabled'); + var match = $(this).attr('id').match(/^(.+)-btn-retry$/); + $.ajax({ + url: sbRoot + '/manage/manageSearches/retryProvider?provider=' + match[1], + type: 'GET', + complete: function () { + window.location.reload(true); + } + }); + }) }); \ No newline at end of file diff --git a/sickbeard/databases/cache_db.py b/sickbeard/databases/cache_db.py index b09a7003..4f769a62 100644 --- a/sickbeard/databases/cache_db.py +++ b/sickbeard/databases/cache_db.py @@ -19,7 +19,7 @@ from sickbeard import db MIN_DB_VERSION = 1 -MAX_DB_VERSION = 3 +MAX_DB_VERSION = 4 # Add new migrations at the bottom of the list; subclass the previous migration. @@ -105,3 +105,26 @@ class AddBacklogParts(ConsolidateProviders): self.connection.action('VACUUM') self.incDBVersion() + + +class AddProviderErrors(AddBacklogParts): + def test(self): + return self.checkDBVersion() > 3 + + def execute(self): + + db.backup_database('cache.db', self.checkDBVersion()) + if not self.hasTable('providererrors'): + self.connection.action('CREATE TABLE providererrors ("prov_name" TEXT, "error_type" INTEGER, ' + '"error_code" INTEGER, "error_time" NUMERIC)') + self.connection.action('CREATE INDEX idx_prov_name_error ON providererrors (prov_name)') + self.connection.action('CREATE UNIQUE INDEX idx_prov_errors ON providererrors (prov_name, error_time)') + + if not self.hasTable('providererrorcount'): + self.connection.action('CREATE TABLE providererrorcount (prov_name TEXT PRIMARY KEY , ' + 'failure_count NUMERIC, failure_time NUMERIC, hit_limit_count NUMERIC, ' + 'hit_limit_time NUMERIC, hit_limit_wait NUMERIC)') + + self.connection.action('VACUUM') + + self.incDBVersion() \ No newline at end of file diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index bef4274a..c1a4bd32 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -27,6 +27,7 @@ import re import time import urlparse import threading +import socket from urllib import quote_plus import zlib from base64 import b16encode, b32decode @@ -45,13 +46,124 @@ from sickbeard.exceptions import SickBeardException, AuthException, ex from sickbeard.helpers import maybe_plural, remove_file_failed from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException from sickbeard.show_name_helpers import get_show_names_all_possible - +from sickbeard.sbdatetime import sbdatetime class HaltParseException(SickBeardException): """Something requires the current processing to abort""" -class GenericProvider: +class ProviderErrorTypes: + http = 1 + connection = 2 + connection_timeout = 3 + timeout = 4 + unknown = 5 + limit = 6 + nodata = 7 + + names = {1: 'http', 2: 'connection', 3: 'connection_timeout', 4: 'timeout', 5: 'unknown', 6: 'limit', 7: 'nodata'} + + def __init__(self): + pass + + +class ProviderError(object): + def __init__(self, error_type=ProviderErrorTypes.unknown, code=None, error_time=None): + self.code = code + self.error_type = error_type + self.error_time = (datetime.datetime.now(), error_time)[isinstance(error_time, datetime.datetime)] + + +class ProviderErrorList(object): + def __init__(self, provider_name): + self.provider_name = provider_name + self._errors = [] + self.lock = threading.Lock() + self.clear_old() + self.load_list() + self.last_save = datetime.datetime.now() + self.dirty = False + + @property + def errors(self): + return self._errors + + @property + def errors_sorted(self): + error_dict = {} + b_d = {'count': 0, 'code': None} + for e in self._errors: + dd = e.error_time.date() + if ProviderErrorTypes.names[e.error_type] not in error_dict.get(dd, {}): + error_dict.setdefault(dd, + {'date': dd, 'http': b_d.copy(), 'connection': b_d.copy(), + 'connection_timeout': b_d.copy(), 'timeout': b_d.copy(), + 'unknown': b_d.copy(), 'limit': b_d.copy(), + 'nodata': b_d.copy()})[ProviderErrorTypes.names[e.error_type]]['count'] = 1 + else: + error_dict[dd][ProviderErrorTypes.names[e.error_type]]['count'] += 1 + if ProviderErrorTypes.http == e.error_type: + if e.code in error_dict[dd].get(ProviderErrorTypes.names[e.error_type], {}): + error_dict[dd][ProviderErrorTypes.names[e.error_type]][e.code] += 1 + else: + error_dict[dd][ProviderErrorTypes.names[e.error_type]][e.code] = 1 + error_list = sorted([error_dict[k] for k in error_dict.iterkeys()], key=lambda x: x.get('date'), reverse=True) + return error_list + + def add_error(self, error): + if isinstance(error, ProviderError): + with self.lock: + self.dirty = True + self._errors.append(error) + logger.log('Adding error: %s for %s' % + (ProviderErrorTypes.names.get(error.error_type, 'unknown'), self.provider_name()), + logger.DEBUG) + self.save_list() + + def save_list(self): + if self.dirty: + self.clear_old() + with self.lock: + myDB = db.DBConnection('cache.db') + cl = [] + for e in self._errors: + cl.append(['INSERT OR IGNORE INTO providererrors (prov_name, error_type, error_code, error_time) ' + 'VALUES (?,?,?,?)', [self.provider_name(), e.error_type, e.code, + sbdatetime.totimestamp(e.error_time)]]) + self.dirty = False + if cl: + myDB.mass_action(cl) + self.last_save = datetime.datetime.now() + + def load_list(self): + with self.lock: + try: + myDB = db.DBConnection('cache.db') + if myDB.hasTable('providererrors'): + results = myDB.select('SELECT * FROM providererrors WHERE prov_name = ?', [self.provider_name()]) + self._errors = [] + for r in results: + try: + self._errors.append(ProviderError( + error_type=helpers.tryInt(r['error_type']), code=helpers.tryInt(r['error_code']), + error_time=datetime.datetime.fromtimestamp(helpers.tryInt(r['error_time'])))) + except (StandardError, Exception): + continue + except (StandardError, Exception): + pass + + def clear_old(self): + with self.lock: + try: + myDB = db.DBConnection('cache.db') + if myDB.hasTable('providererrors'): + time_limit = sbdatetime.totimestamp(datetime.datetime.now() - datetime.timedelta(days=28)) + myDB.action('DELETE FROM providererrors WHERE error_time < ?', [time_limit]) + except (StandardError, Exception): + pass + + +class GenericProvider(object): NZB = 'nzb' TORRENT = 'torrent' @@ -86,6 +198,226 @@ class GenericProvider: # 'Chrome/32.0.1700.107 Safari/537.36'} 'User-Agent': USER_AGENT} + self._failure_count = 0 + self._failure_time = None + self.errors = ProviderErrorList(self.get_id) + self._hit_limit_count = 0 + self._hit_limit_time = None + self._hit_limit_wait = None + self._last_error_type = None + self.has_limit = False + self.fail_times = {1: (0, 15), 2: (0, 30), 3: (1, 0), 4: (2, 0), 5: (3, 0), 6: (6, 0), 7: (12, 0), 8: (24, 0)} + self._load_error_values() + + def _load_error_values(self): + if hasattr(sickbeard, 'DATA_DIR'): + myDB = db.DBConnection('cache.db') + if myDB.hasTable('providererrorcount'): + r = myDB.select('SELECT * FROM providererrorcount WHERE prov_name = ?', [self.get_id()]) + if r: + self._failure_count = helpers.tryInt(r[0]['failure_count'], 0) + if r[0]['failure_time']: + self._failure_time = datetime.datetime.fromtimestamp(r[0]['failure_time']) + else: + self._failure_time = None + self._hit_limit_count = helpers.tryInt(r[0]['hit_limit_count'], 0) + if r[0]['hit_limit_time']: + self._hit_limit_time = datetime.datetime.fromtimestamp(r[0]['hit_limit_time']) + else: + self._hit_limit_time = None + if r[0]['hit_limit_wait']: + self._hit_limit_wait = datetime.timedelta(seconds=helpers.tryInt(r[0]['hit_limit_wait'], 0)) + else: + self._hit_limit_wait = None + self._last_error_type = self.last_error + + def _save_error_value(self, field, value): + myDB = db.DBConnection('cache.db') + if myDB.hasTable('providererrorcount'): + r = myDB.action('UPDATE providererrorcount SET %s = ? WHERE prov_name = ?' % field, [value, self.get_id()]) + if 0 == r.rowcount: + myDB.action('REPLACE INTO providererrorcount (prov_name, %s) VALUES (?,?)' % field, + [self.get_id(), value]) + + @property + def last_error(self): + try: + return sorted(self.errors.errors, key=lambda x: x.error_time, reverse=True)[0].error_type + except (StandardError, Exception): + return None + + @property + def failure_count(self): + return self._failure_count + + @failure_count.setter + def failure_count(self, value): + changed_val = self._failure_count != value + self._failure_count = value + if changed_val: + self._save_error_value('failure_count', value) + + @property + def failure_time(self): + return self._failure_time + + @failure_time.setter + def failure_time(self, value): + if None is value or isinstance(value, datetime.datetime): + changed_val = self._failure_time != value + self._failure_time = value + if None is value: + v = value + else: + v = sbdatetime.totimestamp(value) + if changed_val: + self._save_error_value('failure_time', v) + + @property + def hit_limit_count(self): + return self._hit_limit_count + + @hit_limit_count.setter + def hit_limit_count(self, value): + changed_val = self._hit_limit_count != value + self._hit_limit_count = value + if changed_val: + self._save_error_value('hit_limit_count', value) + + @property + def hit_limit_time(self): + return self._hit_limit_time + + @hit_limit_time.setter + def hit_limit_time(self, value): + if None is value or isinstance(value, datetime.datetime): + changed_val = self._hit_limit_time != value + self._hit_limit_time = value + if None is value: + v = value + else: + v = sbdatetime.totimestamp(value) + if changed_val: + self._save_error_value('hit_limit_time', v) + + @property + def max_index(self): + return len(self.fail_times) + + @property + def hit_limit_wait(self): + return self._hit_limit_wait + + @hit_limit_wait.setter + def hit_limit_wait(self, value): + if isinstance(getattr(self, 'errors', None), ProviderErrorList) and isinstance(value, datetime.timedelta): + self.errors.add_error(ProviderError(error_type=ProviderErrorTypes.limit)) + changed_val = self._hit_limit_wait != value + self._hit_limit_wait = value + if changed_val: + if None is value: + self._save_error_value('hit_limit_wait', value) + elif isinstance(value, datetime.timedelta): + self._save_error_value('hit_limit_wait', value.total_seconds()) + + def fail_time_index(self, base_limit=2): + i = self.failure_count - base_limit + return (i, self.max_index)[i >= self.max_index] + + def wait_time(self, fc): + return datetime.timedelta(hours=self.fail_times[fc][0], minutes=self.fail_times[fc][1]) + + @property + def get_next_try_time(self): + n = None + h = datetime.timedelta(seconds=0) + f = datetime.timedelta(seconds=0) + if isinstance(self.hit_limit_wait, datetime.timedelta) and isinstance(self.hit_limit_time, datetime.datetime): + h = self.hit_limit_time + self.hit_limit_wait - datetime.datetime.now() + if 3 <= self.failure_count and isinstance(self.failure_time, datetime.datetime): + fc = self.fail_time_index() + if datetime.datetime.now() - self.failure_time < self.wait_time(fc): + h = self.failure_time + self.wait_time(fc) - datetime.datetime.now() + if datetime.timedelta(seconds=0) < max((h, f)): + n = max((h, f)) + return n + + def retry_next(self): + if isinstance(self.hit_limit_wait, datetime.timedelta) and isinstance(self.hit_limit_time, datetime.datetime): + self.hit_limit_time = datetime.datetime.now() - self.hit_limit_wait + if 3 <= self.failure_count and isinstance(self.failure_time, datetime.datetime): + fc = self.fail_time_index() + if datetime.datetime.now() - self.failure_time < self.wait_time(fc): + self.failure_time = datetime.datetime.now() - self.wait_time(fc) + + def should_skip(self, log_warning=True): + if isinstance(self.hit_limit_wait, datetime.timedelta) and isinstance(self.hit_limit_time, datetime.datetime): + time_left = self.hit_limit_time + self.hit_limit_wait - datetime.datetime.now() + if time_left > datetime.timedelta(seconds=0): + if log_warning: + logger.log('Hit limited reached, waiting for %s' % time_left, logger.WARNING) + return True + else: + self.hit_limit_time = None + self.hit_limit_wait = None + if 3 <= self.failure_count: + if None is self.failure_time: + self.failure_time = datetime.datetime.now() + fc = self.fail_time_index() + if datetime.datetime.now() - self.failure_time < self.wait_time(fc): + if log_warning: + time_left = self.wait_time(fc) - (datetime.datetime.now() - self.failure_time) + logger.log('Failed %s times, skipping provider for %s' % (self.failure_count, time_left), + logger.WARNING) + return True + return False + + def inc_failure_count(self, *args, **kwargs): + error_type = ('error_type' in kwargs and kwargs['error_type'].error_type) or \ + (isinstance(args, tuple) and isinstance(args[0], ProviderError) and args[0].error_type) + if not isinstance(self.failure_time, datetime.datetime) or \ + error_type != self._last_error_type or \ + datetime.datetime.now() - self.failure_time > datetime.timedelta(seconds=3): + self.failure_count += 1 + self.failure_time = datetime.datetime.now() + self._last_error_type = error_type + self.errors.add_error(*args, **kwargs) + else: + logger.log('%s: Not logging same error within 3 seconds' % self.name, logger.DEBUG) + + def getURL(self, *args, **kwargs): + data = None + + # check for auth + if not self._authorised() or self.should_skip(): + return data + + kwargs['raise_exceptions'] = True + kwargs['raise_status_code'] = True + + try: + data = helpers.getURL(*args, **kwargs) + if data: + if 0 != self.failure_count: + logger.log('Unblocking provider: %s' % self.get_id(), logger.DEBUG) + self.failure_count = 0 + self.failure_time = None + else: + self.inc_failure_count(ProviderError(error_type=ProviderErrorTypes.nodata)) + except requests.exceptions.HTTPError as e: + self.inc_failure_count(ProviderError(error_type=ProviderErrorTypes.http, code=e.response.status_code)) + except requests.exceptions.ConnectionError as e: + self.inc_failure_count(ProviderError(error_type=ProviderErrorTypes.connection)) + except requests.exceptions.ReadTimeout as e: + self.inc_failure_count(ProviderError(error_type=ProviderErrorTypes.timeout)) + except (requests.exceptions.Timeout, socket.timeout) as e: + self.inc_failure_count(ProviderError(error_type=ProviderErrorTypes.connection_timeout)) + except (StandardError, Exception) as e: + self.inc_failure_count(ProviderError(error_type=ProviderErrorTypes.unknown)) + + self.errors.save_list() + return data + def get_id(self): return GenericProvider.make_id(self.name) @@ -428,9 +760,13 @@ class GenericProvider: results = {} item_list = [] + if self.should_skip(): + return results searched_scene_season = None for ep_obj in episodes: + if self.should_skip(log_warning=False): + break # search cache for episode result cache_result = self.cache.searchCache(ep_obj, manual_search) if cache_result: @@ -457,6 +793,8 @@ class GenericProvider: for cur_param in search_params: item_list += self._search_provider(cur_param, search_mode=search_mode, epcount=len(episodes)) + if self.should_skip(): + break return self.finish_find_search_results(show, episodes, search_mode, manual_search, results, item_list) @@ -649,10 +987,11 @@ class GenericProvider: :param count: count of successfully processed items :param url: source url of item(s) """ - str1, thing, str3 = (('', '%s item' % mode.lower(), ''), (' usable', 'proper', ' found'))['Propers' == mode] - logger.log(u'%s %s in response from %s' % (('No' + str1, count)[0 < count], ( - '%s%s%s%s' % (('', 'freeleech ')[getattr(self, 'freeleech', False)], thing, maybe_plural(count), str3)), - re.sub('(\s)\s+', r'\1', url))) + if not self.should_skip(): + str1, thing, str3 = (('', '%s item' % mode.lower(), ''), (' usable', 'proper', ' found'))['Propers' == mode] + logger.log(u'%s %s in response from %s' % (('No' + str1, count)[0 < count], ( + '%s%s%s%s' % (('', 'freeleech ')[getattr(self, 'freeleech', False)], thing, maybe_plural(count), str3)), + re.sub('(\s)\s+', r'\1', url))) def check_auth_cookie(self): @@ -723,12 +1062,13 @@ class GenericProvider: return -class NZBProvider(object, GenericProvider): +class NZBProvider(GenericProvider): def __init__(self, name, supports_backlog=True, anime_only=False): GenericProvider.__init__(self, name, supports_backlog, anime_only) self.providerType = GenericProvider.NZB + self.has_limit = True def image_name(self): @@ -757,6 +1097,9 @@ class NZBProvider(object, GenericProvider): results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in cache_results] + if self.should_skip(): + return results + index = 0 alt_search = ('nzbs_org' == self.get_id()) do_search_alt = False @@ -775,6 +1118,9 @@ class NZBProvider(object, GenericProvider): urls = [] while index < len(search_terms): + if self.should_skip(log_warning=False): + break + search_params = {'q': search_terms[index], 'maxage': sickbeard.BACKLOG_DAYS + 2} if alt_search: @@ -817,7 +1163,7 @@ class NZBProvider(object, GenericProvider): return self._search_provider(search_params=search_params, **kwargs) -class TorrentProvider(object, GenericProvider): +class TorrentProvider(GenericProvider): def __init__(self, name, supports_backlog=True, anime_only=False, cache_update_freq=None, update_freq=None): GenericProvider.__init__(self, name, supports_backlog, anime_only) @@ -1153,6 +1499,8 @@ class TorrentProvider(object, GenericProvider): :return: list of Proper objects """ results = [] + if self.should_skip(): + return results search_terms = getattr(self, 'proper_search_terms', ['proper', 'repack', 'real']) if not isinstance(search_terms, list): @@ -1164,9 +1512,14 @@ class TorrentProvider(object, GenericProvider): clean_term = re.compile(r'(?i)[^a-z1-9|.]+') for proper_term in search_terms: + if self.should_skip(log_warning=False): + break proper_check = re.compile(r'(?i)(?:%s)' % clean_term.sub('', proper_term)) for item in items: + if self.should_skip(log_warning=False): + break + title, url = self._title_and_url(item) if proper_check.search(title): results.append(classes.Proper(title, url, datetime.datetime.today(), diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index 7c3cc1da..2e05c621 100755 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -28,7 +28,7 @@ from math import ceil from sickbeard.sbdatetime import sbdatetime from . import generic from sickbeard import helpers, logger, tvcache, classes, db -from sickbeard.common import neededQualities, Quality +from sickbeard.common import neededQualities, Quality, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED from sickbeard.exceptions import AuthException, MultipleShowObjectsException from sickbeard.indexers.indexer_config import * from io import BytesIO @@ -187,13 +187,13 @@ class NewznabProvider(generic.NZBProvider): if datetime.date.today() - self._caps_need_apikey['date'] > datetime.timedelta(days=30) or \ not self._caps_need_apikey['need']: self._caps_need_apikey['need'] = False - data = self.get_url('%s/api?t=caps' % self.url) + data = self.getURL('%s/api?t=caps' % self.url) if data: xml_caps = helpers.parse_xml(data) if xml_caps is None or not hasattr(xml_caps, 'tag') or xml_caps.tag == 'error' or xml_caps.tag != 'caps': api_key = self.maybe_apikey() if isinstance(api_key, basestring) and api_key not in ('0', ''): - data = self.get_url('%s/api?t=caps&apikey=%s' % (self.url, api_key)) + data = self.getURL('%s/api?t=caps&apikey=%s' % (self.url, api_key)) if data: xml_caps = helpers.parse_xml(data) if xml_caps and hasattr(xml_caps, 'tag') and xml_caps.tag == 'caps': @@ -291,6 +291,11 @@ class NewznabProvider(generic.NZBProvider): return [x for x in cats if x['id'] not in self.excludes] return ','.join(set(cats.split(',')) - self.excludes) + def _check_auth(self, is_required=None): + if self.should_skip(): + return False + return super(NewznabProvider, self)._check_auth(is_required) + def check_auth_from_data(self, data): if data is None or not hasattr(data, 'tag'): @@ -306,6 +311,24 @@ class NewznabProvider(generic.NZBProvider): raise AuthException('Your account on %s has been suspended, contact the admin.' % self.name) elif '102' == code: raise AuthException('Your account isn\'t allowed to use the API on %s, contact the admin.' % self.name) + elif '500' == code: + self.hit_limit_time = datetime.datetime.now() + self.hit_limit_count += 1 + retry_time = re.search(r'Retry in (\d+)\W+([a-z]+)', description, flags=re.I) + if retry_time: + if retry_time.group(2) in ('s', 'sec', 'secs', 'seconds', 'second'): + self.hit_limit_wait = datetime.timedelta(seconds=helpers.tryInt(retry_time.group(1))) + elif retry_time.group(2) in ('m', 'min', 'mins', 'minutes', 'minute'): + self.hit_limit_wait = datetime.timedelta(minutes=helpers.tryInt(retry_time.group(1))) + elif retry_time.group(2) in ('h', 'hr', 'hrs', 'hours', 'hour'): + self.hit_limit_wait = datetime.timedelta(hours=helpers.tryInt(retry_time.group(1))) + elif retry_time.group(2) in ('d', 'days', 'day'): + self.hit_limit_wait = datetime.timedelta(days=helpers.tryInt(retry_time.group(1))) + if not self.hit_limit_wait: + fc = self.fail_time_index(base_limit=0) + self.hit_limit_wait = self.wait_time(fc) + logger.log('Request limit reached. Waiting for %s until next retry. Message: %s' % + (self.hit_limit_wait, description), logger.WARNING) elif '910' == code: logger.log( '%s %s, please check with provider.' % @@ -316,6 +339,7 @@ class NewznabProvider(generic.NZBProvider): logger.WARNING) return False + self.hit_limit_count = 0 return True def config_str(self): @@ -530,15 +554,20 @@ class NewznabProvider(generic.NZBProvider): (hits_per_page * 100 // hits_per_page * 2, hits_per_page * int(ceil(rel_limit * 1.5)))[season_search]) def find_search_results(self, show, episodes, search_mode, manual_search=False, try_other_searches=False, **kwargs): - self._check_auth() + check = self._check_auth() + results = {} + if (isinstance(check, bool) and not check) or self.should_skip(): + return results + self.show = show - results = {} item_list = [] name_space = {} searched_scene_season = s_mode = None for ep_obj in episodes: + if self.should_skip(log_warning=False): + break # skip if season already searched if (s_mode or 'sponly' == search_mode) and 1 < len(episodes) \ and searched_scene_season == ep_obj.scene_season: @@ -577,6 +606,8 @@ class NewznabProvider(generic.NZBProvider): try_all_searches=try_other_searches) item_list += items name_space.update(n_space) + if self.should_skip(): + break return self.finish_find_search_results( show, episodes, search_mode, manual_search, results, item_list, name_space=name_space) @@ -617,7 +648,13 @@ class NewznabProvider(generic.NZBProvider): def _search_provider(self, search_params, needed=neededQualities(need_all=True), max_items=400, try_all_searches=False, **kwargs): + results, n_spaces = [], {} + if self.should_skip(): + return results, n_spaces + api_key = self._check_auth() + if isinstance(api_key, bool) and not api_key: + return results, n_spaces base_params = {'t': 'tvsearch', 'maxage': sickbeard.USENET_RETENTION or 0, @@ -644,8 +681,13 @@ class NewznabProvider(generic.NZBProvider): cat_webdl = self.cats.get(NewznabConstants.CAT_WEBDL) for mode in search_params.keys(): + if self.should_skip(log_warning=False): + break for i, params in enumerate(search_params[mode]): + if self.should_skip(log_warning=False): + break + # category ids cat = [] if 'Episode' == mode or 'Season' == mode: @@ -697,7 +739,10 @@ class NewznabProvider(generic.NZBProvider): search_url = '%sapi?%s' % (self.url, urllib.urlencode(request_params)) i and time.sleep(2.1) - data = helpers.getURL(search_url) + data = self.getURL(search_url) + + if self.should_skip(): + break if not data: logger.log('No Data returned from %s' % self.name, logger.WARNING) @@ -794,6 +839,10 @@ class NewznabProvider(generic.NZBProvider): results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in cache_results] + check = self._check_auth() + if isinstance(check, bool) and not check: + return results + index = 0 alt_search = ('nzbs_org' == self.get_id()) do_search_alt = False @@ -812,6 +861,9 @@ class NewznabProvider(generic.NZBProvider): urls = [] while index < len(search_terms): + if self.should_skip(log_warning=False): + break + search_params = {'q': search_terms[index], 'maxage': sickbeard.BACKLOG_DAYS + 2} if alt_search: @@ -885,8 +937,11 @@ class NewznabCache(tvcache.TVCache): if 4489 != sickbeard.RECENTSEARCH_FREQUENCY or self.should_update(): n_spaces = {} try: - self._checkAuth() - (items, n_spaces) = self.provider.cache_data(needed=needed) + check = self._checkAuth() + if isinstance(check, bool) and not check: + items = None + else: + (items, n_spaces) = self.provider.cache_data(needed=needed) except (StandardError, Exception): items = None diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py index e2bbbc0a..b96ef43b 100644 --- a/sickbeard/providers/omgwtfnzbs.py +++ b/sickbeard/providers/omgwtfnzbs.py @@ -99,7 +99,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider): def get_data(self, url): result = None if url and False is self._init_api(): - data = self.get_url(url, timeout=90) + data = self.getURL(url, timeout=90) if data: if re.search('(?i)limit.*?reached', data): logger.log('Daily Nzb Download limit reached', logger.DEBUG) @@ -138,6 +138,9 @@ class OmgwtfnzbsProvider(generic.NZBProvider): def cache_data(self, needed=neededQualities(need_all=True), **kwargs): + if self.should_skip(): + return [] + api_key = self._init_api() if False is api_key: return self.search_html(needed=needed, **kwargs) @@ -182,7 +185,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider): search_url = self.urls['search'] % urllib.urlencode(params) - data_json = self.get_url(search_url, json=True) + data_json = self.getURL(search_url, json=True) if data_json and self._check_auth_from_data(data_json, is_xml=False): for item in data_json: if 'release' in item and 'getnzb' in item: @@ -210,7 +213,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider): 'cat': 'cat=(?:%s)' % '|'.join(cats)}.items()) mode = ('search', 'cache')['' == search] search_url = self.urls[mode + '_html'] % search - html = self.get_url(search_url) + html = self.getURL(search_url) cnt = len(results) try: if not html: @@ -254,6 +257,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider): search_terms = ['.PROPER.', '.REPACK.', '.REAL.'] results = [] + if self.should_skip(): + return results for term in search_terms: for item in self._search_provider(term, search_mode='Propers', retention=4): @@ -272,6 +277,9 @@ class OmgwtfnzbsProvider(generic.NZBProvider): def _init_api(self): + if self.should_skip(): + return None + try: api_key = self._check_auth() if not api_key.startswith('cookie:'): diff --git a/sickbeard/search.py b/sickbeard/search.py index f6cc0d85..a6f32bd1 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -470,6 +470,11 @@ def search_for_needed_episodes(episodes): found_results[cur_ep] = best_result + try: + cur_provider.save_list() + except (StandardError, Exception): + pass + threading.currentThread().name = orig_thread_name if not len(providers): diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 04dc2cb6..41bb3445 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -4531,11 +4531,27 @@ class ManageSearches(Manage): t.recent_search_status = sickbeard.searchQueueScheduler.action.is_recentsearch_in_progress() t.find_propers_status = sickbeard.searchQueueScheduler.action.is_propersearch_in_progress() t.queue_length = sickbeard.searchQueueScheduler.action.queue_length() + t.provider_error_stats = [{'name': p.name, 'prov_id': p.get_id(), 'errors': p.errors.errors_sorted, + 'hit_limit_time': p.hit_limit_time, 'failure_time': p.failure_time, + 'last_error': p.last_error, + 'next_try': p.get_next_try_time, 'has_limit': getattr(p, 'has_limit', False)} + for p in sickbeard.providerList + sickbeard.newznabProviderList] + t.provider_errors = 0 < len([p for p in t.provider_error_stats if len(p['errors'])]) t.submenu = self.ManageMenu('Search') return t.respond() + def retryProvider(self, provider=None, *args, **kwargs): + if not provider: + return + prov = [p for p in sickbeard.providerList + sickbeard.newznabProviderList if p.get_id() == provider] + if not prov: + return + prov[0].retry_next() + time.sleep(3) + return + def forceVersionCheck(self, *args, **kwargs): # force a check to see if there is a new version if sickbeard.versionCheckScheduler.action.check_for_new_version(force=True): From 0ead7771de9ccc8dff733c333da44c38dd1571da Mon Sep 17 00:00:00 2001 From: JackDandy Date: Mon, 15 Jan 2018 17:54:36 +0000 Subject: [PATCH 2/2] Change improve the manage searches error stats UI and backend functions. Add the improved error handling to torrent providers. Change cache_db to always include db upgrade statements when running SG for the first time. Change split newznab limit logic into a reusable function, hit_limit_update() and use in BTN prov. Change tweak CSS to make things a little neater with button spacings. Add the actual time when current limit will expire to the UI. Change instead of terminology "errors", use "failures". Change improve BTN error handling. Change ensure provider name is output to log at times where it wasn't. Change ensure failed request URLs and POST params are output to log. Add time of last failure + type to should_skip log message. Change code DRY and simplification for improved readability. Change occurrences of "error" to "fail" for consistency. Add tmr limit handling to omg and change log_failure_url level to warning. Change log the failed URL when an API hit limit is reached. Change "hit" to the more universally generic "tmr" - Too Many Requests. Change Db columns containing "hit_" are renamed "tmr_" --- CHANGES.md | 3 + gui/slick/css/dark.css | 54 ++ gui/slick/css/light.css | 60 ++- gui/slick/css/style.css | 106 +++- gui/slick/interfaces/default/inc_bottom.tmpl | 10 +- .../default/manage_manageSearches.tmpl | 303 ++++++----- gui/slick/js/manageSearches.js | 46 +- sickbeard/databases/cache_db.py | 141 +++--- sickbeard/db.py | 20 + sickbeard/providers/alpharatio.py | 2 + sickbeard/providers/anizb.py | 2 +- sickbeard/providers/beyondhd.py | 2 + sickbeard/providers/bithdtv.py | 2 + sickbeard/providers/bitmetv.py | 2 + sickbeard/providers/blutopia.py | 2 + sickbeard/providers/btn.py | 56 ++- sickbeard/providers/btscene.py | 4 +- sickbeard/providers/dh.py | 2 + sickbeard/providers/ettv.py | 5 + sickbeard/providers/fano.py | 2 + sickbeard/providers/filelist.py | 2 + sickbeard/providers/funfile.py | 2 + sickbeard/providers/generic.py | 472 +++++++++++------- sickbeard/providers/gftracker.py | 2 + sickbeard/providers/grabtheinfo.py | 2 + sickbeard/providers/hd4free.py | 2 + sickbeard/providers/hdbits.py | 6 +- sickbeard/providers/hdspace.py | 2 + sickbeard/providers/hdtorrents.py | 2 + sickbeard/providers/iptorrents.py | 2 + sickbeard/providers/limetorrents.py | 2 + sickbeard/providers/magnetdl.py | 2 + sickbeard/providers/morethan.py | 3 + sickbeard/providers/ncore.py | 2 + sickbeard/providers/nebulance.py | 6 +- sickbeard/providers/newznab.py | 43 +- sickbeard/providers/nyaa.py | 2 + sickbeard/providers/omgwtfnzbs.py | 17 +- sickbeard/providers/pisexy.py | 2 + sickbeard/providers/potuk.py | 5 + sickbeard/providers/pretome.py | 3 +- sickbeard/providers/privatehd.py | 2 + sickbeard/providers/ptf.py | 5 + sickbeard/providers/rarbg.py | 6 +- sickbeard/providers/revtt.py | 2 + sickbeard/providers/rsstorrent.py | 8 +- sickbeard/providers/scenehd.py | 2 + sickbeard/providers/scenetime.py | 2 + sickbeard/providers/shazbat.py | 11 +- sickbeard/providers/skytorrents.py | 2 + sickbeard/providers/speedcd.py | 2 + sickbeard/providers/thepiratebay.py | 4 +- sickbeard/providers/tokyotoshokan.py | 5 +- sickbeard/providers/torlock.py | 2 + sickbeard/providers/torrentbytes.py | 2 + sickbeard/providers/torrentday.py | 2 + sickbeard/providers/torrenting.py | 2 + sickbeard/providers/torrentleech.py | 2 + sickbeard/providers/torrentz2.py | 2 + sickbeard/providers/tvchaosuk.py | 2 + sickbeard/providers/wop.py | 2 + sickbeard/providers/zooqle.py | 2 + sickbeard/rssfeeds.py | 58 +-- sickbeard/search.py | 4 +- sickbeard/tvcache.py | 2 +- sickbeard/webserve.py | 12 +- 66 files changed, 1003 insertions(+), 547 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 46b48b85..9cecde1b 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -4,6 +4,9 @@ * Change improve media process to parse anime format 'Show Name 123 - 001 - Ep 1 name' * Add free space stat (if obtainable) of parent folder(s) to footer * Add option "Display disk free" to general config/interface page (default enabled) +* Add a provider error table to page Manage/Media Search +* Add failure handling, skip provider for x hour(s) depending on count of failures +* Add detection of Too Many Requests (Supporting providers UC and BTN) [develop changelog] diff --git a/gui/slick/css/dark.css b/gui/slick/css/dark.css index 0f7d7eed..d3304100 100644 --- a/gui/slick/css/dark.css +++ b/gui/slick/css/dark.css @@ -762,6 +762,60 @@ a.whitelink{ } +/* TABLE BACKGROUND color */ +.provider-failures.hover-highlight td:before, +.provider-failures.focus-highlight td:before{ + background:#222 +} + +/* ODD ZEBRA STRIPE color (needs zebra widget) */ +.provider-failures.hover-highlight .odd td:before, +.provider-failures.hover-highlight .odd th:before, +.provider-failures.focus-highlight .odd td:before, +.provider-failures.focus-highlight .odd th:before{ + background:#333 +} +/* EVEN ZEBRA STRIPE color (needs zebra widget) */ +.provider-failures.hover-highlight .even td:before, +.provider-failures.hover-highlight .even th:before, +.provider-failures.focus-highlight .even td:before, +.provider-failures.focus-highlight .even th:before{ + background-color:#2e2e2e +} + +/* HOVER ROW highlight colors */ +.provider-failures.hover-highlight tbody > tr:hover > td, /* override tablesorter theme row hover */ +.provider-failures.hover-highlight tbody > tr.odd:hover > td, +.provider-failures.hover-highlight tbody > tr.even:hover > td{ + background-color:#282828 +} +/* HOVER COLUMN highlight colors */ +.provider-failures.hover-highlight tbody tr th:hover::after, +.provider-failures.hover-highlight tbody tr td:hover::after{ + background-color:#282828 +} + +/* FOCUS ROW highlight color (touch devices) */ +.provider-failures.focus-highlight td:focus::before, +.provider-failures.focus-highlight th:focus::before{ + background-color:#181818 +} +/* FOCUS COLUMN highlight color (touch devices) */ +.provider-failures.focus-highlight td:focus::after, +.provider-failures.focus-highlight th:focus::after{ + background-color:#181818 +} +/* FOCUS CELL highlight color */ +.provider-failures.focus-highlight th:focus, +.provider-failures.focus-highlight td:focus, +.provider-failures.focus-highlight .odd th:focus, +.provider-failures.focus-highlight .odd td:focus, +.provider-failures.focus-highlight .even th:focus, +.provider-failures.focus-highlight .even td:focus{ + background-color:#181818; + color:#ddd +} + /* ======================================================================= 404.tmpl ========================================================================== */ diff --git a/gui/slick/css/light.css b/gui/slick/css/light.css index f060f85f..da5e8700 100644 --- a/gui/slick/css/light.css +++ b/gui/slick/css/light.css @@ -742,6 +742,60 @@ a.whitelink{ color:#000 } +/* TABLE BACKGROUND color */ +.provider-failures.hover-highlight td:before, +.provider-failures.focus-highlight td:before{ + background:#fff +} + +/* ODD ZEBRA STRIPE color (needs zebra widget) */ +.provider-failures.hover-highlight .odd th:before, +.provider-failures.hover-highlight .odd td:before, +.provider-failures.focus-highlight .odd th:before, +.provider-failures.focus-highlight .odd td:before{ + background:#f5f1e4 +} +/* EVEN ZEBRA STRIPE color (needs zebra widget) */ +.provider-failures.hover-highlight .even th:before, +.provider-failures.hover-highlight .even td:before, +.provider-failures.focus-highlight .even th:before, +.provider-failures.focus-highlight .even td:before{ + background-color:#dfdacf; +} + +/* HOVER ROW highlight colors */ +.provider-failures.hover-highlight tbody > tr:hover > td, /* override tablesorter theme row hover */ +.provider-failures.hover-highlight tbody > tr.odd:hover > td, +.provider-failures.hover-highlight tbody > tr.even:hover > td{ + background-color:#f4f3c2 +} +/* HOVER COLUMN highlight colors */ +.provider-failures.hover-highlight tbody tr th:hover::after, +.provider-failures.hover-highlight tbody tr td:hover::after{ + background-color:#f4f3c2 +} + +/* FOCUS ROW highlight color (touch devices) */ +.provider-failures.focus-highlight th:focus::before, +.provider-failures.focus-highlight td:focus::before{ + background-color:#dfdead +} +/* FOCUS COLUMN highlight color (touch devices) */ +.provider-failures.focus-highlight th:focus::after, +.provider-failures.focus-highlight td:focus::after{ + background-color:#dfdead +} +/* FOCUS CELL highlight color */ +.provider-failures.focus-highlight th:focus, +.provider-failures.focus-highlight td:focus, +.provider-failures.focus-highlight .odd th:focus, +.provider-failures.focus-highlight .odd td:focus, +.provider-failures.focus-highlight .even th:focus, +.provider-failures.focus-highlight .even td:focus{ + background-color:#dfdead; + color:#222 +} + /* ======================================================================= 404.tmpl ========================================================================== */ @@ -1381,8 +1435,8 @@ tablesorter.css } thead.tablesorter-stickyHeader{ - border-top:2px solid #fff; - border-bottom:2px solid #fff + border-top:2px solid #ddd; + border-bottom:2px solid #ddd } /* Zebra Widget - row alternating colors */ @@ -1404,7 +1458,7 @@ thead.tablesorter-stickyHeader{ } .tablesorter tfoot tr{ - color:#fff; + color:#ddd; text-align:center; text-shadow:-1px -1px 0 rgba(0, 0, 0, 0.3); background-color:#333; diff --git a/gui/slick/css/style.css b/gui/slick/css/style.css index c970884d..0ab227d7 100644 --- a/gui/slick/css/style.css +++ b/gui/slick/css/style.css @@ -3191,6 +3191,85 @@ input.get_less_eps{ display:none } +#media-search .section{ + padding-bottom:10px +} +#media-search .btn{ + margin:0 6px 0 0; + min-width:70px +} +#media-search .btn.shows-more, +#media-search .btn.shows-less{ + margin:6px 6px 6px 0; +} +#media-search .btn.provider-retry{ + margin:6px 0 6px 4px; +} +.tablesorter.provider-failures{width:auto;clear:both;margin-bottom:10px} +.tablesorter.provider-failures > tbody > tr.tablesorter-childRow td{display:none} +.tablesorter.provider-failures.tablesorter > tbody > tr{background-color:transparent} + +.provider-failures.hover-highlight th:hover::after, +.provider-failures.hover-highlight td:hover::after, +.provider-failures.focus-highlight th:focus::after, +.provider-failures.focus-highlight td:focus::after{ + content:''; + position:absolute; + width:100%; + height:999em; + left:0; + top:-555em; + z-index:-1 +} +.provider-failures.focus-highlight th:focus::before, +.provider-failures.focus-highlight td:focus::before{ + content:''; + position:absolute; + width:999em; + height:100%; + left:-555em; + top:0; + z-index:-2 +} +/* required styles */ +.provider-failures.hover-highlight, +.provider-failures.focus-highlight{ + overflow:hidden +} +.provider-failures.hover-highlight th, +.provider-failures.hover-highlight td, +.provider-failures.focus-highlight th, +.provider-failures.focus-highlight td{ + position:relative; + outline:0 +} +/* override the tablesorter theme styling */ +.provider-failures.hover-highlight, +.provider-failures.hover-highlight tbody > tr > td, +.provider-failures.focus-highlight, +.provider-failures.focus-highlight tbody > tr > td, +/* override zebra styling */ +.provider-failures.hover-highlight tbody tr.even > th, +.provider-failures.hover-highlight tbody tr.even > td, +.provider-failures.hover-highlight tbody tr.odd > th, +.provider-failures.hover-highlight tbody tr.odd > td, +.provider-failures.focus-highlight tbody tr.even > th, +.provider-failures.focus-highlight tbody tr.even > td, +.provider-failures.focus-highlight tbody tr.odd > th, +.provider-failures.focus-highlight tbody tr.odd > td{ + background:transparent +} +/* table background positioned under the highlight */ +.provider-failures.hover-highlight td:before, +.provider-failures.focus-highlight td:before{ + content:''; + position:absolute; + width:100%; + height:100%; + left:0; + top:0; + z-index:-3 +} /* ======================================================================= 404.tmpl ========================================================================== */ @@ -4265,11 +4344,9 @@ tablesorter.css #display-show .tablesorter{ width:100%; margin-right:auto; - margin-left:auto; - color:#000; + margin-left:auto /* text-align:left;*/ - background-color:#ddd/*; - border-spacing:0*/ +/* border-spacing:0*/ } #display-show .tablesorter{ @@ -4317,20 +4394,6 @@ tablesorter.css cursor:default } -thead.tablesorter-stickyHeader{ - border-top:2px solid #ddd; - border-bottom:2px solid #ddd -} - -/* Zebra Widget - row alternating colors */ -.tablesorter tr.odd, .sickbeardTable tr.odd{ - background-color:#f5f1e4 -} - -.tablesorter tr.even, .sickbeardTable tr.even{ - background-color:#dfdacf -} - /* filter widget */ .tablesorter .filtered{ display:none @@ -4346,9 +4409,7 @@ thead.tablesorter-stickyHeader{ .tablesorter tr.tablesorter-filter-row, .tablesorter tr.tablesorter-filter-row td{ - text-align:center; - background:#eee; - border-bottom:1px solid #ddd + text-align:center } /* optional disabled input styling */ @@ -4362,10 +4423,7 @@ thead.tablesorter-stickyHeader{ }*/ .tablesorter tfoot tr{ - color:#ddd; text-align:center; - text-shadow:-1px -1px 0 rgba(0, 0, 0, 0.3); - background-color:#333; border-collapse:collapse } diff --git a/gui/slick/interfaces/default/inc_bottom.tmpl b/gui/slick/interfaces/default/inc_bottom.tmpl index dffd3328..1f366b6a 100644 --- a/gui/slick/interfaces/default/inc_bottom.tmpl +++ b/gui/slick/interfaces/default/inc_bottom.tmpl @@ -65,11 +65,7 @@ except AttributeError: diskfree, min_output = df() if min_output: avail = ', '.join(['%s %s' % (drive, free) for (drive, free) in diskfree]) -%> - +%>#slurp# ## $shows_total shows ($shows_active active) | $ep_downloaded<%= @@ -87,6 +83,10 @@ if min_output:
free space  $avail #else
+ diff --git a/gui/slick/interfaces/default/manage_manageSearches.tmpl b/gui/slick/interfaces/default/manage_manageSearches.tmpl index 701d4eb4..f9dc39e9 100644 --- a/gui/slick/interfaces/default/manage_manageSearches.tmpl +++ b/gui/slick/interfaces/default/manage_manageSearches.tmpl @@ -8,169 +8,230 @@ ## #import os.path #include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl') - + -
+ +
Free space stats for volume/path
- - #if $prov['has_limit'] - - #end if + + + + +
+

Provider Failures:

+#if not $provider_fails +

No current failures. Failure stats display here when appropriate.

+#else +

Some providers can be often down over periods, SickGear will back off then retry connecting at a later time

+ + #for $prov in $provider_fail_stats + #if $len($prov['fails']) + $prov['name'] + #if $prov['active'] + #if $prov['next_try'] + #set nt = $str($prov['next_try']).split('.', 2) + ... is blocked until $sbdatetime.sbdatetime.sbftime($sbdatetime.sbdatetime.now() + $prov['next_try'], markup=True) (in $nt[0]) + #end if + #else + ... is not enabled + #end if +
Dayhttpconnectionconnection timeouttimeoutunknownno dataHit Limit
+ + + + + + + + #if $prov['has_limit'] + + #end if + - #set $row = 0 - #for $error in $prov['errors'] - - - - - - - - - #if $prov['has_limit'] - - #end if - - #end for + #set $day = [] + #for $fail in $prov['fails'] + #set $child = True + #if $fail['date'] not in $day + #set $day += [$fail['date']] + #set $child = False + #end if + #slurp# + + #if $fail['multirow'] + #if not $child + + #else + + #end if + #else + + #end if + #set $blank = '-' + #set $title=None + #if $fail['http']['count'] + #set $title=$fail['http']['code'] + #end if + + + + + #if $prov['has_limit'] + + #end if + + #end for
period of 1hrserver/timeoutnetworkno dataotherhit limit
$sbdatetime.sbdatetime.sbfdate($error['date'])$error['http'].get('count', 0)$error['connection'].get('count', 0)$error['connection_timeout'].get('count', 0)$error['timeout'].get('count', 0)$error['unknown'].get('count', 0)$error['nodata'].get('count', 0)$error.get('limit', {}).get('count', 0)
$sbdatetime.sbdatetime.sbfdate($fail['date_time'])$sbdatetime.sbdatetime.sbftime($fail['date_time'], markup=True)$sbdatetime.sbdatetime.sbfdatetime($fail['date_time'], markup=True)#if $fail['http']['count']#$fail['http']['count']#else#$blank#end if# / #echo $fail['timeout'].get('count', 0) or $blank##echo ($fail['connection'].get('count', 0) + $fail['connection_timeout'].get('count', 0)) or $blank##echo $fail['nodata'].get('count', 0) or $blank##echo $fail['other'].get('count', 0) or $blank##echo $fail.get('limit', {}).get('count', 0) or $blank#
- #end if -#end for -
-

+ #end if + #end for #end if -

Search Queue:

+
+ + + +
+

Search Queues:

+ #if $queue_length['backlog'] or $queue_length['manual'] or $queue_length['failed']
#end if -
-Recent: $queue_length['recent'] item$sickbeard.helpers.maybe_plural($queue_length['recent'])

-Proper: $queue_length['proper'] item$sickbeard.helpers.maybe_plural($queue_length['proper'])

-Backlog: $len($queue_length['backlog']) item$sickbeard.helpers.maybe_plural($len($queue_length['backlog'])) +
+ Recent: $queue_length['recent'] item$sickbeard.helpers.maybe_plural($queue_length['recent']) +
+ + +
+ Proper: $queue_length['proper'] item$sickbeard.helpers.maybe_plural($queue_length['proper']) +
+ + +
+ Backlog: $len($queue_length['backlog']) item$sickbeard.helpers.maybe_plural($len($queue_length['backlog'])) #if $queue_length['backlog'] -
- - - - #set $row = 0 - #for $cur_item in $queue_length['backlog']: - #set $search_type = 'On Demand' - #if $cur_item['standard_backlog']: - #if $cur_item['forced']: - #set $search_type = 'Forced' - #else - #set $search_type = 'Scheduled' - #end if - #if $cur_item['torrent_only']: - #set $search_type += ', Torrent Only' - #end if - #if $cur_item['limited_backlog']: - #set $search_type += ' (Limited)' - #else - #set $search_type += ' (Full)' - #end if - #end if - - - - - #end for - - -#else -
+
+ + + + #set $row = 0 + #for $cur_item in $queue_length['backlog']: + #set $search_type = 'On Demand' + #if $cur_item['standard_backlog']: + #if $cur_item['forced']: + #set $search_type = 'Forced' + #else + #set $search_type = 'Scheduled' + #end if + #if $cur_item['torrent_only']: + #set $search_type += ', Torrent Only' + #end if + #if $cur_item['limited_backlog']: + #set $search_type += ' (Limited)' + #else + #set $search_type += ' (Full)' + #end if + #end if + + + + + #end for + + #end if -
-Manual: $len($queue_length['manual']) item$sickbeard.helpers.maybe_plural($len($queue_length['manual'])) +
+ + +
+ Manual: $len($queue_length['manual']) item$sickbeard.helpers.maybe_plural($len($queue_length['manual'])) #if $queue_length['manual'] -
- - - - #set $row = 0 - #for $cur_item in $queue_length['manual']: - - - - #end for - - -#else -
+
+ + + + #set $row = 0 + #for $cur_item in $queue_length['manual']: + + + + #end for + + #end if -
-Failed: $len($queue_length['failed']) item$sickbeard.helpers.maybe_plural($len($queue_length['failed'])) +
+ + +
+ Failed: $len($queue_length['failed']) item$sickbeard.helpers.maybe_plural($len($queue_length['failed'])) #if $queue_length['failed'] -
- - - - #set $row = 0 - #for $cur_item in $queue_length['failed']: - - - - #end for - - -#else -
+
+ + + + #set $row = 0 + #for $cur_item in $queue_length['failed']: + + + + #end for + + #end if +
+ #include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_bottom.tmpl') diff --git a/gui/slick/js/manageSearches.js b/gui/slick/js/manageSearches.js index e4457b87..266dc041 100644 --- a/gui/slick/js/manageSearches.js +++ b/gui/slick/js/manageSearches.js @@ -1,36 +1,36 @@ -$(document).ready(function() { +$(function(){ $('#recentsearch,#propersearch').click(function(){ $(this).addClass('disabled'); - }) + }); $('#forcebacklog,#forcefullbacklog').click(function(){ $('#forcebacklog,#forcefullbacklog').addClass('disabled'); $('#pausebacklog').removeClass('disabled'); - }) + }); $('#pausebacklog').click(function(){ $(this).addClass('disabled'); - }) + }); $('.show-all-less').click(function(){ $(this).nextAll('table').hide(); $(this).nextAll('input.shows-more').show(); $(this).nextAll('input.shows-less').hide(); - }) + }); $('.show-all-more').click(function(){ $(this).nextAll('table').show(); $(this).nextAll('input.shows-more').hide(); $(this).nextAll('input.shows-less').show(); - }) + }); $('.shows-less').click(function(){ $(this).nextAll('table:first').hide(); $(this).hide(); $(this).prevAll('input:first').show(); - }) + }); $('.shows-more').click(function(){ $(this).nextAll('table:first').show(); $(this).hide(); $(this).nextAll('input:first').show(); - }) - $('.prov-retry').click(function () { + }); + $('.provider-retry').click(function () { $(this).addClass('disabled'); var match = $(this).attr('id').match(/^(.+)-btn-retry$/); $.ajax({ @@ -38,7 +38,29 @@ $(document).ready(function() { type: 'GET', complete: function () { window.location.reload(true); - } + } }); - }) -}); \ No newline at end of file + }); + + $('.provider-failures').tablesorter({widgets : ['zebra'], + headers : { 0:{sorter:!1}, 1:{sorter:!1}, 2:{sorter:!1}, 3:{sorter:!1}, 4:{sorter:!1}, 5:{sorter:!1} } + }); + + $('.provider-fail-parent-toggle').click(function(){ + $(this).closest('tr').nextUntil('tr:not(.tablesorter-childRow)').find('td').toggle(); + return !1; + }); + + // Make table cell focusable + // http://css-tricks.com/simple-css-row-column-highlighting/ + var focus$ = $('.focus-highlight'); + if (focus$.length){ + focus$.find('td, th') + .attr('tabindex', '1') + // add touch device support + .on('touchstart', function(){ + $(this).focus(); + }); + } + +}); diff --git a/sickbeard/databases/cache_db.py b/sickbeard/databases/cache_db.py index 4f769a62..f9bd2863 100644 --- a/sickbeard/databases/cache_db.py +++ b/sickbeard/databases/cache_db.py @@ -17,6 +17,8 @@ # along with SickGear. If not, see . from sickbeard import db +from collections import OrderedDict +import re MIN_DB_VERSION = 1 MAX_DB_VERSION = 4 @@ -24,107 +26,80 @@ MAX_DB_VERSION = 4 # Add new migrations at the bottom of the list; subclass the previous migration. class InitialSchema(db.SchemaUpgrade): + def __init__(self, connection): + super(InitialSchema, self).__init__(connection) + + self.queries = OrderedDict([ + ('base', [ + 'CREATE TABLE lastUpdate(provider TEXT, time NUMERIC)', + 'CREATE TABLE lastSearch(provider TEXT, time NUMERIC)', + 'CREATE TABLE db_version(db_version INTEGER)', + 'INSERT INTO db_version(db_version) VALUES (1)', + 'CREATE TABLE network_timezones(network_name TEXT PRIMARY KEY, timezone TEXT)' + ]), + ('consolidate_providers', [ + 'CREATE TABLE provider_cache(provider TEXT, name TEXT, season NUMERIC, episodes TEXT,' + ' indexerid NUMERIC, url TEXT UNIQUE, time NUMERIC, quality TEXT, release_group TEXT, version NUMERIC)', + 'CREATE TABLE network_conversions(' + 'tvdb_network TEXT PRIMARY KEY, tvrage_network TEXT, tvrage_country TEXT)', + 'CREATE INDEX tvrage_idx ON network_conversions(tvrage_network, tvrage_country)' + ]), + ('add_backlogparts', [ + 'CREATE TABLE backlogparts(' + 'part NUMERIC NOT NULL, indexer NUMERIC NOT NULL, indexerid NUMERIC NOT NULL)', + 'CREATE TABLE lastrecentsearch(name TEXT PRIMARY KEY NOT NULL, datetime NUMERIC NOT NULL)' + ]), + ('add_provider_fails', [ + 'CREATE TABLE provider_fails(prov_name TEXT, fail_type INTEGER, fail_code INTEGER, fail_time NUMERIC)', + 'CREATE INDEX idx_prov_name_error ON provider_fails (prov_name)', + 'CREATE UNIQUE INDEX idx_prov_errors ON provider_fails (prov_name, fail_time)', + 'CREATE TABLE provider_fails_count(prov_name TEXT PRIMARY KEY,' + ' failure_count NUMERIC, failure_time NUMERIC,' + ' tmr_limit_count NUMERIC, tmr_limit_time NUMERIC, tmr_limit_wait NUMERIC)' + ]) + ]) + def test(self): return self.hasTable('lastUpdate') def execute(self): - queries = [ - 'CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)', - 'CREATE TABLE lastSearch (provider TEXT, time NUMERIC)', - 'CREATE TABLE db_version (db_version INTEGER)', - 'INSERT INTO db_version (db_version) VALUES (1)', - 'CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT)', - 'CREATE TABLE network_conversions (' - 'tvdb_network TEXT PRIMARY KEY, tvrage_network TEXT, tvrage_country TEXT)', - 'CREATE INDEX tvrage_idx on network_conversions (tvrage_network, tvrage_country)', - 'CREATE TABLE provider_cache (provider TEXT ,name TEXT, season NUMERIC, episodes TEXT,' - ' indexerid NUMERIC, url TEXT UNIQUE, time NUMERIC, quality TEXT, release_group TEXT, ' - 'version NUMERIC)', - 'CREATE TABLE IF NOT EXISTS "backlogparts" ("part" NUMERIC NOT NULL ,' - ' "indexer" NUMERIC NOT NULL , "indexerid" NUMERIC NOT NULL )', - 'CREATE TABLE IF NOT EXISTS "lastrecentsearch" ("name" TEXT PRIMARY KEY NOT NULL' - ' , "datetime" NUMERIC NOT NULL )', - ] - for query in queries: - self.connection.action(query) - self.setDBVersion(3) + self.do_query(self.queries.values()) + self.setDBVersion(MAX_DB_VERSION) + + def backup(self): + db.backup_database('cache.db', self.checkDBVersion()) class ConsolidateProviders(InitialSchema): def test(self): - return self.checkDBVersion() > 1 + return 1 < self.checkDBVersion() def execute(self): - - db.backup_database('cache.db', self.checkDBVersion()) - if self.hasTable('provider_cache'): - self.connection.action('DROP TABLE provider_cache') - - self.connection.action('CREATE TABLE provider_cache (provider TEXT, name TEXT, season NUMERIC, episodes TEXT, ' - 'indexerid NUMERIC, url TEXT UNIQUE, time NUMERIC, quality TEXT, release_group TEXT, ' - 'version NUMERIC)') - - if not self.hasTable('network_conversions'): - self.connection.action('CREATE TABLE network_conversions ' + - '(tvdb_network TEXT PRIMARY KEY, tvrage_network TEXT, tvrage_country TEXT)') - self.connection.action('CREATE INDEX tvrage_idx ' + - 'on network_conversions (tvrage_network, tvrage_country)') - - keep_tables = set(['lastUpdate', 'lastSearch', 'db_version', - 'network_timezones', 'network_conversions', 'provider_cache']) - current_tables = set(self.listTables()) - remove_tables = list(current_tables - keep_tables) - for table in remove_tables: - self.connection.action('DROP TABLE [%s]' % table) - - self.incDBVersion() + self.backup() + keep_tables = {'lastUpdate', 'lastSearch', 'db_version', + 'network_timezones', 'network_conversions', 'provider_cache'} + # old provider_cache is dropped before re-creation + self.do_query(['DROP TABLE [provider_cache]'] + self.queries['consolidate_providers'] + + ['DROP TABLE [%s]' % t for t in (set(self.listTables()) - keep_tables)]) + self.finish(True) class AddBacklogParts(ConsolidateProviders): def test(self): - return self.checkDBVersion() > 2 + return 2 < self.checkDBVersion() def execute(self): - - db.backup_database('cache.db', self.checkDBVersion()) - if self.hasTable('scene_names'): - self.connection.action('DROP TABLE scene_names') - - if not self.hasTable('backlogparts'): - self.connection.action('CREATE TABLE IF NOT EXISTS "backlogparts" ("part" NUMERIC NOT NULL ,' - ' "indexer" NUMERIC NOT NULL , "indexerid" NUMERIC NOT NULL )') - - if not self.hasTable('lastrecentsearch'): - self.connection.action('CREATE TABLE IF NOT EXISTS "lastrecentsearch" ("name" TEXT PRIMARY KEY NOT NULL' - ' , "datetime" NUMERIC NOT NULL )') - - if self.hasTable('scene_exceptions_refresh'): - self.connection.action('DROP TABLE scene_exceptions_refresh') - if self.hasTable('scene_exceptions'): - self.connection.action('DROP TABLE scene_exceptions') - self.connection.action('VACUUM') - - self.incDBVersion() + self.backup() + self.do_query(self.queries['add_backlogparts'] + + ['DROP TABLE [%s]' % t for t in ('scene_names', 'scene_exceptions_refresh', 'scene_exceptions')]) + self.finish(True) -class AddProviderErrors(AddBacklogParts): +class AddProviderFailureHandling(AddBacklogParts): def test(self): - return self.checkDBVersion() > 3 + return 3 < self.checkDBVersion() def execute(self): - - db.backup_database('cache.db', self.checkDBVersion()) - if not self.hasTable('providererrors'): - self.connection.action('CREATE TABLE providererrors ("prov_name" TEXT, "error_type" INTEGER, ' - '"error_code" INTEGER, "error_time" NUMERIC)') - self.connection.action('CREATE INDEX idx_prov_name_error ON providererrors (prov_name)') - self.connection.action('CREATE UNIQUE INDEX idx_prov_errors ON providererrors (prov_name, error_time)') - - if not self.hasTable('providererrorcount'): - self.connection.action('CREATE TABLE providererrorcount (prov_name TEXT PRIMARY KEY , ' - 'failure_count NUMERIC, failure_time NUMERIC, hit_limit_count NUMERIC, ' - 'hit_limit_time NUMERIC, hit_limit_wait NUMERIC)') - - self.connection.action('VACUUM') - - self.incDBVersion() \ No newline at end of file + self.backup() + self.do_query(self.queries['add_provider_fails']) + self.finish() diff --git a/sickbeard/db.py b/sickbeard/db.py index fc9a9637..75495638 100644 --- a/sickbeard/db.py +++ b/sickbeard/db.py @@ -432,6 +432,26 @@ class SchemaUpgrade(object): tables.append(table[0]) return tables + def do_query(self, queries): + if not isinstance(queries, list): + queries = list(queries) + elif isinstance(queries[0], list): + queries = [item for sublist in queries for item in sublist] + + for query in queries: + tbl_name = re.findall('(?i)DROP.*?TABLE.*?\[?([^\s\]]+)', query) + if tbl_name and not self.hasTable(tbl_name[0]): + continue + tbl_name = re.findall('(?i)CREATE.*?TABLE.*?\s([^\s(]+)\s*\(', query) + if tbl_name and self.hasTable(tbl_name[0]): + continue + self.connection.action(query) + + def finish(self, tbl_dropped=False): + if tbl_dropped: + self.connection.action('VACUUM') + self.incDBVersion() + def MigrationCode(myDB): schema = { diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py index 886dc01f..1d35cd59 100644 --- a/sickbeard/providers/alpharatio.py +++ b/sickbeard/providers/alpharatio.py @@ -65,6 +65,8 @@ class AlphaRatioProvider(generic.TorrentProvider): search_url = self.urls['search'] % (search_string, ('&freetorrent=1', '')[not self.freeleech]) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/anizb.py b/sickbeard/providers/anizb.py index fc0d4fd7..1797fb79 100644 --- a/sickbeard/providers/anizb.py +++ b/sickbeard/providers/anizb.py @@ -38,7 +38,7 @@ class AnizbProvider(generic.NZBProvider): for params in search_params[mode]: search_url = '%sapi/%s' % (self.url, params and (('?q=%s', '?q=%(q)s')['q' in params] % params) or '') - data = self.cache.getRSSFeed(search_url) + data = self.cache.get_rss(search_url) time.sleep(1.1) cnt = len(results) diff --git a/sickbeard/providers/beyondhd.py b/sickbeard/providers/beyondhd.py index c06d555e..5259ba1a 100644 --- a/sickbeard/providers/beyondhd.py +++ b/sickbeard/providers/beyondhd.py @@ -73,6 +73,8 @@ class BeyondHDProvider(generic.TorrentProvider): search_url += self.urls['search'] % re.sub('[.\s]+', ' ', search_string) data_json = self.get_url(search_url, json=True) + if self.should_skip(): + return results cnt = len(items[mode]) if data_json and 'results' in data_json and self._check_auth_from_data(data_json): diff --git a/sickbeard/providers/bithdtv.py b/sickbeard/providers/bithdtv.py index 7fa1345f..efc1fedd 100644 --- a/sickbeard/providers/bithdtv.py +++ b/sickbeard/providers/bithdtv.py @@ -71,6 +71,8 @@ class BitHDTVProvider(generic.TorrentProvider): search_url = self.urls['search'] % (search_string, self._categories_string(mode)) html = self.get_url(search_url, timeout=90) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/bitmetv.py b/sickbeard/providers/bitmetv.py index 20f7cac9..f392b018 100644 --- a/sickbeard/providers/bitmetv.py +++ b/sickbeard/providers/bitmetv.py @@ -64,6 +64,8 @@ class BitmetvProvider(generic.TorrentProvider): search_url = self.urls['search'] % (self._categories_string(mode, 'cat=%s'), search_string) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/blutopia.py b/sickbeard/providers/blutopia.py index bd60c9dc..5639c934 100644 --- a/sickbeard/providers/blutopia.py +++ b/sickbeard/providers/blutopia.py @@ -105,6 +105,8 @@ class BlutopiaProvider(generic.TorrentProvider): self.token, '+'.join(search_string.split()), self._categories_string(mode, ''), '', '', '') resp = self.get_url(search_url, json=True) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index ca37af5a..23efe073 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -56,6 +56,7 @@ class BTNProvider(generic.TorrentProvider): self.ua = self.session.headers['User-Agent'] self.reject_m2ts = False self.cache = BTNCache(self) + self.has_limit = True def _authorised(self, **kwargs): @@ -67,6 +68,15 @@ class BTNProvider(generic.TorrentProvider): raise AuthException('Must set Api key or Username/Password for %s in config provider options' % self.name) return True + def _check_response(self, data, url, post_data=None, post_json=None): + if not self.should_skip(log_warning=False): + if data and 'Call Limit' in data: + self.tmr_limit_update('1', 'h', '150/hr %s' % data) + self.log_failure_url(url, post_data, post_json) + else: + logger.log(u'Action prematurely ended. %(prov)s server error response = %(desc)s' % + {'prov': self.name, 'desc': data}, logger.WARNING) + def _search_provider(self, search_params, age=0, **kwargs): self._authorised() @@ -93,21 +103,19 @@ class BTNProvider(generic.TorrentProvider): self.api_key, json.dumps(param_dct), items_per_page, offset)) try: - response = None + response, error_text = None, None if api_up and self.api_key: self.session.headers['Content-Type'] = 'application/json-rpc' - response = helpers.getURL( - self.url_api, post_data=json_rpc(params), session=self.session, json=True) - if not response: - api_up = False - results = self.html(mode, search_string, results) - error_text = response['error']['message'] - logger.log( - ('Call Limit' in error_text - and u'Action aborted because the %(prov)s 150 calls/hr limit was reached' - or u'Action prematurely ended. %(prov)s server error response = %(desc)s') % - {'prov': self.name, 'desc': error_text}, logger.WARNING) - return results + response = self.get_url(self.url_api, post_data=json_rpc(params), json=True) + # response = {'error': {'message': 'Call Limit Exceeded Test'}} + error_text = response['error']['message'] + api_up = False + if 'Propers' == mode: + return results + results = self.html(mode, search_string, results) + if not results: + self._check_response(error_text, self.url_api, post_data=json_rpc(params)) + return results except AuthException: logger.log('API looks to be down, add un/pw config detail to be used as a fallback', logger.WARNING) except (KeyError, Exception): @@ -115,7 +123,7 @@ class BTNProvider(generic.TorrentProvider): data_json = response and 'result' in response and response['result'] or {} if data_json: - + self.tmr_limit_count = 0 found_torrents = 'torrents' in data_json and data_json['torrents'] or {} # We got something, we know the API sends max 1000 results at a time. @@ -134,15 +142,10 @@ class BTNProvider(generic.TorrentProvider): for page in range(1, pages_needed + 1): try: - response = helpers.getURL( - self.url_api, json=True, session=self.session, - post_data=json_rpc(params, results_per_page, page * results_per_page)) + post_data = json_rpc(params, results_per_page, page * results_per_page) + response = self.get_url(self.url_api, json=True, post_data=post_data) error_text = response['error']['message'] - logger.log( - ('Call Limit' in error_text - and u'Action prematurely ended because the %(prov)s 150 calls/hr limit was reached' - or u'Action prematurely ended. %(prov)s server error response = %(desc)s') % - {'prov': self.name, 'desc': error_text}, logger.WARNING) + self._check_response(error_text, self.url_api, post_data=post_data) return results except (KeyError, Exception): data_json = response and 'result' in response and response['result'] or {} @@ -150,6 +153,7 @@ class BTNProvider(generic.TorrentProvider): # Note that this these are individual requests and might time out individually. # This would result in 'gaps' in the results. There is no way to fix this though. if 'torrents' in data_json: + self.tmr_limit_count = 0 found_torrents.update(data_json['torrents']) cnt = len(results) @@ -176,7 +180,8 @@ class BTNProvider(generic.TorrentProvider): if self.username and self.password: return super(BTNProvider, self)._authorised( - post_params={'login': 'Log In!'}, logged_in=(lambda y='': 'casThe' in y[0:4096])) + post_params={'login': 'Log In!'}, + logged_in=(lambda y='': 'casThe' in y[0:512] and 'Index' in y[0:512])) raise AuthException('Password or Username for %s is empty in config provider options' % self.name) def html(self, mode, search_string, results): @@ -197,7 +202,10 @@ class BTNProvider(generic.TorrentProvider): search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string search_url = self.urls['search'] % (search_string, self._categories_string(mode, 'filter_cat[%s]=1')) - html = helpers.getURL(search_url, session=self.session) + html = self.get_url(search_url, use_tmr_limit=False) + if self.should_skip(log_warning=False, use_tmr_limit=False): + return results + cnt = len(results) try: if not html or self._has_no_results(html): diff --git a/sickbeard/providers/btscene.py b/sickbeard/providers/btscene.py index 9f25ab16..1a840a0b 100644 --- a/sickbeard/providers/btscene.py +++ b/sickbeard/providers/btscene.py @@ -64,7 +64,7 @@ class BTSceneProvider(generic.TorrentProvider): url = self.url response = self.get_url(url) - if not response: + if self.should_skip(): return results form = re.findall('(?is)(<form[^>]+)', response) @@ -84,6 +84,8 @@ class BTSceneProvider(generic.TorrentProvider): else url + self.urls['search'] % (urllib.quote_plus(search_string)) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/dh.py b/sickbeard/providers/dh.py index 51d15934..b6ab4861 100644 --- a/sickbeard/providers/dh.py +++ b/sickbeard/providers/dh.py @@ -65,6 +65,8 @@ class DHProvider(generic.TorrentProvider): html = self.get_url(self.urls['search'] % ( '+'.join(search_string.split()), self._categories_string(mode), ('3', '0')[not self.freeleech])) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/ettv.py b/sickbeard/providers/ettv.py index c83aa971..292d61e1 100644 --- a/sickbeard/providers/ettv.py +++ b/sickbeard/providers/ettv.py @@ -62,6 +62,8 @@ class ETTVProvider(generic.TorrentProvider): self._categories_string(mode), ('%2B ', '')['Cache' == mode] + '.'.join(search_string.split())) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: @@ -110,6 +112,9 @@ class ETTVProvider(generic.TorrentProvider): def get_data(self, url): result = None html = self.get_url(url, timeout=90) + if self.should_skip(): + return result + try: result = re.findall('(?i)"(magnet:[^"]+?)">', html)[0] except IndexError: diff --git a/sickbeard/providers/fano.py b/sickbeard/providers/fano.py index d56eb4d8..a1e9489e 100644 --- a/sickbeard/providers/fano.py +++ b/sickbeard/providers/fano.py @@ -83,6 +83,8 @@ class FanoProvider(generic.TorrentProvider): search_url = self.urls['search'] % (search_string, self._categories_string(mode)) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/filelist.py b/sickbeard/providers/filelist.py index 4e7e66ea..536bd209 100644 --- a/sickbeard/providers/filelist.py +++ b/sickbeard/providers/filelist.py @@ -62,6 +62,8 @@ class FLProvider(generic.TorrentProvider): html = self.get_url(self.urls['search'] % ('+'.join(search_string.split()), self._categories_string(mode, template='cats[]=%s'))) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/funfile.py b/sickbeard/providers/funfile.py index d57bc5cd..240fa7fa 100644 --- a/sickbeard/providers/funfile.py +++ b/sickbeard/providers/funfile.py @@ -66,6 +66,8 @@ class FunFileProvider(generic.TorrentProvider): search_url = self.urls['search'] % (self._categories_string(mode), search_string) html = self.get_url(search_url, timeout=self.url_timeout) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index c1a4bd32..1a20c138 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -48,36 +48,39 @@ from sickbeard.name_parser.parser import NameParser, InvalidNameException, Inval from sickbeard.show_name_helpers import get_show_names_all_possible from sickbeard.sbdatetime import sbdatetime + class HaltParseException(SickBeardException): """Something requires the current processing to abort""" -class ProviderErrorTypes: +class ProviderFailTypes: http = 1 connection = 2 connection_timeout = 3 timeout = 4 - unknown = 5 + other = 5 limit = 6 nodata = 7 - names = {1: 'http', 2: 'connection', 3: 'connection_timeout', 4: 'timeout', 5: 'unknown', 6: 'limit', 7: 'nodata'} + names = {http: 'http', timeout: 'timeout', + connection: 'connection', connection_timeout: 'connection_timeout', + nodata: 'nodata', other: 'other', limit: 'limit'} def __init__(self): pass -class ProviderError(object): - def __init__(self, error_type=ProviderErrorTypes.unknown, code=None, error_time=None): +class ProviderFail(object): + def __init__(self, fail_type=ProviderFailTypes.other, code=None, fail_time=None): self.code = code - self.error_type = error_type - self.error_time = (datetime.datetime.now(), error_time)[isinstance(error_time, datetime.datetime)] + self.fail_type = fail_type + self.fail_time = (datetime.datetime.now(), fail_time)[isinstance(fail_time, datetime.datetime)] -class ProviderErrorList(object): +class ProviderFailList(object): def __init__(self, provider_name): self.provider_name = provider_name - self._errors = [] + self._fails = [] self.lock = threading.Lock() self.clear_old() self.load_list() @@ -85,38 +88,68 @@ class ProviderErrorList(object): self.dirty = False @property - def errors(self): - return self._errors + def fails(self): + return self._fails @property - def errors_sorted(self): - error_dict = {} - b_d = {'count': 0, 'code': None} - for e in self._errors: - dd = e.error_time.date() - if ProviderErrorTypes.names[e.error_type] not in error_dict.get(dd, {}): - error_dict.setdefault(dd, - {'date': dd, 'http': b_d.copy(), 'connection': b_d.copy(), - 'connection_timeout': b_d.copy(), 'timeout': b_d.copy(), - 'unknown': b_d.copy(), 'limit': b_d.copy(), - 'nodata': b_d.copy()})[ProviderErrorTypes.names[e.error_type]]['count'] = 1 + def fails_sorted(self): + fail_dict = {} + b_d = {'count': 0} + for e in self._fails: + fail_date = e.fail_time.date() + fail_hour = e.fail_time.time().hour + date_time = datetime.datetime.combine(fail_date, datetime.time(hour=fail_hour)) + if ProviderFailTypes.names[e.fail_type] not in fail_dict.get(date_time, {}): + default = {'date': str(fail_date), 'date_time': date_time, 'multirow': False} + for et in ProviderFailTypes.names.itervalues(): + default[et] = b_d.copy() + fail_dict.setdefault(date_time, default)[ProviderFailTypes.names[e.fail_type]]['count'] = 1 else: - error_dict[dd][ProviderErrorTypes.names[e.error_type]]['count'] += 1 - if ProviderErrorTypes.http == e.error_type: - if e.code in error_dict[dd].get(ProviderErrorTypes.names[e.error_type], {}): - error_dict[dd][ProviderErrorTypes.names[e.error_type]][e.code] += 1 + fail_dict[date_time][ProviderFailTypes.names[e.fail_type]]['count'] += 1 + if ProviderFailTypes.http == e.fail_type: + if e.code in fail_dict[date_time].get(ProviderFailTypes.names[e.fail_type], + {'code': {}}).get('code', {}): + fail_dict[date_time][ProviderFailTypes.names[e.fail_type]]['code'][e.code] += 1 else: - error_dict[dd][ProviderErrorTypes.names[e.error_type]][e.code] = 1 - error_list = sorted([error_dict[k] for k in error_dict.iterkeys()], key=lambda x: x.get('date'), reverse=True) - return error_list + fail_dict[date_time][ProviderFailTypes.names[e.fail_type]].setdefault('code', {})[e.code] = 1 - def add_error(self, error): - if isinstance(error, ProviderError): + row_count = {} + for (k, v) in fail_dict.iteritems(): + row_count.setdefault(v.get('date'), 0) + if v.get('date') in row_count: + row_count[v.get('date')] += 1 + for (k, v) in fail_dict.iteritems(): + if 1 < row_count.get(v.get('date')): + fail_dict[k]['multirow'] = True + + fail_list = sorted([fail_dict[k] for k in fail_dict.iterkeys()], key=lambda y: y.get('date_time'), reverse=True) + + totals = {} + for fail_date in set([fail.get('date') for fail in fail_list]): + daytotals = {} + for et in ProviderFailTypes.names.itervalues(): + daytotals.update({et: sum([x.get(et).get('count') for x in fail_list if fail_date == x.get('date')])}) + totals.update({fail_date: daytotals}) + for (fail_date, total) in totals.iteritems(): + for i, item in enumerate(fail_list): + if fail_date == item.get('date'): + if item.get('multirow'): + fail_list[i:i] = [item.copy()] + for et in ProviderFailTypes.names.itervalues(): + fail_list[i][et] = {'count': total[et]} + if et == ProviderFailTypes.names[ProviderFailTypes.http]: + fail_list[i][et]['code'] = {} + break + + return fail_list + + def add_fail(self, fail): + if isinstance(fail, ProviderFail): with self.lock: self.dirty = True - self._errors.append(error) - logger.log('Adding error: %s for %s' % - (ProviderErrorTypes.names.get(error.error_type, 'unknown'), self.provider_name()), + self._fails.append(fail) + logger.log('Adding fail.%s for %s' % (ProviderFailTypes.names.get( + fail.fail_type, ProviderFailTypes.names[ProviderFailTypes.other]), self.provider_name()), logger.DEBUG) self.save_list() @@ -124,29 +157,29 @@ class ProviderErrorList(object): if self.dirty: self.clear_old() with self.lock: - myDB = db.DBConnection('cache.db') + my_db = db.DBConnection('cache.db') cl = [] - for e in self._errors: - cl.append(['INSERT OR IGNORE INTO providererrors (prov_name, error_type, error_code, error_time) ' - 'VALUES (?,?,?,?)', [self.provider_name(), e.error_type, e.code, - sbdatetime.totimestamp(e.error_time)]]) + for f in self._fails: + cl.append(['INSERT OR IGNORE INTO provider_fails (prov_name, fail_type, fail_code, fail_time) ' + 'VALUES (?,?,?,?)', [self.provider_name(), f.fail_type, f.code, + sbdatetime.totimestamp(f.fail_time)]]) self.dirty = False if cl: - myDB.mass_action(cl) + my_db.mass_action(cl) self.last_save = datetime.datetime.now() def load_list(self): with self.lock: try: - myDB = db.DBConnection('cache.db') - if myDB.hasTable('providererrors'): - results = myDB.select('SELECT * FROM providererrors WHERE prov_name = ?', [self.provider_name()]) - self._errors = [] + my_db = db.DBConnection('cache.db') + if my_db.hasTable('provider_fails'): + results = my_db.select('SELECT * FROM provider_fails WHERE prov_name = ?', [self.provider_name()]) + self._fails = [] for r in results: try: - self._errors.append(ProviderError( - error_type=helpers.tryInt(r['error_type']), code=helpers.tryInt(r['error_code']), - error_time=datetime.datetime.fromtimestamp(helpers.tryInt(r['error_time'])))) + self._fails.append(ProviderFail( + fail_type=helpers.tryInt(r['fail_type']), code=helpers.tryInt(r['fail_code']), + fail_time=datetime.datetime.fromtimestamp(helpers.tryInt(r['fail_time'])))) except (StandardError, Exception): continue except (StandardError, Exception): @@ -155,10 +188,10 @@ class ProviderErrorList(object): def clear_old(self): with self.lock: try: - myDB = db.DBConnection('cache.db') - if myDB.hasTable('providererrors'): + my_db = db.DBConnection('cache.db') + if my_db.hasTable('provider_fails'): time_limit = sbdatetime.totimestamp(datetime.datetime.now() - datetime.timedelta(days=28)) - myDB.action('DELETE FROM providererrors WHERE error_time < ?', [time_limit]) + my_db.action('DELETE FROM provider_fails WHERE fail_time < ?', [time_limit]) except (StandardError, Exception): pass @@ -200,49 +233,50 @@ class GenericProvider(object): self._failure_count = 0 self._failure_time = None - self.errors = ProviderErrorList(self.get_id) - self._hit_limit_count = 0 - self._hit_limit_time = None - self._hit_limit_wait = None - self._last_error_type = None + self.fails = ProviderFailList(self.get_id) + self._tmr_limit_count = 0 + self._tmr_limit_time = None + self._tmr_limit_wait = None + self._last_fail_type = None self.has_limit = False self.fail_times = {1: (0, 15), 2: (0, 30), 3: (1, 0), 4: (2, 0), 5: (3, 0), 6: (6, 0), 7: (12, 0), 8: (24, 0)} - self._load_error_values() + self._load_fail_values() - def _load_error_values(self): + def _load_fail_values(self): if hasattr(sickbeard, 'DATA_DIR'): - myDB = db.DBConnection('cache.db') - if myDB.hasTable('providererrorcount'): - r = myDB.select('SELECT * FROM providererrorcount WHERE prov_name = ?', [self.get_id()]) + my_db = db.DBConnection('cache.db') + if my_db.hasTable('provider_fails_count'): + r = my_db.select('SELECT * FROM provider_fails_count WHERE prov_name = ?', [self.get_id()]) if r: self._failure_count = helpers.tryInt(r[0]['failure_count'], 0) if r[0]['failure_time']: self._failure_time = datetime.datetime.fromtimestamp(r[0]['failure_time']) else: self._failure_time = None - self._hit_limit_count = helpers.tryInt(r[0]['hit_limit_count'], 0) - if r[0]['hit_limit_time']: - self._hit_limit_time = datetime.datetime.fromtimestamp(r[0]['hit_limit_time']) + self._tmr_limit_count = helpers.tryInt(r[0]['tmr_limit_count'], 0) + if r[0]['tmr_limit_time']: + self._tmr_limit_time = datetime.datetime.fromtimestamp(r[0]['tmr_limit_time']) else: - self._hit_limit_time = None - if r[0]['hit_limit_wait']: - self._hit_limit_wait = datetime.timedelta(seconds=helpers.tryInt(r[0]['hit_limit_wait'], 0)) + self._tmr_limit_time = None + if r[0]['tmr_limit_wait']: + self._tmr_limit_wait = datetime.timedelta(seconds=helpers.tryInt(r[0]['tmr_limit_wait'], 0)) else: - self._hit_limit_wait = None - self._last_error_type = self.last_error + self._tmr_limit_wait = None + self._last_fail_type = self.last_fail - def _save_error_value(self, field, value): - myDB = db.DBConnection('cache.db') - if myDB.hasTable('providererrorcount'): - r = myDB.action('UPDATE providererrorcount SET %s = ? WHERE prov_name = ?' % field, [value, self.get_id()]) + def _save_fail_value(self, field, value): + my_db = db.DBConnection('cache.db') + if my_db.hasTable('provider_fails_count'): + r = my_db.action('UPDATE provider_fails_count SET %s = ? WHERE prov_name = ?' % field, + [value, self.get_id()]) if 0 == r.rowcount: - myDB.action('REPLACE INTO providererrorcount (prov_name, %s) VALUES (?,?)' % field, - [self.get_id(), value]) + my_db.action('REPLACE INTO provider_fails_count (prov_name, %s) VALUES (?,?)' % field, + [self.get_id(), value]) @property - def last_error(self): + def last_fail(self): try: - return sorted(self.errors.errors, key=lambda x: x.error_time, reverse=True)[0].error_type + return sorted(self.fails.fails, key=lambda x: x.fail_time, reverse=True)[0].fail_type except (StandardError, Exception): return None @@ -255,7 +289,7 @@ class GenericProvider(object): changed_val = self._failure_count != value self._failure_count = value if changed_val: - self._save_error_value('failure_count', value) + self._save_fail_value('failure_count', value) @property def failure_time(self): @@ -266,158 +300,252 @@ class GenericProvider(object): if None is value or isinstance(value, datetime.datetime): changed_val = self._failure_time != value self._failure_time = value - if None is value: - v = value - else: - v = sbdatetime.totimestamp(value) if changed_val: - self._save_error_value('failure_time', v) + self._save_fail_value('failure_time', (sbdatetime.totimestamp(value), value)[None is value]) @property - def hit_limit_count(self): - return self._hit_limit_count + def tmr_limit_count(self): + return self._tmr_limit_count - @hit_limit_count.setter - def hit_limit_count(self, value): - changed_val = self._hit_limit_count != value - self._hit_limit_count = value + @tmr_limit_count.setter + def tmr_limit_count(self, value): + changed_val = self._tmr_limit_count != value + self._tmr_limit_count = value if changed_val: - self._save_error_value('hit_limit_count', value) + self._save_fail_value('tmr_limit_count', value) @property - def hit_limit_time(self): - return self._hit_limit_time + def tmr_limit_time(self): + return self._tmr_limit_time - @hit_limit_time.setter - def hit_limit_time(self, value): + @tmr_limit_time.setter + def tmr_limit_time(self, value): if None is value or isinstance(value, datetime.datetime): - changed_val = self._hit_limit_time != value - self._hit_limit_time = value - if None is value: - v = value - else: - v = sbdatetime.totimestamp(value) + changed_val = self._tmr_limit_time != value + self._tmr_limit_time = value if changed_val: - self._save_error_value('hit_limit_time', v) + self._save_fail_value('tmr_limit_time', (sbdatetime.totimestamp(value), value)[None is value]) @property def max_index(self): return len(self.fail_times) @property - def hit_limit_wait(self): - return self._hit_limit_wait + def tmr_limit_wait(self): + return self._tmr_limit_wait - @hit_limit_wait.setter - def hit_limit_wait(self, value): - if isinstance(getattr(self, 'errors', None), ProviderErrorList) and isinstance(value, datetime.timedelta): - self.errors.add_error(ProviderError(error_type=ProviderErrorTypes.limit)) - changed_val = self._hit_limit_wait != value - self._hit_limit_wait = value + @tmr_limit_wait.setter + def tmr_limit_wait(self, value): + if isinstance(getattr(self, 'fails', None), ProviderFailList) and isinstance(value, datetime.timedelta): + self.fails.add_fail(ProviderFail(fail_type=ProviderFailTypes.limit)) + changed_val = self._tmr_limit_wait != value + self._tmr_limit_wait = value if changed_val: if None is value: - self._save_error_value('hit_limit_wait', value) + self._save_fail_value('tmr_limit_wait', value) elif isinstance(value, datetime.timedelta): - self._save_error_value('hit_limit_wait', value.total_seconds()) + self._save_fail_value('tmr_limit_wait', value.total_seconds()) def fail_time_index(self, base_limit=2): i = self.failure_count - base_limit return (i, self.max_index)[i >= self.max_index] - def wait_time(self, fc): - return datetime.timedelta(hours=self.fail_times[fc][0], minutes=self.fail_times[fc][1]) + def tmr_limit_update(self, period, unit, desc): + self.tmr_limit_time = datetime.datetime.now() + self.tmr_limit_count += 1 + limit_set = False + if None not in (period, unit): + limit_set = True + if unit in ('s', 'sec', 'secs', 'seconds', 'second'): + self.tmr_limit_wait = datetime.timedelta(seconds=helpers.tryInt(period)) + elif unit in ('m', 'min', 'mins', 'minutes', 'minute'): + self.tmr_limit_wait = datetime.timedelta(minutes=helpers.tryInt(period)) + elif unit in ('h', 'hr', 'hrs', 'hours', 'hour'): + self.tmr_limit_wait = datetime.timedelta(hours=helpers.tryInt(period)) + elif unit in ('d', 'days', 'day'): + self.tmr_limit_wait = datetime.timedelta(days=helpers.tryInt(period)) + else: + limit_set = False + if not limit_set: + time_index = self.fail_time_index(base_limit=0) + self.tmr_limit_wait = self.wait_time(time_index) + logger.log('Request limit reached. Waiting for %s until next retry. Message: %s' % + (self.tmr_limit_wait, desc or 'none found'), logger.WARNING) + + def wait_time(self, time_index=None): + """ + Return a suitable wait time, selected by parameter, or based on the current failure count + + :param time_index: A key value index into the fail_times dict, or selects using failure count if None + :type time_index: Integer + :return: Time + :rtype: Timedelta + """ + if None is time_index: + time_index = self.fail_time_index() + return datetime.timedelta(hours=self.fail_times[time_index][0], minutes=self.fail_times[time_index][1]) + + def fail_newest_delta(self): + """ + Return how long since most recent failure + :return: Period since most recent failure on record + :rtype: timedelta + """ + return datetime.datetime.now() - self.failure_time + + def is_waiting(self): + return self.fail_newest_delta() < self.wait_time() + + def valid_tmr_time(self): + return isinstance(self.tmr_limit_wait, datetime.timedelta) and \ + isinstance(self.tmr_limit_time, datetime.datetime) @property def get_next_try_time(self): n = None h = datetime.timedelta(seconds=0) f = datetime.timedelta(seconds=0) - if isinstance(self.hit_limit_wait, datetime.timedelta) and isinstance(self.hit_limit_time, datetime.datetime): - h = self.hit_limit_time + self.hit_limit_wait - datetime.datetime.now() - if 3 <= self.failure_count and isinstance(self.failure_time, datetime.datetime): - fc = self.fail_time_index() - if datetime.datetime.now() - self.failure_time < self.wait_time(fc): - h = self.failure_time + self.wait_time(fc) - datetime.datetime.now() + if self.valid_tmr_time(): + h = self.tmr_limit_time + self.tmr_limit_wait - datetime.datetime.now() + if 3 <= self.failure_count and isinstance(self.failure_time, datetime.datetime) and self.is_waiting(): + h = self.failure_time + self.wait_time() - datetime.datetime.now() if datetime.timedelta(seconds=0) < max((h, f)): n = max((h, f)) return n def retry_next(self): - if isinstance(self.hit_limit_wait, datetime.timedelta) and isinstance(self.hit_limit_time, datetime.datetime): - self.hit_limit_time = datetime.datetime.now() - self.hit_limit_wait - if 3 <= self.failure_count and isinstance(self.failure_time, datetime.datetime): - fc = self.fail_time_index() - if datetime.datetime.now() - self.failure_time < self.wait_time(fc): - self.failure_time = datetime.datetime.now() - self.wait_time(fc) + if self.valid_tmr_time(): + self.tmr_limit_time = datetime.datetime.now() - self.tmr_limit_wait + if 3 <= self.failure_count and isinstance(self.failure_time, datetime.datetime) and self.is_waiting(): + self.failure_time = datetime.datetime.now() - self.wait_time() - def should_skip(self, log_warning=True): - if isinstance(self.hit_limit_wait, datetime.timedelta) and isinstance(self.hit_limit_time, datetime.datetime): - time_left = self.hit_limit_time + self.hit_limit_wait - datetime.datetime.now() + @staticmethod + def fmt_delta(delta): + return str(delta).rsplit('.')[0] + + def should_skip(self, log_warning=True, use_tmr_limit=True): + """ + Determine if a subsequent server request should be skipped. The result of this logic is based on most recent + server connection activity including, exhausted request limits, and counting connect failures to determine a + "cool down" period before recommending reconnection attempts; by returning False. + :param log_warning: Output to log if True (default) otherwise set False for no output. + :type log_warning: Boolean + :param use_tmr_limit: Setting this to False will ignore a tmr limit being reached and will instead return False. + :type use_tmr_limit: Boolean + :return: True for any known issue that would prevent a subsequent server connection, otherwise False. + :rtype: Boolean + """ + if self.valid_tmr_time(): + time_left = self.tmr_limit_time + self.tmr_limit_wait - datetime.datetime.now() if time_left > datetime.timedelta(seconds=0): if log_warning: - logger.log('Hit limited reached, waiting for %s' % time_left, logger.WARNING) - return True + # Ensure provider name output (e.g. when displaying config/provs) instead of e.g. thread "Tornado" + prepend = ('[%s] :: ' % self.name, '')[any([x.name in threading.currentThread().getName() + for x in sickbeard.providers.sortedProviderList()])] + logger.log('%sToo many requests reached at %s, waiting for %s' % ( + prepend, self.fmt_delta(self.tmr_limit_time), self.fmt_delta(time_left)), logger.WARNING) + return use_tmr_limit else: - self.hit_limit_time = None - self.hit_limit_wait = None + self.tmr_limit_time = None + self.tmr_limit_wait = None if 3 <= self.failure_count: if None is self.failure_time: self.failure_time = datetime.datetime.now() - fc = self.fail_time_index() - if datetime.datetime.now() - self.failure_time < self.wait_time(fc): + if self.is_waiting(): if log_warning: - time_left = self.wait_time(fc) - (datetime.datetime.now() - self.failure_time) - logger.log('Failed %s times, skipping provider for %s' % (self.failure_count, time_left), - logger.WARNING) + time_left = self.wait_time() - self.fail_newest_delta() + logger.log('Failed %s times, skipping provider for %s, last failure at %s with fail type: %s' % ( + self.failure_count, self.fmt_delta(time_left), self.fmt_delta(self.failure_time), + ProviderFailTypes.names.get( + self.last_fail, ProviderFailTypes.names[ProviderFailTypes.other])), logger.WARNING) return True return False def inc_failure_count(self, *args, **kwargs): - error_type = ('error_type' in kwargs and kwargs['error_type'].error_type) or \ - (isinstance(args, tuple) and isinstance(args[0], ProviderError) and args[0].error_type) + fail_type = ('fail_type' in kwargs and kwargs['fail_type'].fail_type) or \ + (isinstance(args, tuple) and isinstance(args[0], ProviderFail) and args[0].fail_type) if not isinstance(self.failure_time, datetime.datetime) or \ - error_type != self._last_error_type or \ - datetime.datetime.now() - self.failure_time > datetime.timedelta(seconds=3): + fail_type != self._last_fail_type or \ + self.fail_newest_delta() > datetime.timedelta(seconds=3): self.failure_count += 1 self.failure_time = datetime.datetime.now() - self._last_error_type = error_type - self.errors.add_error(*args, **kwargs) + self._last_fail_type = fail_type + self.fails.add_fail(*args, **kwargs) else: - logger.log('%s: Not logging same error within 3 seconds' % self.name, logger.DEBUG) + logger.log('%s: Not logging same failure within 3 seconds' % self.name, logger.DEBUG) - def getURL(self, *args, **kwargs): + def get_url(self, url, skip_auth=False, use_tmr_limit=True, *args, **kwargs): + """ + Return data from a URI with a possible check for authentication prior to the data fetch. + Raised errors and no data in responses are tracked for making future logic decisions. + + :param url: Address where to fetch data from + :type url: String + :param skip_auth: Skip authentication check of provider if True + :type skip_auth: Boolean + :param use_tmr_limit: An API limit can be +ve before a fetch, but unwanted, set False to short should_skip + :type use_tmr_limit: Boolean + :param args: params to pass-through to getURL + :type args: + :param kwargs: keyword params to pass-through to getURL + :type kwargs: + :return: None or data fetched from URL + :rtype: String or Nonetype + """ data = None # check for auth - if not self._authorised() or self.should_skip(): - return data + if (not skip_auth and not (self.is_public_access() + and type(self).__name__ not in ['TorrentRssProvider']) and not self._authorised()) \ + or self.should_skip(use_tmr_limit=use_tmr_limit): + return kwargs['raise_exceptions'] = True kwargs['raise_status_code'] = True + for k, v in dict(headers=self.headers, hooks=dict(response=self.cb_response), session=self.session).items(): + kwargs.setdefault(k, v) + post_data = kwargs.get('post_data') + post_json = kwargs.get('post_json') + + # noinspection PyUnusedLocal + log_failure_url = False try: - data = helpers.getURL(*args, **kwargs) + data = helpers.getURL(url, *args, **kwargs) if data: if 0 != self.failure_count: logger.log('Unblocking provider: %s' % self.get_id(), logger.DEBUG) self.failure_count = 0 self.failure_time = None else: - self.inc_failure_count(ProviderError(error_type=ProviderErrorTypes.nodata)) + self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.nodata)) + log_failure_url = True except requests.exceptions.HTTPError as e: - self.inc_failure_count(ProviderError(error_type=ProviderErrorTypes.http, code=e.response.status_code)) - except requests.exceptions.ConnectionError as e: - self.inc_failure_count(ProviderError(error_type=ProviderErrorTypes.connection)) - except requests.exceptions.ReadTimeout as e: - self.inc_failure_count(ProviderError(error_type=ProviderErrorTypes.timeout)) - except (requests.exceptions.Timeout, socket.timeout) as e: - self.inc_failure_count(ProviderError(error_type=ProviderErrorTypes.connection_timeout)) + self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.http, code=e.response.status_code)) + except requests.exceptions.ConnectionError: + self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.connection)) + except requests.exceptions.ReadTimeout: + self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.timeout)) + except (requests.exceptions.Timeout, socket.timeout): + self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.connection_timeout)) except (StandardError, Exception) as e: - self.inc_failure_count(ProviderError(error_type=ProviderErrorTypes.unknown)) + log_failure_url = True + self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.other)) - self.errors.save_list() + self.fails.save_list() + if log_failure_url: + self.log_failure_url(url, post_data, post_json) return data + def log_failure_url(self, url, post_data=None, post_json=None): + if self.should_skip(log_warning=False): + post = [] + if post_data: + post += [' .. Post params: [%s]' % '&'.join([post_data])] + if post_json: + post += [' .. Json params: [%s]' % '&'.join([post_json])] + logger.log('Failure URL: %s%s' % (url, ''.join(post)), logger.WARNING) + def get_id(self): return GenericProvider.make_id(self.name) @@ -484,19 +612,6 @@ class GenericProvider(object): self.session.response = dict(url=r.url, status_code=r.status_code, elapsed=r.elapsed, from_cache=r.from_cache) return r - def get_url(self, url, post_data=None, params=None, timeout=30, json=False): - """ - By default this is just a simple urlopen call but this method should be overridden - for providers with special URL requirements (like cookies) - """ - - # check for auth - if not self._authorised(): - return - - return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout, - session=self.session, json=json, hooks=dict(response=self.cb_response)) - def download_result(self, result): """ Save the result to disk. @@ -1341,8 +1456,9 @@ class TorrentProvider(GenericProvider): return None if 10 < len(cur_url) and ((expire and (expire > int(time.time()))) or - self._has_signature(helpers.getURL(cur_url, session=self.session))): - + self._has_signature(self.get_url(cur_url, skip_auth=True))): + if self.should_skip(): + return None for k, v in getattr(self, 'url_tmpl', {}).items(): self.urls[k] = v % {'home': cur_url, 'vars': getattr(self, 'url_vars', {}).get(k, '')} @@ -1402,15 +1518,17 @@ class TorrentProvider(GenericProvider): if isinstance(url, type([])): for i in range(0, len(url)): - helpers.getURL(url.pop(), session=self.session) + self.get_url(url.pop(), skip_auth=True) + if self.should_skip(): + return False passfield, userfield = None, None if not url: if hasattr(self, 'urls'): url = self.urls.get('login_action') if url: - response = helpers.getURL(url, session=self.session) - if None is response: + response = self.get_url(url, skip_auth=True) + if self.should_skip() or None is response: return False try: post_params = isinstance(post_params, type({})) and post_params or {} @@ -1450,8 +1568,8 @@ class TorrentProvider(GenericProvider): if self.password not in post_params.values(): post_params[(passfield, 'password')[not passfield]] = self.password - response = helpers.getURL(url, post_data=post_params, session=self.session, timeout=timeout) - if response: + response = self.get_url(url, skip_auth=True, post_data=post_params, timeout=timeout) + if not self.should_skip() and response: if logged_in(response): return True diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py index 88f44284..dae9aa19 100644 --- a/sickbeard/providers/gftracker.py +++ b/sickbeard/providers/gftracker.py @@ -66,6 +66,8 @@ class GFTrackerProvider(generic.TorrentProvider): (self.urls['search'] % search_string, '')['Cache' == mode]) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/grabtheinfo.py b/sickbeard/providers/grabtheinfo.py index f838dd6b..6e8f7aa3 100644 --- a/sickbeard/providers/grabtheinfo.py +++ b/sickbeard/providers/grabtheinfo.py @@ -70,6 +70,8 @@ class GrabTheInfoProvider(generic.TorrentProvider): (self.urls['search'] % search_string, '')['Cache' == mode]) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/hd4free.py b/sickbeard/providers/hd4free.py index 0f86d0da..18b30b26 100644 --- a/sickbeard/providers/hd4free.py +++ b/sickbeard/providers/hd4free.py @@ -105,6 +105,8 @@ class HD4FreeProvider(generic.TorrentProvider): self.token, '+'.join(search_string.split()), self._categories_string(mode, ''), '', '', '') resp = self.get_url(search_url, json=True) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py index 6986c84d..6b873324 100644 --- a/sickbeard/providers/hdbits.py +++ b/sickbeard/providers/hdbits.py @@ -48,7 +48,7 @@ class HDBitsProvider(generic.TorrentProvider): self.username, self.passkey, self.freeleech, self.minseed, self.minleech = 5 * [None] - def check_auth_from_data(self, parsed_json): + def _check_auth_from_data(self, parsed_json): if 'status' in parsed_json and 5 == parsed_json.get('status') and 'message' in parsed_json: logger.log(u'Incorrect username or password for %s: %s' % (self.name, parsed_json['message']), logger.DEBUG) @@ -112,9 +112,11 @@ class HDBitsProvider(generic.TorrentProvider): search_url = self.urls['search'] json_resp = self.get_url(search_url, post_data=post_data, json=True) + if self.should_skip(): + return results try: - if not (json_resp and self.check_auth_from_data(json_resp) and 'data' in json_resp): + if not (json_resp and self._check_auth_from_data(json_resp) and 'data' in json_resp): logger.log(u'Response from %s does not contain any json data, abort' % self.name, logger.ERROR) return results except AuthException as e: diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py index 12e76099..f6e63ff9 100644 --- a/sickbeard/providers/hdspace.py +++ b/sickbeard/providers/hdspace.py @@ -83,6 +83,8 @@ class HDSpaceProvider(generic.TorrentProvider): search_url += self.urls['search'] % rc['nodots'].sub(' ', search_string) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index f079e7d8..ec8049d1 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -86,6 +86,8 @@ class HDTorrentsProvider(generic.TorrentProvider): self._categories_string(mode, template='category[]=%s') .replace('&category[]=Animation', ('&genre[]=Animation', '')[mode in ['Cache', 'Propers']])) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index 5600698a..e5c9048d 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -88,6 +88,8 @@ class IPTorrentsProvider(generic.TorrentProvider): (';free', '')[not self.freeleech], (';o=seeders', '')['Cache' == mode]) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/limetorrents.py b/sickbeard/providers/limetorrents.py index 61ce72e2..4b0fd6b5 100644 --- a/sickbeard/providers/limetorrents.py +++ b/sickbeard/providers/limetorrents.py @@ -67,6 +67,8 @@ class LimeTorrentsProvider(generic.TorrentProvider): else self.urls['search'] % (urllib.quote_plus(search_string)) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/magnetdl.py b/sickbeard/providers/magnetdl.py index cdd762a4..2342614a 100644 --- a/sickbeard/providers/magnetdl.py +++ b/sickbeard/providers/magnetdl.py @@ -54,6 +54,8 @@ class MagnetDLProvider(generic.TorrentProvider): search_url = self.urls['search'] % re.sub('[.\s]+', ' ', search_string) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/morethan.py b/sickbeard/providers/morethan.py index 2e3833b6..bffc35de 100644 --- a/sickbeard/providers/morethan.py +++ b/sickbeard/providers/morethan.py @@ -65,6 +65,9 @@ class MoreThanProvider(generic.TorrentProvider): # fetches 15 results by default, and up to 100 if allowed in user profile html = self.get_url(search_url) + if self.should_skip(): + return results + cnt = len(items[mode]) try: if not html or self._has_no_results(html): diff --git a/sickbeard/providers/ncore.py b/sickbeard/providers/ncore.py index 11226e95..e777fac0 100644 --- a/sickbeard/providers/ncore.py +++ b/sickbeard/providers/ncore.py @@ -68,6 +68,8 @@ class NcoreProvider(generic.TorrentProvider): # fetches 15 results by default, and up to 100 if allowed in user profile html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/nebulance.py b/sickbeard/providers/nebulance.py index b6abcd87..0118d9e8 100644 --- a/sickbeard/providers/nebulance.py +++ b/sickbeard/providers/nebulance.py @@ -51,7 +51,9 @@ class NebulanceProvider(generic.TorrentProvider): post_params={'keeplogged': '1', 'form_tmpl': True}): return False if not self.user_authkey: - response = helpers.getURL(self.urls['user'], session=self.session, json=True) + response = self.get_url(self.urls['user'], skip_auth=True, json=True) + if self.should_skip(): + return False if 'response' in response: self.user_authkey, self.user_passkey = [response['response'].get(v) for v in 'authkey', 'passkey'] return self.user_authkey @@ -74,6 +76,8 @@ class NebulanceProvider(generic.TorrentProvider): search_url += self.urls['search'] % rc['nodots'].sub('+', search_string) data_json = self.get_url(search_url, json=True) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index 2e05c621..d1e18f1d 100755 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -187,13 +187,13 @@ class NewznabProvider(generic.NZBProvider): if datetime.date.today() - self._caps_need_apikey['date'] > datetime.timedelta(days=30) or \ not self._caps_need_apikey['need']: self._caps_need_apikey['need'] = False - data = self.getURL('%s/api?t=caps' % self.url) + data = self.get_url('%s/api?t=caps' % self.url) if data: xml_caps = helpers.parse_xml(data) if xml_caps is None or not hasattr(xml_caps, 'tag') or xml_caps.tag == 'error' or xml_caps.tag != 'caps': api_key = self.maybe_apikey() if isinstance(api_key, basestring) and api_key not in ('0', ''): - data = self.getURL('%s/api?t=caps&apikey=%s' % (self.url, api_key)) + data = self.get_url('%s/api?t=caps&apikey=%s' % (self.url, api_key)) if data: xml_caps = helpers.parse_xml(data) if xml_caps and hasattr(xml_caps, 'tag') and xml_caps.tag == 'caps': @@ -296,7 +296,7 @@ class NewznabProvider(generic.NZBProvider): return False return super(NewznabProvider, self)._check_auth(is_required) - def check_auth_from_data(self, data): + def _check_auth_from_data(self, data, url): if data is None or not hasattr(data, 'tag'): return False @@ -312,23 +312,12 @@ class NewznabProvider(generic.NZBProvider): elif '102' == code: raise AuthException('Your account isn\'t allowed to use the API on %s, contact the admin.' % self.name) elif '500' == code: - self.hit_limit_time = datetime.datetime.now() - self.hit_limit_count += 1 - retry_time = re.search(r'Retry in (\d+)\W+([a-z]+)', description, flags=re.I) - if retry_time: - if retry_time.group(2) in ('s', 'sec', 'secs', 'seconds', 'second'): - self.hit_limit_wait = datetime.timedelta(seconds=helpers.tryInt(retry_time.group(1))) - elif retry_time.group(2) in ('m', 'min', 'mins', 'minutes', 'minute'): - self.hit_limit_wait = datetime.timedelta(minutes=helpers.tryInt(retry_time.group(1))) - elif retry_time.group(2) in ('h', 'hr', 'hrs', 'hours', 'hour'): - self.hit_limit_wait = datetime.timedelta(hours=helpers.tryInt(retry_time.group(1))) - elif retry_time.group(2) in ('d', 'days', 'day'): - self.hit_limit_wait = datetime.timedelta(days=helpers.tryInt(retry_time.group(1))) - if not self.hit_limit_wait: - fc = self.fail_time_index(base_limit=0) - self.hit_limit_wait = self.wait_time(fc) - logger.log('Request limit reached. Waiting for %s until next retry. Message: %s' % - (self.hit_limit_wait, description), logger.WARNING) + try: + retry_time, unit = re.findall(r'Retry in (\d+)\W+([a-z]+)', description, flags=re.I)[0] + except IndexError: + retry_time, unit = None, None + self.tmr_limit_update(retry_time, unit, description) + self.log_failure_url(url) elif '910' == code: logger.log( '%s %s, please check with provider.' % @@ -339,7 +328,7 @@ class NewznabProvider(generic.NZBProvider): logger.WARNING) return False - self.hit_limit_count = 0 + self.tmr_limit_count = 0 return True def config_str(self): @@ -739,17 +728,13 @@ class NewznabProvider(generic.NZBProvider): search_url = '%sapi?%s' % (self.url, urllib.urlencode(request_params)) i and time.sleep(2.1) - data = self.getURL(search_url) + data = self.get_url(search_url) - if self.should_skip(): - break - - if not data: - logger.log('No Data returned from %s' % self.name, logger.WARNING) + if self.should_skip() or not data: break # hack this in until it's fixed server side - if data and not data.startswith('<?xml'): + if not data.startswith('<?xml'): data = '<?xml version="1.0" encoding="ISO-8859-1" ?>%s' % data try: @@ -759,7 +744,7 @@ class NewznabProvider(generic.NZBProvider): logger.log('Error trying to load %s RSS feed' % self.name, logger.WARNING) break - if not self.check_auth_from_data(parsed_xml): + if not self._check_auth_from_data(parsed_xml, search_url): break if 'rss' != parsed_xml.tag: diff --git a/sickbeard/providers/nyaa.py b/sickbeard/providers/nyaa.py index aae8184d..3b965a48 100644 --- a/sickbeard/providers/nyaa.py +++ b/sickbeard/providers/nyaa.py @@ -53,6 +53,8 @@ class NyaaProvider(generic.TorrentProvider): search_url = self.urls['search'] % ((0, 2)[self.confirmed], search_string) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py index b96ef43b..747e5c3e 100644 --- a/sickbeard/providers/omgwtfnzbs.py +++ b/sickbeard/providers/omgwtfnzbs.py @@ -99,10 +99,13 @@ class OmgwtfnzbsProvider(generic.NZBProvider): def get_data(self, url): result = None if url and False is self._init_api(): - data = self.getURL(url, timeout=90) + data = self.get_url(url, timeout=90) + if self.should_skip(): + return result if data: if re.search('(?i)limit.*?reached', data): - logger.log('Daily Nzb Download limit reached', logger.DEBUG) + self.tmr_limit_update('1', 'h', 'Your 24 hour limit of 10 NZBs has been reached') + self.log_failure_url(url) elif '</nzb>' not in data or 'seem to be logged in' in data: logger.log('Failed nzb data response: %s' % data, logger.DEBUG) else: @@ -156,6 +159,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider): url = self.urls['cache'] % urllib.urlencode(params) response = self.get_url(url) + if self.should_skip(): + return results data = feedparser.parse(response.replace('<xml', '<?xml').replace('>\n<info>', '?>\n<feed>\n<info>') .replace('<search_req>\n', '').replace('</search_req>\n', '') @@ -185,7 +190,9 @@ class OmgwtfnzbsProvider(generic.NZBProvider): search_url = self.urls['search'] % urllib.urlencode(params) - data_json = self.getURL(search_url, json=True) + data_json = self.get_url(search_url, json=True) + if self.should_skip(): + return results if data_json and self._check_auth_from_data(data_json, is_xml=False): for item in data_json: if 'release' in item and 'getnzb' in item: @@ -213,7 +220,9 @@ class OmgwtfnzbsProvider(generic.NZBProvider): 'cat': 'cat=(?:%s)' % '|'.join(cats)}.items()) mode = ('search', 'cache')['' == search] search_url = self.urls[mode + '_html'] % search - html = self.getURL(search_url) + html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(results) try: if not html: diff --git a/sickbeard/providers/pisexy.py b/sickbeard/providers/pisexy.py index 62cddb21..2b4d48b8 100644 --- a/sickbeard/providers/pisexy.py +++ b/sickbeard/providers/pisexy.py @@ -59,6 +59,8 @@ class PiSexyProvider(generic.TorrentProvider): search_url = self.urls['search'] % search_string html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/potuk.py b/sickbeard/providers/potuk.py index cbabbaf8..c0ae0849 100644 --- a/sickbeard/providers/potuk.py +++ b/sickbeard/providers/potuk.py @@ -94,6 +94,8 @@ class PotUKProvider(generic.TorrentProvider): params.setdefault(name, value) del params['doprefs'] html = self.get_url(search_url, post_data=params) + if self.should_skip(): + return results cnt = len(items[mode]) try: @@ -135,6 +137,9 @@ class PotUKProvider(generic.TorrentProvider): def get_data(self, url): result = None html = self.get_url(url, timeout=90) + if self.should_skip(): + return result + try: result = self._link(re.findall('(?i)"(attachment\.php[^"]+?)"', html)[0]) except IndexError: diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py index 5fb2e042..72de5d5e 100644 --- a/sickbeard/providers/pretome.py +++ b/sickbeard/providers/pretome.py @@ -16,7 +16,6 @@ # along with SickGear. If not, see <http://www.gnu.org/licenses/>. from . import generic -from sickbeard.rssfeeds import RSSFeeds from lib.unidecode import unidecode @@ -52,7 +51,7 @@ class PreToMeProvider(generic.TorrentProvider): search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string search_url = url + (self.urls['search'] % search_string, '')['Cache' == mode] - xml_data = RSSFeeds(self).get_feed(search_url) + xml_data = self.cache.get_rss(search_url) cnt = len(items[mode]) if xml_data and 'entries' in xml_data: diff --git a/sickbeard/providers/privatehd.py b/sickbeard/providers/privatehd.py index 57961c51..5360b0c6 100644 --- a/sickbeard/providers/privatehd.py +++ b/sickbeard/providers/privatehd.py @@ -97,6 +97,8 @@ class PrivateHDProvider(generic.TorrentProvider): '+'.join(search_string.split()), self._categories_string(mode, '')) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/ptf.py b/sickbeard/providers/ptf.py index fe741ffc..9a76c189 100644 --- a/sickbeard/providers/ptf.py +++ b/sickbeard/providers/ptf.py @@ -85,11 +85,16 @@ class PTFProvider(generic.TorrentProvider): search_url = self.urls['search'] % ('+'.join(search_string.split()), self._categories_string(mode)) html = self.get_url(search_url) + if self.should_skip(): + return results + time.sleep(2) if not self.has_all_cookies(['session_key']): if not self._authorised(): return results html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index c3d98a47..41464507 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -58,8 +58,8 @@ class RarbgProvider(generic.TorrentProvider): return True for r in range(0, 3): - response = helpers.getURL(self.urls['api_token'], session=self.session, json=True) - if response and 'token' in response: + response = self.get_url(self.urls['api_token'], json=True) + if not self.should_skip() and response and 'token' in response: self.token = response['token'] self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14) return True @@ -125,6 +125,8 @@ class RarbgProvider(generic.TorrentProvider): searched_url = search_url % {'r': int(self.confirmed), 't': self.token} data_json = self.get_url(searched_url, json=True) + if self.should_skip(): + return results self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14) self.request_throttle = datetime.datetime.now() + datetime.timedelta(seconds=3) diff --git a/sickbeard/providers/revtt.py b/sickbeard/providers/revtt.py index e6d86c5a..a8741491 100644 --- a/sickbeard/providers/revtt.py +++ b/sickbeard/providers/revtt.py @@ -63,6 +63,8 @@ class RevTTProvider(generic.TorrentProvider): html = self.get_url(self.urls['search'] % ('+'.join(search_string.split()), self._categories_string(mode))) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py index 6eace767..d43eab9b 100644 --- a/sickbeard/providers/rsstorrent.py +++ b/sickbeard/providers/rsstorrent.py @@ -21,7 +21,6 @@ from . import generic from sickbeard import logger, tvcache from sickbeard.helpers import tryInt from sickbeard.exceptions import ex -from sickbeard.rssfeeds import RSSFeeds from lib.bencode import bdecode @@ -41,8 +40,6 @@ class TorrentRssProvider(generic.TorrentProvider): self.search_mode = search_mode self.search_fallback = bool(tryInt(search_fallback)) - self.feeder = RSSFeeds(self) - def image_name(self): return generic.GenericProvider.image_name(self, 'torrentrss') @@ -102,6 +99,9 @@ class TorrentRssProvider(generic.TorrentProvider): break else: torrent_file = self.get_url(url) + if self.should_skip(): + break + try: bdecode(torrent_file) break @@ -120,7 +120,7 @@ class TorrentRssProvider(generic.TorrentProvider): result = [] for mode in search_params.keys(): - data = self.feeder.get_feed(self.url) + data = self.cache.get_rss(self.url) result += (data and 'entries' in data) and data.entries or [] diff --git a/sickbeard/providers/scenehd.py b/sickbeard/providers/scenehd.py index 960dfb9c..2aba52d3 100644 --- a/sickbeard/providers/scenehd.py +++ b/sickbeard/providers/scenehd.py @@ -61,6 +61,8 @@ class SceneHDProvider(generic.TorrentProvider): search_url = self.urls['search'] % (search_string, self._categories_string(mode, '%s', ',')) html = self.get_url(search_url, timeout=90) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index 1be278a8..ccf6e431 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -80,6 +80,8 @@ class SceneTimeProvider(generic.TorrentProvider): self.session.headers.update({'Referer': self.url + 'browse.php', 'X-Requested-With': 'XMLHttpRequest'}) html = self.get_url(self.urls['browse'], post_data=post_data) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/shazbat.py b/sickbeard/providers/shazbat.py index e235b330..62a0173c 100644 --- a/sickbeard/providers/shazbat.py +++ b/sickbeard/providers/shazbat.py @@ -49,8 +49,8 @@ class ShazbatProvider(generic.TorrentProvider): def _authorised(self, **kwargs): return super(ShazbatProvider, self)._authorised( - logged_in=(lambda y=None: '<input type="password"' not in helpers.getURL( - self.urls['feeds'], session=self.session)), post_params={'tv_login': self.username, 'form_tmpl': True}) + logged_in=(lambda y=None: '<input type="password"' not in self.get_url(self.urls['feeds'], skip_auth=True)), + post_params={'tv_login': self.username, 'form_tmpl': True}) def _search_provider(self, search_params, **kwargs): @@ -70,11 +70,16 @@ class ShazbatProvider(generic.TorrentProvider): if 'Cache' == mode: search_url = self.urls['browse'] html = self.get_url(search_url) + if self.should_skip(): + return results else: search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string search_string = search_string.replace(show_detail, '').strip() search_url = self.urls['search'] % search_string html = self.get_url(search_url) + if self.should_skip(): + return results + shows = rc['show_id'].findall(html) if not any(shows): continue @@ -85,6 +90,8 @@ class ShazbatProvider(generic.TorrentProvider): continue html and time.sleep(1.1) html += self.get_url(self.urls['show'] % sid) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/skytorrents.py b/sickbeard/providers/skytorrents.py index 984acf81..59d60933 100644 --- a/sickbeard/providers/skytorrents.py +++ b/sickbeard/providers/skytorrents.py @@ -56,6 +56,8 @@ class SkytorrentsProvider(generic.TorrentProvider): search_url = self.urls['search'] % search_string html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py index c6f4d501..36620bb7 100644 --- a/sickbeard/providers/speedcd.py +++ b/sickbeard/providers/speedcd.py @@ -67,6 +67,8 @@ class SpeedCDProvider(generic.TorrentProvider): jxt=2, jxw='b', freeleech=('on', None)[not self.freeleech]) data_json = self.get_url(self.urls['search'], post_data=post_data, json=True) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index 2fb31373..dc93eb8d 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -106,7 +106,7 @@ class ThePirateBayProvider(generic.TorrentProvider): quality = Quality.UNKNOWN file_name = None data = self.get_url('%sajax_details_filelist.php?id=%s' % (self.url, torrent_id)) - if not data: + if self.should_skip() or not data: return None files_list = re.findall('<td.+>(.*?)</td>', data) @@ -193,6 +193,8 @@ class ThePirateBayProvider(generic.TorrentProvider): search_url = self.urls['browse'] if 'Cache' == mode \ else self.urls['search'] % (urllib.quote(search_string)) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py index 9dbbba70..9bd8476e 100644 --- a/sickbeard/providers/tokyotoshokan.py +++ b/sickbeard/providers/tokyotoshokan.py @@ -49,6 +49,9 @@ class TokyoToshokanProvider(generic.TorrentProvider): 'stats': 'S:\s*?(\d)+\s*L:\s*(\d+)', 'size': 'size:\s*(\d+[.,]\d+\w+)'}.iteritems()) html = self.get_url(search_url) + if self.should_skip(): + return self._sort_seeding(mode, results) + if html: try: with BS4Parser(html, features=['html5lib', 'permissive']) as soup: @@ -103,7 +106,7 @@ class TokyoToshokanCache(tvcache.TVCache): mode = 'Cache' search_url = '%srss.php?%s' % (self.provider.url, urllib.urlencode({'filter': '1'})) - data = self.getRSSFeed(search_url) + data = self.get_rss(search_url) results = [] if data and 'entries' in data: diff --git a/sickbeard/providers/torlock.py b/sickbeard/providers/torlock.py index 9d8c2e75..3541529c 100644 --- a/sickbeard/providers/torlock.py +++ b/sickbeard/providers/torlock.py @@ -74,6 +74,8 @@ class TorLockProvider(generic.TorrentProvider): else self.urls['search'] % (urllib.quote_plus(search_string).replace('+', '-')) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py index 44da04a0..f0009a9a 100644 --- a/sickbeard/providers/torrentbytes.py +++ b/sickbeard/providers/torrentbytes.py @@ -61,6 +61,8 @@ class TorrentBytesProvider(generic.TorrentProvider): search_url = self.urls['search'] % (search_string, self._categories_string(mode)) html = self.get_url(search_url, timeout=90) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index 7cda7d27..defa8e9a 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -86,6 +86,8 @@ class TorrentDayProvider(generic.TorrentProvider): search_string, ('&sort=7&type=desc', '')['Cache' == mode]) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/torrenting.py b/sickbeard/providers/torrenting.py index c6d83130..2c24ec80 100644 --- a/sickbeard/providers/torrenting.py +++ b/sickbeard/providers/torrenting.py @@ -69,6 +69,8 @@ class TorrentingProvider(generic.TorrentProvider): search_url = self.urls['search'] % (self._categories_string(), search_string) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index fb4a31ba..2c1adb65 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -62,6 +62,8 @@ class TorrentLeechProvider(generic.TorrentProvider): 'query': isinstance(search_string, unicode) and unidecode(search_string) or search_string} html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/torrentz2.py b/sickbeard/providers/torrentz2.py index a4902c57..a3986d4c 100644 --- a/sickbeard/providers/torrentz2.py +++ b/sickbeard/providers/torrentz2.py @@ -93,6 +93,8 @@ class Torrentz2Provider(generic.TorrentProvider): 'tv%s' % ('+' + quote_plus(search_string), '')['Cache' == mode]) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py index 146ad31a..4bec9082 100644 --- a/sickbeard/providers/tvchaosuk.py +++ b/sickbeard/providers/tvchaosuk.py @@ -66,6 +66,8 @@ class TVChaosUKProvider(generic.TorrentProvider): 'order': 'desc', 'daysprune': '-1'}) html = self.get_url(self.urls['search'], **kwargs) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/wop.py b/sickbeard/providers/wop.py index 0647cd92..e593380b 100644 --- a/sickbeard/providers/wop.py +++ b/sickbeard/providers/wop.py @@ -70,6 +70,8 @@ class WOPProvider(generic.TorrentProvider): search_url = self.urls['search'] % (search_string, self._categories_string(mode, 'cats2[]=%s')) html = self.get_url(search_url, timeout=90) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/providers/zooqle.py b/sickbeard/providers/zooqle.py index bfd81253..41b27991 100644 --- a/sickbeard/providers/zooqle.py +++ b/sickbeard/providers/zooqle.py @@ -58,6 +58,8 @@ class ZooqleProvider(generic.TorrentProvider): search_url = self.urls['search'] % (search_string, self._categories_string(mode, '', ',')) html = self.get_url(search_url) + if self.should_skip(): + return results cnt = len(items[mode]) try: diff --git a/sickbeard/rssfeeds.py b/sickbeard/rssfeeds.py index 04226f12..5b72dbd6 100644 --- a/sickbeard/rssfeeds.py +++ b/sickbeard/rssfeeds.py @@ -5,54 +5,32 @@ import feedparser -from sickbeard import helpers, logger +from sickbeard import logger from sickbeard.exceptions import ex class RSSFeeds: def __init__(self, provider=None): - self.provider = provider - self.response = None - def _check_auth_cookie(self): + def get_feed(self, url, **kwargs): - if self.provider: - return self.provider.check_auth_cookie() - return True + if self.provider and self.provider.check_auth_cookie(): + response = self.provider.get_url(url, **kwargs) + if not self.provider.should_skip() and response: + try: + data = feedparser.parse(response) + data['rq_response'] = self.provider.session.response + if data and 'entries' in data: + return data - # noinspection PyUnusedLocal - def cb_response(self, r, *args, **kwargs): - self.response = dict(url=r.url, elapsed=r.elapsed, from_cache=r.from_cache) - return r + if data and 'error' in data.feed: + err_code = data.feed['error']['code'] + err_desc = data.feed['error']['description'] + logger.log(u'RSS error:[%s] code:[%s]' % (err_desc, err_code), logger.DEBUG) + else: + logger.log(u'RSS error loading url: ' + url, logger.DEBUG) - def get_feed(self, url, request_headers=None, **kwargs): - - if not self._check_auth_cookie(): - return - - session = None - if self.provider and hasattr(self.provider, 'session'): - session = self.provider.session - - response = helpers.getURL(url, headers=request_headers, session=session, - hooks=dict(response=self.cb_response), **kwargs) - if not response: - return - - try: - feed = feedparser.parse(response) - feed['rq_response'] = self.response - if feed and 'entries' in feed: - return feed - - if feed and 'error' in feed.feed: - err_code = feed.feed['error']['code'] - err_desc = feed.feed['error']['description'] - logger.log(u'RSS ERROR:[%s] CODE:[%s]' % (err_desc, err_code), logger.DEBUG) - else: - logger.log(u'RSS error loading url: ' + url, logger.DEBUG) - - except Exception as e: - logger.log(u'RSS error: ' + ex(e), logger.DEBUG) + except Exception as e: + logger.log(u'RSS error: ' + ex(e), logger.DEBUG) diff --git a/sickbeard/search.py b/sickbeard/search.py index a6f32bd1..67e166f4 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -143,7 +143,7 @@ def snatch_episode(result, end_status=SNATCHED): # make sure we have the torrent file content if not result.content and not result.url.startswith('magnet'): result.content = result.provider.get_url(result.url) - if not result.content: + if result.provider.should_skip() or not result.content: logger.log(u'Torrent content failed to download from %s' % result.url, logger.ERROR) return False # Snatches torrent with client @@ -465,6 +465,8 @@ def search_for_needed_episodes(episodes): best_result.content = None if not best_result.url.startswith('magnet'): best_result.content = best_result.provider.get_url(best_result.url) + if best_result.provider.should_skip(): + break if not best_result.content: continue diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index d7ec58f6..d69e4bad 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -107,7 +107,7 @@ class TVCache: return [] - def getRSSFeed(self, url, **kwargs): + def get_rss(self, url, **kwargs): return RSSFeeds(self.provider).get_feed(url, **kwargs) def _translateTitle(self, title): diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 41bb3445..e5ec3857 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -4531,12 +4531,12 @@ class ManageSearches(Manage): t.recent_search_status = sickbeard.searchQueueScheduler.action.is_recentsearch_in_progress() t.find_propers_status = sickbeard.searchQueueScheduler.action.is_propersearch_in_progress() t.queue_length = sickbeard.searchQueueScheduler.action.queue_length() - t.provider_error_stats = [{'name': p.name, 'prov_id': p.get_id(), 'errors': p.errors.errors_sorted, - 'hit_limit_time': p.hit_limit_time, 'failure_time': p.failure_time, - 'last_error': p.last_error, - 'next_try': p.get_next_try_time, 'has_limit': getattr(p, 'has_limit', False)} - for p in sickbeard.providerList + sickbeard.newznabProviderList] - t.provider_errors = 0 < len([p for p in t.provider_error_stats if len(p['errors'])]) + t.provider_fail_stats = filter(lambda stat: len(stat['fails']), [{ + 'active': p.is_active(), 'name': p.name, 'prov_id': p.get_id(), 'prov_img': p.image_name(), + 'fails': p.fails.fails_sorted, 'tmr_limit_time': p.tmr_limit_time, + 'next_try': p.get_next_try_time, 'has_limit': getattr(p, 'has_limit', False)} + for p in sickbeard.providerList + sickbeard.newznabProviderList]) + t.provider_fails = 0 < len([p for p in t.provider_fail_stats if len(p['fails'])]) t.submenu = self.ManageMenu('Search')