Merge pull request #451 from JackDandy/feature/ChangeProvNN

Change refactor and code simplification for newznab providers.
This commit is contained in:
JackDandy 2015-07-03 17:35:29 +01:00
commit 01689080e2
6 changed files with 156 additions and 198 deletions

View file

@ -37,7 +37,7 @@
* Change provider KAT remove dead url * Change provider KAT remove dead url
* Change provider KAT to use mediaExtensions from common instead of private list * Change provider KAT to use mediaExtensions from common instead of private list
* Change provider KAT provider PEP8 and code convention cleanup * Change provider KAT provider PEP8 and code convention cleanup
* Change refactor and code simplification for torrent providers * Change refactor and code simplification for torrent and newznab providers
* Change refactor SCC to use torrent provider simplification and PEP8 * Change refactor SCC to use torrent provider simplification and PEP8
* Change refactor SCD to use torrent provider simplification * Change refactor SCD to use torrent provider simplification
* Change refactor TB to use torrent provider simplification and PEP8 * Change refactor TB to use torrent provider simplification and PEP8

View file

@ -39,7 +39,7 @@
#for $curNewznabProvider in $sickbeard.newznabProviderList: #for $curNewznabProvider in $sickbeard.newznabProviderList:
\$(this).addProvider('$curNewznabProvider.getID()', '$curNewznabProvider.name', '$curNewznabProvider.url', '<%= starify(curNewznabProvider.key) %>', '$curNewznabProvider.catIDs', $int($curNewznabProvider.default), show_nzb_providers); \$(this).addProvider('$curNewznabProvider.getID()', '$curNewznabProvider.name', '$curNewznabProvider.url', '<%= starify(curNewznabProvider.key) %>', '$curNewznabProvider.cat_ids', $int($curNewznabProvider.default), show_nzb_providers);
#end for #end for

View file

@ -436,7 +436,7 @@ class ConfigMigrator():
self.migration_names = {1: 'Custom naming', self.migration_names = {1: 'Custom naming',
2: 'Sync backup number with version number', 2: 'Sync backup number with version number',
3: 'Rename omgwtfnzb variables', 3: 'Rename omgwtfnzb variables',
4: 'Add newznab catIDs', 4: 'Add newznab cat_ids',
5: 'Metadata update', 5: 'Metadata update',
6: 'Rename daily search to recent search', 6: 'Rename daily search to recent search',
7: 'Rename coming episodes to episode view', 7: 'Rename coming episodes to episode view',
@ -602,7 +602,7 @@ class ConfigMigrator():
sickbeard.OMGWTFNZBS_USERNAME = check_setting_str(self.config_obj, 'omgwtfnzbs', 'omgwtfnzbs_uid', '') sickbeard.OMGWTFNZBS_USERNAME = check_setting_str(self.config_obj, 'omgwtfnzbs', 'omgwtfnzbs_uid', '')
sickbeard.OMGWTFNZBS_APIKEY = check_setting_str(self.config_obj, 'omgwtfnzbs', 'omgwtfnzbs_key', '') sickbeard.OMGWTFNZBS_APIKEY = check_setting_str(self.config_obj, 'omgwtfnzbs', 'omgwtfnzbs_key', '')
# Migration v4: Add default newznab catIDs # Migration v4: Add default newznab cat_ids
def _migrate_v4(self): def _migrate_v4(self):
""" Update newznab providers so that the category IDs can be set independently via the config """ """ Update newznab providers so that the category IDs can be set independently via the config """
@ -624,11 +624,11 @@ class ConfigMigrator():
key = '0' key = '0'
if name == 'NZBs.org': if name == 'NZBs.org':
catIDs = '5030,5040,5060,5070,5090' cat_ids = '5030,5040,5060,5070,5090'
else: else:
catIDs = '5030,5040,5060' cat_ids = '5030,5040,5060'
cur_provider_data_list = [name, url, key, catIDs, enabled] cur_provider_data_list = [name, url, key, cat_ids, enabled]
new_newznab_data.append('|'.join(cur_provider_data_list)) new_newznab_data.append('|'.join(cur_provider_data_list))
sickbeard.NEWZNAB_DATA = '!!!'.join(new_newznab_data) sickbeard.NEWZNAB_DATA = '!!!'.join(new_newznab_data)

View file

@ -114,12 +114,12 @@ def makeNewznabProvider(configString):
try: try:
values = configString.split('|') values = configString.split('|')
if len(values) == 9: if len(values) == 9:
name, url, key, catIDs, enabled, search_mode, search_fallback, enable_recentsearch, enable_backlog = values name, url, key, cat_ids, enabled, search_mode, search_fallback, enable_recentsearch, enable_backlog = values
else: else:
name = values[0] name = values[0]
url = values[1] url = values[1]
key = values[2] key = values[2]
catIDs = values[3] cat_ids = values[3]
enabled = values[4] enabled = values[4]
except ValueError: except ValueError:
logger.log(u"Skipping Newznab provider string: '" + configString + "', incorrect format", logger.ERROR) logger.log(u"Skipping Newznab provider string: '" + configString + "', incorrect format", logger.ERROR)
@ -127,7 +127,7 @@ def makeNewznabProvider(configString):
newznab = sys.modules['sickbeard.providers.newznab'] newznab = sys.modules['sickbeard.providers.newznab']
newProvider = newznab.NewznabProvider(name, url, key=key, catIDs=catIDs, search_mode=search_mode, newProvider = newznab.NewznabProvider(name, url, key=key, cat_ids=cat_ids, search_mode=search_mode,
search_fallback=search_fallback, enable_recentsearch=enable_recentsearch, search_fallback=search_fallback, enable_recentsearch=enable_recentsearch,
enable_backlog=enable_backlog) enable_backlog=enable_backlog)
newProvider.enabled = enabled == '1' newProvider.enabled = enabled == '1'

View file

@ -18,71 +18,63 @@
import urllib import urllib
import time import time
import os
try:
import xml.etree.cElementTree as etree
except ImportError:
import elementtree.ElementTree as etree
import sickbeard import sickbeard
import generic import generic
from sickbeard import classes,helpers,scene_exceptions,logger,tvcache from sickbeard import helpers, scene_exceptions, logger, tvcache
from sickbeard import encodingKludge as ek
from sickbeard.exceptions import AuthException from sickbeard.exceptions import AuthException
class NewznabProvider(generic.NZBProvider): class NewznabProvider(generic.NZBProvider):
def __init__(self, name, url, key='', catIDs='5030,5040', search_mode='eponly', search_fallback=False,
enable_recentsearch=False, enable_backlog=False): def __init__(self, name, url, key='', cat_ids='5030,5040', search_mode='eponly',
search_fallback=False, enable_recentsearch=False, enable_backlog=False):
generic.NZBProvider.__init__(self, name, True, False) generic.NZBProvider.__init__(self, name, True, False)
self.cache = NewznabCache(self)
self.url = url self.url = url
self.key = key self.key = key
self.cat_ids = cat_ids
self.search_mode = search_mode self.search_mode = search_mode
self.search_fallback = search_fallback self.search_fallback = search_fallback
self.enable_recentsearch = enable_recentsearch self.enable_recentsearch = enable_recentsearch
self.enable_backlog = enable_backlog self.enable_backlog = enable_backlog
self.needs_auth = '0' != self.key.strip() # '0' in the key setting indicates that api_key is not needed
# a 0 in the key spot indicates that no key is needed
if self.key == '0':
self.needs_auth = False
else:
self.needs_auth = True
if catIDs:
self.catIDs = catIDs
else:
self.catIDs = '5030,5040'
self.default = False self.default = False
self.cache = NewznabCache(self)
def configStr(self): def _checkAuth(self):
return self.name + '|' + self.url + '|' + self.key + '|' + self.catIDs + '|' + str(
int(self.enabled)) + '|' + self.search_mode + '|' + str(int(self.search_fallback)) + '|' + str(
int(self.enable_recentsearch)) + '|' + str(int(self.enable_backlog))
def imageName(self): if self.needs_auth and not self.key:
if ek.ek(os.path.isfile, logger.log(u'Incorrect authentication credentials for %s : API key is missing' % self.name, logger.DEBUG)
ek.ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers', raise AuthException('Your authentication credentials for %s are missing, check your config.' % self.name)
self.getID() + '.png')):
return self.getID() + '.png'
return 'newznab.png'
def _getURL(self, url, post_data=None, params=None, timeout=30, json=False): return True
"""
By default this is just a simple urlopen call but this method should be overridden
for providers with special URL requirements (like cookies)
Not really changed much from the superclass, can be used in future.
"""
# check for auth def check_auth_from_data(self, data):
if not self._doLogin():
return if data is None:
return self._checkAuth()
if 'error' in data.feed:
code = data.feed['error']['code']
if '100' == code:
raise AuthException('Your API key for %s is incorrect, check your config.' % self.name)
elif '101' == code:
raise AuthException('Your account on %s has been suspended, contact the admin.' % self.name)
elif '102' == code:
raise AuthException('Your account isn\'t allowed to use the API on %s, contact the admin.' % self.name)
elif '910' == code:
logger.log(u'%s currently has their API disabled, please check with provider.' % self.name,
logger.WARNING)
else:
logger.log(u'Unknown error given from %s: %s' % (self.name, data.feed['error']['description']),
logger.ERROR)
return False
return True
return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout,
session=self.session, json=json)
def get_newznab_categories(self): def get_newznab_categories(self):
""" """
Uses the newznab provider url and apikey to get the capabilities. Uses the newznab provider url and apikey to get the capabilities.
@ -91,39 +83,42 @@ class NewznabProvider(generic.NZBProvider):
{"id": "5080", "name": "Documentary"}, {"id": "5020", "name": "Foreign"}...etc}], error message) {"id": "5080", "name": "Documentary"}, {"id": "5020", "name": "Foreign"}...etc}], error message)
""" """
return_categories = [] return_categories = []
self._checkAuth() self._checkAuth()
params = {"t": "caps"} params = {'t': 'caps'}
if self.needs_auth and self.key: if self.needs_auth and self.key:
params['apikey'] = self.key params['apikey'] = self.key
try: try:
categories = self.getURL("%s/api" % (self.url), params=params, timeout=10) categories = self.getURL('%s/api' % self.url, params=params, timeout=10)
except: except:
logger.log(u"Error getting html for [%s]" % logger.log(u'Error getting html for [%s]' %
("%s/api?%s" % (self.url, '&'.join("%s=%s" % (x,y) for x,y in params.items())) ), logger.DEBUG) ('%s/api?%s' % (self.url, '&'.join('%s=%s' % (x, y) for x, y in params.items()))), logger.DEBUG)
return (False, return_categories, "Error getting html for [%s]" % return (False, return_categories, 'Error getting html for [%s]' %
("%s/api?%s" % (self.url, '&'.join("%s=%s" % (x,y) for x,y in params.items()) ))) ('%s/api?%s' % (self.url, '&'.join('%s=%s' % (x, y) for x, y in params.items()))))
xml_categories = helpers.parse_xml(categories) xml_categories = helpers.parse_xml(categories)
if not xml_categories: if not xml_categories:
logger.log(u"Error parsing xml for [%s]" % (self.name), logger.log(u'Error parsing xml for [%s]' % self.name, logger.DEBUG)
logger.DEBUG) return False, return_categories, 'Error parsing xml for [%s]' % self.name
return (False, return_categories, "Error parsing xml for [%s]" % (self.name))
try: try:
for category in xml_categories.iter('category'): for category in xml_categories.iter('category'):
if category.get('name') == 'TV': if 'TV' == category.get('name'):
for subcat in category.findall('subcat'): for subcat in category.findall('subcat'):
return_categories.append(subcat.attrib) return_categories.append(subcat.attrib)
except: except:
logger.log(u"Error parsing result for [%s]" % (self.name), logger.log(u'Error parsing result for [%s]' % self.name, logger.DEBUG)
logger.DEBUG) return False, return_categories, 'Error parsing result for [%s]' % self.name
return (False, return_categories, "Error parsing result for [%s]" % (self.name))
return True, return_categories, ''
return (True, return_categories, "")
def config_str(self):
return '%s|%s|%s|%s|%i|%s|%i|%i|%i' \
% (self.name or '', self.url or '', self.key or '', self.cat_ids or '', self.enabled,
self.search_mode or '', self.search_fallback, self.enable_recentsearch, self.enable_backlog)
def _get_season_search_strings(self, ep_obj): def _get_season_search_strings(self, ep_obj):
@ -136,7 +131,7 @@ class NewznabProvider(generic.NZBProvider):
cur_params['season'] = date_str cur_params['season'] = date_str
cur_params['q'] = date_str.replace('-', '.') cur_params['q'] = date_str.replace('-', '.')
elif ep_obj.show.is_anime: elif ep_obj.show.is_anime:
cur_params['season'] = "%d" % ep_obj.scene_absolute_number cur_params['season'] = '%d' % ep_obj.scene_absolute_number
else: else:
cur_params['season'] = str(ep_obj.scene_season) cur_params['season'] = str(ep_obj.scene_season)
@ -149,7 +144,8 @@ class NewznabProvider(generic.NZBProvider):
# add new query strings for exceptions # add new query strings for exceptions
name_exceptions = list( name_exceptions = list(
set([helpers.sanitizeSceneName(a) for a in scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]])) set([helpers.sanitizeSceneName(a) for a in
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
for cur_exception in name_exceptions: for cur_exception in name_exceptions:
cur_return = cur_params.copy() cur_return = cur_params.copy()
if 'q' in cur_return: if 'q' in cur_return:
@ -170,7 +166,8 @@ class NewznabProvider(generic.NZBProvider):
params['season'] = date_str.partition('-')[0] params['season'] = date_str.partition('-')[0]
params['ep'] = date_str.partition('-')[2].replace('-', '/') params['ep'] = date_str.partition('-')[2].replace('-', '/')
elif ep_obj.show.anime: elif ep_obj.show.anime:
params['ep'] = "%i" % int(ep_obj.scene_absolute_number if int(ep_obj.scene_absolute_number) > 0 else ep_obj.scene_episode) params['ep'] = '%i' % int(
ep_obj.scene_absolute_number if int(ep_obj.scene_absolute_number) > 0 else ep_obj.scene_episode)
else: else:
params['season'] = ep_obj.scene_season params['season'] = ep_obj.scene_season
params['ep'] = ep_obj.scene_episode params['ep'] = ep_obj.scene_episode
@ -184,180 +181,141 @@ class NewznabProvider(generic.NZBProvider):
# add new query strings for exceptions # add new query strings for exceptions
name_exceptions = list( name_exceptions = list(
set([helpers.sanitizeSceneName(a) for a in scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]])) set([helpers.sanitizeSceneName(a) for a in
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
for cur_exception in name_exceptions: for cur_exception in name_exceptions:
cur_return = params.copy() cur_return = params.copy()
cur_return['q'] = cur_exception cur_return['q'] = cur_exception
to_return.append(cur_return) to_return.append(cur_return)
if ep_obj.show.anime: if ep_obj.show.anime:
# Experimental, add a searchstring without search explicitly for the episode! # Experimental, add a searchstring without search explicitly for the episode!
# Remove the ?ep=e46 paramater and use add the episode number to the query paramater. # Remove the ?ep=e46 paramater and use add the episode number to the query paramater.
# Can be usefull for newznab indexers that do not have the episodes 100% parsed. # Can be usefull for newznab indexers that do not have the episodes 100% parsed.
# Start with only applying the searchstring to anime shows # Start with only applying the searchstring to anime shows
params['q'] = cur_exception params['q'] = cur_exception
paramsNoEp = params.copy() params_no_ep = params.copy()
paramsNoEp['q'] = '%s.%02d' % (paramsNoEp['q'], int(paramsNoEp['ep'])) params_no_ep['q'] = '%s.%02d' % (params_no_ep['q'], int(params_no_ep['ep']))
if "ep" in paramsNoEp: if 'ep' in params_no_ep:
paramsNoEp.pop("ep") params_no_ep.pop('ep')
to_return.append(paramsNoEp) to_return.append(params_no_ep)
return to_return return to_return
def _doGeneralSearch(self, search_string):
return self._doSearch({'q': search_string})
def _checkAuth(self):
if self.needs_auth and not self.key:
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + "API key is missing",
logger.DEBUG)
raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
return True
def _checkAuthFromData(self, data):
if data is None:
return self._checkAuth()
if 'error' in data.feed:
code = data.feed['error']['code']
if code == '100':
raise AuthException("Your API key for " + self.name + " is incorrect, check your config.")
elif code == '101':
raise AuthException("Your account on " + self.name + " has been suspended, contact the administrator.")
elif code == '102':
raise AuthException(
"Your account isn't allowed to use the API on " + self.name + ", contact the administrator")
elif code == '910':
logger.log(u"" + self.name + " currently has their API disabled, please check with provider.", logger.WARNING)
return False
else:
logger.log(u"Unknown error given from " + self.name + ": " + data.feed['error']['description'],
logger.ERROR)
return False
return True
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
self._checkAuth() self._checkAuth()
params = {"t": "tvsearch", if 'rid' not in search_params and 'q' not in search_params:
"maxage": sickbeard.USENET_RETENTION, logger.log('Error no rid or search term given.')
"limit": 100, return []
"attrs": "rageid",
"offset": 0} params = {'t': 'tvsearch',
'maxage': sickbeard.USENET_RETENTION,
'limit': 100,
'attrs': 'rageid',
'offset': 0}
# category ids # category ids
if self.show and self.show.is_sports: cat = []
params['cat'] = self.catIDs + ',5060' if self.show:
elif self.show and self.show.is_anime: if self.show.is_sports:
params['cat'] = self.catIDs + ',5070' cat = ['5060']
else: elif self.show.is_anime:
params['cat'] = self.catIDs cat = ['5070']
params['cat'] = ','.join([self.cat_ids] + cat)
# if max_age is set, use it, don't allow it to be missing # if max_age is set, use it, don't allow it to be missing
if age or not params['maxage']: if not params['maxage'] or age:
params['maxage'] = age params['maxage'] = age
if search_params: if search_params:
params.update(search_params) params.update(search_params)
if 'rid' not in search_params and 'q' not in search_params:
logger.log("Error no rid or search term given. Report to forums with a full debug log")
return []
if self.needs_auth and self.key: if self.needs_auth and self.key:
params['apikey'] = self.key params['apikey'] = self.key
results = [] results = []
offset = total = 0 offset = total = 0
while (total >= offset) and (offset < 1000): # hardcoded to stop after a max of 4 hits (400 items) per query
search_url = self.url + 'api?' + urllib.urlencode(params) while (offset <= total) and (offset < 400):
logger.log(u"Search url: " + search_url, logger.DEBUG) search_url = '%sapi?%s' % (self.url, urllib.urlencode(params))
data = self.cache.getRSSFeed(search_url) logger.log(u'Search url: ' + search_url, logger.DEBUG)
if not data or not self._checkAuthFromData(data): data = self.cache.getRSSFeed(search_url)
time.sleep(1.1)
if not data or not self.check_auth_from_data(data):
break break
for item in data.entries: for item in data.entries:
(title, url) = self._get_title_and_url(item) title, url = self._get_title_and_url(item)
if title and url: if title and url:
results.append(item) results.append(item)
else: else:
logger.log( logger.log(u'The data returned from %s is incomplete, this result is unusable' % self.name,
u"The data returned from " + self.name + " is incomplete, this result is unusable", logger.DEBUG)
logger.DEBUG)
# get total and offset attribs # get total and offset attribs
try: try:
if total == 0: if 0 == total:
total = int(data.feed.newznab_response['total'] or 0) total = int(data.feed.newznab_response['total'] or 0)
hits = (total / 100 + int(0 < (total % 100)))
hits += int(0 == hits)
offset = int(data.feed.newznab_response['offset'] or 0) offset = int(data.feed.newznab_response['offset'] or 0)
except AttributeError: except AttributeError:
break break
# No items found, prevent from doing another search # No items found, prevent from doing another search
if total == 0: if 0 == total:
break break
if offset != params['offset']: if offset != params['offset']:
logger.log("Tell your newznab provider to fix their bloody newznab responses") logger.log('Tell your newznab provider to fix their bloody newznab responses')
break break
params['offset'] += params['limit'] params['offset'] += params['limit']
if (total > int(params['offset'])): if total <= params['offset']:
offset = int(params['offset']) logger.log('%s item%s found that will be used for episode matching' % (total, helpers.maybe_plural(total)),
# if there are more items available then the amount given in one call, grab some more logger.DEBUG)
logger.log(str(
total - int(params['offset'])) + " more items to be fetched from provider. Fetching another " + str(
params['limit']) + " items.", logger.DEBUG)
else:
logger.log(str(
total - int(params['offset'])) + " No more searches needed, couldn't find anything I was looking for! " + str(
params['limit']) + " items.", logger.DEBUG)
break break
time.sleep(0.2) # there are more items available than the amount given in one call, grab some more
items = total - params['offset']
return results logger.log('%s more item%s to fetch from a batch of up to %s items.'
% (items, helpers.maybe_plural(items), params['limit']), logger.DEBUG)
def findPropers(self, search_date=None): def findPropers(self, search_date=None):
return self._find_propers(search_date) return self._find_propers(search_date)
class NewznabCache(tvcache.TVCache): class NewznabCache(tvcache.TVCache):
def __init__(self, provider):
def __init__(self, provider):
tvcache.TVCache.__init__(self, provider) tvcache.TVCache.__init__(self, provider)
# only poll newznab providers every 15 minutes max self.minTime = 15 # cache update frequency
self.minTime = 15
def _getRSSData(self): def _getRSSData(self):
params = {"t": "tvsearch", params = {'t': 'tvsearch',
"cat": self.provider.catIDs + ',5060,5070', 'cat': self.provider.cat_ids + ',5060,5070',
"attrs": "rageid"} 'attrs': 'rageid'}
if self.provider.needs_auth and self.provider.key: if self.provider.needs_auth and self.provider.key:
params['apikey'] = self.provider.key params['apikey'] = self.provider.key
rss_url = self.provider.url + 'api?' + urllib.urlencode(params) rss_url = '%sapi?%s' % (self.provider.url, urllib.urlencode(params))
logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG) logger.log(self.provider.name + ' cache update URL: ' + rss_url, logger.DEBUG)
return self.getRSSFeed(rss_url) return self.getRSSFeed(rss_url)
def _checkAuth(self, data): def _checkAuth(self, *data):
return self.provider._checkAuthFromData(data)
return self.provider.check_auth_from_data(data[0])
def updateCache(self): def updateCache(self):
@ -381,40 +339,40 @@ class NewznabCache(tvcache.TVCache):
if ci is not None: if ci is not None:
cl.append(ci) cl.append(ci)
if len(cl) > 0: if 0 < len(cl):
myDB = self._getDB() my_db = self._getDB()
myDB.mass_action(cl) my_db.mass_action(cl)
else: else:
raise AuthException( raise AuthException(
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config") u'Your authentication credentials for ' + self.provider.name + ' are incorrect, check your config')
return [] return []
# overwrite method with that parses the rageid from the newznab feed # overwrite method with that parses the rageid from the newznab feed
def _parseItem(self, item): def _parseItem(self, *item):
title = item.title
url = item.link
attrs = item.newznab_attr title = item[0].title
url = item[0].link
attrs = item[0].newznab_attr
if not isinstance(attrs, list): if not isinstance(attrs, list):
attrs = [item.newznab_attr] attrs = [item[0].newznab_attr]
tvrageid = 0 tvrageid = 0
for attr in attrs: for attr in attrs:
if attr['name'] == 'tvrageid': if 'tvrageid' == attr['name']:
tvrageid = int(attr['value']) tvrageid = int(attr['value'])
break break
self._checkItemAuth(title, url) self._checkItemAuth(title, url)
if not title or not url: if not title or not url:
logger.log( logger.log(u'The data returned from the %s feed is incomplete, this result is unusable'
u"The data returned from the " + self.provider.name + " feed is incomplete, this result is unusable", % self.provider.name, logger.DEBUG)
logger.DEBUG)
return None return None
url = self._translateLinkURL(url) url = self._translateLinkURL(url)
logger.log(u"Attempting to add item from RSS to cache: " + title, logger.DEBUG) logger.log(u'Attempting to add item from RSS to cache: ' + title, logger.DEBUG)
return self._addCacheEntry(title, url, indexer_id=tvrageid) return self._addCacheEntry(title, url, indexer_id=tvrageid)

View file

@ -3950,12 +3950,12 @@ class ConfigProviders(Config):
else: else:
providerDict[name].needs_auth = True providerDict[name].needs_auth = True
return providerDict[name].getID() + '|' + providerDict[name].configStr() return providerDict[name].getID() + '|' + providerDict[name].config_str()
else: else:
newProvider = newznab.NewznabProvider(name, url, key=key) newProvider = newznab.NewznabProvider(name, url, key=key)
sickbeard.newznabProviderList.append(newProvider) sickbeard.newznabProviderList.append(newProvider)
return newProvider.getID() + '|' + newProvider.configStr() return newProvider.getID() + '|' + newProvider.config_str()
def getNewznabCategories(self, name, url, key): def getNewznabCategories(self, name, url, key):
''' '''
@ -4090,7 +4090,7 @@ class ConfigProviders(Config):
newznabProviderDict[cur_id].url = cur_url newznabProviderDict[cur_id].url = cur_url
if cur_key: if cur_key:
newznabProviderDict[cur_id].key = cur_key newznabProviderDict[cur_id].key = cur_key
newznabProviderDict[cur_id].catIDs = cur_cat newznabProviderDict[cur_id].cat_ids = cur_cat
# a 0 in the key spot indicates that no key is needed # a 0 in the key spot indicates that no key is needed
if cur_key == '0': if cur_key == '0':
newznabProviderDict[cur_id].needs_auth = False newznabProviderDict[cur_id].needs_auth = False
@ -4345,7 +4345,7 @@ class ConfigProviders(Config):
except: except:
curNzbProvider.enable_backlog = 0 # these exceptions are actually catching unselected checkboxes curNzbProvider.enable_backlog = 0 # these exceptions are actually catching unselected checkboxes
sickbeard.NEWZNAB_DATA = '!!!'.join([x.configStr() for x in sickbeard.newznabProviderList]) sickbeard.NEWZNAB_DATA = '!!!'.join([x.config_str() for x in sickbeard.newznabProviderList])
sickbeard.PROVIDER_ORDER = provider_list sickbeard.PROVIDER_ORDER = provider_list
helpers.clear_unused_providers() helpers.clear_unused_providers()