Merge pull request #451 from JackDandy/feature/ChangeProvNN

Change refactor and code simplification for newznab providers.
This commit is contained in:
JackDandy 2015-07-03 17:35:29 +01:00
commit 01689080e2
6 changed files with 156 additions and 198 deletions

View file

@ -37,7 +37,7 @@
* Change provider KAT remove dead url
* Change provider KAT to use mediaExtensions from common instead of private list
* Change provider KAT provider PEP8 and code convention cleanup
* Change refactor and code simplification for torrent providers
* Change refactor and code simplification for torrent and newznab providers
* Change refactor SCC to use torrent provider simplification and PEP8
* Change refactor SCD to use torrent provider simplification
* Change refactor TB to use torrent provider simplification and PEP8

View file

@ -39,7 +39,7 @@
#for $curNewznabProvider in $sickbeard.newznabProviderList:
\$(this).addProvider('$curNewznabProvider.getID()', '$curNewznabProvider.name', '$curNewznabProvider.url', '<%= starify(curNewznabProvider.key) %>', '$curNewznabProvider.catIDs', $int($curNewznabProvider.default), show_nzb_providers);
\$(this).addProvider('$curNewznabProvider.getID()', '$curNewznabProvider.name', '$curNewznabProvider.url', '<%= starify(curNewznabProvider.key) %>', '$curNewznabProvider.cat_ids', $int($curNewznabProvider.default), show_nzb_providers);
#end for

View file

@ -436,7 +436,7 @@ class ConfigMigrator():
self.migration_names = {1: 'Custom naming',
2: 'Sync backup number with version number',
3: 'Rename omgwtfnzb variables',
4: 'Add newznab catIDs',
4: 'Add newznab cat_ids',
5: 'Metadata update',
6: 'Rename daily search to recent search',
7: 'Rename coming episodes to episode view',
@ -602,7 +602,7 @@ class ConfigMigrator():
sickbeard.OMGWTFNZBS_USERNAME = check_setting_str(self.config_obj, 'omgwtfnzbs', 'omgwtfnzbs_uid', '')
sickbeard.OMGWTFNZBS_APIKEY = check_setting_str(self.config_obj, 'omgwtfnzbs', 'omgwtfnzbs_key', '')
# Migration v4: Add default newznab catIDs
# Migration v4: Add default newznab cat_ids
def _migrate_v4(self):
""" Update newznab providers so that the category IDs can be set independently via the config """
@ -624,11 +624,11 @@ class ConfigMigrator():
key = '0'
if name == 'NZBs.org':
catIDs = '5030,5040,5060,5070,5090'
cat_ids = '5030,5040,5060,5070,5090'
else:
catIDs = '5030,5040,5060'
cat_ids = '5030,5040,5060'
cur_provider_data_list = [name, url, key, catIDs, enabled]
cur_provider_data_list = [name, url, key, cat_ids, enabled]
new_newznab_data.append('|'.join(cur_provider_data_list))
sickbeard.NEWZNAB_DATA = '!!!'.join(new_newznab_data)

View file

@ -114,12 +114,12 @@ def makeNewznabProvider(configString):
try:
values = configString.split('|')
if len(values) == 9:
name, url, key, catIDs, enabled, search_mode, search_fallback, enable_recentsearch, enable_backlog = values
name, url, key, cat_ids, enabled, search_mode, search_fallback, enable_recentsearch, enable_backlog = values
else:
name = values[0]
url = values[1]
key = values[2]
catIDs = values[3]
cat_ids = values[3]
enabled = values[4]
except ValueError:
logger.log(u"Skipping Newznab provider string: '" + configString + "', incorrect format", logger.ERROR)
@ -127,7 +127,7 @@ def makeNewznabProvider(configString):
newznab = sys.modules['sickbeard.providers.newznab']
newProvider = newznab.NewznabProvider(name, url, key=key, catIDs=catIDs, search_mode=search_mode,
newProvider = newznab.NewznabProvider(name, url, key=key, cat_ids=cat_ids, search_mode=search_mode,
search_fallback=search_fallback, enable_recentsearch=enable_recentsearch,
enable_backlog=enable_backlog)
newProvider.enabled = enabled == '1'

View file

@ -18,70 +18,62 @@
import urllib
import time
import os
try:
import xml.etree.cElementTree as etree
except ImportError:
import elementtree.ElementTree as etree
import sickbeard
import generic
from sickbeard import classes,helpers,scene_exceptions,logger,tvcache
from sickbeard import encodingKludge as ek
from sickbeard import helpers, scene_exceptions, logger, tvcache
from sickbeard.exceptions import AuthException
class NewznabProvider(generic.NZBProvider):
def __init__(self, name, url, key='', catIDs='5030,5040', search_mode='eponly', search_fallback=False,
enable_recentsearch=False, enable_backlog=False):
def __init__(self, name, url, key='', cat_ids='5030,5040', search_mode='eponly',
search_fallback=False, enable_recentsearch=False, enable_backlog=False):
generic.NZBProvider.__init__(self, name, True, False)
self.cache = NewznabCache(self)
self.url = url
self.key = key
self.cat_ids = cat_ids
self.search_mode = search_mode
self.search_fallback = search_fallback
self.enable_recentsearch = enable_recentsearch
self.enable_backlog = enable_backlog
# a 0 in the key spot indicates that no key is needed
if self.key == '0':
self.needs_auth = False
else:
self.needs_auth = True
if catIDs:
self.catIDs = catIDs
else:
self.catIDs = '5030,5040'
self.needs_auth = '0' != self.key.strip() # '0' in the key setting indicates that api_key is not needed
self.default = False
self.cache = NewznabCache(self)
def configStr(self):
return self.name + '|' + self.url + '|' + self.key + '|' + self.catIDs + '|' + str(
int(self.enabled)) + '|' + self.search_mode + '|' + str(int(self.search_fallback)) + '|' + str(
int(self.enable_recentsearch)) + '|' + str(int(self.enable_backlog))
def _checkAuth(self):
def imageName(self):
if ek.ek(os.path.isfile,
ek.ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers',
self.getID() + '.png')):
return self.getID() + '.png'
return 'newznab.png'
if self.needs_auth and not self.key:
logger.log(u'Incorrect authentication credentials for %s : API key is missing' % self.name, logger.DEBUG)
raise AuthException('Your authentication credentials for %s are missing, check your config.' % self.name)
def _getURL(self, url, post_data=None, params=None, timeout=30, json=False):
"""
By default this is just a simple urlopen call but this method should be overridden
for providers with special URL requirements (like cookies)
Not really changed much from the superclass, can be used in future.
"""
return True
# check for auth
if not self._doLogin():
return
def check_auth_from_data(self, data):
return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout,
session=self.session, json=json)
if data is None:
return self._checkAuth()
if 'error' in data.feed:
code = data.feed['error']['code']
if '100' == code:
raise AuthException('Your API key for %s is incorrect, check your config.' % self.name)
elif '101' == code:
raise AuthException('Your account on %s has been suspended, contact the admin.' % self.name)
elif '102' == code:
raise AuthException('Your account isn\'t allowed to use the API on %s, contact the admin.' % self.name)
elif '910' == code:
logger.log(u'%s currently has their API disabled, please check with provider.' % self.name,
logger.WARNING)
else:
logger.log(u'Unknown error given from %s: %s' % (self.name, data.feed['error']['description']),
logger.ERROR)
return False
return True
def get_newznab_categories(self):
"""
@ -94,36 +86,39 @@ class NewznabProvider(generic.NZBProvider):
self._checkAuth()
params = {"t": "caps"}
params = {'t': 'caps'}
if self.needs_auth and self.key:
params['apikey'] = self.key
try:
categories = self.getURL("%s/api" % (self.url), params=params, timeout=10)
categories = self.getURL('%s/api' % self.url, params=params, timeout=10)
except:
logger.log(u"Error getting html for [%s]" %
("%s/api?%s" % (self.url, '&'.join("%s=%s" % (x,y) for x,y in params.items())) ), logger.DEBUG)
return (False, return_categories, "Error getting html for [%s]" %
("%s/api?%s" % (self.url, '&'.join("%s=%s" % (x,y) for x,y in params.items()) )))
logger.log(u'Error getting html for [%s]' %
('%s/api?%s' % (self.url, '&'.join('%s=%s' % (x, y) for x, y in params.items()))), logger.DEBUG)
return (False, return_categories, 'Error getting html for [%s]' %
('%s/api?%s' % (self.url, '&'.join('%s=%s' % (x, y) for x, y in params.items()))))
xml_categories = helpers.parse_xml(categories)
if not xml_categories:
logger.log(u"Error parsing xml for [%s]" % (self.name),
logger.DEBUG)
return (False, return_categories, "Error parsing xml for [%s]" % (self.name))
logger.log(u'Error parsing xml for [%s]' % self.name, logger.DEBUG)
return False, return_categories, 'Error parsing xml for [%s]' % self.name
try:
for category in xml_categories.iter('category'):
if category.get('name') == 'TV':
for subcat in category.findall('subcat'):
return_categories.append(subcat.attrib)
if 'TV' == category.get('name'):
for subcat in category.findall('subcat'):
return_categories.append(subcat.attrib)
except:
logger.log(u"Error parsing result for [%s]" % (self.name),
logger.DEBUG)
return (False, return_categories, "Error parsing result for [%s]" % (self.name))
logger.log(u'Error parsing result for [%s]' % self.name, logger.DEBUG)
return False, return_categories, 'Error parsing result for [%s]' % self.name
return (True, return_categories, "")
return True, return_categories, ''
def config_str(self):
return '%s|%s|%s|%s|%i|%s|%i|%i|%i' \
% (self.name or '', self.url or '', self.key or '', self.cat_ids or '', self.enabled,
self.search_mode or '', self.search_fallback, self.enable_recentsearch, self.enable_backlog)
def _get_season_search_strings(self, ep_obj):
@ -136,7 +131,7 @@ class NewznabProvider(generic.NZBProvider):
cur_params['season'] = date_str
cur_params['q'] = date_str.replace('-', '.')
elif ep_obj.show.is_anime:
cur_params['season'] = "%d" % ep_obj.scene_absolute_number
cur_params['season'] = '%d' % ep_obj.scene_absolute_number
else:
cur_params['season'] = str(ep_obj.scene_season)
@ -149,7 +144,8 @@ class NewznabProvider(generic.NZBProvider):
# add new query strings for exceptions
name_exceptions = list(
set([helpers.sanitizeSceneName(a) for a in scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
set([helpers.sanitizeSceneName(a) for a in
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
for cur_exception in name_exceptions:
cur_return = cur_params.copy()
if 'q' in cur_return:
@ -170,7 +166,8 @@ class NewznabProvider(generic.NZBProvider):
params['season'] = date_str.partition('-')[0]
params['ep'] = date_str.partition('-')[2].replace('-', '/')
elif ep_obj.show.anime:
params['ep'] = "%i" % int(ep_obj.scene_absolute_number if int(ep_obj.scene_absolute_number) > 0 else ep_obj.scene_episode)
params['ep'] = '%i' % int(
ep_obj.scene_absolute_number if int(ep_obj.scene_absolute_number) > 0 else ep_obj.scene_episode)
else:
params['season'] = ep_obj.scene_season
params['ep'] = ep_obj.scene_episode
@ -184,7 +181,8 @@ class NewznabProvider(generic.NZBProvider):
# add new query strings for exceptions
name_exceptions = list(
set([helpers.sanitizeSceneName(a) for a in scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
set([helpers.sanitizeSceneName(a) for a in
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
for cur_exception in name_exceptions:
cur_return = params.copy()
cur_return['q'] = cur_exception
@ -196,168 +194,128 @@ class NewznabProvider(generic.NZBProvider):
# Can be usefull for newznab indexers that do not have the episodes 100% parsed.
# Start with only applying the searchstring to anime shows
params['q'] = cur_exception
paramsNoEp = params.copy()
params_no_ep = params.copy()
paramsNoEp['q'] = '%s.%02d' % (paramsNoEp['q'], int(paramsNoEp['ep']))
if "ep" in paramsNoEp:
paramsNoEp.pop("ep")
to_return.append(paramsNoEp)
params_no_ep['q'] = '%s.%02d' % (params_no_ep['q'], int(params_no_ep['ep']))
if 'ep' in params_no_ep:
params_no_ep.pop('ep')
to_return.append(params_no_ep)
return to_return
def _doGeneralSearch(self, search_string):
return self._doSearch({'q': search_string})
def _checkAuth(self):
if self.needs_auth and not self.key:
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + "API key is missing",
logger.DEBUG)
raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
return True
def _checkAuthFromData(self, data):
if data is None:
return self._checkAuth()
if 'error' in data.feed:
code = data.feed['error']['code']
if code == '100':
raise AuthException("Your API key for " + self.name + " is incorrect, check your config.")
elif code == '101':
raise AuthException("Your account on " + self.name + " has been suspended, contact the administrator.")
elif code == '102':
raise AuthException(
"Your account isn't allowed to use the API on " + self.name + ", contact the administrator")
elif code == '910':
logger.log(u"" + self.name + " currently has their API disabled, please check with provider.", logger.WARNING)
return False
else:
logger.log(u"Unknown error given from " + self.name + ": " + data.feed['error']['description'],
logger.ERROR)
return False
return True
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
self._checkAuth()
params = {"t": "tvsearch",
"maxage": sickbeard.USENET_RETENTION,
"limit": 100,
"attrs": "rageid",
"offset": 0}
if 'rid' not in search_params and 'q' not in search_params:
logger.log('Error no rid or search term given.')
return []
params = {'t': 'tvsearch',
'maxage': sickbeard.USENET_RETENTION,
'limit': 100,
'attrs': 'rageid',
'offset': 0}
# category ids
if self.show and self.show.is_sports:
params['cat'] = self.catIDs + ',5060'
elif self.show and self.show.is_anime:
params['cat'] = self.catIDs + ',5070'
else:
params['cat'] = self.catIDs
cat = []
if self.show:
if self.show.is_sports:
cat = ['5060']
elif self.show.is_anime:
cat = ['5070']
params['cat'] = ','.join([self.cat_ids] + cat)
# if max_age is set, use it, don't allow it to be missing
if age or not params['maxage']:
if not params['maxage'] or age:
params['maxage'] = age
if search_params:
params.update(search_params)
if 'rid' not in search_params and 'q' not in search_params:
logger.log("Error no rid or search term given. Report to forums with a full debug log")
return []
if self.needs_auth and self.key:
params['apikey'] = self.key
results = []
offset = total = 0
while (total >= offset) and (offset < 1000):
search_url = self.url + 'api?' + urllib.urlencode(params)
logger.log(u"Search url: " + search_url, logger.DEBUG)
data = self.cache.getRSSFeed(search_url)
# hardcoded to stop after a max of 4 hits (400 items) per query
while (offset <= total) and (offset < 400):
search_url = '%sapi?%s' % (self.url, urllib.urlencode(params))
logger.log(u'Search url: ' + search_url, logger.DEBUG)
if not data or not self._checkAuthFromData(data):
data = self.cache.getRSSFeed(search_url)
time.sleep(1.1)
if not data or not self.check_auth_from_data(data):
break
for item in data.entries:
(title, url) = self._get_title_and_url(item)
title, url = self._get_title_and_url(item)
if title and url:
results.append(item)
else:
logger.log(
u"The data returned from " + self.name + " is incomplete, this result is unusable",
logger.DEBUG)
logger.log(u'The data returned from %s is incomplete, this result is unusable' % self.name,
logger.DEBUG)
# get total and offset attribs
try:
if total == 0:
if 0 == total:
total = int(data.feed.newznab_response['total'] or 0)
hits = (total / 100 + int(0 < (total % 100)))
hits += int(0 == hits)
offset = int(data.feed.newznab_response['offset'] or 0)
except AttributeError:
break
# No items found, prevent from doing another search
if total == 0:
if 0 == total:
break
if offset != params['offset']:
logger.log("Tell your newznab provider to fix their bloody newznab responses")
logger.log('Tell your newznab provider to fix their bloody newznab responses')
break
params['offset'] += params['limit']
if (total > int(params['offset'])):
offset = int(params['offset'])
# if there are more items available then the amount given in one call, grab some more
logger.log(str(
total - int(params['offset'])) + " more items to be fetched from provider. Fetching another " + str(
params['limit']) + " items.", logger.DEBUG)
else:
logger.log(str(
total - int(params['offset'])) + " No more searches needed, couldn't find anything I was looking for! " + str(
params['limit']) + " items.", logger.DEBUG)
if total <= params['offset']:
logger.log('%s item%s found that will be used for episode matching' % (total, helpers.maybe_plural(total)),
logger.DEBUG)
break
time.sleep(0.2)
return results
# there are more items available than the amount given in one call, grab some more
items = total - params['offset']
logger.log('%s more item%s to fetch from a batch of up to %s items.'
% (items, helpers.maybe_plural(items), params['limit']), logger.DEBUG)
def findPropers(self, search_date=None):
return self._find_propers(search_date)
class NewznabCache(tvcache.TVCache):
def __init__(self, provider):
def __init__(self, provider):
tvcache.TVCache.__init__(self, provider)
# only poll newznab providers every 15 minutes max
self.minTime = 15
self.minTime = 15 # cache update frequency
def _getRSSData(self):
params = {"t": "tvsearch",
"cat": self.provider.catIDs + ',5060,5070',
"attrs": "rageid"}
params = {'t': 'tvsearch',
'cat': self.provider.cat_ids + ',5060,5070',
'attrs': 'rageid'}
if self.provider.needs_auth and self.provider.key:
params['apikey'] = self.provider.key
rss_url = self.provider.url + 'api?' + urllib.urlencode(params)
rss_url = '%sapi?%s' % (self.provider.url, urllib.urlencode(params))
logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG)
logger.log(self.provider.name + ' cache update URL: ' + rss_url, logger.DEBUG)
return self.getRSSFeed(rss_url)
def _checkAuth(self, data):
return self.provider._checkAuthFromData(data)
def _checkAuth(self, *data):
return self.provider.check_auth_from_data(data[0])
def updateCache(self):
@ -381,40 +339,40 @@ class NewznabCache(tvcache.TVCache):
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
if 0 < len(cl):
my_db = self._getDB()
my_db.mass_action(cl)
else:
raise AuthException(
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
u'Your authentication credentials for ' + self.provider.name + ' are incorrect, check your config')
return []
# overwrite method with that parses the rageid from the newznab feed
def _parseItem(self, item):
title = item.title
url = item.link
def _parseItem(self, *item):
attrs = item.newznab_attr
title = item[0].title
url = item[0].link
attrs = item[0].newznab_attr
if not isinstance(attrs, list):
attrs = [item.newznab_attr]
attrs = [item[0].newznab_attr]
tvrageid = 0
for attr in attrs:
if attr['name'] == 'tvrageid':
if 'tvrageid' == attr['name']:
tvrageid = int(attr['value'])
break
self._checkItemAuth(title, url)
if not title or not url:
logger.log(
u"The data returned from the " + self.provider.name + " feed is incomplete, this result is unusable",
logger.DEBUG)
logger.log(u'The data returned from the %s feed is incomplete, this result is unusable'
% self.provider.name, logger.DEBUG)
return None
url = self._translateLinkURL(url)
logger.log(u"Attempting to add item from RSS to cache: " + title, logger.DEBUG)
logger.log(u'Attempting to add item from RSS to cache: ' + title, logger.DEBUG)
return self._addCacheEntry(title, url, indexer_id=tvrageid)

View file

@ -3950,12 +3950,12 @@ class ConfigProviders(Config):
else:
providerDict[name].needs_auth = True
return providerDict[name].getID() + '|' + providerDict[name].configStr()
return providerDict[name].getID() + '|' + providerDict[name].config_str()
else:
newProvider = newznab.NewznabProvider(name, url, key=key)
sickbeard.newznabProviderList.append(newProvider)
return newProvider.getID() + '|' + newProvider.configStr()
return newProvider.getID() + '|' + newProvider.config_str()
def getNewznabCategories(self, name, url, key):
'''
@ -4090,7 +4090,7 @@ class ConfigProviders(Config):
newznabProviderDict[cur_id].url = cur_url
if cur_key:
newznabProviderDict[cur_id].key = cur_key
newznabProviderDict[cur_id].catIDs = cur_cat
newznabProviderDict[cur_id].cat_ids = cur_cat
# a 0 in the key spot indicates that no key is needed
if cur_key == '0':
newznabProviderDict[cur_id].needs_auth = False
@ -4345,7 +4345,7 @@ class ConfigProviders(Config):
except:
curNzbProvider.enable_backlog = 0 # these exceptions are actually catching unselected checkboxes
sickbeard.NEWZNAB_DATA = '!!!'.join([x.configStr() for x in sickbeard.newznabProviderList])
sickbeard.NEWZNAB_DATA = '!!!'.join([x.config_str() for x in sickbeard.newznabProviderList])
sickbeard.PROVIDER_ORDER = provider_list
helpers.clear_unused_providers()