2014-03-10 05:18:05 +00:00
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# This file is part of SickGear.
|
2014-03-10 05:18:05 +00:00
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is free software: you can redistribute it and/or modify
|
2014-03-10 05:18:05 +00:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is distributed in the hope that it will be useful,
|
2014-03-10 05:18:05 +00:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-11-12 16:43:14 +00:00
|
|
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
import urllib
|
2014-05-17 09:27:17 +00:00
|
|
|
import time
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
import sickbeard
|
|
|
|
import generic
|
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
from sickbeard import helpers, scene_exceptions, logger, tvcache
|
2015-04-18 04:55:04 +00:00
|
|
|
from sickbeard.exceptions import AuthException
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
class NewznabProvider(generic.NZBProvider):
|
2015-06-19 16:47:52 +00:00
|
|
|
|
|
|
|
def __init__(self, name, url, key='', cat_ids='5030,5040', search_mode='eponly',
|
|
|
|
search_fallback=False, enable_recentsearch=False, enable_backlog=False):
|
2015-04-18 04:55:04 +00:00
|
|
|
generic.NZBProvider.__init__(self, name, True, False)
|
2015-06-19 16:47:52 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
self.url = url
|
|
|
|
self.key = key
|
2015-06-19 16:47:52 +00:00
|
|
|
self.cat_ids = cat_ids
|
2014-05-17 05:23:11 +00:00
|
|
|
self.search_mode = search_mode
|
|
|
|
self.search_fallback = search_fallback
|
2014-12-22 18:30:53 +00:00
|
|
|
self.enable_recentsearch = enable_recentsearch
|
2014-08-29 05:16:25 +00:00
|
|
|
self.enable_backlog = enable_backlog
|
2015-06-19 16:47:52 +00:00
|
|
|
self.needs_auth = '0' != self.key.strip() # '0' in the key setting indicates that api_key is not needed
|
|
|
|
self.default = False
|
|
|
|
self.cache = NewznabCache(self)
|
2014-05-17 05:23:11 +00:00
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
def _checkAuth(self):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
if self.needs_auth and not self.key:
|
|
|
|
logger.log(u'Incorrect authentication credentials for %s : API key is missing' % self.name, logger.DEBUG)
|
|
|
|
raise AuthException('Your authentication credentials for %s are missing, check your config.' % self.name)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
return True
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
def check_auth_from_data(self, data):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
if data is None:
|
|
|
|
return self._checkAuth()
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
if 'error' in data.feed:
|
|
|
|
code = data.feed['error']['code']
|
2014-09-01 15:57:52 +00:00
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
if '100' == code:
|
|
|
|
raise AuthException('Your API key for %s is incorrect, check your config.' % self.name)
|
|
|
|
elif '101' == code:
|
|
|
|
raise AuthException('Your account on %s has been suspended, contact the admin.' % self.name)
|
|
|
|
elif '102' == code:
|
|
|
|
raise AuthException('Your account isn\'t allowed to use the API on %s, contact the admin.' % self.name)
|
|
|
|
elif '910' == code:
|
|
|
|
logger.log(u'%s currently has their API disabled, please check with provider.' % self.name,
|
|
|
|
logger.WARNING)
|
|
|
|
else:
|
|
|
|
logger.log(u'Unknown error given from %s: %s' % (self.name, data.feed['error']['description']),
|
|
|
|
logger.ERROR)
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
2014-09-01 15:57:52 +00:00
|
|
|
|
|
|
|
def get_newznab_categories(self):
|
|
|
|
"""
|
|
|
|
Uses the newznab provider url and apikey to get the capabilities.
|
|
|
|
Makes use of the default newznab caps param. e.a. http://yournewznab/api?t=caps&apikey=skdfiw7823sdkdsfjsfk
|
|
|
|
Returns a tuple with (succes or not, array with dicts [{"id": "5070", "name": "Anime"},
|
|
|
|
{"id": "5080", "name": "Documentary"}, {"id": "5020", "name": "Foreign"}...etc}], error message)
|
|
|
|
"""
|
|
|
|
return_categories = []
|
2015-06-19 16:47:52 +00:00
|
|
|
|
2014-09-01 15:57:52 +00:00
|
|
|
self._checkAuth()
|
2015-06-19 16:47:52 +00:00
|
|
|
|
|
|
|
params = {'t': 'caps'}
|
2014-09-01 15:57:52 +00:00
|
|
|
if self.needs_auth and self.key:
|
|
|
|
params['apikey'] = self.key
|
|
|
|
|
2014-09-10 14:26:16 +00:00
|
|
|
try:
|
2015-06-19 16:47:52 +00:00
|
|
|
categories = self.getURL('%s/api' % self.url, params=params, timeout=10)
|
2014-09-10 14:26:16 +00:00
|
|
|
except:
|
2015-06-19 16:47:52 +00:00
|
|
|
logger.log(u'Error getting html for [%s]' %
|
|
|
|
('%s/api?%s' % (self.url, '&'.join('%s=%s' % (x, y) for x, y in params.items()))), logger.DEBUG)
|
|
|
|
return (False, return_categories, 'Error getting html for [%s]' %
|
|
|
|
('%s/api?%s' % (self.url, '&'.join('%s=%s' % (x, y) for x, y in params.items()))))
|
|
|
|
|
2014-09-01 15:57:52 +00:00
|
|
|
xml_categories = helpers.parse_xml(categories)
|
2015-06-19 16:47:52 +00:00
|
|
|
|
2014-09-01 15:57:52 +00:00
|
|
|
if not xml_categories:
|
2015-06-19 16:47:52 +00:00
|
|
|
logger.log(u'Error parsing xml for [%s]' % self.name, logger.DEBUG)
|
|
|
|
return False, return_categories, 'Error parsing xml for [%s]' % self.name
|
|
|
|
|
2014-09-01 15:57:52 +00:00
|
|
|
try:
|
|
|
|
for category in xml_categories.iter('category'):
|
2015-06-19 16:47:52 +00:00
|
|
|
if 'TV' == category.get('name'):
|
|
|
|
for subcat in category.findall('subcat'):
|
|
|
|
return_categories.append(subcat.attrib)
|
2014-09-01 15:57:52 +00:00
|
|
|
except:
|
2015-06-19 16:47:52 +00:00
|
|
|
logger.log(u'Error parsing result for [%s]' % self.name, logger.DEBUG)
|
|
|
|
return False, return_categories, 'Error parsing result for [%s]' % self.name
|
|
|
|
|
|
|
|
return True, return_categories, ''
|
|
|
|
|
|
|
|
def config_str(self):
|
|
|
|
return '%s|%s|%s|%s|%i|%s|%i|%i|%i' \
|
|
|
|
% (self.name or '', self.url or '', self.key or '', self.cat_ids or '', self.enabled,
|
|
|
|
self.search_mode or '', self.search_fallback, self.enable_recentsearch, self.enable_backlog)
|
2014-09-01 15:57:52 +00:00
|
|
|
|
2014-04-30 13:49:50 +00:00
|
|
|
def _get_season_search_strings(self, ep_obj):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
to_return = []
|
2014-07-15 02:00:53 +00:00
|
|
|
cur_params = {}
|
|
|
|
|
|
|
|
# season
|
|
|
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
|
|
|
date_str = str(ep_obj.airdate).split('-')[0]
|
|
|
|
cur_params['season'] = date_str
|
|
|
|
cur_params['q'] = date_str.replace('-', '.')
|
|
|
|
elif ep_obj.show.is_anime:
|
2015-06-19 16:47:52 +00:00
|
|
|
cur_params['season'] = '%d' % ep_obj.scene_absolute_number
|
2014-07-15 02:00:53 +00:00
|
|
|
else:
|
|
|
|
cur_params['season'] = str(ep_obj.scene_season)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
# search
|
2014-07-30 23:33:28 +00:00
|
|
|
rid = helpers.mapIndexersToShow(ep_obj.show)[2]
|
|
|
|
if rid:
|
2014-08-29 06:15:51 +00:00
|
|
|
cur_return = cur_params.copy()
|
|
|
|
cur_return['rid'] = rid
|
|
|
|
to_return.append(cur_return)
|
|
|
|
|
|
|
|
# add new query strings for exceptions
|
|
|
|
name_exceptions = list(
|
2015-06-19 16:47:52 +00:00
|
|
|
set([helpers.sanitizeSceneName(a) for a in
|
|
|
|
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
|
2014-08-29 06:15:51 +00:00
|
|
|
for cur_exception in name_exceptions:
|
2015-03-29 02:27:59 +00:00
|
|
|
cur_return = cur_params.copy()
|
|
|
|
if 'q' in cur_return:
|
2015-03-29 23:15:25 +00:00
|
|
|
cur_return['q'] = cur_exception + '.' + cur_return['q']
|
2015-03-29 02:27:59 +00:00
|
|
|
to_return.append(cur_return)
|
2014-05-04 03:16:26 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
return to_return
|
|
|
|
|
2014-04-30 13:49:50 +00:00
|
|
|
def _get_episode_search_strings(self, ep_obj, add_string=''):
|
2014-07-15 02:00:53 +00:00
|
|
|
to_return = []
|
2014-03-10 05:18:05 +00:00
|
|
|
params = {}
|
|
|
|
|
2014-04-30 13:49:50 +00:00
|
|
|
if not ep_obj:
|
2014-03-10 05:18:05 +00:00
|
|
|
return [params]
|
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
2014-05-03 09:23:26 +00:00
|
|
|
date_str = str(ep_obj.airdate)
|
2014-04-30 13:49:50 +00:00
|
|
|
params['season'] = date_str.partition('-')[0]
|
2014-05-03 09:23:26 +00:00
|
|
|
params['ep'] = date_str.partition('-')[2].replace('-', '/')
|
2014-07-15 02:00:53 +00:00
|
|
|
elif ep_obj.show.anime:
|
2015-06-19 16:47:52 +00:00
|
|
|
params['ep'] = '%i' % int(
|
|
|
|
ep_obj.scene_absolute_number if int(ep_obj.scene_absolute_number) > 0 else ep_obj.scene_episode)
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
2014-04-30 13:49:50 +00:00
|
|
|
params['season'] = ep_obj.scene_season
|
|
|
|
params['ep'] = ep_obj.scene_episode
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
# search
|
2014-07-30 23:33:28 +00:00
|
|
|
rid = helpers.mapIndexersToShow(ep_obj.show)[2]
|
|
|
|
if rid:
|
2014-08-29 06:15:51 +00:00
|
|
|
cur_return = params.copy()
|
|
|
|
cur_return['rid'] = rid
|
|
|
|
to_return.append(cur_return)
|
|
|
|
|
|
|
|
# add new query strings for exceptions
|
|
|
|
name_exceptions = list(
|
2015-06-19 16:47:52 +00:00
|
|
|
set([helpers.sanitizeSceneName(a) for a in
|
|
|
|
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
|
2014-08-29 06:15:51 +00:00
|
|
|
for cur_exception in name_exceptions:
|
2015-03-29 02:27:59 +00:00
|
|
|
cur_return = params.copy()
|
2015-03-29 23:15:25 +00:00
|
|
|
cur_return['q'] = cur_exception
|
2015-03-29 02:27:59 +00:00
|
|
|
to_return.append(cur_return)
|
2015-06-19 16:47:52 +00:00
|
|
|
|
2014-09-16 13:57:32 +00:00
|
|
|
if ep_obj.show.anime:
|
|
|
|
# Experimental, add a searchstring without search explicitly for the episode!
|
|
|
|
# Remove the ?ep=e46 paramater and use add the episode number to the query paramater.
|
|
|
|
# Can be usefull for newznab indexers that do not have the episodes 100% parsed.
|
|
|
|
# Start with only applying the searchstring to anime shows
|
2015-03-29 23:15:25 +00:00
|
|
|
params['q'] = cur_exception
|
2015-06-19 16:47:52 +00:00
|
|
|
params_no_ep = params.copy()
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
params_no_ep['q'] = '%s.%02d' % (params_no_ep['q'], int(params_no_ep['ep']))
|
|
|
|
if 'ep' in params_no_ep:
|
|
|
|
params_no_ep.pop('ep')
|
|
|
|
to_return.append(params_no_ep)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
return to_return
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-21 05:47:13 +00:00
|
|
|
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
self._checkAuth()
|
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
if 'rid' not in search_params and 'q' not in search_params:
|
|
|
|
logger.log('Error no rid or search term given.')
|
|
|
|
return []
|
|
|
|
|
|
|
|
params = {'t': 'tvsearch',
|
|
|
|
'maxage': sickbeard.USENET_RETENTION,
|
|
|
|
'limit': 100,
|
|
|
|
'attrs': 'rageid',
|
|
|
|
'offset': 0}
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
# category ids
|
2015-06-19 16:47:52 +00:00
|
|
|
cat = []
|
|
|
|
if self.show:
|
|
|
|
if self.show.is_sports:
|
|
|
|
cat = ['5060']
|
|
|
|
elif self.show.is_anime:
|
|
|
|
cat = ['5070']
|
|
|
|
params['cat'] = ','.join([self.cat_ids] + cat)
|
2014-05-26 06:29:22 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
# if max_age is set, use it, don't allow it to be missing
|
2015-06-19 16:47:52 +00:00
|
|
|
if not params['maxage'] or age:
|
2014-05-08 14:03:50 +00:00
|
|
|
params['maxage'] = age
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
if search_params:
|
|
|
|
params.update(search_params)
|
|
|
|
|
|
|
|
if self.needs_auth and self.key:
|
|
|
|
params['apikey'] = self.key
|
|
|
|
|
2014-08-20 00:44:05 +00:00
|
|
|
results = []
|
2014-09-07 07:44:48 +00:00
|
|
|
offset = total = 0
|
2014-09-07 07:48:09 +00:00
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
# hardcoded to stop after a max of 4 hits (400 items) per query
|
|
|
|
while (offset <= total) and (offset < 400):
|
|
|
|
search_url = '%sapi?%s' % (self.url, urllib.urlencode(params))
|
|
|
|
logger.log(u'Search url: ' + search_url, logger.DEBUG)
|
2014-08-20 00:44:05 +00:00
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
data = self.cache.getRSSFeed(search_url)
|
|
|
|
time.sleep(1.1)
|
|
|
|
if not data or not self.check_auth_from_data(data):
|
2014-09-07 07:48:09 +00:00
|
|
|
break
|
|
|
|
|
|
|
|
for item in data.entries:
|
2014-08-20 00:44:05 +00:00
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
title, url = self._get_title_and_url(item)
|
2014-09-07 07:48:09 +00:00
|
|
|
if title and url:
|
|
|
|
results.append(item)
|
|
|
|
else:
|
2015-06-19 16:47:52 +00:00
|
|
|
logger.log(u'The data returned from %s is incomplete, this result is unusable' % self.name,
|
|
|
|
logger.DEBUG)
|
2014-08-20 00:44:05 +00:00
|
|
|
|
2014-09-07 07:48:09 +00:00
|
|
|
# get total and offset attribs
|
2014-09-23 14:15:13 +00:00
|
|
|
try:
|
2015-06-19 16:47:52 +00:00
|
|
|
if 0 == total:
|
2014-09-23 14:15:13 +00:00
|
|
|
total = int(data.feed.newznab_response['total'] or 0)
|
2015-06-19 16:47:52 +00:00
|
|
|
hits = (total / 100 + int(0 < (total % 100)))
|
|
|
|
hits += int(0 == hits)
|
2014-09-23 14:15:13 +00:00
|
|
|
offset = int(data.feed.newznab_response['offset'] or 0)
|
|
|
|
except AttributeError:
|
|
|
|
break
|
|
|
|
|
2014-09-19 13:49:26 +00:00
|
|
|
# No items found, prevent from doing another search
|
2015-06-19 16:47:52 +00:00
|
|
|
if 0 == total:
|
2014-09-19 13:49:26 +00:00
|
|
|
break
|
2015-06-19 16:47:52 +00:00
|
|
|
|
2014-09-10 16:25:22 +00:00
|
|
|
if offset != params['offset']:
|
2015-06-19 16:47:52 +00:00
|
|
|
logger.log('Tell your newznab provider to fix their bloody newznab responses')
|
2014-09-10 16:25:22 +00:00
|
|
|
break
|
2015-06-19 16:47:52 +00:00
|
|
|
|
2014-09-07 07:48:09 +00:00
|
|
|
params['offset'] += params['limit']
|
2015-06-19 16:47:52 +00:00
|
|
|
if total <= params['offset']:
|
|
|
|
logger.log('%s item%s found that will be used for episode matching' % (total, helpers.maybe_plural(total)),
|
|
|
|
logger.DEBUG)
|
2014-09-19 13:49:26 +00:00
|
|
|
break
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
# there are more items available than the amount given in one call, grab some more
|
|
|
|
items = total - params['offset']
|
|
|
|
logger.log('%s more item%s to fetch from a batch of up to %s items.'
|
|
|
|
% (items, helpers.maybe_plural(items), params['limit']), logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
def findPropers(self, search_date=None):
|
2015-06-19 23:34:56 +00:00
|
|
|
return self._find_propers(search_date)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
|
|
|
|
class NewznabCache(tvcache.TVCache):
|
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
def __init__(self, provider):
|
2014-03-10 05:18:05 +00:00
|
|
|
tvcache.TVCache.__init__(self, provider)
|
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
self.minTime = 15 # cache update frequency
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
def _getRSSData(self):
|
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
params = {'t': 'tvsearch',
|
|
|
|
'cat': self.provider.cat_ids + ',5060,5070',
|
|
|
|
'attrs': 'rageid'}
|
2014-05-26 06:29:22 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
if self.provider.needs_auth and self.provider.key:
|
|
|
|
params['apikey'] = self.provider.key
|
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
rss_url = '%sapi?%s' % (self.provider.url, urllib.urlencode(params))
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
logger.log(self.provider.name + ' cache update URL: ' + rss_url, logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-04 12:05:27 +00:00
|
|
|
return self.getRSSFeed(rss_url)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
def _checkAuth(self, *data):
|
|
|
|
|
|
|
|
return self.provider.check_auth_from_data(data[0])
|
2014-05-11 12:49:07 +00:00
|
|
|
|
|
|
|
def updateCache(self):
|
2014-05-18 15:33:31 +00:00
|
|
|
|
2014-08-30 08:47:00 +00:00
|
|
|
if self.shouldUpdate() and self._checkAuth(None):
|
2014-05-11 12:49:07 +00:00
|
|
|
data = self._getRSSData()
|
|
|
|
|
|
|
|
# as long as the http request worked we count this as an update
|
2014-08-30 08:47:00 +00:00
|
|
|
if not data:
|
2014-05-11 12:49:07 +00:00
|
|
|
return []
|
|
|
|
|
2014-08-30 08:47:00 +00:00
|
|
|
# clear cache
|
|
|
|
self._clearCache()
|
|
|
|
|
2015-04-23 10:02:21 +00:00
|
|
|
self.setLastUpdate()
|
|
|
|
|
2014-05-11 12:49:07 +00:00
|
|
|
if self._checkAuth(data):
|
|
|
|
items = data.entries
|
2014-07-15 02:00:53 +00:00
|
|
|
cl = []
|
2014-05-11 12:49:07 +00:00
|
|
|
for item in items:
|
|
|
|
ci = self._parseItem(item)
|
|
|
|
if ci is not None:
|
2014-07-15 02:00:53 +00:00
|
|
|
cl.append(ci)
|
2014-07-02 18:51:14 +00:00
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
if 0 < len(cl):
|
|
|
|
my_db = self._getDB()
|
|
|
|
my_db.mass_action(cl)
|
2014-05-11 12:49:07 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
raise AuthException(
|
2015-06-19 16:47:52 +00:00
|
|
|
u'Your authentication credentials for ' + self.provider.name + ' are incorrect, check your config')
|
2014-05-11 12:49:07 +00:00
|
|
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
# overwrite method with that parses the rageid from the newznab feed
|
2015-06-19 16:47:52 +00:00
|
|
|
def _parseItem(self, *item):
|
|
|
|
|
|
|
|
title = item[0].title
|
|
|
|
url = item[0].link
|
2014-05-11 12:49:07 +00:00
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
attrs = item[0].newznab_attr
|
2014-07-15 02:00:53 +00:00
|
|
|
if not isinstance(attrs, list):
|
2015-06-19 16:47:52 +00:00
|
|
|
attrs = [item[0].newznab_attr]
|
2014-07-15 02:00:53 +00:00
|
|
|
|
|
|
|
tvrageid = 0
|
|
|
|
for attr in attrs:
|
2015-06-19 16:47:52 +00:00
|
|
|
if 'tvrageid' == attr['name']:
|
2014-07-15 02:00:53 +00:00
|
|
|
tvrageid = int(attr['value'])
|
|
|
|
break
|
|
|
|
|
2014-05-11 12:49:07 +00:00
|
|
|
self._checkItemAuth(title, url)
|
|
|
|
|
|
|
|
if not title or not url:
|
2015-06-19 16:47:52 +00:00
|
|
|
logger.log(u'The data returned from the %s feed is incomplete, this result is unusable'
|
|
|
|
% self.provider.name, logger.DEBUG)
|
2014-05-11 12:49:07 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
url = self._translateLinkURL(url)
|
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
logger.log(u'Attempting to add item from RSS to cache: ' + title, logger.DEBUG)
|
2014-07-15 02:00:53 +00:00
|
|
|
return self._addCacheEntry(title, url, indexer_id=tvrageid)
|