2016-08-21 20:31:18 +00:00
|
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
2014-03-10 05:18:05 +00:00
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
|
# This file is part of SickGear.
|
2014-03-10 05:18:05 +00:00
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
|
# SickGear is free software: you can redistribute it and/or modify
|
2014-03-10 05:18:05 +00:00
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
|
# (at your option) any later version.
|
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
|
# SickGear is distributed in the hope that it will be useful,
|
2014-03-10 05:18:05 +00:00
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
|
#
|
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-11-12 16:43:14 +00:00
|
|
|
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
from __future__ import division
|
2018-04-11 14:13:20 +00:00
|
|
|
|
from collections import OrderedDict
|
|
|
|
|
from math import ceil
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
import datetime
|
|
|
|
|
import re
|
2018-04-11 14:13:20 +00:00
|
|
|
|
import time
|
2016-09-04 20:00:44 +00:00
|
|
|
|
import urllib
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2018-04-11 14:13:20 +00:00
|
|
|
|
import sickbeard
|
|
|
|
|
|
|
|
|
|
from io import BytesIO
|
|
|
|
|
from lib.dateutil import parser
|
2015-09-18 00:06:34 +00:00
|
|
|
|
from . import generic
|
2018-04-11 14:13:20 +00:00
|
|
|
|
from sickbeard import classes, db, helpers, logger, tvcache
|
|
|
|
|
from sickbeard.common import neededQualities, Quality, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST
|
2016-09-04 20:00:44 +00:00
|
|
|
|
from sickbeard.exceptions import AuthException, MultipleShowObjectsException
|
2018-04-11 14:13:20 +00:00
|
|
|
|
from sickbeard.helpers import tryInt
|
2016-09-04 20:00:44 +00:00
|
|
|
|
from sickbeard.indexers.indexer_config import *
|
|
|
|
|
from sickbeard.network_timezones import sb_timezone
|
2018-04-11 14:13:20 +00:00
|
|
|
|
from sickbeard.sbdatetime import sbdatetime
|
|
|
|
|
from sickbeard.search import get_aired_in_season, get_wanted_qualities
|
2018-01-24 02:24:00 +00:00
|
|
|
|
from sickbeard.show_name_helpers import get_show_names
|
2016-09-04 20:00:44 +00:00
|
|
|
|
|
|
|
|
|
try:
|
2017-08-02 21:13:02 +00:00
|
|
|
|
from lxml import etree
|
2016-09-04 20:00:44 +00:00
|
|
|
|
except ImportError:
|
|
|
|
|
try:
|
|
|
|
|
import xml.etree.cElementTree as etree
|
|
|
|
|
except ImportError:
|
|
|
|
|
import xml.etree.ElementTree as etree
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class NewznabConstants:
|
|
|
|
|
SEARCH_TEXT = -100
|
|
|
|
|
SEARCH_SEASON = -101
|
|
|
|
|
SEARCH_EPISODE = -102
|
|
|
|
|
|
|
|
|
|
CAT_SD = -200
|
|
|
|
|
CAT_HD = -201
|
|
|
|
|
CAT_UHD = -202
|
|
|
|
|
CAT_HEVC = -203
|
|
|
|
|
CAT_ANIME = -204
|
|
|
|
|
CAT_SPORT = -205
|
2017-08-02 21:13:02 +00:00
|
|
|
|
CAT_WEBDL = -206
|
2016-09-04 20:00:44 +00:00
|
|
|
|
|
|
|
|
|
catSearchStrings = {r'^Anime$': CAT_ANIME,
|
|
|
|
|
r'^Sport$': CAT_SPORT,
|
|
|
|
|
r'^SD$': CAT_SD,
|
2017-08-02 21:13:02 +00:00
|
|
|
|
r'^BoxSD$': CAT_SD,
|
2016-09-04 20:00:44 +00:00
|
|
|
|
r'^HD$': CAT_HD,
|
2017-08-02 21:13:02 +00:00
|
|
|
|
r'^BoxHD$': CAT_HD,
|
2016-09-04 20:00:44 +00:00
|
|
|
|
r'^UHD$': CAT_UHD,
|
|
|
|
|
r'^4K$': CAT_UHD,
|
2018-04-11 14:13:20 +00:00
|
|
|
|
# r'^HEVC$': CAT_HEVC,
|
2017-08-02 21:13:02 +00:00
|
|
|
|
r'^WEB.?DL$': CAT_WEBDL}
|
2016-09-04 20:00:44 +00:00
|
|
|
|
|
|
|
|
|
providerToIndexerMapping = {'tvdbid': INDEXER_TVDB,
|
|
|
|
|
'rageid': INDEXER_TVRAGE,
|
|
|
|
|
'tvmazeid': INDEXER_TVMAZE,
|
|
|
|
|
'imdbid': INDEXER_IMDB,
|
|
|
|
|
'tmdbid': INDEXER_TMDB,
|
|
|
|
|
'traktid': INDEXER_TRAKT}
|
|
|
|
|
|
|
|
|
|
indexer_priority_list = [INDEXER_TVDB, INDEXER_TVMAZE, INDEXER_TVRAGE, INDEXER_TRAKT, INDEXER_TMDB, INDEXER_TMDB]
|
|
|
|
|
|
|
|
|
|
searchTypes = {'rid': INDEXER_TVRAGE,
|
|
|
|
|
'tvdbid': INDEXER_TVDB,
|
|
|
|
|
'tvmazeid': INDEXER_TVMAZE,
|
|
|
|
|
'imdbid': INDEXER_IMDB,
|
|
|
|
|
'tmdbid': INDEXER_TMDB,
|
|
|
|
|
'traktid': INDEXER_TRAKT,
|
|
|
|
|
'q': SEARCH_TEXT,
|
|
|
|
|
'season': SEARCH_SEASON,
|
|
|
|
|
'ep': SEARCH_EPISODE}
|
|
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
|
pass
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
|
class NewznabProvider(generic.NZBProvider):
|
2015-06-19 16:47:52 +00:00
|
|
|
|
|
2018-04-11 14:13:20 +00:00
|
|
|
|
def __init__(self, name, url, key='', cat_ids=None, search_mode=None, search_fallback=False,
|
|
|
|
|
enable_recentsearch=False, enable_backlog=False, enable_scheduled_backlog=False):
|
2015-04-18 04:55:04 +00:00
|
|
|
|
generic.NZBProvider.__init__(self, name, True, False)
|
2015-06-19 16:47:52 +00:00
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
|
self.url = url
|
|
|
|
|
self.key = key
|
2017-08-02 21:13:02 +00:00
|
|
|
|
self._exclude = set()
|
2016-09-04 20:00:44 +00:00
|
|
|
|
self.cat_ids = cat_ids or ''
|
|
|
|
|
self._cat_ids = None
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
self.search_mode = search_mode or 'eponly'
|
2017-02-24 20:08:37 +00:00
|
|
|
|
self.search_fallback = bool(tryInt(search_fallback))
|
|
|
|
|
self.enable_recentsearch = bool(tryInt(enable_recentsearch))
|
|
|
|
|
self.enable_backlog = bool(tryInt(enable_backlog))
|
|
|
|
|
self.enable_scheduled_backlog = bool(tryInt(enable_scheduled_backlog, 1))
|
2015-06-19 16:47:52 +00:00
|
|
|
|
self.needs_auth = '0' != self.key.strip() # '0' in the key setting indicates that api_key is not needed
|
|
|
|
|
self.default = False
|
2016-09-04 20:00:44 +00:00
|
|
|
|
self._caps = {}
|
|
|
|
|
self._caps_cats = {}
|
|
|
|
|
self._caps_all_cats = []
|
|
|
|
|
self._caps_need_apikey = {'need': False, 'date': datetime.date.fromordinal(1)}
|
|
|
|
|
self._limits = 100
|
|
|
|
|
self._last_recent_search = None
|
|
|
|
|
self._caps_last_updated = datetime.datetime.fromordinal(1)
|
2015-06-19 16:47:52 +00:00
|
|
|
|
self.cache = NewznabCache(self)
|
2018-04-13 11:11:21 +00:00
|
|
|
|
# filters
|
|
|
|
|
if super(NewznabProvider, self).get_id() in ('nzbs_org',):
|
|
|
|
|
self.filter = []
|
|
|
|
|
if 'nzbs_org' == super(NewznabProvider, self).get_id():
|
|
|
|
|
self.may_filter = OrderedDict([
|
|
|
|
|
('so', ('scene only', False)), ('snn', ('scene not nuked', False))])
|
2014-05-17 05:23:11 +00:00
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
@property
|
|
|
|
|
def cat_ids(self):
|
|
|
|
|
return self._cat_ids
|
|
|
|
|
|
|
|
|
|
@cat_ids.setter
|
|
|
|
|
def cat_ids(self, cats):
|
|
|
|
|
self._cat_ids = self.clean_newznab_categories(cats)
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def caps(self):
|
|
|
|
|
self.check_cap_update()
|
|
|
|
|
return self._caps
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def cats(self):
|
|
|
|
|
self.check_cap_update()
|
|
|
|
|
return self._caps_cats
|
|
|
|
|
|
2017-08-02 21:13:02 +00:00
|
|
|
|
@property
|
|
|
|
|
def excludes(self):
|
|
|
|
|
self.check_cap_update()
|
|
|
|
|
return self._exclude
|
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
@property
|
|
|
|
|
def all_cats(self):
|
|
|
|
|
self.check_cap_update()
|
|
|
|
|
return self._caps_all_cats
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def limits(self):
|
|
|
|
|
self.check_cap_update()
|
|
|
|
|
return self._limits
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def last_recent_search(self):
|
|
|
|
|
if not self._last_recent_search:
|
|
|
|
|
try:
|
|
|
|
|
my_db = db.DBConnection('cache.db')
|
2016-10-24 23:04:02 +00:00
|
|
|
|
res = my_db.select('SELECT' + ' "datetime" FROM "lastrecentsearch" WHERE "name"=?', [self.get_id()])
|
2016-09-04 20:00:44 +00:00
|
|
|
|
if res:
|
|
|
|
|
self._last_recent_search = datetime.datetime.fromtimestamp(int(res[0]['datetime']))
|
2016-10-24 23:04:02 +00:00
|
|
|
|
except (StandardError, Exception):
|
2016-09-04 20:00:44 +00:00
|
|
|
|
pass
|
|
|
|
|
return self._last_recent_search
|
|
|
|
|
|
|
|
|
|
@last_recent_search.setter
|
|
|
|
|
def last_recent_search(self, value):
|
|
|
|
|
try:
|
|
|
|
|
my_db = db.DBConnection('cache.db')
|
|
|
|
|
my_db.action('INSERT OR REPLACE INTO "lastrecentsearch" (name, datetime) VALUES (?,?)',
|
2016-10-24 23:04:02 +00:00
|
|
|
|
[self.get_id(), sbdatetime.totimestamp(value, default=0)])
|
|
|
|
|
except (StandardError, Exception):
|
2016-09-04 20:00:44 +00:00
|
|
|
|
pass
|
|
|
|
|
self._last_recent_search = value
|
|
|
|
|
|
|
|
|
|
def check_cap_update(self):
|
2016-11-12 02:13:00 +00:00
|
|
|
|
if self.enabled and \
|
|
|
|
|
(not self._caps or (datetime.datetime.now() - self._caps_last_updated) >= datetime.timedelta(days=1)):
|
2016-09-04 20:00:44 +00:00
|
|
|
|
self.get_caps()
|
|
|
|
|
|
|
|
|
|
def _get_caps_data(self):
|
|
|
|
|
xml_caps = None
|
2016-11-12 02:13:00 +00:00
|
|
|
|
if self.enabled:
|
|
|
|
|
if datetime.date.today() - self._caps_need_apikey['date'] > datetime.timedelta(days=30) or \
|
|
|
|
|
not self._caps_need_apikey['need']:
|
|
|
|
|
self._caps_need_apikey['need'] = False
|
2018-01-15 17:54:36 +00:00
|
|
|
|
data = self.get_url('%s/api?t=caps' % self.url)
|
2016-11-12 02:13:00 +00:00
|
|
|
|
if data:
|
|
|
|
|
xml_caps = helpers.parse_xml(data)
|
2017-06-29 20:14:44 +00:00
|
|
|
|
if xml_caps is None or not hasattr(xml_caps, 'tag') or xml_caps.tag == 'error' or xml_caps.tag != 'caps':
|
|
|
|
|
api_key = self.maybe_apikey()
|
|
|
|
|
if isinstance(api_key, basestring) and api_key not in ('0', ''):
|
2018-01-15 17:54:36 +00:00
|
|
|
|
data = self.get_url('%s/api?t=caps&apikey=%s' % (self.url, api_key))
|
2017-06-29 20:14:44 +00:00
|
|
|
|
if data:
|
|
|
|
|
xml_caps = helpers.parse_xml(data)
|
|
|
|
|
if xml_caps and hasattr(xml_caps, 'tag') and xml_caps.tag == 'caps':
|
|
|
|
|
self._caps_need_apikey = {'need': True, 'date': datetime.date.today()}
|
2016-09-04 20:00:44 +00:00
|
|
|
|
return xml_caps
|
|
|
|
|
|
2017-08-02 21:13:02 +00:00
|
|
|
|
def _check_excludes(self, cats):
|
|
|
|
|
if isinstance(cats, dict):
|
|
|
|
|
c = []
|
|
|
|
|
for v in cats.itervalues():
|
|
|
|
|
c.extend(v)
|
|
|
|
|
self._exclude = set(c)
|
|
|
|
|
else:
|
|
|
|
|
self._exclude = set(v for v in cats)
|
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
def get_caps(self):
|
|
|
|
|
caps = {}
|
|
|
|
|
cats = {}
|
|
|
|
|
all_cats = []
|
|
|
|
|
xml_caps = self._get_caps_data()
|
|
|
|
|
if None is not xml_caps:
|
|
|
|
|
tv_search = xml_caps.find('.//tv-search')
|
|
|
|
|
if None is not tv_search:
|
|
|
|
|
for c in [i for i in tv_search.get('supportedParams', '').split(',')]:
|
|
|
|
|
k = NewznabConstants.searchTypes.get(c)
|
|
|
|
|
if k:
|
|
|
|
|
caps[k] = c
|
|
|
|
|
|
|
|
|
|
limit = xml_caps.find('.//limits')
|
|
|
|
|
if None is not limit:
|
2018-04-11 14:13:20 +00:00
|
|
|
|
lim = helpers.tryInt(limit.get('max'), 100)
|
|
|
|
|
self._limits = (100, lim)[lim >= 100]
|
2016-09-04 20:00:44 +00:00
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
for category in xml_caps.iter('category'):
|
|
|
|
|
if 'TV' == category.get('name'):
|
|
|
|
|
for subcat in category.findall('subcat'):
|
|
|
|
|
try:
|
|
|
|
|
cat_name = subcat.attrib['name']
|
|
|
|
|
cat_id = subcat.attrib['id']
|
|
|
|
|
all_cats.append({'id': cat_id, 'name': cat_name})
|
|
|
|
|
for s, v in NewznabConstants.catSearchStrings.iteritems():
|
|
|
|
|
if None is not re.search(s, cat_name, re.IGNORECASE):
|
|
|
|
|
cats.setdefault(v, []).append(cat_id)
|
2016-10-24 23:04:02 +00:00
|
|
|
|
except (StandardError, Exception):
|
2016-09-04 20:00:44 +00:00
|
|
|
|
continue
|
|
|
|
|
elif category.get('name', '').upper() in ['XXX', 'OTHER', 'MISC']:
|
|
|
|
|
for subcat in category.findall('subcat'):
|
|
|
|
|
try:
|
|
|
|
|
if None is not re.search(r'^Anime$', subcat.attrib['name'], re.IGNORECASE):
|
|
|
|
|
cats.setdefault(NewznabConstants.CAT_ANIME, []).append(subcat.attrib['id'])
|
|
|
|
|
break
|
2016-10-24 23:04:02 +00:00
|
|
|
|
except (StandardError, Exception):
|
2016-09-04 20:00:44 +00:00
|
|
|
|
continue
|
2016-10-24 23:04:02 +00:00
|
|
|
|
except (StandardError, Exception):
|
2016-09-04 20:00:44 +00:00
|
|
|
|
logger.log('Error parsing result for [%s]' % self.name, logger.DEBUG)
|
|
|
|
|
|
|
|
|
|
if not caps and self._caps and not all_cats and self._caps_all_cats and not cats and self._caps_cats:
|
2017-08-02 21:13:02 +00:00
|
|
|
|
self._check_excludes(cats)
|
2016-09-04 20:00:44 +00:00
|
|
|
|
return
|
|
|
|
|
|
2016-11-12 02:13:00 +00:00
|
|
|
|
if self.enabled:
|
|
|
|
|
self._caps_last_updated = datetime.datetime.now()
|
2016-09-04 20:00:44 +00:00
|
|
|
|
|
|
|
|
|
if not caps and self.get_id() not in ['sick_beard_index']:
|
|
|
|
|
caps[INDEXER_TVDB] = 'tvdbid'
|
|
|
|
|
if NewznabConstants.SEARCH_TEXT not in caps or not caps.get(NewznabConstants.SEARCH_TEXT):
|
|
|
|
|
caps[NewznabConstants.SEARCH_TEXT] = 'q'
|
|
|
|
|
if NewznabConstants.SEARCH_SEASON not in caps or not caps.get(NewznabConstants.SEARCH_SEASON):
|
|
|
|
|
caps[NewznabConstants.SEARCH_SEASON] = 'season'
|
|
|
|
|
if NewznabConstants.SEARCH_EPISODE not in caps or not caps.get(NewznabConstants.SEARCH_EPISODE):
|
|
|
|
|
caps[NewznabConstants.SEARCH_TEXT] = 'ep'
|
|
|
|
|
if (INDEXER_TVRAGE not in caps or not caps.get(INDEXER_TVRAGE)) and self.get_id() not in ['sick_beard_index']:
|
|
|
|
|
caps[INDEXER_TVRAGE] = 'rid'
|
|
|
|
|
|
|
|
|
|
if NewznabConstants.CAT_HD not in cats or not cats.get(NewznabConstants.CAT_HD):
|
2017-08-02 21:13:02 +00:00
|
|
|
|
cats[NewznabConstants.CAT_HD] = (['5040'], ['5040', '5090'])['nzbs_org' == self.get_id()]
|
2016-09-04 20:00:44 +00:00
|
|
|
|
if NewznabConstants.CAT_SD not in cats or not cats.get(NewznabConstants.CAT_SD):
|
2017-08-02 21:13:02 +00:00
|
|
|
|
cats[NewznabConstants.CAT_SD] = (['5030'], ['5030', '5070'])['nzbs_org' == self.get_id()]
|
2016-09-04 20:00:44 +00:00
|
|
|
|
if NewznabConstants.CAT_ANIME not in cats or not cats.get(NewznabConstants.CAT_ANIME):
|
2017-08-02 21:13:02 +00:00
|
|
|
|
cats[NewznabConstants.CAT_ANIME] = (['5070'], ['6070', '7040'])['nzbs_org' == self.get_id()]
|
2016-09-04 20:00:44 +00:00
|
|
|
|
if NewznabConstants.CAT_SPORT not in cats or not cats.get(NewznabConstants.CAT_SPORT):
|
|
|
|
|
cats[NewznabConstants.CAT_SPORT] = ['5060']
|
|
|
|
|
|
2017-08-02 21:13:02 +00:00
|
|
|
|
self._check_excludes(cats)
|
2016-09-04 20:00:44 +00:00
|
|
|
|
self._caps = caps
|
|
|
|
|
self._caps_cats = cats
|
|
|
|
|
self._caps_all_cats = all_cats
|
|
|
|
|
|
2017-08-02 21:13:02 +00:00
|
|
|
|
def clean_newznab_categories(self, cats):
|
2016-09-04 20:00:44 +00:00
|
|
|
|
"""
|
2017-08-02 21:13:02 +00:00
|
|
|
|
Removes automatically mapped categories from the list
|
2016-09-04 20:00:44 +00:00
|
|
|
|
"""
|
|
|
|
|
if isinstance(cats, list):
|
2017-08-02 21:13:02 +00:00
|
|
|
|
return [x for x in cats if x['id'] not in self.excludes]
|
|
|
|
|
return ','.join(set(cats.split(',')) - self.excludes)
|
2016-09-04 20:00:44 +00:00
|
|
|
|
|
2017-11-02 18:30:05 +00:00
|
|
|
|
def _check_auth(self, is_required=None):
|
|
|
|
|
if self.should_skip():
|
|
|
|
|
return False
|
|
|
|
|
return super(NewznabProvider, self)._check_auth(is_required)
|
|
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
|
def _check_auth_from_data(self, data, url):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
if data is None or not hasattr(data, 'tag'):
|
|
|
|
|
return False
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
if 'error' == data.tag:
|
|
|
|
|
code = data.get('code', '')
|
2016-10-24 00:28:22 +00:00
|
|
|
|
description = data.get('description', '')
|
2014-09-01 15:57:52 +00:00
|
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
|
if '100' == code:
|
|
|
|
|
raise AuthException('Your API key for %s is incorrect, check your config.' % self.name)
|
|
|
|
|
elif '101' == code:
|
|
|
|
|
raise AuthException('Your account on %s has been suspended, contact the admin.' % self.name)
|
|
|
|
|
elif '102' == code:
|
|
|
|
|
raise AuthException('Your account isn\'t allowed to use the API on %s, contact the admin.' % self.name)
|
2017-11-02 18:30:05 +00:00
|
|
|
|
elif '500' == code:
|
2018-01-15 17:54:36 +00:00
|
|
|
|
try:
|
|
|
|
|
retry_time, unit = re.findall(r'Retry in (\d+)\W+([a-z]+)', description, flags=re.I)[0]
|
|
|
|
|
except IndexError:
|
|
|
|
|
retry_time, unit = None, None
|
|
|
|
|
self.tmr_limit_update(retry_time, unit, description)
|
|
|
|
|
self.log_failure_url(url)
|
2015-06-19 16:47:52 +00:00
|
|
|
|
elif '910' == code:
|
2016-10-24 23:04:02 +00:00
|
|
|
|
logger.log(
|
|
|
|
|
'%s %s, please check with provider.' %
|
|
|
|
|
(self.name, ('currently has their API disabled', description)[description not in (None, '')]),
|
|
|
|
|
logger.WARNING)
|
2015-06-19 16:47:52 +00:00
|
|
|
|
else:
|
2016-09-04 20:00:44 +00:00
|
|
|
|
logger.log('Unknown error given from %s: %s' % (self.name, data.get('description', '')),
|
2016-11-12 02:13:00 +00:00
|
|
|
|
logger.WARNING)
|
2015-06-19 16:47:52 +00:00
|
|
|
|
return False
|
|
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
|
self.tmr_limit_count = 0
|
2015-06-19 16:47:52 +00:00
|
|
|
|
return True
|
2014-09-01 15:57:52 +00:00
|
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
|
def config_str(self):
|
2017-01-01 20:24:41 +00:00
|
|
|
|
return '%s|%s|%s|%s|%i|%s|%i|%i|%i|%i' \
|
2015-07-13 09:39:20 +00:00
|
|
|
|
% (self.name or '', self.url or '', self.maybe_apikey() or '', self.cat_ids or '', self.enabled,
|
2017-01-01 20:24:41 +00:00
|
|
|
|
self.search_mode or '', self.search_fallback, self.enable_recentsearch, self.enable_backlog,
|
|
|
|
|
self.enable_scheduled_backlog)
|
2014-09-01 15:57:52 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
def _season_strings(self, ep_obj):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
search_params = []
|
|
|
|
|
base_params = {}
|
2014-07-15 02:00:53 +00:00
|
|
|
|
|
|
|
|
|
# season
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
ep_detail = None
|
2015-08-14 23:02:05 +00:00
|
|
|
|
if ep_obj.show.air_by_date or ep_obj.show.is_sports:
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
airdate = str(ep_obj.airdate).split('-')[0]
|
|
|
|
|
base_params['season'] = airdate
|
|
|
|
|
base_params['q'] = airdate
|
|
|
|
|
if ep_obj.show.air_by_date:
|
|
|
|
|
ep_detail = '+"%s"' % airdate
|
2014-07-15 02:00:53 +00:00
|
|
|
|
elif ep_obj.show.is_anime:
|
2015-09-18 00:06:34 +00:00
|
|
|
|
base_params['season'] = '%d' % ep_obj.scene_absolute_number
|
2014-07-15 02:00:53 +00:00
|
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
|
base_params['season'] = str((ep_obj.season, ep_obj.scene_season)[bool(ep_obj.show.is_scene)])
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
ep_detail = 'S%02d' % helpers.tryInt(base_params['season'], 1)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
# id search
|
2016-09-04 20:00:44 +00:00
|
|
|
|
params = base_params.copy()
|
|
|
|
|
use_id = False
|
|
|
|
|
for i in sickbeard.indexerApi().all_indexers:
|
|
|
|
|
if i in ep_obj.show.ids and 0 < ep_obj.show.ids[i]['id'] and i in self.caps:
|
|
|
|
|
params[self.caps[i]] = ep_obj.show.ids[i]['id']
|
2015-09-18 00:06:34 +00:00
|
|
|
|
use_id = True
|
2016-09-04 20:00:44 +00:00
|
|
|
|
use_id and search_params.append(params)
|
2014-08-29 06:15:51 +00:00
|
|
|
|
|
2018-01-24 10:30:32 +00:00
|
|
|
|
spacer = 'nzbgeek.info' in self.url.lower() and ' ' or '.'
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
# query search and exceptions
|
2018-01-24 02:24:00 +00:00
|
|
|
|
name_exceptions = get_show_names(ep_obj, spacer)
|
2014-08-29 06:15:51 +00:00
|
|
|
|
for cur_exception in name_exceptions:
|
2015-09-18 00:06:34 +00:00
|
|
|
|
params = base_params.copy()
|
|
|
|
|
if 'q' in params:
|
2016-07-02 15:06:50 +00:00
|
|
|
|
params['q'] = '%s%s%s' % (cur_exception, spacer, params['q'])
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
search_params.append(params)
|
|
|
|
|
|
|
|
|
|
if ep_detail:
|
|
|
|
|
params = base_params.copy()
|
2016-07-02 15:06:50 +00:00
|
|
|
|
params['q'] = '%s%s%s' % (cur_exception, spacer, ep_detail)
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
'season' in params and params.pop('season')
|
|
|
|
|
'ep' in params and params.pop('ep')
|
|
|
|
|
search_params.append(params)
|
2014-05-04 03:16:26 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
return [{'Season': search_params}]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
def _episode_strings(self, ep_obj):
|
|
|
|
|
|
|
|
|
|
search_params = []
|
|
|
|
|
base_params = {}
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2014-04-30 13:49:50 +00:00
|
|
|
|
if not ep_obj:
|
2015-09-18 00:06:34 +00:00
|
|
|
|
return [base_params]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
ep_detail = None
|
2015-08-14 23:02:05 +00:00
|
|
|
|
if ep_obj.show.air_by_date or ep_obj.show.is_sports:
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
airdate = str(ep_obj.airdate).split('-')
|
|
|
|
|
base_params['season'] = airdate[0]
|
|
|
|
|
if ep_obj.show.air_by_date:
|
|
|
|
|
base_params['ep'] = '/'.join(airdate[1:])
|
|
|
|
|
ep_detail = '+"%s.%s"' % (base_params['season'], '.'.join(airdate[1:]))
|
2015-08-14 23:02:05 +00:00
|
|
|
|
elif ep_obj.show.is_anime:
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
base_params['ep'] = '%i' % (helpers.tryInt(ep_obj.scene_absolute_number) or
|
|
|
|
|
helpers.tryInt(ep_obj.scene_episode))
|
2016-07-02 15:06:50 +00:00
|
|
|
|
ep_detail = '%02d' % helpers.tryInt(base_params['ep'])
|
2014-03-10 05:18:05 +00:00
|
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
|
base_params['season'], base_params['ep'] = (
|
|
|
|
|
(ep_obj.season, ep_obj.episode), (ep_obj.scene_season, ep_obj.scene_episode))[ep_obj.show.is_scene]
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
ep_detail = sickbeard.config.naming_ep_type[2] % {
|
|
|
|
|
'seasonnumber': helpers.tryInt(base_params['season'], 1),
|
|
|
|
|
'episodenumber': helpers.tryInt(base_params['ep'], 1)}
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
# id search
|
2016-09-04 20:00:44 +00:00
|
|
|
|
params = base_params.copy()
|
|
|
|
|
use_id = False
|
|
|
|
|
for i in sickbeard.indexerApi().all_indexers:
|
|
|
|
|
if i in ep_obj.show.ids and 0 < ep_obj.show.ids[i]['id'] and i in self.caps:
|
|
|
|
|
params[self.caps[i]] = ep_obj.show.ids[i]['id']
|
2015-09-18 00:06:34 +00:00
|
|
|
|
use_id = True
|
2016-09-04 20:00:44 +00:00
|
|
|
|
use_id and search_params.append(params)
|
2014-08-29 06:15:51 +00:00
|
|
|
|
|
2018-01-24 10:30:32 +00:00
|
|
|
|
spacer = 'nzbgeek.info' in self.url.lower() and ' ' or '.'
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
# query search and exceptions
|
2018-01-24 02:24:00 +00:00
|
|
|
|
name_exceptions = get_show_names(ep_obj, spacer)
|
2016-08-21 20:31:18 +00:00
|
|
|
|
if sickbeard.scene_exceptions.has_abs_episodes(ep_obj):
|
|
|
|
|
search_params.append({'q': '%s%s%s' % (ep_obj.show.name, spacer, base_params['ep'])})
|
2014-08-29 06:15:51 +00:00
|
|
|
|
for cur_exception in name_exceptions:
|
2015-09-18 00:06:34 +00:00
|
|
|
|
params = base_params.copy()
|
|
|
|
|
params['q'] = cur_exception
|
|
|
|
|
search_params.append(params)
|
2015-06-19 16:47:52 +00:00
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
if ep_detail:
|
2015-09-18 00:06:34 +00:00
|
|
|
|
params = base_params.copy()
|
2016-07-02 15:06:50 +00:00
|
|
|
|
params['q'] = '%s%s%s' % (cur_exception, spacer, ep_detail)
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
'season' in params and params.pop('season')
|
|
|
|
|
'ep' in params and params.pop('ep')
|
2015-09-18 00:06:34 +00:00
|
|
|
|
search_params.append(params)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
return [{'Episode': search_params}]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
def supports_tvdbid(self):
|
2015-06-19 16:47:52 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
return self.get_id() not in ['sick_beard_index']
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
def _title_and_url(self, item):
|
|
|
|
|
title, url = None, None
|
|
|
|
|
try:
|
2016-12-22 22:18:20 +00:00
|
|
|
|
title = ('%s' % item.findtext('title')).strip()
|
2017-08-26 15:30:52 +00:00
|
|
|
|
title = re.sub(r'\s+', '.', title)
|
|
|
|
|
# remove indexer specific release name parts
|
|
|
|
|
r_found = True
|
|
|
|
|
while r_found:
|
|
|
|
|
r_found = False
|
2018-02-01 00:06:24 +00:00
|
|
|
|
for pattern, repl in ((r'(?i)-Scrambled$', ''), (r'(?i)-BUYMORE$', ''), (r'(?i)-Obfuscated$', ''),
|
|
|
|
|
(r'(?i)-postbot$', ''), (r'(?i)[-.]English$', '')):
|
2017-08-26 15:30:52 +00:00
|
|
|
|
if re.search(pattern, title):
|
|
|
|
|
r_found = True
|
|
|
|
|
title = re.sub(pattern, repl, title)
|
2016-12-22 22:18:20 +00:00
|
|
|
|
url = str(item.findtext('link')).replace('&', '&')
|
2016-10-24 23:04:02 +00:00
|
|
|
|
except (StandardError, Exception):
|
2016-09-04 20:00:44 +00:00
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
return title, url
|
|
|
|
|
|
2017-09-13 17:18:59 +00:00
|
|
|
|
def get_size_uid(self, item, **kwargs):
|
|
|
|
|
size = -1
|
|
|
|
|
uid = None
|
|
|
|
|
if 'name_space' in kwargs and 'newznab' in kwargs['name_space']:
|
|
|
|
|
size, uid = self._parse_size_uid(item, kwargs['name_space'])
|
|
|
|
|
return size, uid
|
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
def get_show(self, item, **kwargs):
|
|
|
|
|
show_obj = None
|
|
|
|
|
if 'name_space' in kwargs and 'newznab' in kwargs['name_space']:
|
|
|
|
|
ids = self.cache.parse_ids(item, kwargs['name_space'])
|
|
|
|
|
|
|
|
|
|
if ids:
|
|
|
|
|
try:
|
|
|
|
|
show_obj = helpers.find_show_by_id(sickbeard.showList, id_dict=ids, no_mapped_ids=False)
|
|
|
|
|
except MultipleShowObjectsException:
|
|
|
|
|
return None
|
|
|
|
|
return show_obj
|
|
|
|
|
|
|
|
|
|
def choose_search_mode(self, episodes, ep_obj, hits_per_page=100):
|
|
|
|
|
searches = [e for e in episodes if (not ep_obj.show.is_scene and e.season == ep_obj.season) or
|
|
|
|
|
(ep_obj.show.is_scene and e.scene_season == ep_obj.scene_season)]
|
2017-08-02 21:13:02 +00:00
|
|
|
|
|
|
|
|
|
needed = neededQualities()
|
2017-12-27 03:14:20 +00:00
|
|
|
|
needed.check_needed_types(ep_obj.show)
|
2016-09-04 20:00:44 +00:00
|
|
|
|
for s in searches:
|
2017-08-02 21:13:02 +00:00
|
|
|
|
if needed.all_qualities_needed:
|
2016-11-14 01:06:28 +00:00
|
|
|
|
break
|
2016-09-04 20:00:44 +00:00
|
|
|
|
if not s.show.is_anime and not s.show.is_sports:
|
2017-12-27 03:14:20 +00:00
|
|
|
|
if not getattr(s, 'wantedQuality', None):
|
|
|
|
|
# this should not happen, the creation is missing for the search in this case
|
|
|
|
|
logger.log('wantedQuality property was missing for search, creating it', logger.WARNING)
|
|
|
|
|
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
|
|
|
|
|
s.wantedQuality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=True)
|
2017-08-02 21:13:02 +00:00
|
|
|
|
needed.check_needed_qualities(s.wantedQuality)
|
|
|
|
|
|
2017-12-27 03:14:20 +00:00
|
|
|
|
if not hasattr(ep_obj, 'eps_aired_in_season'):
|
|
|
|
|
# this should not happen, the creation is missing for the search in this case
|
|
|
|
|
logger.log('eps_aired_in_season property was missing for search, creating it', logger.WARNING)
|
|
|
|
|
ep_count, ep_count_scene = get_aired_in_season(ep_obj.show)
|
|
|
|
|
ep_obj.eps_aired_in_season = ep_count.get(ep_obj.season, 0)
|
|
|
|
|
ep_obj.eps_aired_in_scene_season = ep_count_scene.get(ep_obj.scene_season, 0) if ep_obj.show.is_scene else \
|
|
|
|
|
ep_obj.eps_aired_in_season
|
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
per_ep, limit_per_ep = 0, 0
|
2017-08-02 21:13:02 +00:00
|
|
|
|
if needed.need_sd and not needed.need_hd:
|
2016-09-04 20:00:44 +00:00
|
|
|
|
per_ep, limit_per_ep = 10, 25
|
2017-08-02 21:13:02 +00:00
|
|
|
|
if needed.need_hd:
|
|
|
|
|
if not needed.need_sd:
|
2016-09-04 20:00:44 +00:00
|
|
|
|
per_ep, limit_per_ep = 30, 90
|
|
|
|
|
else:
|
|
|
|
|
per_ep, limit_per_ep = 40, 120
|
2017-08-02 21:13:02 +00:00
|
|
|
|
if needed.need_uhd or (needed.need_hd and not self.cats.get(NewznabConstants.CAT_UHD)):
|
2016-09-04 20:00:44 +00:00
|
|
|
|
per_ep += 4
|
|
|
|
|
limit_per_ep += 10
|
|
|
|
|
if ep_obj.show.is_anime or ep_obj.show.is_sports or ep_obj.show.air_by_date:
|
|
|
|
|
rel_per_ep, limit_per_ep = 5, 10
|
|
|
|
|
else:
|
|
|
|
|
rel_per_ep = per_ep
|
2017-12-27 03:14:20 +00:00
|
|
|
|
rel = max(1, int(ceil((ep_obj.eps_aired_in_scene_season if ep_obj.show.is_scene else
|
|
|
|
|
ep_obj.eps_aired_in_season * rel_per_ep) / hits_per_page)))
|
|
|
|
|
rel_limit = max(1, int(ceil((ep_obj.eps_aired_in_scene_season if ep_obj.show.is_scene else
|
|
|
|
|
ep_obj.eps_aired_in_season * limit_per_ep) / hits_per_page)))
|
2016-09-04 20:00:44 +00:00
|
|
|
|
season_search = rel < (len(searches) * 100 // hits_per_page)
|
|
|
|
|
if not season_search:
|
2017-08-02 21:13:02 +00:00
|
|
|
|
needed = neededQualities()
|
2017-12-27 03:14:20 +00:00
|
|
|
|
needed.check_needed_types(ep_obj.show)
|
2016-09-04 20:00:44 +00:00
|
|
|
|
if not ep_obj.show.is_anime and not ep_obj.show.is_sports:
|
2017-12-27 03:14:20 +00:00
|
|
|
|
if not getattr(ep_obj, 'wantedQuality', None):
|
|
|
|
|
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
|
|
|
|
|
ep_obj.wantedQuality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=True)
|
2017-08-02 21:13:02 +00:00
|
|
|
|
needed.check_needed_qualities(ep_obj.wantedQuality)
|
2017-12-27 03:14:20 +00:00
|
|
|
|
else:
|
|
|
|
|
if not ep_obj.show.is_anime and not ep_obj.show.is_sports:
|
|
|
|
|
for ep in episodes:
|
|
|
|
|
if not getattr(ep, 'wantedQuality', None):
|
|
|
|
|
ep_status, ep_quality = Quality.splitCompositeStatus(ep.status)
|
|
|
|
|
ep.wantedQuality = get_wanted_qualities(ep, ep_status, ep_quality, unaired=True)
|
|
|
|
|
needed.check_needed_qualities(ep.wantedQuality)
|
2017-08-02 21:13:02 +00:00
|
|
|
|
return (season_search, needed,
|
2016-09-04 20:00:44 +00:00
|
|
|
|
(hits_per_page * 100 // hits_per_page * 2, hits_per_page * int(ceil(rel_limit * 1.5)))[season_search])
|
|
|
|
|
|
|
|
|
|
def find_search_results(self, show, episodes, search_mode, manual_search=False, try_other_searches=False, **kwargs):
|
2017-11-02 18:30:05 +00:00
|
|
|
|
check = self._check_auth()
|
|
|
|
|
results = {}
|
|
|
|
|
if (isinstance(check, bool) and not check) or self.should_skip():
|
|
|
|
|
return results
|
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
self.show = show
|
|
|
|
|
|
|
|
|
|
item_list = []
|
|
|
|
|
name_space = {}
|
|
|
|
|
|
|
|
|
|
searched_scene_season = s_mode = None
|
|
|
|
|
for ep_obj in episodes:
|
2017-11-02 18:30:05 +00:00
|
|
|
|
if self.should_skip(log_warning=False):
|
|
|
|
|
break
|
2016-09-04 20:00:44 +00:00
|
|
|
|
# skip if season already searched
|
|
|
|
|
if (s_mode or 'sponly' == search_mode) and 1 < len(episodes) \
|
|
|
|
|
and searched_scene_season == ep_obj.scene_season:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# search cache for episode result
|
|
|
|
|
cache_result = self.cache.searchCache(ep_obj, manual_search)
|
|
|
|
|
if cache_result:
|
|
|
|
|
if ep_obj.episode not in results:
|
|
|
|
|
results[ep_obj.episode] = cache_result
|
|
|
|
|
else:
|
|
|
|
|
results[ep_obj.episode].extend(cache_result)
|
|
|
|
|
|
|
|
|
|
# found result, search next episode
|
|
|
|
|
continue
|
|
|
|
|
|
2017-08-02 21:13:02 +00:00
|
|
|
|
s_mode, needed, max_items = self.choose_search_mode(episodes, ep_obj, hits_per_page=self.limits)
|
|
|
|
|
needed.check_needed_types(self.show)
|
2016-09-04 20:00:44 +00:00
|
|
|
|
|
|
|
|
|
if 'sponly' == search_mode:
|
|
|
|
|
searched_scene_season = ep_obj.scene_season
|
|
|
|
|
|
|
|
|
|
# get season search params
|
|
|
|
|
search_params = self._season_strings(ep_obj)
|
|
|
|
|
else:
|
|
|
|
|
# get single episode search params
|
|
|
|
|
if s_mode and 1 < len(episodes):
|
|
|
|
|
searched_scene_season = ep_obj.scene_season
|
|
|
|
|
search_params = self._season_strings(ep_obj)
|
|
|
|
|
else:
|
|
|
|
|
search_params = self._episode_strings(ep_obj)
|
|
|
|
|
|
|
|
|
|
for cur_param in search_params:
|
|
|
|
|
items, n_space = self._search_provider(cur_param, search_mode=search_mode, epcount=len(episodes),
|
2017-08-02 21:13:02 +00:00
|
|
|
|
needed=needed, max_items=max_items,
|
|
|
|
|
try_all_searches=try_other_searches)
|
2016-09-04 20:00:44 +00:00
|
|
|
|
item_list += items
|
|
|
|
|
name_space.update(n_space)
|
2017-11-02 18:30:05 +00:00
|
|
|
|
if self.should_skip():
|
|
|
|
|
break
|
2016-09-04 20:00:44 +00:00
|
|
|
|
|
|
|
|
|
return self.finish_find_search_results(
|
|
|
|
|
show, episodes, search_mode, manual_search, results, item_list, name_space=name_space)
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
def _parse_pub_date(item, default=None):
|
|
|
|
|
parsed_date = default
|
|
|
|
|
try:
|
|
|
|
|
p = item.findtext('pubDate')
|
|
|
|
|
if p:
|
|
|
|
|
p = parser.parse(p, fuzzy=True)
|
|
|
|
|
try:
|
|
|
|
|
p = p.astimezone(sb_timezone)
|
2016-10-24 23:04:02 +00:00
|
|
|
|
except (StandardError, Exception):
|
2016-09-04 20:00:44 +00:00
|
|
|
|
pass
|
|
|
|
|
if isinstance(p, datetime.datetime):
|
|
|
|
|
parsed_date = p.replace(tzinfo=None)
|
2016-10-24 23:04:02 +00:00
|
|
|
|
except (StandardError, Exception):
|
2016-09-04 20:00:44 +00:00
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
return parsed_date
|
|
|
|
|
|
2017-09-13 17:18:59 +00:00
|
|
|
|
@staticmethod
|
|
|
|
|
def _parse_size_uid(item, ns, default=-1):
|
|
|
|
|
parsed_size = default
|
|
|
|
|
uid = None
|
|
|
|
|
try:
|
|
|
|
|
if ns and 'newznab' in ns:
|
|
|
|
|
for attr in item.findall('%sattr' % ns['newznab']):
|
|
|
|
|
if 'size' == attr.get('name', ''):
|
|
|
|
|
parsed_size = helpers.tryInt(attr.get('value'), -1)
|
|
|
|
|
elif 'guid' == attr.get('name', ''):
|
|
|
|
|
uid = attr.get('value')
|
|
|
|
|
except (StandardError, Exception):
|
|
|
|
|
pass
|
|
|
|
|
return parsed_size, uid
|
|
|
|
|
|
2017-08-02 21:13:02 +00:00
|
|
|
|
def _search_provider(self, search_params, needed=neededQualities(need_all=True), max_items=400,
|
|
|
|
|
try_all_searches=False, **kwargs):
|
2014-05-26 06:29:22 +00:00
|
|
|
|
|
2017-11-02 18:30:05 +00:00
|
|
|
|
results, n_spaces = [], {}
|
|
|
|
|
if self.should_skip():
|
|
|
|
|
return results, n_spaces
|
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
api_key = self._check_auth()
|
2017-11-02 18:30:05 +00:00
|
|
|
|
if isinstance(api_key, bool) and not api_key:
|
|
|
|
|
return results, n_spaces
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
base_params = {'t': 'tvsearch',
|
|
|
|
|
'maxage': sickbeard.USENET_RETENTION or 0,
|
2016-09-04 20:00:44 +00:00
|
|
|
|
'limit': self.limits,
|
|
|
|
|
'attrs': ','.join([k for k, v in NewznabConstants.providerToIndexerMapping.iteritems()
|
|
|
|
|
if v in self.caps]),
|
2015-09-18 00:06:34 +00:00
|
|
|
|
'offset': 0}
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2018-01-24 02:24:00 +00:00
|
|
|
|
uc_only = all([re.search('(?i)usenet_crawler', self.get_id())])
|
|
|
|
|
base_params_uc = {'num': self.limits, 'dl': '1', 'i': '64660'}
|
|
|
|
|
|
2017-06-29 20:14:44 +00:00
|
|
|
|
if isinstance(api_key, basestring) and api_key not in ('0', ''):
|
2015-09-18 00:06:34 +00:00
|
|
|
|
base_params['apikey'] = api_key
|
2018-01-24 02:24:00 +00:00
|
|
|
|
base_params_uc['r'] = api_key
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
results, n_spaces = [], {}
|
2016-10-24 23:04:02 +00:00
|
|
|
|
total, cnt, search_url, exit_log = 0, len(results), '', True
|
2015-09-18 00:06:34 +00:00
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
cat_sport = self.cats.get(NewznabConstants.CAT_SPORT, ['5060'])
|
|
|
|
|
cat_anime = self.cats.get(NewznabConstants.CAT_ANIME, ['5070'])
|
|
|
|
|
cat_hd = self.cats.get(NewznabConstants.CAT_HD, ['5040'])
|
|
|
|
|
cat_sd = self.cats.get(NewznabConstants.CAT_SD, ['5030'])
|
|
|
|
|
cat_uhd = self.cats.get(NewznabConstants.CAT_UHD)
|
2017-08-02 21:13:02 +00:00
|
|
|
|
cat_webdl = self.cats.get(NewznabConstants.CAT_WEBDL)
|
2016-09-04 20:00:44 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
for mode in search_params.keys():
|
2017-11-02 18:30:05 +00:00
|
|
|
|
if self.should_skip(log_warning=False):
|
|
|
|
|
break
|
2015-09-18 00:06:34 +00:00
|
|
|
|
for i, params in enumerate(search_params[mode]):
|
|
|
|
|
|
2017-11-02 18:30:05 +00:00
|
|
|
|
if self.should_skip(log_warning=False):
|
|
|
|
|
break
|
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
# category ids
|
|
|
|
|
cat = []
|
|
|
|
|
if 'Episode' == mode or 'Season' == mode:
|
2016-09-04 20:00:44 +00:00
|
|
|
|
if not (any(x in params for x in [v for c, v in self.caps.iteritems()
|
2016-10-24 23:04:02 +00:00
|
|
|
|
if c not in [NewznabConstants.SEARCH_EPISODE, NewznabConstants.SEARCH_SEASON]])
|
|
|
|
|
or not self.supports_tvdbid()):
|
|
|
|
|
logger.log('Show is missing either an id or search term for search')
|
2015-09-18 00:06:34 +00:00
|
|
|
|
continue
|
|
|
|
|
|
2017-08-02 21:13:02 +00:00
|
|
|
|
if needed.need_anime:
|
2016-09-04 20:00:44 +00:00
|
|
|
|
cat.extend(cat_anime)
|
2017-08-02 21:13:02 +00:00
|
|
|
|
if needed.need_sports:
|
2016-09-04 20:00:44 +00:00
|
|
|
|
cat.extend(cat_sport)
|
|
|
|
|
|
2017-08-02 21:13:02 +00:00
|
|
|
|
if needed.need_hd:
|
2016-09-04 20:00:44 +00:00
|
|
|
|
cat.extend(cat_hd)
|
2017-08-02 21:13:02 +00:00
|
|
|
|
if needed.need_sd:
|
2016-09-04 20:00:44 +00:00
|
|
|
|
cat.extend(cat_sd)
|
2017-08-02 21:13:02 +00:00
|
|
|
|
if needed.need_uhd and cat_uhd is not None:
|
2016-09-04 20:00:44 +00:00
|
|
|
|
cat.extend(cat_uhd)
|
2017-08-02 21:13:02 +00:00
|
|
|
|
if needed.need_webdl and cat_webdl is not None:
|
|
|
|
|
cat.extend(cat_webdl)
|
2014-09-07 07:48:09 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
if self.cat_ids or len(cat):
|
2016-09-04 20:00:44 +00:00
|
|
|
|
base_params['cat'] = ','.join(sorted(set((self.cat_ids.split(',') if self.cat_ids else []) + cat)))
|
2018-01-24 02:24:00 +00:00
|
|
|
|
base_params_uc['t'] = base_params['cat']
|
2014-08-20 00:44:05 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
request_params = base_params.copy()
|
2017-12-12 02:46:28 +00:00
|
|
|
|
if ('Propers' == mode or 'nzbs_org' == self.get_id()) \
|
|
|
|
|
and 'q' in params and not (any(x in params for x in ['season', 'ep'])):
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
request_params['t'] = 'search'
|
2015-09-18 00:06:34 +00:00
|
|
|
|
request_params.update(params)
|
2014-09-07 07:48:09 +00:00
|
|
|
|
|
2018-04-13 11:11:21 +00:00
|
|
|
|
if hasattr(self, 'filter'):
|
|
|
|
|
if 'nzbs_org' == self.get_id():
|
|
|
|
|
request_params['rls'] = ((0, 1)['so' in self.filter], 2)['snn' in self.filter]
|
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
# workaround a strange glitch
|
|
|
|
|
if sum(ord(i) for i in self.get_id()) in [383] and 5 == 14 - request_params['maxage']:
|
|
|
|
|
request_params['maxage'] += 1
|
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
offset = 0
|
|
|
|
|
batch_count = not 0
|
2016-09-04 20:00:44 +00:00
|
|
|
|
first_date = last_date = None
|
2014-08-20 00:44:05 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
# hardcoded to stop after a max of 4 hits (400 items) per query
|
2016-09-04 20:00:44 +00:00
|
|
|
|
while (offset <= total) and (offset < max_items) and batch_count:
|
2015-09-18 00:06:34 +00:00
|
|
|
|
cnt = len(results)
|
2014-08-20 00:44:05 +00:00
|
|
|
|
|
2018-01-24 02:24:00 +00:00
|
|
|
|
if 'Cache' == mode and uc_only:
|
|
|
|
|
search_url = '%srss?%s' % (self.url, urllib.urlencode(base_params_uc))
|
|
|
|
|
else:
|
|
|
|
|
search_url = '%sapi?%s' % (self.url, urllib.urlencode(request_params))
|
2016-07-25 16:36:06 +00:00
|
|
|
|
i and time.sleep(2.1)
|
2014-09-23 14:15:13 +00:00
|
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
|
data = self.get_url(search_url)
|
2016-09-04 20:00:44 +00:00
|
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
|
if self.should_skip() or not data:
|
2016-10-24 00:28:22 +00:00
|
|
|
|
break
|
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
# hack this in until it's fixed server side
|
2018-01-15 17:54:36 +00:00
|
|
|
|
if not data.startswith('<?xml'):
|
2016-09-04 20:00:44 +00:00
|
|
|
|
data = '<?xml version="1.0" encoding="ISO-8859-1" ?>%s' % data
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
parsed_xml, n_spaces = self.cache.parse_and_get_ns(data)
|
|
|
|
|
items = parsed_xml.findall('channel/item')
|
2016-10-24 23:04:02 +00:00
|
|
|
|
except (StandardError, Exception):
|
2016-11-12 02:13:00 +00:00
|
|
|
|
logger.log('Error trying to load %s RSS feed' % self.name, logger.WARNING)
|
2015-09-18 00:06:34 +00:00
|
|
|
|
break
|
2015-06-19 16:47:52 +00:00
|
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
|
if not self._check_auth_from_data(parsed_xml, search_url):
|
2016-09-04 20:00:44 +00:00
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
if 'rss' != parsed_xml.tag:
|
2016-11-12 02:13:00 +00:00
|
|
|
|
logger.log('Resulting XML from %s isn\'t RSS, not parsing it' % self.name, logger.WARNING)
|
2016-09-04 20:00:44 +00:00
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
i and time.sleep(2.1)
|
|
|
|
|
|
|
|
|
|
for item in items:
|
2015-06-19 16:47:52 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
title, url = self._title_and_url(item)
|
|
|
|
|
if title and url:
|
|
|
|
|
results.append(item)
|
|
|
|
|
else:
|
2016-09-04 20:00:44 +00:00
|
|
|
|
logger.log('The data returned from %s is incomplete, this result is unusable' % self.name,
|
2015-09-18 00:06:34 +00:00
|
|
|
|
logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
# get total and offset attributes
|
2015-09-18 00:06:34 +00:00
|
|
|
|
try:
|
|
|
|
|
if 0 == total:
|
2016-09-04 20:00:44 +00:00
|
|
|
|
total = (helpers.tryInt(parsed_xml.find(
|
|
|
|
|
'.//%sresponse' % n_spaces['newznab']).get('total', 0)), 1000)['Cache' == mode]
|
|
|
|
|
hits = (total // self.limits + int(0 < (total % self.limits)))
|
2015-09-18 00:06:34 +00:00
|
|
|
|
hits += int(0 == hits)
|
2016-09-04 20:00:44 +00:00
|
|
|
|
offset = helpers.tryInt(parsed_xml.find('.//%sresponse' % n_spaces['newznab']).get('offset', 0))
|
2016-10-24 00:28:22 +00:00
|
|
|
|
except (AttributeError, KeyError):
|
2018-01-24 02:24:00 +00:00
|
|
|
|
if not uc_only:
|
|
|
|
|
break
|
|
|
|
|
total = len(items)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2015-12-14 03:38:17 +00:00
|
|
|
|
# No items found, prevent from doing another search
|
|
|
|
|
if 0 == total:
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
# Cache mode, prevent from doing another search
|
|
|
|
|
if 'Cache' == mode:
|
2016-09-04 20:00:44 +00:00
|
|
|
|
if items and len(items):
|
|
|
|
|
if not first_date:
|
|
|
|
|
first_date = self._parse_pub_date(items[0])
|
|
|
|
|
last_date = self._parse_pub_date(items[-1])
|
|
|
|
|
if not first_date or not last_date or not self._last_recent_search or \
|
2018-01-24 02:24:00 +00:00
|
|
|
|
last_date <= self.last_recent_search or uc_only:
|
2016-09-04 20:00:44 +00:00
|
|
|
|
break
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
if offset != request_params['offset']:
|
2016-02-26 01:07:39 +00:00
|
|
|
|
logger.log('Ask your newznab provider to fix their newznab responses')
|
2015-09-18 00:06:34 +00:00
|
|
|
|
break
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
request_params['offset'] += request_params['limit']
|
|
|
|
|
if total <= request_params['offset']:
|
|
|
|
|
break
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
# there are more items available than the amount given in one call, grab some more
|
|
|
|
|
items = total - request_params['offset']
|
|
|
|
|
logger.log('%s more item%s to fetch from a batch of up to %s items.'
|
|
|
|
|
% (items, helpers.maybe_plural(items), request_params['limit']), logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
batch_count = self._log_result(results, mode, cnt, search_url)
|
2016-10-24 23:04:02 +00:00
|
|
|
|
exit_log = False
|
2016-09-04 20:00:44 +00:00
|
|
|
|
|
|
|
|
|
if 'Cache' == mode and first_date:
|
|
|
|
|
self.last_recent_search = first_date
|
2015-12-14 03:38:17 +00:00
|
|
|
|
|
|
|
|
|
if exit_log:
|
2016-10-24 23:04:02 +00:00
|
|
|
|
self._log_search(mode, total, search_url)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2016-10-24 23:04:02 +00:00
|
|
|
|
if not try_all_searches and any(x in request_params for x in [
|
|
|
|
|
v for c, v in self.caps.iteritems()
|
|
|
|
|
if c not in [NewznabConstants.SEARCH_EPISODE, NewznabConstants.SEARCH_SEASON,
|
|
|
|
|
NewznabConstants.SEARCH_TEXT]]) and len(results):
|
2015-09-18 00:06:34 +00:00
|
|
|
|
break
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
return results, n_spaces
|
|
|
|
|
|
|
|
|
|
def find_propers(self, search_date=None, shows=None, anime=None, **kwargs):
|
|
|
|
|
cache_results = self.cache.listPropers(search_date)
|
|
|
|
|
results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in
|
|
|
|
|
cache_results]
|
|
|
|
|
|
2017-11-02 18:30:05 +00:00
|
|
|
|
check = self._check_auth()
|
|
|
|
|
if isinstance(check, bool) and not check:
|
|
|
|
|
return results
|
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
index = 0
|
|
|
|
|
alt_search = ('nzbs_org' == self.get_id())
|
|
|
|
|
do_search_alt = False
|
|
|
|
|
|
|
|
|
|
search_terms = []
|
|
|
|
|
regex = []
|
|
|
|
|
if shows:
|
2017-09-13 17:18:59 +00:00
|
|
|
|
search_terms += ['.proper.', '.repack.', '.real.']
|
|
|
|
|
regex += ['proper|repack', Quality.real_check]
|
2016-09-04 20:00:44 +00:00
|
|
|
|
proper_check = re.compile(r'(?i)(\b%s\b)' % '|'.join(regex))
|
|
|
|
|
if anime:
|
|
|
|
|
terms = 'v1|v2|v3|v4|v5'
|
|
|
|
|
search_terms += [terms]
|
|
|
|
|
regex += [terms]
|
|
|
|
|
proper_check = re.compile(r'(?i)(%s)' % '|'.join(regex))
|
|
|
|
|
|
|
|
|
|
urls = []
|
|
|
|
|
while index < len(search_terms):
|
2017-11-02 18:30:05 +00:00
|
|
|
|
if self.should_skip(log_warning=False):
|
|
|
|
|
break
|
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
search_params = {'q': search_terms[index], 'maxage': sickbeard.BACKLOG_DAYS + 2}
|
|
|
|
|
if alt_search:
|
|
|
|
|
|
|
|
|
|
if do_search_alt:
|
|
|
|
|
search_params['t'] = 'search'
|
|
|
|
|
index += 1
|
|
|
|
|
|
|
|
|
|
do_search_alt = not do_search_alt
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
index += 1
|
|
|
|
|
|
|
|
|
|
items, n_space = self._search_provider({'Propers': [search_params]})
|
|
|
|
|
|
|
|
|
|
for item in items:
|
|
|
|
|
|
|
|
|
|
(title, url) = self._title_and_url(item)
|
|
|
|
|
|
|
|
|
|
if not proper_check.search(title) or url in urls:
|
|
|
|
|
continue
|
|
|
|
|
urls.append(url)
|
|
|
|
|
|
|
|
|
|
result_date = self._parse_pub_date(item)
|
|
|
|
|
if not result_date:
|
|
|
|
|
logger.log(u'Unable to figure out the date for entry %s, skipping it' % title)
|
|
|
|
|
continue
|
|
|
|
|
|
2017-09-13 17:18:59 +00:00
|
|
|
|
result_size, result_uid = self._parse_size_uid(item, ns=n_space)
|
2016-09-04 20:00:44 +00:00
|
|
|
|
if not search_date or search_date < result_date:
|
|
|
|
|
show_obj = self.get_show(item, name_space=n_space)
|
2017-09-13 17:18:59 +00:00
|
|
|
|
search_result = classes.Proper(title, url, result_date, self.show, parsed_show=show_obj,
|
|
|
|
|
size=result_size, puid=result_uid)
|
2016-09-04 20:00:44 +00:00
|
|
|
|
results.append(search_result)
|
|
|
|
|
|
|
|
|
|
time.sleep(0.5)
|
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
return results
|
2014-05-26 06:29:22 +00:00
|
|
|
|
|
2015-12-14 03:38:17 +00:00
|
|
|
|
def _log_result(self, results, mode, cnt, url):
|
|
|
|
|
count = len(results) - cnt
|
|
|
|
|
if count:
|
|
|
|
|
self._log_search(mode, count, url)
|
|
|
|
|
return count
|
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
class NewznabCache(tvcache.TVCache):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
def __init__(self, provider):
|
|
|
|
|
tvcache.TVCache.__init__(self, provider)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
|
self.update_freq = 5
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
# helper method to read the namespaces from xml
|
|
|
|
|
@staticmethod
|
|
|
|
|
def parse_and_get_ns(data):
|
|
|
|
|
events = 'start', 'start-ns'
|
|
|
|
|
root = None
|
|
|
|
|
ns = {}
|
|
|
|
|
for event, elem in etree.iterparse(BytesIO(data.encode('utf-8')), events):
|
|
|
|
|
if 'start-ns' == event:
|
|
|
|
|
ns[elem[0]] = '{%s}' % elem[1]
|
|
|
|
|
elif 'start' == event:
|
|
|
|
|
if None is root:
|
|
|
|
|
root = elem
|
|
|
|
|
return root, ns
|
|
|
|
|
|
2017-08-02 21:13:02 +00:00
|
|
|
|
def updateCache(self, needed=neededQualities(need_all=True), **kwargs):
|
2014-05-18 15:33:31 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
result = []
|
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
if 4489 != sickbeard.RECENTSEARCH_FREQUENCY or self.should_update():
|
2016-09-04 20:00:44 +00:00
|
|
|
|
n_spaces = {}
|
2015-07-13 09:39:20 +00:00
|
|
|
|
try:
|
2017-11-02 18:30:05 +00:00
|
|
|
|
check = self._checkAuth()
|
|
|
|
|
if isinstance(check, bool) and not check:
|
|
|
|
|
items = None
|
|
|
|
|
else:
|
|
|
|
|
(items, n_spaces) = self.provider.cache_data(needed=needed)
|
2016-10-24 23:04:02 +00:00
|
|
|
|
except (StandardError, Exception):
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
items = None
|
2014-05-11 12:49:07 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
if items:
|
|
|
|
|
self._clearCache()
|
2014-08-30 08:47:00 +00:00
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
# parse data
|
2014-07-15 02:00:53 +00:00
|
|
|
|
cl = []
|
2014-05-11 12:49:07 +00:00
|
|
|
|
for item in items:
|
2016-09-04 20:00:44 +00:00
|
|
|
|
ci = self._parseItem(n_spaces, item)
|
2014-05-11 12:49:07 +00:00
|
|
|
|
if ci is not None:
|
2014-07-15 02:00:53 +00:00
|
|
|
|
cl.append(ci)
|
2014-07-02 18:51:14 +00:00
|
|
|
|
|
2015-06-19 16:47:52 +00:00
|
|
|
|
if 0 < len(cl):
|
2015-07-13 09:39:20 +00:00
|
|
|
|
my_db = self.get_db()
|
2015-06-19 16:47:52 +00:00
|
|
|
|
my_db.mass_action(cl)
|
2014-05-11 12:49:07 +00:00
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
# set updated as time the attempt to fetch data is
|
|
|
|
|
self.setLastUpdate()
|
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
return result
|
2014-05-11 12:49:07 +00:00
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
@staticmethod
|
|
|
|
|
def parse_ids(item, ns):
|
|
|
|
|
ids = {}
|
|
|
|
|
if 'newznab' in ns:
|
|
|
|
|
for attr in item.findall('%sattr' % ns['newznab']):
|
|
|
|
|
if attr.get('name', '') in NewznabConstants.providerToIndexerMapping:
|
|
|
|
|
v = helpers.tryInt(attr.get('value'))
|
|
|
|
|
if v > 0:
|
|
|
|
|
ids[NewznabConstants.providerToIndexerMapping[attr.get('name')]] = v
|
|
|
|
|
return ids
|
2015-06-19 16:47:52 +00:00
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
# overwrite method with that parses the rageid from the newznab feed
|
|
|
|
|
def _parseItem(self, ns, item):
|
2014-05-11 12:49:07 +00:00
|
|
|
|
|
2016-12-22 22:18:20 +00:00
|
|
|
|
title, url = self._title_and_url(item)
|
2014-07-15 02:00:53 +00:00
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
ids = self.parse_ids(item, ns)
|
2014-07-15 02:00:53 +00:00
|
|
|
|
|
2018-04-20 09:22:53 +00:00
|
|
|
|
if title and url:
|
|
|
|
|
return self.add_cache_entry(title, url, id_dict=ids)
|
2014-05-11 12:49:07 +00:00
|
|
|
|
|
2018-04-20 09:22:53 +00:00
|
|
|
|
logger.log('Data returned from the %s feed is incomplete, this result is unusable' % self.provider.name,
|
|
|
|
|
logger.DEBUG)
|