Change provider SCC login process to use General Config/Advanced/Proxy host setting.

Change refactor SCC to use torrent provider simplification and PEP8.
This commit is contained in:
JackDandy 2015-06-20 00:34:56 +01:00
parent 2f767b28c3
commit f782567fd1
8 changed files with 320 additions and 302 deletions

View file

@ -16,6 +16,7 @@
* Fix provider SCC stop snatching releases for episodes already completed
* Fix provider SCC handle null server responses
* Change provider SCC remove 1 of 3 requests per search to save 30% time
* Change provider SCC login process to use General Config/Advanced/Proxy host setting
* Change provider IPT only decode unicode search strings
* Change provider IPT login process to use General Config/Advanced/Proxy host setting
* Change provider TB PEP8 and code convention cleanup
@ -28,6 +29,7 @@
* Change provider KAT to use mediaExtensions from common instead of private list
* Change provider KAT provider PEP8 and code convention cleanup
* Change refactor and code simplification for torrent providers
* Change refactor SCC to use torrent provider simplification and PEP8
* Change provider SCD PEP8 and code convention cleanup
* Remove HDTorrents torrent provider
* Remove NextGen torrent provider

Binary file not shown.

After

Width:  |  Height:  |  Size: 916 B

View file

@ -808,9 +808,10 @@ def starify(text, verify=False):
If verify is true, return true if text is a star block created text else return false.
"""
return ((('%s%s' % (text[:len(text) / 2], '*' * (len(text) / 2))),
('%s%s%s' % (text[:4], '*' * (len(text) - 8), text[-4:])))[12 <= len(text)],
set('*') == set((text[len(text) / 2:], text[4:-4])[12 <= len(text)]))[verify]
return '' if not text\
else ((('%s%s' % (text[:len(text) / 2], '*' * (len(text) / 2))),
('%s%s%s' % (text[:4], '*' * (len(text) - 8), text[-4:])))[12 <= len(text)],
set('*') == set((text[len(text) / 2:], text[4:-4])[12 <= len(text)]))[verify]
"""
@ -1162,12 +1163,12 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
except requests.exceptions.ConnectionError as e:
logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING)
return
except requests.exceptions.Timeout as e:
logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
return
except requests.exceptions.ReadTimeout as e:
logger.log(u'Read timed out ' + str(e.message) + ' while loading URL ' + url, logger.WARNING)
return
except requests.exceptions.Timeout as e:
logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
return
except Exception:
logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING)
return

View file

@ -19,6 +19,7 @@
from __future__ import with_statement
import time
import datetime
import os
import re
@ -27,15 +28,19 @@ from base64 import b16encode, b32decode
import sickbeard
import requests
from sickbeard import helpers, classes, logger, db, tvcache
from sickbeard.common import MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT
from sickbeard import encodingKludge as ek
from sickbeard.exceptions import ex
from sickbeard import helpers, classes, logger, db, tvcache, encodingKludge as ek
from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT
from sickbeard.exceptions import SickBeardException, AuthException, ex
from sickbeard.helpers import maybe_plural
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
from sickbeard.common import Quality
from sickbeard.show_name_helpers import allPossibleShowNames
from hachoir_parser import createParser
class HaltParseException(SickBeardException):
"""Something requires the current processing to abort"""
class GenericProvider:
NZB = "nzb"
TORRENT = "torrent"
@ -73,8 +78,14 @@ class GenericProvider:
def makeID(name):
return re.sub("[^\w\d_]", "_", name.strip().lower())
def imageName(self):
return self.getID() + '.png'
def imageName(self, *default_name):
for name in ['%s.%s' % (self.getID(), image_ext) for image_ext in ['png', 'gif', 'jpg']]:
if ek.ek(os.path.isfile,
ek.ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers', name)):
return name
return '%s.png' % ('newznab', default_name[0])[any(default_name)]
def _checkAuth(self):
return True
@ -136,20 +147,17 @@ class GenericProvider:
if self.providerType == GenericProvider.TORRENT:
try:
torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper()
torrent_hash = re.findall('urn:btih:([0-9a-f]{32,40})', result.url)[0].upper()
if len(torrent_hash) == 32:
if 32 == len(torrent_hash):
torrent_hash = b16encode(b32decode(torrent_hash)).lower()
if not torrent_hash:
logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR)
return False
urls = [
'http://torcache.net/torrent/' + torrent_hash + '.torrent',
'http://torrage.com/torrent/' + torrent_hash + '.torrent',
'http://zoink.it/torrent/' + torrent_hash + '.torrent',
]
urls = ['https://%s/%s.torrent' % (u, torrent_hash)
for u in ('torcache.net/torrent', 'torrage.com/torrent', 'getstrike.net/torrents/api/download')]
except:
urls = [result.url]
@ -174,6 +182,8 @@ class GenericProvider:
if self._verify_download(filename):
return True
elif ek.ek(os.path.isfile, filename):
ek.ek(os.remove, filename)
logger.log(u"Failed to download result", logger.ERROR)
return False
@ -233,8 +243,7 @@ class GenericProvider:
Returns: A tuple containing two strings representing title and URL respectively
"""
title = None
url = None
title, url = None, None
try:
if isinstance(item, tuple):
@ -285,7 +294,7 @@ class GenericProvider:
# mark season searched for season pack searches so we can skip later on
searched_scene_season = epObj.scene_season
if len(episodes) > 1 and 'eponly' != search_mode:
if 'sponly' == search_mode:
# get season search results
for curString in self._get_season_search_strings(epObj):
itemList += self._doSearch(curString, search_mode, len(episodes))
@ -469,28 +478,107 @@ class GenericProvider:
'''
return ''
@staticmethod
def _log_result(mode='cache', count=0, url='url missing'):
"""
Simple function to log the result of a search
:param count: count of successfully processed items
:param url: source url of item(s)
"""
mode = mode.lower()
logger.log(u'%s in response from %s' % (('No %s items' % mode,
'%s %s item%s' % (count, mode, maybe_plural(count)))[0 < count], url))
class NZBProvider(GenericProvider):
def __init__(self, name, supports_backlog=True, anime_only=False):
GenericProvider.__init__(self, name, supports_backlog, anime_only)
self.providerType = GenericProvider.NZB
def imageName(self):
return GenericProvider.imageName(self, 'newznab')
def _find_propers(self, search_date=None):
cache_results = self.cache.listPropers(search_date)
results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in
cache_results]
index = 0
alt_search = ('nzbs_org' == self.getID())
term_items_found = False
do_search_alt = False
search_terms = ['.proper.', '.repack.']
proper_check = re.compile(r'(?i)\b(proper)|(repack)\b')
while index < len(search_terms):
search_params = {'q': search_terms[index]}
if alt_search:
if do_search_alt:
index += 1
if term_items_found:
do_search_alt = True
term_items_found = False
else:
if do_search_alt:
search_params['t'] = 'search'
do_search_alt = (True, False)[do_search_alt]
else:
index += 1
for item in self._doSearch(search_params, age=4):
(title, url) = self._get_title_and_url(item)
if not proper_check.search(title):
continue
if 'published_parsed' in item and item['published_parsed']:
result_date = item.published_parsed
if result_date:
result_date = datetime.datetime(*result_date[0:6])
else:
logger.log(u'Unable to figure out the date for entry %s, skipping it', title)
continue
if not search_date or result_date > search_date:
search_result = classes.Proper(title, url, result_date, self.show)
results.append(search_result)
term_items_found = True
do_search_alt = False
time.sleep(0.2)
return results
class TorrentProvider(GenericProvider):
def __init__(self, name, supports_backlog=True, anime_only=False):
GenericProvider.__init__(self, name, supports_backlog, anime_only)
self.providerType = GenericProvider.TORRENT
self._seed_ratio = None
def get_cache_data(self):
search_params = {'RSS': ['']}
return self._doSearch(search_params)
def imageName(self):
return GenericProvider.imageName(self, 'torrent')
def seedRatio(self):
return self._seed_ratio
def getQuality(self, item, anime=False):
if isinstance(item, tuple):
name = item[0]
elif isinstance(item, dict):
@ -499,11 +587,98 @@ class TorrentProvider(GenericProvider):
name = item.title
return Quality.sceneQuality(name, anime)
def _find_propers(self, search_date=datetime.datetime.today(), method=None):
@staticmethod
def _reverse_quality(quality):
return {
Quality.SDTV: 'HDTV x264',
Quality.SDDVD: 'DVDRIP',
Quality.HDTV: '720p HDTV x264',
Quality.FULLHDTV: '1080p HDTV x264',
Quality.RAWHDTV: '1080i HDTV mpeg2',
Quality.HDWEBDL: '720p WEB-DL h264',
Quality.FULLHDWEBDL: '1080p WEB-DL h264',
Quality.HDBLURAY: '720p Bluray x264',
Quality.FULLHDBLURAY: '1080p Bluray x264'
}.get(quality, '')
def _get_season_search_strings(self, ep_obj, detail_only=False, scene=True):
if ep_obj.show.air_by_date or ep_obj.show.sports:
ep_detail = str(ep_obj.airdate).split('-')[0]
elif ep_obj.show.anime:
ep_detail = ep_obj.scene_absolute_number
else:
ep_detail = 'S%02d' % int(ep_obj.scene_season)
detail = ({}, {'Season_only': [ep_detail]})[detail_only and not self.show.sports and not self.show.anime]
return [dict({'Season': self._build_search_strings(ep_detail, scene)}.items() + detail.items())]
def _get_episode_search_strings(self, ep_obj, add_string='', detail_only=False, scene=True, sep_date=' ', use_or=True):
if not ep_obj:
return []
if self.show.air_by_date or self.show.sports:
ep_detail = str(ep_obj.airdate).replace('-', sep_date)
if self.show.sports:
month = ep_obj.airdate.strftime('%b')
ep_detail = ([ep_detail] + [month], '%s|%s' % (ep_detail, month))[use_or]
elif self.show.anime:
ep_detail = ep_obj.scene_absolute_number
else:
ep_detail = sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
'episodenumber': ep_obj.scene_episode}
append = (add_string, '')[self.show.anime]
detail = ({}, {'Episode_only': [ep_detail]})[detail_only and not self.show.sports and not self.show.anime]
return [dict({'Episode': self._build_search_strings(ep_detail, scene, append)}.items() + detail.items())]
def _build_search_strings(self, ep_detail, process_name=True, append=''):
"""
Build a list of search strings for querying a provider
:param ep_detail: String of episode detail or List of episode details
:param process_name: Bool Whether to call sanitizeSceneName() on show name
:param append: String to append to search strings
:return: List of search string parameters
"""
if not isinstance(ep_detail, list):
ep_detail = [ep_detail]
if not isinstance(append, list):
append = [append]
search_params = []
crop = re.compile(r'([\.\s])(?:\1)+')
for name in set(allPossibleShowNames(self.show)):
if process_name:
name = helpers.sanitizeSceneName(name)
for detail in ep_detail:
search_params += [crop.sub(r'\1', '%s %s' % (name, detail) + ('', ' ' + x)[any(x)]) for x in append]
return search_params
def _checkAuth(self):
if hasattr(self, 'username') and hasattr(self, 'password'):
if self.username and self.password:
return True
setting = 'Password or Username'
elif hasattr(self, 'username') and hasattr(self, 'passkey'):
if self.username and self.passkey:
return True
setting = 'Passkey or Username'
elif hasattr(self, 'api_key'):
if self.api_key:
return True
setting = 'Apikey'
else:
return GenericProvider._checkAuth(self)
raise AuthException('%s for %s is empty in config provider options' % (setting, self.name))
def _find_propers(self, search_date=datetime.datetime.today(), search_terms=None):
"""
Search for releases of type PROPER
:param search_date: Filter search on episodes since this date
:param method: String or list of strings that qualify PROPER release types
:param search_terms: String or list of strings that qualify PROPER release types
:return: list of Proper objects
"""
results = []
@ -520,8 +695,9 @@ class TorrentProvider(GenericProvider):
if not sql_results:
return results
clean_term = re.compile(r'(?i)[^a-z\|\.]+')
for sqlshow in sql_results:
showid, season, episode = (int(sqlshow['showid']), int(sqlshow['season']), int(sqlshow['episode']))
showid, season, episode = [int(sqlshow[item]) for item in ('showid', 'season', 'episode')]
self.show = helpers.findCertainShow(sickbeard.showList, showid)
if not self.show:
@ -529,19 +705,36 @@ class TorrentProvider(GenericProvider):
cur_ep = self.show.getEpisode(season, episode)
if not isinstance(method, list):
if None is method:
method = 'PROPER|REPACK'
method = [method]
if None is search_terms:
search_terms = ['proper', 'repack']
elif not isinstance(search_terms, list):
if '' == search_terms:
search_terms = 'proper|repack'
search_terms = [search_terms]
for proper_string in method:
search_string = self._get_episode_search_strings(cur_ep, add_string=proper_string)
for proper_term in search_terms:
proper_check = re.compile(r'(?i)(?:%s)' % clean_term.sub('', proper_term))
proper_exp = re.sub(r'(?i)[^a-z\|\.]+', '', proper_string)
search_string = self._get_episode_search_strings(cur_ep, add_string=proper_term)
for item in self._doSearch(search_string[0]):
title, url = self._get_title_and_url(item)
if not re.search('(?i)(?:%s)' % proper_exp, title):
if not proper_check.search(title):
continue
results.append(classes.Proper(title, url, datetime.datetime.today(), self.show))
return results
@staticmethod
def _has_no_results(*html):
return re.search(r'(?i)<(?:h\d|strong)[^>]*>(?:'
+ 'your\ssearch\sdid\snot\smatch|'
+ 'nothing\sfound|'
+ 'no\storrents\sfound|'
+ '.*?there\sare\sno\sresults|'
+ '.*?no\shits\.\sTry\sadding'
+ ')', html[0])
def get_cache_data(self, *args, **kwargs):
search_params = {'Cache': ['']}
return self._doSearch(search_params)

View file

@ -18,7 +18,6 @@
import urllib
import time
import datetime
import os
try:
@ -331,58 +330,7 @@ class NewznabProvider(generic.NZBProvider):
return results
def findPropers(self, search_date=None):
search_terms = ['.proper.', '.repack.']
cache_results = self.cache.listPropers(search_date)
results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in
cache_results]
index = 0
alt_search = ('nzbs_org' == self.getID())
term_items_found = False
do_search_alt = False
while index < len(search_terms):
search_params = {'q': search_terms[index]}
if alt_search:
if do_search_alt:
index += 1
if term_items_found:
do_search_alt = True
term_items_found = False
else:
if do_search_alt:
search_params['t'] = "search"
do_search_alt = (True, False)[do_search_alt]
else:
index += 1
for item in self._doSearch(search_params, age=4):
(title, url) = self._get_title_and_url(item)
if item.has_key('published_parsed') and item['published_parsed']:
result_date = item.published_parsed
if result_date:
result_date = datetime.datetime(*result_date[0:6])
else:
logger.log(u"Unable to figure out the date for entry " + title + ", skipping it")
continue
if not search_date or result_date > search_date:
search_result = classes.Proper(title, url, result_date, self.show)
results.append(search_result)
term_items_found = True
do_search_alt = False
time.sleep(0.2)
return results
return self._find_propers(search_date)
class NewznabCache(tvcache.TVCache):

View file

@ -1,6 +1,4 @@
# Author: Idan Gutman
# Modified by jkaberg, https://github.com/jkaberg for SceneAccess
# URL: http://code.google.com/p/sickbeard/
# coding=utf-8
#
# This file is part of SickGear.
#
@ -18,268 +16,137 @@
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import re
import traceback
import datetime
import time
import traceback
import sickbeard
import generic
from sickbeard import logger, tvcache, db, classes, helpers, show_name_helpers
from sickbeard.common import Quality
from sickbeard.exceptions import ex
from . import generic
from sickbeard import logger, tvcache, helpers
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import sanitizeSceneName
from lib import requests
from lib.requests import exceptions
from lib.unidecode import unidecode
class SCCProvider(generic.TorrentProvider):
urls = {'base_url': 'https://sceneaccess.eu',
'login': 'https://sceneaccess.eu/login',
'detail': 'https://sceneaccess.eu/details?id=%s',
'search': 'https://sceneaccess.eu/browse?search=%s&method=1&%s',
'nonscene': 'https://sceneaccess.eu/nonscene?search=%s&method=1&c44=44&c45=44',
'archive': 'https://sceneaccess.eu/archive?search=%s&method=1&c26=26',
'download': 'https://sceneaccess.eu/%s'}
def __init__(self):
generic.TorrentProvider.__init__(self, 'SceneAccess', True, False)
self.username = None
self.password = None
self.ratio = None
self.minseed = None
self.minleech = None
generic.TorrentProvider.__init__(self, 'SceneAccess')
self.url_base = 'https://sceneaccess.eu/'
self.urls = {'config_provider_home_uri': self.url_base,
'login': self.url_base + 'login',
'search': self.url_base + 'browse?search=%s&method=1&c27=27&c17=17&c11=11',
'nonscene': self.url_base + 'nonscene?search=%s&method=1&c44=44&c45=44',
'archive': self.url_base + 'archive?search=%s&method=1&c26=26',
'get': self.url_base + '%s'}
self.url = self.urls['config_provider_home_uri']
self.username, self.password, self.minseed, self.minleech = 4 * [None]
self.cache = SCCCache(self)
self.url = self.urls['base_url']
self.categories = 'c27=27&c17=17&c11=11'
def getQuality(self, item, anime=False):
quality = Quality.sceneQuality(item[0], anime)
return quality
def _doLogin(self):
login_params = {'username': self.username,
'password': self.password,
'submit': 'come on in'}
self.session = requests.Session()
try:
response = self.session.post(self.urls['login'], data=login_params, headers=self.headers, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
logger.log(u'Unable to connect to %s provider: %s' % (self.name, ex(e)), logger.ERROR)
return False
if re.search('Username or password incorrect', response.text) \
or re.search('<title>SceneAccess \| Login</title>', response.text) \
or 401 == response.status_code:
logger.log(u'Your authentication credentials for %s are incorrect, check your config.' % self.name, logger.ERROR)
return False
return True
def _get_season_search_strings(self, ep_obj):
search_string = {'Season': []}
if ep_obj.show.air_by_date or ep_obj.show.sports:
ep_string = str(ep_obj.airdate).split('-')[0]
elif ep_obj.show.anime:
ep_string = '%d' % ep_obj.scene_absolute_number
else:
ep_string = 'S%02d' % int(ep_obj.scene_season) # 1) showName SXX
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
search_string['Season'].append('%s %s' % (show_name, ep_string))
return [search_string]
def _get_episode_search_strings(self, ep_obj, add_string=''):
search_string = {'Episode': []}
if not ep_obj:
return []
airdate = str(ep_obj.airdate).replace('-', '.')
if self.show.air_by_date:
ep_detail = airdate
elif self.show.sports:
ep_detail = '%s|%s' % (airdate, ep_obj.airdate.strftime('%b'))
elif self.show.anime:
ep_detail = ep_obj.scene_absolute_number
else:
ep_detail = sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
'episodenumber': ep_obj.scene_episode}
if add_string and not self.show.anime:
ep_detail += ' ' + add_string
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
search_string['Episode'].append(re.sub('\s+', ' ', '%s %s' % (sanitizeSceneName(show_name), ep_detail)))
return [search_string]
def _isSection(self, section, text):
title = '<title>.+? \| %s</title>' % section
if re.search(title, text, re.IGNORECASE):
logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies
if logged_in():
return True
else:
return False
if self._checkAuth():
login_params = {'username': self.username, 'password': self.password, 'submit': 'come on in'}
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
if response and logged_in():
return True
logger.log(u'Failed to authenticate with %s, abort provider.' % self.name, logger.ERROR)
return False
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
results = []
items = {'Season': [], 'Episode': [], 'RSS': []}
items = {'Season': [], 'Episode': [], 'Cache': []}
if not self._doLogin():
return results
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download'}.items())
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string, url = self._get_title_and_url([search_string, self.urls['search'], '', '', ''])
search_string, void = self._get_title_and_url((search_string, None))
if isinstance(search_string, unicode):
search_string = unidecode(search_string)
nonsceneSearchURL = None
if 'Season' == mode:
searchURL = self.urls['archive'] % search_string
response = [self.getURL(searchURL)]
searches = [self.urls['archive'] % search_string]
else:
searchURL = self.urls['search'] % (search_string, self.categories)
nonsceneSearchURL = self.urls['nonscene'] % search_string
response = [self.getURL(searchURL),
self.getURL(nonsceneSearchURL)]
logger.log(u'Search string: ' + nonsceneSearchURL, logger.DEBUG)
searches = [self.urls['search'] % search_string,
self.urls['nonscene'] % search_string]
logger.log(u'Search string: ' + searchURL, logger.DEBUG)
for search_url in searches:
html = self.getURL(search_url)
response = [html for html in response if html is not None]
if not len(response):
continue
cnt = len(items[mode])
try:
if not html or self._has_no_results(html):
raise generic.HaltParseException
try:
for markup in response:
with BS4Parser(markup, features=['html5lib', 'permissive']) as soup:
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_table = soup.find('table', attrs={'id': 'torrents-table'})
torrent_rows = []
if torrent_table:
torrent_rows = torrent_table.find_all('tr')
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
# Continue only if at least one Release is found
if 2 > len(torrent_rows):
if soup.title:
source = '%s (%s)' % (self.name, soup.title.string)
else:
source = self.name
logger.log(u'The data returned from %s does not contain any torrents' % source, logger.DEBUG)
continue
for result in torrent_table.find_all('tr')[1:]:
raise generic.HaltParseException
for tr in torrent_table.find_all('tr')[1:]:
try:
link = result.find('td', attrs={'class': 'ttr_name'}).find('a')
all_urls = result.find('td', attrs={'class': 'td_dl'}).find_all('a', limit=2)
url = all_urls[0]
seeders, leechers = [int(tr.find('td', attrs={'class': x}).get_text().strip())
for x in ('ttr_seeders', 'ttr_leechers')]
if 'Cache' != mode and (seeders < self.minseed or leechers < self.minleech):
continue
title = link.string
if re.search('\.\.\.', title):
response = self.getURL(self.url + '/' + link['href'])
if response:
with BS4Parser(response) as soup_detail:
title = re.search('(?<=").+(?<!")', soup_detail.title.string).group(0)
download_url = self.urls['download'] % url['href']
id = int(link['href'].replace('details?id=', ''))
seeders = int(result.find('td', attrs={'class': 'ttr_seeders'}).string)
leechers = int(result.find('td', attrs={'class': 'ttr_leechers'}).string)
info = tr.find('a', href=rc['info'])
title = ('title' in info.attrs and info['title']) or info.get_text().strip()
link = str(tr.find('a', href=rc['get'])['href']).lstrip('/')
download_url = self.urls['get'] % link
except (AttributeError, TypeError):
continue
if 'RSS' != mode and (self.minseed > seeders or self.minleech > leechers):
continue
if title and download_url:
items[mode].append((title, download_url, seeders))
if not title or not download_url:
continue
item = title, download_url, id, seeders, leechers
if self._isSection('Non-Scene', markup):
logger.log(u'Found result: %s (%s)' % (title, nonsceneSearchURL), logger.DEBUG)
else:
logger.log(u'Found result: %s (%s)' % (title, searchURL), logger.DEBUG)
items[mode].append(item)
except Exception as e:
logger.log(u'Failed parsing %s Traceback: %s' % (self.name, traceback.format_exc()), logger.ERROR)
except generic.HaltParseException:
time.sleep(1.1)
except Exception:
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_result(mode, len(items[mode]) - cnt, search_url)
# For each search mode sort all the items by seeders
items[mode].sort(key=lambda tup: tup[3], reverse=True)
items[mode].sort(key=lambda tup: tup[2], reverse=True)
results += items[mode]
return results
def _get_title_and_url(self, item):
title, url, id, seeders, leechers = item
if title:
title += u''
title = re.sub(r'\s+', '.', title)
if url:
url = str(url).replace('&amp;', '&')
return title, url
def findPropers(self, search_date=datetime.datetime.today()):
results = []
return self._find_propers(search_date)
my_db = db.DBConnection()
sql_results = my_db.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
if not sql_results:
return []
for sqlshow in sql_results:
showid, season, episode = (int(sqlshow['showid']), int(sqlshow['season']), int(sqlshow['episode']))
self.show = helpers.findCertainShow(sickbeard.showList, showid)
if not self.show:
continue
cur_ep = self.show.getEpisode(season, episode)
for search in ['.proper.', '.repack.']:
search_string = self._get_episode_search_strings(cur_ep, add_string=search)
for item in self._doSearch(search_string[0]):
title, url = self._get_title_and_url(item)
results.append(classes.Proper(title, url, datetime.datetime.today(), self.show))
return results
def seedRatio(self):
return self.ratio
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, sep_date='.', use_or=False)
class SCCCache(tvcache.TVCache):
def __init__(self, provider):
tvcache.TVCache.__init__(self, provider)
def __init__(self, this_provider):
tvcache.TVCache.__init__(self, this_provider)
# only poll SCC every 10 minutes max
self.minTime = 20
self.minTime = 20 # cache update frequency
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
return self.provider.get_cache_data()
provider = SCCProvider()

View file

@ -1241,7 +1241,6 @@ class TVShow(object):
toReturn += "anime: " + str(self.is_anime) + "\n"
return toReturn
def wantEpisode(self, season, episode, quality, manualSearch=False):
logger.log(u"Checking if found episode " + str(season) + "x" + str(episode) + " is wanted at quality " +
@ -1250,8 +1249,12 @@ class TVShow(object):
# if the quality isn't one we want under any circumstances then just say no
initialQualities, archiveQualities = Quality.splitQuality(self.quality)
allQualities = list(set(initialQualities + archiveQualities))
logger.log(u"initial + archive = (" + ",".join([Quality.qualityStrings[qual] for qual in initialQualities]) + ") + (" + ",".join([Quality.qualityStrings[qual] for qual in archiveQualities]) + ") and found " + Quality.qualityStrings[quality],
logger.DEBUG)
initial = u'= (%s)' % ','.join([Quality.qualityStrings[qual] for qual in initialQualities])
if 0 < len(archiveQualities):
initial = u'+ upgrade to %s + (%s)'\
% (initial, ','.join([Quality.qualityStrings[qual] for qual in archiveQualities]))
logger.log(u'Want initial %s and found %s' % (initial, Quality.qualityStrings[quality]), logger.DEBUG)
if quality not in allQualities:
logger.log(u"Don't want this quality, ignoring found episode", logger.DEBUG)

View file

@ -27,11 +27,9 @@ from sickbeard import logger
from sickbeard.common import Quality
from sickbeard import helpers, show_name_helpers
from sickbeard.exceptions import MultipleShowObjectsException
from sickbeard.exceptions import AuthException
from sickbeard.exceptions import AuthException, ex
from name_parser.parser import NameParser, InvalidNameException, InvalidShowException
from sickbeard.rssfeeds import getFeed
from sickbeard import clients
import itertools
class CacheDBConnection(db.DBConnection):
@ -77,7 +75,13 @@ class TVCache():
return True
def updateCache(self):
if self.shouldUpdate() and self._checkAuth():
try:
self._checkAuth()
except AuthException as e:
logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
return []
if self.shouldUpdate():
# as long as the http request worked we count this as an update
data = self._getRSSData()
if not data: