2014-04-22 08:02:43 +00:00
|
|
|
# coding=utf-8
|
2014-03-10 05:18:05 +00:00
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# This file is part of SickGear.
|
2014-03-10 05:18:05 +00:00
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is free software: you can redistribute it and/or modify
|
2014-03-10 05:18:05 +00:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is distributed in the hope that it will be useful,
|
2014-03-10 05:18:05 +00:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-11-12 16:43:14 +00:00
|
|
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
from __future__ import with_statement
|
|
|
|
|
|
|
|
import datetime
|
2015-09-18 00:06:34 +00:00
|
|
|
import itertools
|
|
|
|
import math
|
2014-03-10 05:18:05 +00:00
|
|
|
import os
|
|
|
|
import re
|
2015-09-18 00:06:34 +00:00
|
|
|
import time
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
import urlparse
|
2016-09-07 20:24:10 +00:00
|
|
|
from urllib import quote_plus
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
import zlib
|
2015-04-18 04:55:04 +00:00
|
|
|
from base64 import b16encode, b32decode
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
import sickbeard
|
2014-07-24 04:44:11 +00:00
|
|
|
import requests
|
2015-07-13 09:39:20 +00:00
|
|
|
import requests.cookies
|
2017-02-17 03:16:51 +00:00
|
|
|
from cfscrape import CloudflareScraper
|
2015-09-18 00:06:34 +00:00
|
|
|
from hachoir_parser import guessParser
|
2016-08-26 23:36:01 +00:00
|
|
|
from hachoir_core.error import HachoirError
|
2015-09-18 00:06:34 +00:00
|
|
|
from hachoir_core.stream import FileInputStream
|
|
|
|
|
2015-06-19 23:34:56 +00:00
|
|
|
from sickbeard import helpers, classes, logger, db, tvcache, encodingKludge as ek
|
|
|
|
from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT
|
|
|
|
from sickbeard.exceptions import SickBeardException, AuthException, ex
|
2016-11-14 21:33:15 +00:00
|
|
|
from sickbeard.helpers import maybe_plural, remove_file_failed
|
2014-07-06 13:11:04 +00:00
|
|
|
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
2015-06-19 23:34:56 +00:00
|
|
|
from sickbeard.show_name_helpers import allPossibleShowNames
|
2015-04-18 04:55:04 +00:00
|
|
|
|
2014-09-07 05:25:01 +00:00
|
|
|
|
2015-06-19 23:34:56 +00:00
|
|
|
class HaltParseException(SickBeardException):
|
|
|
|
"""Something requires the current processing to abort"""
|
|
|
|
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
class GenericProvider:
|
2015-07-13 09:39:20 +00:00
|
|
|
NZB = 'nzb'
|
|
|
|
TORRENT = 'torrent'
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-27 00:00:01 +00:00
|
|
|
def __init__(self, name, supports_backlog=False, anime_only=False):
|
2014-03-10 05:18:05 +00:00
|
|
|
# these need to be set in the subclass
|
|
|
|
self.providerType = None
|
|
|
|
self.name = name
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
self.supports_backlog = supports_backlog
|
2015-04-18 04:55:04 +00:00
|
|
|
self.anime_only = anime_only
|
2015-09-18 00:06:34 +00:00
|
|
|
if anime_only:
|
|
|
|
self.proper_search_terms = 'v1|v2|v3|v4|v5'
|
2014-03-10 05:18:05 +00:00
|
|
|
self.url = ''
|
|
|
|
|
2014-04-29 13:14:19 +00:00
|
|
|
self.show = None
|
2014-05-17 05:23:11 +00:00
|
|
|
|
2014-05-16 09:16:01 +00:00
|
|
|
self.search_mode = None
|
|
|
|
self.search_fallback = False
|
2015-04-15 06:16:27 +00:00
|
|
|
self.enabled = False
|
2014-12-22 18:30:53 +00:00
|
|
|
self.enable_recentsearch = False
|
2014-08-29 05:16:25 +00:00
|
|
|
self.enable_backlog = False
|
2017-01-01 20:24:41 +00:00
|
|
|
self.enable_scheduled_backlog = True
|
2015-09-18 00:06:34 +00:00
|
|
|
self.categories = None
|
2014-05-27 07:44:23 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
self.cache = tvcache.TVCache(self)
|
|
|
|
|
2017-02-17 03:16:51 +00:00
|
|
|
self.session = CloudflareScraper.create_scraper()
|
2014-07-27 14:04:37 +00:00
|
|
|
|
2014-07-27 10:59:21 +00:00
|
|
|
self.headers = {
|
2014-09-07 05:25:01 +00:00
|
|
|
# Using USER_AGENT instead of Mozilla to keep same user agent along authentication and download phases,
|
2015-07-13 09:39:20 +00:00
|
|
|
# otherwise session might be broken and download fail, asking again for authentication
|
2016-08-26 23:36:01 +00:00
|
|
|
# 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' +
|
|
|
|
# 'Chrome/32.0.1700.107 Safari/537.36'}
|
2014-09-06 00:16:04 +00:00
|
|
|
'User-Agent': USER_AGENT}
|
2014-04-28 09:15:29 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def get_id(self):
|
|
|
|
return GenericProvider.make_id(self.name)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
@staticmethod
|
2015-07-13 09:39:20 +00:00
|
|
|
def make_id(name):
|
|
|
|
return re.sub('[^\w\d_]', '_', name.strip().lower())
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def image_name(self, *default_name):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
for name in ['%s.%s' % (self.get_id(), image_ext) for image_ext in ['png', 'gif', 'jpg']]:
|
2015-06-19 23:34:56 +00:00
|
|
|
if ek.ek(os.path.isfile,
|
|
|
|
ek.ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers', name)):
|
|
|
|
return name
|
|
|
|
|
|
|
|
return '%s.png' % ('newznab', default_name[0])[any(default_name)]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _authorised(self):
|
2014-08-12 10:09:11 +00:00
|
|
|
return True
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
def _check_auth(self, is_required=None):
|
2014-07-27 10:59:21 +00:00
|
|
|
return True
|
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
def is_public_access(self):
|
|
|
|
try:
|
2016-12-31 00:49:20 +00:00
|
|
|
return bool(re.search('(?i)rarbg|sick|anizb', self.name)) \
|
2016-08-26 23:36:01 +00:00
|
|
|
or False is bool(('_authorised' in self.__class__.__dict__ or hasattr(self, 'digest')
|
|
|
|
or self._check_auth(is_required=True)))
|
|
|
|
except AuthException:
|
|
|
|
return False
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def is_active(self):
|
|
|
|
if GenericProvider.NZB == self.providerType and sickbeard.USE_NZBS:
|
|
|
|
return self.is_enabled()
|
|
|
|
elif GenericProvider.TORRENT == self.providerType and sickbeard.USE_TORRENTS:
|
|
|
|
return self.is_enabled()
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def is_enabled(self):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
This should be overridden and should return the config setting eg. sickbeard.MYPROVIDER
|
|
|
|
"""
|
2015-04-15 06:16:27 +00:00
|
|
|
return self.enabled
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def get_result(self, episodes, url):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
Returns a result of the correct type for this provider
|
|
|
|
"""
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
if GenericProvider.NZB == self.providerType:
|
2014-03-10 05:18:05 +00:00
|
|
|
result = classes.NZBSearchResult(episodes)
|
2015-07-13 09:39:20 +00:00
|
|
|
elif GenericProvider.TORRENT == self.providerType:
|
2014-03-10 05:18:05 +00:00
|
|
|
result = classes.TorrentSearchResult(episodes)
|
|
|
|
else:
|
|
|
|
result = classes.SearchResult(episodes)
|
2014-03-25 05:57:24 +00:00
|
|
|
|
|
|
|
result.provider = self
|
2015-07-13 09:39:20 +00:00
|
|
|
result.url = url
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
return result
|
|
|
|
|
2016-02-26 01:07:39 +00:00
|
|
|
# noinspection PyUnusedLocal
|
|
|
|
def cb_response(self, r, *args, **kwargs):
|
|
|
|
self.session.response = dict(url=r.url, status_code=r.status_code, elapsed=r.elapsed, from_cache=r.from_cache)
|
|
|
|
return r
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def get_url(self, url, post_data=None, params=None, timeout=30, json=False):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
By default this is just a simple urlopen call but this method should be overridden
|
|
|
|
for providers with special URL requirements (like cookies)
|
|
|
|
"""
|
|
|
|
|
2014-07-27 10:59:21 +00:00
|
|
|
# check for auth
|
2015-09-18 00:06:34 +00:00
|
|
|
if not self._authorised():
|
2014-07-27 10:59:21 +00:00
|
|
|
return
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-27 10:59:21 +00:00
|
|
|
return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout,
|
2016-02-26 01:07:39 +00:00
|
|
|
session=self.session, json=json, hooks=dict(response=self.cb_response))
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def download_result(self, result):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
Save the result to disk.
|
|
|
|
"""
|
|
|
|
|
2014-07-27 10:59:21 +00:00
|
|
|
# check for auth
|
2015-09-18 00:06:34 +00:00
|
|
|
if not self._authorised():
|
2014-07-27 14:04:37 +00:00
|
|
|
return False
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
if GenericProvider.TORRENT == self.providerType:
|
2015-09-16 23:52:18 +00:00
|
|
|
final_dir = sickbeard.TORRENT_DIR
|
|
|
|
link_type = 'magnet'
|
2014-07-27 14:04:37 +00:00
|
|
|
try:
|
2017-05-03 23:40:47 +00:00
|
|
|
btih = None
|
|
|
|
try:
|
|
|
|
btih = re.findall('urn:btih:([\w]{32,40})', result.url)[0]
|
|
|
|
if 32 == len(btih):
|
|
|
|
from base64 import b16encode, b32decode
|
|
|
|
btih = b16encode(b32decode(btih))
|
|
|
|
except (StandardError, Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
if not btih or not re.search('(?i)[0-9a-f]{32,40}', btih):
|
2015-07-13 09:39:20 +00:00
|
|
|
logger.log('Unable to extract torrent hash from link: ' + ex(result.url), logger.ERROR)
|
2014-07-27 14:04:37 +00:00
|
|
|
return False
|
|
|
|
|
2017-05-03 23:40:47 +00:00
|
|
|
urls = ['http%s://%s/torrent/%s.torrent' % (u + (btih.upper(),))
|
2017-07-13 19:20:00 +00:00
|
|
|
for u in (('s', 'itorrents.org'), ('s', 'torrage.info'), ('', 'reflektor.karmorra.info'),
|
|
|
|
('s', 'torrentproject.se'), ('', 'thetorrent.org'), ('s', 'torcache.to'))]
|
2016-08-26 23:36:01 +00:00
|
|
|
except (StandardError, Exception):
|
2015-09-16 23:52:18 +00:00
|
|
|
link_type = 'torrent'
|
2014-07-27 14:04:37 +00:00
|
|
|
urls = [result.url]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
elif GenericProvider.NZB == self.providerType:
|
2015-09-16 23:52:18 +00:00
|
|
|
final_dir = sickbeard.NZB_DIR
|
|
|
|
link_type = 'nzb'
|
2014-07-27 10:59:21 +00:00
|
|
|
urls = [result.url]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-27 10:59:21 +00:00
|
|
|
else:
|
|
|
|
return
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-15 20:22:58 +00:00
|
|
|
ref_state = 'Referer' in self.session.headers and self.session.headers['Referer']
|
|
|
|
saved = False
|
2014-07-27 10:59:21 +00:00
|
|
|
for url in urls:
|
2015-08-14 23:02:05 +00:00
|
|
|
cache_dir = sickbeard.CACHE_DIR or helpers._getTempDir()
|
|
|
|
base_name = '%s.%s' % (helpers.sanitizeFileName(result.name), self.providerType)
|
2017-07-13 19:20:00 +00:00
|
|
|
final_file = ek.ek(os.path.join, final_dir, base_name)
|
|
|
|
cached = getattr(result, 'cache_file', None)
|
|
|
|
if cached and ek.ek(os.path.isfile, cached):
|
|
|
|
base_name = ek.ek(os.path.basename, cached)
|
2015-08-14 23:02:05 +00:00
|
|
|
cache_file = ek.ek(os.path.join, cache_dir, base_name)
|
2014-07-27 10:59:21 +00:00
|
|
|
|
2015-09-15 20:22:58 +00:00
|
|
|
self.session.headers['Referer'] = url
|
2017-07-13 19:20:00 +00:00
|
|
|
if cached or helpers.download_file(url, cache_file, session=self.session):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-08-14 23:02:05 +00:00
|
|
|
if self._verify_download(cache_file):
|
2015-09-15 20:22:58 +00:00
|
|
|
logger.log(u'Downloaded %s result from %s' % (self.name, url))
|
|
|
|
try:
|
|
|
|
helpers.moveFile(cache_file, final_file)
|
|
|
|
msg = 'moved'
|
2016-08-26 23:36:01 +00:00
|
|
|
except (OSError, Exception):
|
2015-09-15 20:22:58 +00:00
|
|
|
msg = 'copied cached file'
|
2017-07-13 19:20:00 +00:00
|
|
|
logger.log(u'Saved .%s data and %s to %s' % (
|
|
|
|
(link_type, 'torrent cache')['magnet' == link_type], msg, final_file))
|
2015-09-15 20:22:58 +00:00
|
|
|
saved = True
|
|
|
|
break
|
2015-08-14 23:02:05 +00:00
|
|
|
|
2015-09-15 20:22:58 +00:00
|
|
|
remove_file_failed(cache_file)
|
2015-08-14 23:02:05 +00:00
|
|
|
|
2015-09-15 20:22:58 +00:00
|
|
|
if 'Referer' in self.session.headers:
|
|
|
|
if ref_state:
|
|
|
|
self.session.headers['Referer'] = ref_state
|
|
|
|
else:
|
|
|
|
del(self.session.headers['Referer'])
|
2014-08-25 15:55:16 +00:00
|
|
|
|
2016-10-27 09:28:53 +00:00
|
|
|
if not saved and 'magnet' == link_type:
|
2017-07-13 19:20:00 +00:00
|
|
|
logger.log(u'All torrent cache servers failed to return a downloadable result', logger.DEBUG)
|
2016-10-27 09:28:53 +00:00
|
|
|
final_file = ek.ek(os.path.join, final_dir, '%s.%s' % (helpers.sanitizeFileName(result.name), link_type))
|
|
|
|
try:
|
|
|
|
with open(final_file, 'wb') as fp:
|
|
|
|
fp.write(result.url)
|
|
|
|
fp.flush()
|
|
|
|
os.fsync(fp.fileno())
|
|
|
|
logger.log(u'Saved magnet link to file as some clients (or plugins) support this, %s' % final_file)
|
2017-07-13 19:20:00 +00:00
|
|
|
if 'blackhole' == sickbeard.TORRENT_METHOD:
|
|
|
|
logger.log('Tip: If your client fails to load magnet in files, ' +
|
|
|
|
'change blackhole to a client connection method in search settings')
|
2016-10-27 09:28:53 +00:00
|
|
|
except (StandardError, Exception):
|
2017-07-13 19:20:00 +00:00
|
|
|
logger.log(u'Failed to save magnet link to file, %s' % final_file)
|
2016-10-27 09:28:53 +00:00
|
|
|
elif not saved:
|
|
|
|
logger.log(u'Server failed to return anything useful', logger.ERROR)
|
2015-09-15 20:22:58 +00:00
|
|
|
|
|
|
|
return saved
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
def _verify_download(self, file_name=None):
|
|
|
|
"""
|
|
|
|
Checks the saved file to see if it was actually valid, if not then consider the download a failure.
|
|
|
|
"""
|
2015-09-15 20:22:58 +00:00
|
|
|
result = True
|
2014-03-10 05:18:05 +00:00
|
|
|
# primitive verification of torrents, just make sure we didn't get a text file or something
|
2015-07-13 09:39:20 +00:00
|
|
|
if GenericProvider.TORRENT == self.providerType:
|
2015-09-15 20:22:58 +00:00
|
|
|
parser = stream = None
|
|
|
|
try:
|
|
|
|
stream = FileInputStream(file_name)
|
|
|
|
parser = guessParser(stream)
|
2016-08-26 23:36:01 +00:00
|
|
|
except (HachoirError, Exception):
|
2015-09-15 20:22:58 +00:00
|
|
|
pass
|
|
|
|
result = parser and 'application/x-bittorrent' == parser.mime_type
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-15 20:22:58 +00:00
|
|
|
try:
|
|
|
|
stream._input.close()
|
2016-08-26 23:36:01 +00:00
|
|
|
except (HachoirError, Exception):
|
2015-09-15 20:22:58 +00:00
|
|
|
pass
|
|
|
|
|
|
|
|
return result
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def search_rss(self, episodes):
|
2014-09-20 12:03:48 +00:00
|
|
|
return self.cache.findNeededEpisodes(episodes)
|
2014-05-15 04:16:46 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def get_quality(self, item, anime=False):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
Figures out the quality of the given RSS item node
|
2015-09-18 00:06:34 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
|
2015-09-18 00:06:34 +00:00
|
|
|
|
|
|
|
Returns a Quality value obtained from the node's data
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
2015-09-18 00:06:34 +00:00
|
|
|
(title, url) = self._title_and_url(item) # @UnusedVariable
|
2014-05-26 06:29:22 +00:00
|
|
|
quality = Quality.sceneQuality(title, anime)
|
2014-03-10 05:18:05 +00:00
|
|
|
return quality
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
def _search_provider(self, search_params, search_mode='eponly', epcount=0, age=0, **kwargs):
|
2014-03-10 05:18:05 +00:00
|
|
|
return []
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _season_strings(self, episode):
|
2014-03-10 05:18:05 +00:00
|
|
|
return []
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _episode_strings(self, *args, **kwargs):
|
2014-03-10 05:18:05 +00:00
|
|
|
return []
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _title_and_url(self, item):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
2015-05-27 00:00:01 +00:00
|
|
|
Retrieves the title and URL data from the item
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-27 00:00:01 +00:00
|
|
|
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed, or a two part tup
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
Returns: A tuple containing two strings representing title and URL respectively
|
|
|
|
"""
|
2014-05-05 13:26:02 +00:00
|
|
|
|
2015-06-19 23:34:56 +00:00
|
|
|
title, url = None, None
|
2015-05-27 00:00:01 +00:00
|
|
|
try:
|
2015-09-18 00:06:34 +00:00
|
|
|
title, url = isinstance(item, tuple) and (item[0], item[1]) or \
|
|
|
|
(item.get('title', None), item.get('link', None))
|
2016-08-26 23:36:01 +00:00
|
|
|
except (StandardError, Exception):
|
2015-05-27 00:00:01 +00:00
|
|
|
pass
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
title = title and re.sub(r'\s+', '.', u'%s' % title)
|
|
|
|
url = url and str(url).replace('&', '&')
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-08-23 21:39:10 +00:00
|
|
|
return title, url
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
def _link(self, url, url_tmpl=None):
|
|
|
|
|
|
|
|
url = url and str(url).strip().replace('&', '&') or ''
|
|
|
|
try:
|
|
|
|
url_tmpl = url_tmpl or self.urls['get']
|
|
|
|
except (StandardError, Exception):
|
|
|
|
url_tmpl = '%s'
|
2017-02-18 20:36:42 +00:00
|
|
|
return url if re.match('(?i)(https?://|magnet:)', url) else (url_tmpl % url.lstrip('/'))
|
2016-08-26 23:36:01 +00:00
|
|
|
|
2016-11-01 18:13:51 +00:00
|
|
|
def _header_row(self, table_row, custom_match=None, header_strip=''):
|
|
|
|
"""
|
|
|
|
:param header_row: Soup resultset of table header row
|
|
|
|
:param custom_match: Dict key/values to override one or more default regexes
|
|
|
|
:param header_strip: String regex of ambiguities to remove from headers
|
|
|
|
:return: dict column indices or None for leech, seeds, and size
|
|
|
|
"""
|
|
|
|
results = {}
|
|
|
|
rc = dict((k, re.compile('(?i)' + r)) for (k, r) in dict(
|
|
|
|
{'seed': r'(?:seed|s/l)', 'leech': r'(?:leech|peers)', 'size': r'(?:size)'}.items()
|
|
|
|
+ ({}, custom_match)[any([custom_match])].items()).items())
|
|
|
|
table = table_row.find_parent('table')
|
|
|
|
header_row = table.tr or table.thead.tr or table.tbody.tr
|
|
|
|
for y in [x for x in header_row(True) if x.attrs.get('class')]:
|
|
|
|
y['class'] = '..'.join(y['class'])
|
|
|
|
all_cells = header_row.find_all('th')
|
|
|
|
all_cells = all_cells if any(all_cells) else header_row.find_all('td')
|
|
|
|
|
|
|
|
headers = [re.sub(
|
|
|
|
r'[\s]+', '',
|
|
|
|
((any([cell.get_text()]) and any([rc[x].search(cell.get_text()) for x in rc.keys()]) and cell.get_text())
|
|
|
|
or (cell.attrs.get('id') and any([rc[x].search(cell['id']) for x in rc.keys()]) and cell['id'])
|
|
|
|
or (cell.attrs.get('title') and any([rc[x].search(cell['title']) for x in rc.keys()]) and cell['title'])
|
|
|
|
or next(iter(set(filter(lambda z: any([z]), [
|
|
|
|
next(iter(set(filter(lambda y: any([y]), [
|
|
|
|
cell.find(tag, **p) for p in [{attr: rc[x]} for x in rc.keys()]]))), {}).get(attr)
|
|
|
|
for (tag, attr) in [
|
|
|
|
('img', 'title'), ('img', 'src'), ('i', 'title'), ('i', 'class'),
|
|
|
|
('abbr', 'title'), ('a', 'title'), ('a', 'href')]]))), '')
|
|
|
|
or cell.get_text()
|
|
|
|
)).strip() for cell in all_cells]
|
|
|
|
headers = [re.sub(header_strip, '', x) for x in headers]
|
|
|
|
all_headers = headers
|
|
|
|
colspans = [int(cell.attrs.get('colspan', 0)) for cell in all_cells]
|
|
|
|
if any(colspans):
|
|
|
|
all_headers = []
|
|
|
|
for i, width in enumerate(colspans):
|
|
|
|
all_headers += [headers[i]] + ([''] * (width - 1))
|
|
|
|
|
|
|
|
for k, r in rc.iteritems():
|
|
|
|
if k not in results:
|
|
|
|
for name in filter(lambda v: any([v]) and r.search(v), all_headers[::-1]):
|
|
|
|
results[k] = all_headers.index(name) - len(all_headers)
|
|
|
|
break
|
|
|
|
|
|
|
|
for missing in set(rc.keys()) - set(results.keys()):
|
|
|
|
results[missing] = None
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
2016-09-07 20:24:10 +00:00
|
|
|
@staticmethod
|
|
|
|
def _dhtless_magnet(btih, name=None):
|
|
|
|
"""
|
|
|
|
:param btih: torrent hash
|
|
|
|
:param name: torrent name
|
|
|
|
:return: a magnet loaded with default trackers for clients without enabled DHT or None if bad hash
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
btih = btih.lstrip('/').upper()
|
|
|
|
if 32 == len(btih):
|
|
|
|
btih = b16encode(b32decode(btih)).lower()
|
|
|
|
btih = re.search('(?i)[0-9a-f]{32,40}', btih) and btih or None
|
|
|
|
except (StandardError, Exception):
|
|
|
|
btih = None
|
|
|
|
return (btih and 'magnet:?xt=urn:btih:%s&dn=%s&tr=%s' % (btih, quote_plus(name or btih), '&tr='.join(
|
|
|
|
[quote_plus(tr) for tr in
|
|
|
|
'http://atrack.pow7.com/announce', 'http://mgtracker.org:2710/announce',
|
|
|
|
'http://pow7.com/announce', 'http://t1.pow7.com/announce',
|
|
|
|
'http://tracker.tfile.me/announce', 'udp://9.rarbg.com:2710/announce',
|
|
|
|
'udp://9.rarbg.me:2710/announce', 'udp://9.rarbg.to:2710/announce',
|
|
|
|
'udp://eddie4.nl:6969/announce', 'udp://explodie.org:6969/announce',
|
|
|
|
'udp://inferno.demonoid.pw:3395/announce', 'udp://inferno.subdemon.com:3395/announce',
|
|
|
|
'udp://ipv4.tracker.harry.lu:80/announce', 'udp://p4p.arenabg.ch:1337/announce',
|
|
|
|
'udp://shadowshq.yi.org:6969/announce', 'udp://tracker.aletorrenty.pl:2710/announce',
|
|
|
|
'udp://tracker.coppersurfer.tk:6969', 'udp://tracker.coppersurfer.tk:6969/announce',
|
|
|
|
'udp://tracker.internetwarriors.net:1337', 'udp://tracker.internetwarriors.net:1337/announce',
|
|
|
|
'udp://tracker.leechers-paradise.org:6969', 'udp://tracker.leechers-paradise.org:6969/announce',
|
|
|
|
'udp://tracker.opentrackr.org:1337/announce', 'udp://tracker.torrent.eu.org:451/announce',
|
Add fanart to Episodes View, Display Show, Edit Show, and Media Renamer page.
Add "Maximum fanart image files per show to cache" to config General/Interface.
Add populate images when the daily show updater is run with a default maximum 3 images per show.
Change force full update in a show will replace existing images with new.
Add fanart livepanel to lower right of Episodes View and Display Show page.
Add highlight panel red until button is clicked a few times.
Add flick through multiple background images on Episodes View and Display Show page.
Add persistent move poster image to right hand side or hide on Display Show page (multi-click the eye).
Add persistent translucency of background images on Episodes View and Display Show page.
Add persistent fanart rating to avoid art completely, random display, random from a group, or display fave always.
Add persistent views of the show detail on Display Show page.
Add persistent views on Episodes View.
Add persistent button to collapse and expand card images on Episode View/Layout daybyday.
Add non persistent "Open gear" and "Full fanart" image views to Episodes View and Display Show page.
Add "smart" selection of fanart image to display on Episode view.
Change insert [!] and change text shade of ended shows in drop down show list on Display Show page.
Change button graphic for next and previous show of show list on Display Show page.
Add logic to hide some livepanel buttons until artwork becomes available or in other circumstances.
Add "(Ended)" where appropriate to show title on Display Show page.
Add links to fanart.tv where appropriate on Display Show page.
Change use tense for label "Airs" or "Aired" depending on if show ended.
Change display "No files" instead of "0 files" and "Upgrade once" instead of "End upgrade on first match".
Add persistent button to newest season to "Show all" episodes.
Add persistent button to all shown seasons to "Hide most" episodes.
Add button to older seasons to toggle "Show Season n" or "Show Specials" with "Hide..." episodes.
Add season level status counts next to each season header on display show page
Add sorting to season table headers on display show page
Add filename and size to quality badge on display show page, removed its redundant "downloaded" text
Remove redundant "Add show" buttons
Change combine the NFO and TBN columns into a single Meta column
Change reduce screen estate used by episode numbers columns
Change improve clarity of text on Add Show page.
Add "Reset fanart ratings" to show Edit/Other tab.
Add fanart usage to show Edit/Other tab.
Add fanart keys guide to show Edit/Other tab.
Change add placeholder tip to "Alternative release name(s)" on show Edit.
Change add placeholder tip to search box on shows Search.
Change hide Anime tips on show Edit when selecting its mutually exclusive options.
Change label "End upgrade on first match" to "Upgrade once" on show Edit.
Change improve performance rendering displayShow.
Add total episodes to start of show description (excludes specials if those are hidden).
Add "Add show" actions i.e. "Search", "Trakt cards", "IMDb cards", and "Anime" to Shows menu.
Add "Import (existing)" action to Tools menu.
Change SD quality from red to dark green, 2160p UHD 4K is red.
Change relocate the functions of Logs & Errors to the right side Tools menu -> View Log File.
Add warning indicator to the Tools menu in different colour depending on error count (green through red).
Change View Log error item output from reversed to natural order.
Change View Log add a typeface and some colour to improve readability.
Change View Log/Errors only display "Clear Errors" button when there are errors to clear.
Change improve performance of View Log File.
2016-02-28 23:43:40 +00:00
|
|
|
'udp://tracker.trackerfix.com:80/announce', 'udp://tracker.zer0day.to:1337/announce'])) or None)
|
2016-09-07 20:24:10 +00:00
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
def get_show(self, item, **kwargs):
|
|
|
|
return None
|
|
|
|
|
|
|
|
def find_search_results(self, show, episodes, search_mode, manual_search=False, **kwargs):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
self._check_auth()
|
2014-05-01 01:20:53 +00:00
|
|
|
self.show = show
|
|
|
|
|
2014-04-27 10:31:54 +00:00
|
|
|
results = {}
|
2015-07-13 09:39:20 +00:00
|
|
|
item_list = []
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-16 05:40:49 +00:00
|
|
|
searched_scene_season = None
|
2015-07-13 09:39:20 +00:00
|
|
|
for ep_obj in episodes:
|
2014-08-30 08:47:00 +00:00
|
|
|
# search cache for episode result
|
2015-07-13 09:39:20 +00:00
|
|
|
cache_result = self.cache.searchCache(ep_obj, manual_search)
|
|
|
|
if cache_result:
|
|
|
|
if ep_obj.episode not in results:
|
|
|
|
results[ep_obj.episode] = cache_result
|
2014-08-30 08:47:00 +00:00
|
|
|
else:
|
2015-07-13 09:39:20 +00:00
|
|
|
results[ep_obj.episode].extend(cache_result)
|
2014-08-30 08:47:00 +00:00
|
|
|
|
|
|
|
# found result, search next episode
|
2014-07-21 05:07:53 +00:00
|
|
|
continue
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if 'sponly' == search_mode:
|
|
|
|
# skip if season already searched
|
|
|
|
if 1 < len(episodes) and searched_scene_season == ep_obj.scene_season:
|
|
|
|
continue
|
2014-05-16 05:40:49 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
searched_scene_season = ep_obj.scene_season
|
2014-05-16 09:16:01 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
# get season search params
|
|
|
|
search_params = self._season_strings(ep_obj)
|
2014-05-16 05:16:35 +00:00
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
# get single episode search params
|
|
|
|
search_params = self._episode_strings(ep_obj)
|
|
|
|
|
|
|
|
for cur_param in search_params:
|
|
|
|
item_list += self._search_provider(cur_param, search_mode=search_mode, epcount=len(episodes))
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
return self.finish_find_search_results(show, episodes, search_mode, manual_search, results, item_list)
|
|
|
|
|
|
|
|
def finish_find_search_results(self, show, episodes, search_mode, manual_search, results, item_list, **kwargs):
|
|
|
|
|
2014-07-21 05:07:53 +00:00
|
|
|
# if we found what we needed already from cache then return results and exit
|
|
|
|
if len(results) == len(episodes):
|
2014-05-30 07:36:47 +00:00
|
|
|
return results
|
2014-05-12 19:48:47 +00:00
|
|
|
|
2014-07-21 06:43:52 +00:00
|
|
|
# sort list by quality
|
2015-07-13 09:39:20 +00:00
|
|
|
if len(item_list):
|
2014-07-21 06:43:52 +00:00
|
|
|
items = {}
|
2015-07-13 09:39:20 +00:00
|
|
|
items_unknown = []
|
|
|
|
for item in item_list:
|
|
|
|
quality = self.get_quality(item, anime=show.is_anime)
|
|
|
|
if Quality.UNKNOWN == quality:
|
|
|
|
items_unknown += [item]
|
2014-07-21 06:59:57 +00:00
|
|
|
else:
|
2014-07-21 15:38:56 +00:00
|
|
|
if quality not in items:
|
|
|
|
items[quality] = [item]
|
|
|
|
else:
|
|
|
|
items[quality].append(item)
|
2014-07-22 00:55:45 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
item_list = list(itertools.chain(*[v for (k, v) in sorted(items.items(), reverse=True)]))
|
|
|
|
item_list += items_unknown if items_unknown else []
|
2014-07-21 06:43:52 +00:00
|
|
|
|
2014-07-21 05:07:53 +00:00
|
|
|
# filter results
|
2014-07-22 06:00:58 +00:00
|
|
|
cl = []
|
2015-07-13 09:39:20 +00:00
|
|
|
for item in item_list:
|
2015-09-18 00:06:34 +00:00
|
|
|
(title, url) = self._title_and_url(item)
|
2014-05-08 14:03:50 +00:00
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
parser = NameParser(False, showObj=self.get_show(item, **kwargs), convert=True)
|
2014-07-21 05:07:53 +00:00
|
|
|
# parse the file name
|
|
|
|
try:
|
2015-07-13 09:39:20 +00:00
|
|
|
parse_result = parser.parse(title)
|
2014-07-21 05:07:53 +00:00
|
|
|
except InvalidNameException:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Unable to parse the filename %s into a valid episode' % title, logger.DEBUG)
|
2014-07-21 05:07:53 +00:00
|
|
|
continue
|
|
|
|
except InvalidShowException:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'No match for search criteria in the parsed filename ' + title, logger.DEBUG)
|
2014-07-21 05:07:53 +00:00
|
|
|
continue
|
2014-05-04 12:05:27 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
show_obj = parse_result.show
|
2014-07-21 05:07:53 +00:00
|
|
|
quality = parse_result.quality
|
|
|
|
release_group = parse_result.release_group
|
2014-07-22 04:53:32 +00:00
|
|
|
version = parse_result.version
|
2014-07-21 05:07:53 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
add_cache_entry = False
|
2015-08-14 23:02:05 +00:00
|
|
|
if not (show_obj.air_by_date or show_obj.is_sports):
|
2015-07-13 09:39:20 +00:00
|
|
|
if 'sponly' == search_mode:
|
2014-09-24 11:04:56 +00:00
|
|
|
if len(parse_result.episode_numbers):
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'This is supposed to be a season pack search but the result ' + title +
|
|
|
|
u' is not a valid season pack, skipping it', logger.DEBUG)
|
2015-07-13 09:39:20 +00:00
|
|
|
add_cache_entry = True
|
|
|
|
if len(parse_result.episode_numbers)\
|
2015-09-18 00:06:34 +00:00
|
|
|
and (parse_result.season_number not in set([ep.season for ep in episodes]) or not [
|
|
|
|
ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers]):
|
|
|
|
logger.log(u'The result ' + title + u' doesn\'t seem to be a valid episode that we are trying' +
|
|
|
|
u' to snatch, ignoring', logger.DEBUG)
|
2015-07-13 09:39:20 +00:00
|
|
|
add_cache_entry = True
|
2014-07-22 06:00:58 +00:00
|
|
|
else:
|
2015-07-13 09:39:20 +00:00
|
|
|
if not len(parse_result.episode_numbers)\
|
|
|
|
and parse_result.season_number\
|
|
|
|
and not [ep for ep in episodes
|
2015-09-18 00:06:34 +00:00
|
|
|
if ep.season == parse_result.season_number and
|
|
|
|
ep.episode in parse_result.episode_numbers]:
|
|
|
|
logger.log(u'The result ' + title + u' doesn\'t seem to be a valid season that we are trying' +
|
|
|
|
u' to snatch, ignoring', logger.DEBUG)
|
2015-07-13 09:39:20 +00:00
|
|
|
add_cache_entry = True
|
2016-09-04 20:00:44 +00:00
|
|
|
elif len(parse_result.episode_numbers)\
|
|
|
|
and not [ep for ep in episodes
|
|
|
|
if ep.season == parse_result.season_number and
|
2016-08-26 23:36:01 +00:00
|
|
|
ep.episode in parse_result.episode_numbers]:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'The result ' + title + ' doesn\'t seem to be a valid episode that we are trying' +
|
|
|
|
u' to snatch, ignoring', logger.DEBUG)
|
2015-07-13 09:39:20 +00:00
|
|
|
add_cache_entry = True
|
2014-07-22 06:00:58 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
if not add_cache_entry:
|
2014-07-22 06:00:58 +00:00
|
|
|
# we just use the existing info for normal searches
|
2014-09-07 05:25:01 +00:00
|
|
|
actual_season = parse_result.season_number
|
2014-07-22 06:00:58 +00:00
|
|
|
actual_episodes = parse_result.episode_numbers
|
2014-07-21 05:07:53 +00:00
|
|
|
else:
|
2015-07-13 09:39:20 +00:00
|
|
|
if not parse_result.is_air_by_date:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'This is supposed to be a date search but the result ' + title +
|
|
|
|
u' didn\'t parse as one, skipping it', logger.DEBUG)
|
2015-07-13 09:39:20 +00:00
|
|
|
add_cache_entry = True
|
2014-07-22 06:00:58 +00:00
|
|
|
else:
|
2014-08-22 07:21:37 +00:00
|
|
|
airdate = parse_result.air_date.toordinal()
|
2015-07-13 09:39:20 +00:00
|
|
|
my_db = db.DBConnection()
|
2016-08-26 23:36:01 +00:00
|
|
|
sql_results = my_db.select('SELECT season, episode FROM tv_episodes ' +
|
|
|
|
'WHERE showid = ? AND airdate = ?', [show_obj.indexerid, airdate])
|
2014-07-22 06:00:58 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
if 1 != len(sql_results):
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Tried to look up the date for the episode ' + title + ' but the database didn\'t' +
|
|
|
|
u' give proper results, skipping it', logger.WARNING)
|
2015-07-13 09:39:20 +00:00
|
|
|
add_cache_entry = True
|
2014-07-22 06:00:58 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
if not add_cache_entry:
|
|
|
|
actual_season = int(sql_results[0]['season'])
|
|
|
|
actual_episodes = [int(sql_results[0]['episode'])]
|
2014-07-22 06:00:58 +00:00
|
|
|
|
|
|
|
# add parsed result to cache for usage later on
|
2015-07-13 09:39:20 +00:00
|
|
|
if add_cache_entry:
|
|
|
|
logger.log(u'Adding item from search to cache: ' + title, logger.DEBUG)
|
|
|
|
ci = self.cache.add_cache_entry(title, url, parse_result=parse_result)
|
|
|
|
if None is not ci:
|
2014-07-22 06:00:58 +00:00
|
|
|
cl.append(ci)
|
|
|
|
continue
|
2014-07-21 05:07:53 +00:00
|
|
|
|
|
|
|
# make sure we want the episode
|
2015-07-13 09:39:20 +00:00
|
|
|
want_ep = True
|
2014-07-21 05:07:53 +00:00
|
|
|
for epNo in actual_episodes:
|
2015-07-13 09:39:20 +00:00
|
|
|
if not show_obj.wantEpisode(actual_season, epNo, quality, manual_search):
|
|
|
|
want_ep = False
|
2014-07-21 05:07:53 +00:00
|
|
|
break
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
if not want_ep:
|
|
|
|
logger.log(u'Ignoring result %s because we don\'t want an episode that is %s'
|
|
|
|
% (title, Quality.qualityStrings[quality]), logger.DEBUG)
|
2014-07-21 05:07:53 +00:00
|
|
|
continue
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
logger.log(u'Found result %s at %s' % (title, url), logger.DEBUG)
|
2014-07-21 05:07:53 +00:00
|
|
|
|
|
|
|
# make a result object
|
2015-07-13 09:39:20 +00:00
|
|
|
ep_obj = []
|
2014-07-21 05:07:53 +00:00
|
|
|
for curEp in actual_episodes:
|
2015-07-13 09:39:20 +00:00
|
|
|
ep_obj.append(show_obj.getEpisode(actual_season, curEp))
|
2014-07-21 05:07:53 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
result = self.get_result(ep_obj, url)
|
|
|
|
if None is result:
|
|
|
|
continue
|
|
|
|
result.show = show_obj
|
2014-07-21 05:07:53 +00:00
|
|
|
result.name = title
|
|
|
|
result.quality = quality
|
|
|
|
result.release_group = release_group
|
|
|
|
result.content = None
|
2014-07-22 04:53:32 +00:00
|
|
|
result.version = version
|
2014-07-21 05:07:53 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
if 1 == len(ep_obj):
|
|
|
|
ep_num = ep_obj[0].episode
|
|
|
|
logger.log(u'Single episode result.', logger.DEBUG)
|
|
|
|
elif 1 < len(ep_obj):
|
|
|
|
ep_num = MULTI_EP_RESULT
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Separating multi-episode result to check for later - result contains episodes: ' +
|
|
|
|
str(parse_result.episode_numbers), logger.DEBUG)
|
2015-07-13 09:39:20 +00:00
|
|
|
elif 0 == len(ep_obj):
|
|
|
|
ep_num = SEASON_RESULT
|
|
|
|
logger.log(u'Separating full season result to check for later', logger.DEBUG)
|
|
|
|
|
|
|
|
if ep_num not in results:
|
|
|
|
results[ep_num] = [result]
|
2014-07-21 13:29:07 +00:00
|
|
|
else:
|
2015-07-13 09:39:20 +00:00
|
|
|
results[ep_num].append(result)
|
2014-05-03 23:26:12 +00:00
|
|
|
|
2014-07-22 06:00:58 +00:00
|
|
|
# check if we have items to add to cache
|
2015-07-13 09:39:20 +00:00
|
|
|
if 0 < len(cl):
|
|
|
|
my_db = self.cache.get_db()
|
|
|
|
my_db.mass_action(cl)
|
2014-07-22 06:00:58 +00:00
|
|
|
|
2014-05-03 23:26:12 +00:00
|
|
|
return results
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def find_propers(self, search_date=None, **kwargs):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
results = self.cache.listPropers(search_date)
|
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
return [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in
|
|
|
|
results]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def seed_ratio(self):
|
|
|
|
"""
|
2014-05-08 22:28:28 +00:00
|
|
|
Provider should override this value if custom seed ratio enabled
|
|
|
|
It should return the value of the provider seed ratio
|
2015-07-13 09:39:20 +00:00
|
|
|
"""
|
2014-05-08 22:28:28 +00:00
|
|
|
return ''
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _log_search(self, mode='Cache', count=0, url='url missing'):
|
|
|
|
"""
|
|
|
|
Simple function to log the result of a search types except propers
|
|
|
|
:param count: count of successfully processed items
|
|
|
|
:param url: source url of item(s)
|
|
|
|
"""
|
|
|
|
if 'Propers' != mode:
|
|
|
|
self.log_result(mode, count, url)
|
|
|
|
|
|
|
|
def log_result(self, mode='Cache', count=0, url='url missing'):
|
2015-06-19 23:34:56 +00:00
|
|
|
"""
|
2015-09-18 00:06:34 +00:00
|
|
|
Simple function to log the result of any search
|
2016-08-26 23:36:01 +00:00
|
|
|
:param mode: string that this log relates to
|
2015-06-19 23:34:56 +00:00
|
|
|
:param count: count of successfully processed items
|
|
|
|
:param url: source url of item(s)
|
|
|
|
"""
|
2015-09-18 00:06:34 +00:00
|
|
|
str1, thing, str3 = (('', '%s item' % mode.lower(), ''), (' usable', 'proper', ' found'))['Propers' == mode]
|
|
|
|
logger.log(u'%s %s in response from %s' % (('No' + str1, count)[0 < count], (
|
|
|
|
'%s%s%s%s' % (('', 'freeleech ')[getattr(self, 'freeleech', False)], thing, maybe_plural(count), str3)),
|
|
|
|
re.sub('(\s)\s+', r'\1', url)))
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def check_auth_cookie(self):
|
|
|
|
|
|
|
|
if hasattr(self, 'cookies'):
|
|
|
|
cookies = self.cookies
|
|
|
|
|
2017-03-17 01:57:08 +00:00
|
|
|
if not (cookies and re.match('^(?:\w+=[^;\s]+[;\s]*)+$', cookies)):
|
2015-07-13 09:39:20 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
cj = requests.utils.add_dict_to_cookiejar(self.session.cookies,
|
|
|
|
dict([x.strip().split('=') for x in cookies.split(';')
|
|
|
|
if x != ''])),
|
|
|
|
for item in cj:
|
|
|
|
if not isinstance(item, requests.cookies.RequestsCookieJar):
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
def _check_cookie(self):
|
|
|
|
|
|
|
|
if self.check_auth_cookie():
|
|
|
|
return True, None
|
|
|
|
|
|
|
|
return False, 'Cookies not correctly formatted key=value pairs e.g. uid=xx;pass=yy)'
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def has_all_cookies(self, cookies=None, pre=''):
|
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
cookies = cookies and ([cookies], cookies)[isinstance(cookies, list)] or ['uid', 'pass']
|
|
|
|
return all(['%s%s' % (pre, item) in self.session.cookies for item in cookies])
|
2015-09-18 00:06:34 +00:00
|
|
|
|
|
|
|
def _categories_string(self, mode='Cache', template='c%s=1', delimiter='&'):
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
return delimiter.join([('%s', template)[any(template)] % c for c in sorted(
|
|
|
|
'shows' in self.categories and (isinstance(self.categories['shows'], type([])) and
|
|
|
|
self.categories['shows'] or [self.categories['shows']]) or
|
|
|
|
self.categories[(mode, 'Episode')['Propers' == mode]] +
|
|
|
|
([], self.categories.get('anime') or [])[
|
|
|
|
(mode in ['Cache', 'Propers'] and helpers.has_anime()) or
|
|
|
|
((mode in ['Season', 'Episode']) and self.show and self.show.is_anime)])])
|
2015-09-18 00:06:34 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _bytesizer(size_dim=''):
|
|
|
|
|
|
|
|
try:
|
2016-08-26 23:36:01 +00:00
|
|
|
value = float('.'.join(re.findall('(?i)(\d+)(?:[.,](\d+))?', size_dim)[0]))
|
2015-09-18 00:06:34 +00:00
|
|
|
except TypeError:
|
|
|
|
return size_dim
|
|
|
|
except IndexError:
|
|
|
|
return None
|
|
|
|
try:
|
|
|
|
value *= 1024 ** ['b', 'k', 'm', 'g', 't'].index(re.findall('(t|g|m|k)[i]?b', size_dim.lower())[0])
|
|
|
|
except IndexError:
|
|
|
|
pass
|
2015-12-16 23:35:39 +00:00
|
|
|
return long(math.ceil(value))
|
2015-09-18 00:06:34 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
class NZBProvider(object, GenericProvider):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-05-27 00:00:01 +00:00
|
|
|
def __init__(self, name, supports_backlog=True, anime_only=False):
|
|
|
|
GenericProvider.__init__(self, name, supports_backlog, anime_only)
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
self.providerType = GenericProvider.NZB
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def image_name(self):
|
|
|
|
|
|
|
|
return GenericProvider.image_name(self, 'newznab')
|
|
|
|
|
|
|
|
def maybe_apikey(self):
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
if getattr(self, 'needs_auth', None):
|
|
|
|
return (getattr(self, 'key', '') and self.key) or (getattr(self, 'api_key', '') and self.api_key) or None
|
2015-07-13 09:39:20 +00:00
|
|
|
return False
|
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
def _check_auth(self, is_required=None):
|
2015-07-13 09:39:20 +00:00
|
|
|
|
|
|
|
has_key = self.maybe_apikey()
|
|
|
|
if has_key:
|
|
|
|
return has_key
|
|
|
|
if None is has_key:
|
2017-01-02 18:44:35 +00:00
|
|
|
raise AuthException('%s for %s is empty in Media Providers/Options'
|
2015-07-13 09:39:20 +00:00
|
|
|
% ('API key' + ('', ' and/or Username')[hasattr(self, 'username')], self.name))
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
return GenericProvider._check_auth(self)
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def find_propers(self, search_date=None, shows=None, anime=None, **kwargs):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
|
|
|
cache_results = self.cache.listPropers(search_date)
|
|
|
|
results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in
|
|
|
|
cache_results]
|
|
|
|
|
|
|
|
index = 0
|
2015-07-13 09:39:20 +00:00
|
|
|
alt_search = ('nzbs_org' == self.get_id())
|
2015-06-19 23:34:56 +00:00
|
|
|
do_search_alt = False
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
search_terms = []
|
|
|
|
regex = []
|
|
|
|
if shows:
|
|
|
|
search_terms += ['.proper.', '.repack.']
|
|
|
|
regex += ['proper|repack']
|
|
|
|
proper_check = re.compile(r'(?i)(\b%s\b)' % '|'.join(regex))
|
|
|
|
if anime:
|
|
|
|
terms = 'v1|v2|v3|v4|v5'
|
|
|
|
search_terms += [terms]
|
|
|
|
regex += [terms]
|
|
|
|
proper_check = re.compile(r'(?i)(%s)' % '|'.join(regex))
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2016-02-19 17:38:38 +00:00
|
|
|
urls = []
|
2015-06-19 23:34:56 +00:00
|
|
|
while index < len(search_terms):
|
2016-02-19 17:38:38 +00:00
|
|
|
search_params = {'q': search_terms[index], 'maxage': sickbeard.BACKLOG_DAYS + 2}
|
2015-06-19 23:34:56 +00:00
|
|
|
if alt_search:
|
|
|
|
|
|
|
|
if do_search_alt:
|
2016-02-19 17:38:38 +00:00
|
|
|
search_params['t'] = 'search'
|
2015-06-19 23:34:56 +00:00
|
|
|
index += 1
|
|
|
|
|
2016-02-19 17:38:38 +00:00
|
|
|
do_search_alt = not do_search_alt
|
2015-06-19 23:34:56 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
index += 1
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
for item in self._search_provider({'Propers': [search_params]}):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
(title, url) = self._title_and_url(item)
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2016-02-19 17:38:38 +00:00
|
|
|
if not proper_check.search(title) or url in urls:
|
2015-06-19 23:34:56 +00:00
|
|
|
continue
|
2016-02-19 17:38:38 +00:00
|
|
|
urls.append(url)
|
2015-06-19 23:34:56 +00:00
|
|
|
|
|
|
|
if 'published_parsed' in item and item['published_parsed']:
|
|
|
|
result_date = item.published_parsed
|
|
|
|
if result_date:
|
|
|
|
result_date = datetime.datetime(*result_date[0:6])
|
|
|
|
else:
|
2016-07-02 15:06:50 +00:00
|
|
|
logger.log(u'Unable to figure out the date for entry %s, skipping it' % title)
|
2015-06-19 23:34:56 +00:00
|
|
|
continue
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
if not search_date or search_date < result_date:
|
2015-06-19 23:34:56 +00:00
|
|
|
search_result = classes.Proper(title, url, result_date, self.show)
|
|
|
|
results.append(search_result)
|
|
|
|
|
2016-02-19 17:38:38 +00:00
|
|
|
time.sleep(0.5)
|
2015-06-19 23:34:56 +00:00
|
|
|
|
|
|
|
return results
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def cache_data(self, *args, **kwargs):
|
|
|
|
|
|
|
|
search_params = {'Cache': [{}]}
|
2016-09-04 20:00:44 +00:00
|
|
|
return self._search_provider(search_params=search_params, **kwargs)
|
2015-09-18 00:06:34 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
class TorrentProvider(object, GenericProvider):
|
2015-05-27 00:00:01 +00:00
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
def __init__(self, name, supports_backlog=True, anime_only=False, cache_update_freq=None):
|
2015-05-27 00:00:01 +00:00
|
|
|
GenericProvider.__init__(self, name, supports_backlog, anime_only)
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2014-07-25 04:29:03 +00:00
|
|
|
self.providerType = GenericProvider.TORRENT
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-05-27 00:00:01 +00:00
|
|
|
self._seed_ratio = None
|
2015-09-12 17:06:54 +00:00
|
|
|
self.seed_time = None
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
self._url = None
|
|
|
|
self.urls = {}
|
|
|
|
self.cache._cache_data = self._cache_data
|
|
|
|
if cache_update_freq:
|
|
|
|
self.cache.update_freq = cache_update_freq
|
|
|
|
|
|
|
|
@property
|
|
|
|
def url(self):
|
2016-06-23 19:58:26 +00:00
|
|
|
if None is self._url or (hasattr(self, 'url_tmpl') and not self.urls):
|
2016-09-24 11:23:22 +00:00
|
|
|
self._url = self._valid_home(False)
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
self._valid_url()
|
|
|
|
return self._url
|
|
|
|
|
|
|
|
@url.setter
|
|
|
|
def url(self, value=None):
|
|
|
|
self._url = value
|
|
|
|
|
|
|
|
def _valid_url(self):
|
|
|
|
return True
|
2015-05-27 00:00:01 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def image_name(self):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
return GenericProvider.image_name(self, 'torrent')
|
2015-05-27 00:00:01 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def seed_ratio(self):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-05-27 00:00:01 +00:00
|
|
|
return self._seed_ratio
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
@staticmethod
|
|
|
|
def _sort_seeders(mode, items):
|
2016-08-26 23:36:01 +00:00
|
|
|
""" legacy function used by a custom provider, do not remove """
|
2015-09-18 00:06:34 +00:00
|
|
|
mode in ['Season', 'Episode'] and items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
@staticmethod
|
|
|
|
def _sort_seeding(mode, items):
|
|
|
|
|
|
|
|
if mode in ['Season', 'Episode']:
|
|
|
|
return sorted(set(items), key=lambda tup: tup[2], reverse=True)
|
|
|
|
return items
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _peers_fail(self, mode, seeders=0, leechers=0):
|
|
|
|
|
|
|
|
return 'Cache' != mode and (seeders < getattr(self, 'minseed', 0) or leechers < getattr(self, 'minleech', 0))
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def get_quality(self, item, anime=False):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-05-27 00:00:01 +00:00
|
|
|
if isinstance(item, tuple):
|
|
|
|
name = item[0]
|
2015-06-06 15:08:59 +00:00
|
|
|
elif isinstance(item, dict):
|
2015-09-18 00:06:34 +00:00
|
|
|
name, url = self._title_and_url(item)
|
2015-05-27 00:00:01 +00:00
|
|
|
else:
|
|
|
|
name = item.title
|
|
|
|
return Quality.sceneQuality(name, anime)
|
|
|
|
|
2015-06-19 23:34:56 +00:00
|
|
|
@staticmethod
|
|
|
|
def _reverse_quality(quality):
|
|
|
|
|
|
|
|
return {
|
|
|
|
Quality.SDTV: 'HDTV x264',
|
|
|
|
Quality.SDDVD: 'DVDRIP',
|
|
|
|
Quality.HDTV: '720p HDTV x264',
|
|
|
|
Quality.FULLHDTV: '1080p HDTV x264',
|
|
|
|
Quality.RAWHDTV: '1080i HDTV mpeg2',
|
|
|
|
Quality.HDWEBDL: '720p WEB-DL h264',
|
|
|
|
Quality.FULLHDWEBDL: '1080p WEB-DL h264',
|
|
|
|
Quality.HDBLURAY: '720p Bluray x264',
|
|
|
|
Quality.FULLHDBLURAY: '1080p Bluray x264'
|
|
|
|
}.get(quality, '')
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _season_strings(self, ep_obj, detail_only=False, scene=True, prefix='', **kwargs):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if not ep_obj:
|
|
|
|
return []
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
show = ep_obj.show
|
|
|
|
ep_dict = self._ep_dict(ep_obj)
|
|
|
|
sp_detail = (show.air_by_date or show.is_sports) and str(ep_obj.airdate).split('-')[0] or \
|
|
|
|
(show.is_anime and ep_obj.scene_absolute_number or
|
2016-08-26 23:36:01 +00:00
|
|
|
('sp_detail' in kwargs.keys() and kwargs['sp_detail'](ep_dict)) or 'S%(seasonnumber)02d' % ep_dict)
|
2015-09-18 00:06:34 +00:00
|
|
|
sp_detail = ([sp_detail], sp_detail)[isinstance(sp_detail, list)]
|
|
|
|
detail = ({}, {'Season_only': sp_detail})[detail_only and not self.show.is_sports and not self.show.is_anime]
|
|
|
|
return [dict({'Season': self._build_search_strings(sp_detail, scene, prefix)}.items() + detail.items())]
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _episode_strings(self, ep_obj, detail_only=False, scene=True, prefix='', sep_date=' ', date_or=False, **kwargs):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
|
|
|
if not ep_obj:
|
|
|
|
return []
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
show = ep_obj.show
|
|
|
|
if show.air_by_date or show.is_sports:
|
|
|
|
ep_detail = [str(ep_obj.airdate).replace('-', sep_date)]\
|
|
|
|
if 'date_detail' not in kwargs.keys() else kwargs['date_detail'](ep_obj.airdate)
|
|
|
|
if show.is_sports:
|
2015-06-19 23:34:56 +00:00
|
|
|
month = ep_obj.airdate.strftime('%b')
|
2015-09-18 00:06:34 +00:00
|
|
|
ep_detail = (ep_detail + [month], ['%s|%s' % (x, month) for x in ep_detail])[date_or]
|
|
|
|
elif show.is_anime:
|
|
|
|
ep_detail = ep_obj.scene_absolute_number \
|
|
|
|
if 'ep_detail_anime' not in kwargs.keys() else kwargs['ep_detail_anime'](ep_obj.scene_absolute_number)
|
2015-06-19 23:34:56 +00:00
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
ep_dict = self._ep_dict(ep_obj)
|
|
|
|
ep_detail = sickbeard.config.naming_ep_type[2] % ep_dict \
|
|
|
|
if 'ep_detail' not in kwargs.keys() else kwargs['ep_detail'](ep_dict)
|
2016-08-21 20:31:18 +00:00
|
|
|
if sickbeard.scene_exceptions.has_abs_episodes(ep_obj):
|
2016-09-07 20:24:10 +00:00
|
|
|
ep_detail = ([ep_detail], ep_detail)[isinstance(ep_detail, list)] + ['%d' % ep_dict['episodenumber']]
|
2015-09-18 00:06:34 +00:00
|
|
|
ep_detail = ([ep_detail], ep_detail)[isinstance(ep_detail, list)]
|
|
|
|
detail = ({}, {'Episode_only': ep_detail})[detail_only and not show.is_sports and not show.is_anime]
|
|
|
|
return [dict({'Episode': self._build_search_strings(ep_detail, scene, prefix)}.items() + detail.items())]
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _ep_dict(ep_obj):
|
|
|
|
season, episode = ((ep_obj.season, ep_obj.episode),
|
|
|
|
(ep_obj.scene_season, ep_obj.scene_episode))[bool(ep_obj.show.is_scene)]
|
|
|
|
return {'seasonnumber': season, 'episodenumber': episode}
|
|
|
|
|
|
|
|
def _build_search_strings(self, ep_detail, process_name=True, prefix=''):
|
2015-06-19 23:34:56 +00:00
|
|
|
"""
|
|
|
|
Build a list of search strings for querying a provider
|
|
|
|
:param ep_detail: String of episode detail or List of episode details
|
|
|
|
:param process_name: Bool Whether to call sanitizeSceneName() on show name
|
2015-09-18 00:06:34 +00:00
|
|
|
:param prefix: String to insert to search strings
|
2015-06-19 23:34:56 +00:00
|
|
|
:return: List of search string parameters
|
|
|
|
"""
|
2015-09-18 00:06:34 +00:00
|
|
|
ep_detail = ([ep_detail], ep_detail)[isinstance(ep_detail, list)]
|
|
|
|
prefix = ([prefix], prefix)[isinstance(prefix, list)]
|
2015-06-19 23:34:56 +00:00
|
|
|
|
|
|
|
search_params = []
|
2016-08-26 23:36:01 +00:00
|
|
|
crop = re.compile(r'([.\s])(?:\1)+')
|
2015-06-19 23:34:56 +00:00
|
|
|
for name in set(allPossibleShowNames(self.show)):
|
|
|
|
if process_name:
|
|
|
|
name = helpers.sanitizeSceneName(name)
|
|
|
|
for detail in ep_detail:
|
2015-09-18 00:06:34 +00:00
|
|
|
search_params += [crop.sub(r'\1', '%s %s%s' % (name, x, detail)) for x in prefix]
|
2015-06-19 23:34:56 +00:00
|
|
|
return search_params
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
@staticmethod
|
|
|
|
def _has_signature(data=None):
|
|
|
|
return data and re.search(r'(?sim)<input[^<]+name="password"', data) and \
|
|
|
|
re.search(r'(?sim)<input[^<]+name="username"', data)
|
|
|
|
|
2016-09-24 11:23:22 +00:00
|
|
|
def _valid_home(self, attempt_fetch=True):
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
"""
|
|
|
|
:return: signature verified home url else None if validation fail
|
|
|
|
"""
|
|
|
|
url_base = getattr(self, 'url_base', None)
|
|
|
|
if url_base:
|
|
|
|
return url_base
|
|
|
|
|
|
|
|
url_list = getattr(self, 'url_home', None)
|
|
|
|
if not url_list and getattr(self, 'url_edit', None) or 10 > max([len(x) for x in url_list]):
|
|
|
|
return None
|
|
|
|
|
2016-11-07 13:27:49 +00:00
|
|
|
url_list = ['%s/' % x.rstrip('/') for x in url_list]
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
last_url, expire = sickbeard.PROVIDER_HOMES.get(self.get_id(), ('', None))
|
|
|
|
if 'site down' == last_url:
|
2016-11-19 03:19:57 +00:00
|
|
|
if expire and (expire > int(time.time())) or not self.enabled:
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
return None
|
|
|
|
elif last_url:
|
2016-11-07 13:27:49 +00:00
|
|
|
last_url = last_url.replace('getrss.php', '/') # correct develop typo after a network outage (0.11>0.12)
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
last_url in url_list and url_list.remove(last_url)
|
|
|
|
url_list.insert(0, last_url)
|
|
|
|
|
2016-11-19 03:19:57 +00:00
|
|
|
if not self.enabled:
|
|
|
|
return last_url
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
for cur_url in url_list:
|
|
|
|
if not self.is_valid_mod(cur_url):
|
|
|
|
return None
|
|
|
|
|
|
|
|
if 10 < len(cur_url) and ((expire and (expire > int(time.time()))) or
|
|
|
|
self._has_signature(helpers.getURL(cur_url, session=self.session))):
|
|
|
|
|
|
|
|
for k, v in getattr(self, 'url_tmpl', {}).items():
|
|
|
|
self.urls[k] = v % {'home': cur_url, 'vars': getattr(self, 'url_vars', {}).get(k, '')}
|
|
|
|
|
|
|
|
if last_url != cur_url or (expire and not (expire > int(time.time()))):
|
2016-10-24 23:04:02 +00:00
|
|
|
sickbeard.PROVIDER_HOMES[self.get_id()] = (cur_url, int(time.time()) + (60*60))
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
sickbeard.save_config()
|
|
|
|
return cur_url
|
|
|
|
|
|
|
|
logger.log('Failed to identify a "%s" page with %s %s (local network issue, site down, or ISP blocked) ' %
|
|
|
|
(self.name, len(url_list), ('URL', 'different URLs')[1 < len(url_list)]) +
|
2016-09-24 11:23:22 +00:00
|
|
|
(attempt_fetch and ('Suggest; 1) Disable "%s" 2) Use a proxy/VPN' % self.get_id()) or ''),
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
(logger.WARNING, logger.ERROR)[self.enabled])
|
|
|
|
self.urls = {}
|
|
|
|
sickbeard.PROVIDER_HOMES[self.get_id()] = ('site down', int(time.time()) + (5 * 60))
|
|
|
|
sickbeard.save_config()
|
|
|
|
return None
|
|
|
|
|
|
|
|
def is_valid_mod(self, url):
|
|
|
|
parsed, s, is_valid = urlparse.urlparse(url), 70000700, True
|
|
|
|
if 2012691328 == s + zlib.crc32(('.%s' % (parsed.netloc or parsed.path)).split('.')[-2]):
|
|
|
|
is_valid = False
|
|
|
|
file_name = '%s.py' % os.path.join(sickbeard.PROG_DIR, *self.__module__.split('.'))
|
|
|
|
if ek.ek(os.path.isfile, file_name):
|
|
|
|
with open(file_name, 'rb') as file_hd:
|
2016-08-11 00:00:36 +00:00
|
|
|
is_valid = s + zlib.crc32(file_hd.read()) in (1661931498, 472149389)
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
return is_valid
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _authorised(self, logged_in=None, post_params=None, failed_msg=None, url=None, timeout=30):
|
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
maxed_out = (lambda y: re.search(r'(?i)[1-3]((<[^>]+>)|\W)*' +
|
|
|
|
'(attempts|tries|remain)[\W\w]{,40}?(remain|left|attempt)', y))
|
2015-09-18 00:06:34 +00:00
|
|
|
logged_in, failed_msg = [None is not a and a or b for (a, b) in (
|
2016-08-26 23:36:01 +00:00
|
|
|
(logged_in, (lambda y=None: self.has_all_cookies())),
|
|
|
|
(failed_msg, (lambda y='': maxed_out(y) and u'Urgent abort, running low on login attempts. ' +
|
|
|
|
u'Password flushed to prevent service disruption to %s.' or
|
|
|
|
(re.search(r'(?i)(username|password)((<[^>]+>)|\W)*' +
|
|
|
|
'(or|and|/|\s)((<[^>]+>)|\W)*(password|incorrect)', y) and
|
2015-09-18 00:06:34 +00:00
|
|
|
u'Invalid username or password for %s. Check settings' or
|
|
|
|
u'Failed to authenticate or parse a response from %s, abort provider')))
|
|
|
|
)]
|
|
|
|
|
2016-09-07 20:24:10 +00:00
|
|
|
if logged_in() and (not hasattr(self, 'urls') or bool(len(getattr(self, 'urls')))):
|
2015-09-18 00:06:34 +00:00
|
|
|
return True
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
if not self._valid_home():
|
|
|
|
return False
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if hasattr(self, 'digest'):
|
|
|
|
self.cookies = re.sub(r'(?i)([\s\']+|cookie\s*:)', '', self.digest)
|
|
|
|
success, msg = self._check_cookie()
|
|
|
|
if not success:
|
|
|
|
self.cookies = None
|
|
|
|
logger.log(u'%s: [%s]' % (msg, self.cookies), logger.WARNING)
|
|
|
|
return False
|
|
|
|
elif not self._check_auth():
|
|
|
|
return False
|
|
|
|
|
|
|
|
if isinstance(url, type([])):
|
|
|
|
for i in range(0, len(url)):
|
|
|
|
helpers.getURL(url.pop(), session=self.session)
|
|
|
|
|
2016-09-30 22:20:28 +00:00
|
|
|
passfield, userfield = None, None
|
2015-09-18 00:06:34 +00:00
|
|
|
if not url:
|
|
|
|
if hasattr(self, 'urls'):
|
|
|
|
url = self.urls.get('login_action')
|
|
|
|
if url:
|
|
|
|
response = helpers.getURL(url, session=self.session)
|
2016-10-02 17:30:47 +00:00
|
|
|
if None is response:
|
|
|
|
return False
|
2015-09-18 00:06:34 +00:00
|
|
|
try:
|
2016-08-26 23:36:01 +00:00
|
|
|
post_params = isinstance(post_params, type({})) and post_params or {}
|
|
|
|
form = 'form_tmpl' in post_params and post_params.pop('form_tmpl')
|
|
|
|
if form:
|
|
|
|
form = re.findall(
|
|
|
|
'(?is)(<form[^>]+%s.*?</form>)' % (True is form and 'login' or form), response)
|
|
|
|
response = form and form[0] or response
|
|
|
|
|
|
|
|
action = re.findall('<form[^>]+action=[\'"]([^\'"]*)', response)[0]
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
url = action if action.startswith('http') else \
|
2016-08-26 23:36:01 +00:00
|
|
|
url if not action else \
|
|
|
|
(url + action) if action.startswith('?') else \
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
(self.urls.get('login_base') or self.urls['config_provider_home_uri']) + action.lstrip('/')
|
|
|
|
|
2016-09-30 22:20:28 +00:00
|
|
|
tags = re.findall(r'(?is)(<input[^>]*?name=[\'"][^\'"]+[^>]*)', response)
|
|
|
|
attrs = [[(re.findall(r'(?is)%s=[\'"]([^\'"]+)' % attr, x) or [''])[0]
|
|
|
|
for attr in ['type', 'name', 'value']] for x in tags]
|
|
|
|
for itype, name, value in attrs:
|
|
|
|
if 'password' in [itype, name]:
|
|
|
|
passfield = name
|
|
|
|
if name not in ('username', 'password') and 'password' != itype:
|
2016-03-20 20:07:10 +00:00
|
|
|
post_params.setdefault(name, value)
|
2015-09-18 00:06:34 +00:00
|
|
|
except KeyError:
|
|
|
|
return super(TorrentProvider, self)._authorised()
|
|
|
|
else:
|
|
|
|
url = self.urls.get('login')
|
|
|
|
if not url:
|
|
|
|
return super(TorrentProvider, self)._authorised()
|
|
|
|
|
|
|
|
if hasattr(self, 'username') and hasattr(self, 'password'):
|
|
|
|
if not post_params:
|
2016-06-10 18:29:40 +00:00
|
|
|
post_params = dict(username=self.username, password=self.password)
|
|
|
|
elif isinstance(post_params, type({})):
|
|
|
|
if self.username not in post_params.values():
|
|
|
|
post_params['username'] = self.username
|
|
|
|
if self.password not in post_params.values():
|
2016-09-30 22:20:28 +00:00
|
|
|
post_params[(passfield, 'password')[not passfield]] = self.password
|
2016-06-10 18:29:40 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
response = helpers.getURL(url, post_data=post_params, session=self.session, timeout=timeout)
|
|
|
|
if response:
|
|
|
|
if logged_in(response):
|
|
|
|
return True
|
|
|
|
|
|
|
|
if maxed_out(response) and hasattr(self, 'password'):
|
|
|
|
self.password = None
|
|
|
|
sickbeard.save_config()
|
|
|
|
logger.log(failed_msg(response) % self.name, logger.ERROR)
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
def _check_auth(self, is_required=False):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
|
|
|
if hasattr(self, 'username') and hasattr(self, 'password'):
|
|
|
|
if self.username and self.password:
|
|
|
|
return True
|
|
|
|
setting = 'Password or Username'
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
elif hasattr(self, 'username') and hasattr(self, 'api_key'):
|
|
|
|
if self.username and self.api_key:
|
|
|
|
return True
|
2017-02-17 14:58:04 +00:00
|
|
|
setting = 'Api key or Username'
|
2015-06-19 23:34:56 +00:00
|
|
|
elif hasattr(self, 'username') and hasattr(self, 'passkey'):
|
|
|
|
if self.username and self.passkey:
|
|
|
|
return True
|
|
|
|
setting = 'Passkey or Username'
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
elif hasattr(self, 'uid') and hasattr(self, 'passkey'):
|
|
|
|
if self.uid and self.passkey:
|
|
|
|
return True
|
|
|
|
setting = 'Passkey or uid'
|
2015-06-19 23:34:56 +00:00
|
|
|
elif hasattr(self, 'api_key'):
|
|
|
|
if self.api_key:
|
|
|
|
return True
|
2017-02-17 14:58:04 +00:00
|
|
|
setting = 'Api key'
|
2015-07-06 11:14:37 +00:00
|
|
|
elif hasattr(self, 'passkey'):
|
|
|
|
if self.passkey:
|
|
|
|
return True
|
|
|
|
setting = 'Passkey'
|
2015-06-19 23:34:56 +00:00
|
|
|
else:
|
2016-08-26 23:36:01 +00:00
|
|
|
return not is_required and GenericProvider._check_auth(self)
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2017-01-02 18:44:35 +00:00
|
|
|
raise AuthException('%s for %s is empty in Media Providers/Options' % (setting, self.name))
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def find_propers(self, **kwargs):
|
2015-05-27 00:00:01 +00:00
|
|
|
"""
|
|
|
|
Search for releases of type PROPER
|
|
|
|
:return: list of Proper objects
|
|
|
|
"""
|
|
|
|
results = []
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
search_terms = getattr(self, 'proper_search_terms', ['proper', 'repack'])
|
|
|
|
if not isinstance(search_terms, list):
|
|
|
|
if None is search_terms:
|
|
|
|
search_terms = 'proper|repack'
|
|
|
|
search_terms = [search_terms]
|
2015-05-27 00:00:01 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
items = self._search_provider({'Propers': search_terms})
|
2015-05-27 00:00:01 +00:00
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
clean_term = re.compile(r'(?i)[^a-z1-9|.]+')
|
2015-09-18 00:06:34 +00:00
|
|
|
for proper_term in search_terms:
|
2015-05-27 00:00:01 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
proper_check = re.compile(r'(?i)(?:%s)' % clean_term.sub('', proper_term))
|
|
|
|
for item in items:
|
|
|
|
title, url = self._title_and_url(item)
|
|
|
|
if proper_check.search(title):
|
|
|
|
results.append(classes.Proper(title, url, datetime.datetime.today(),
|
|
|
|
helpers.findCertainShow(sickbeard.showList, None)))
|
2015-05-27 00:00:01 +00:00
|
|
|
return results
|
2015-06-19 23:34:56 +00:00
|
|
|
|
|
|
|
@staticmethod
|
2016-11-01 18:13:51 +00:00
|
|
|
def _has_no_results(html):
|
2016-09-30 22:20:28 +00:00
|
|
|
return re.search(r'(?i)<(?:b|div|h\d|p|span|strong|td)[^>]*>\s*(?:' +
|
2016-08-26 23:36:01 +00:00
|
|
|
'your\ssearch.*?did\snot\smatch|' +
|
|
|
|
'(?:nothing|0</b>\s+torrents)\sfound|' +
|
2016-11-01 18:13:51 +00:00
|
|
|
'(?:sorry,\s)?no\s(?:results|torrents)\s(found|here|match)|' +
|
|
|
|
'no\s(?:match|results|torrents)!*|'
|
|
|
|
'[^<]*?there\sare\sno\sresults|' +
|
|
|
|
'[^<]*?no\shits\.\sTry\sadding' +
|
|
|
|
')', html)
|
2015-06-19 23:34:56 +00:00
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
def _cache_data(self):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
return self._search_provider({'Cache': ['']})
|