Change various providers.

Change accept theTVDB Url in addshow search field.
Change Nzb.org usenet provider add config scene only/nuked.
Change SpeedCD torrent provider improve copy/paste cookie support.
Change BTScene, LimeTorrents, SkyTorrents, Torlock, Torrentz, TPB torrent providers.
Add AlphaReign, EZTV torrent providers.
This commit is contained in:
JackDandy 2018-04-13 12:11:21 +01:00
parent 0bf9165a94
commit 12363d01d5
17 changed files with 456 additions and 101 deletions

View file

@ -1,4 +1,12 @@
### 0.15.9 (2018-04-07 20:45:00 UTC)
### 0.15.10 (2018-04-13 12:10:00 UTC)
* Change accept theTVDB Url in addshow search field
* Change Nzb.org usenet provider add config scene only/nuked
* Change SpeedCD torrent provider improve copy/paste cookie support
* Change BTScene, LimeTorrents, SkyTorrents, Torlock, Torrentz, TPB torrent providers
* Add AlphaReign, EZTV torrent providers
### 0.15.9 (2018-04-07 20:45:00 UTC)
* Fix metadata show not found
* Change when adding a show, display show title instead of '[]'

Binary file not shown.

After

Width:  |  Height:  |  Size: 500 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 409 B

View file

@ -251,6 +251,23 @@
</label>
</div>
#end if
#if $hasattr($cur_newznab_provider, 'may_filter'):
<div class="field-pair">
<span class="component-title">Allow releases that are</span>
<span class="component-desc">
#for $cur_fval, $filter in $cur_newznab_provider.may_filter.iteritems()
#set $cur_fname, $cur_is_default = $filter[0], $filter[1]
#set $filter_id = '%s_filter_%s' % ($cur_newznab_provider.get_id(), $cur_fval)
<label class="space-right">
<input type="checkbox" name="$filter_id" id="$filter_id" #echo ('', $html_checked)[$cur_fval in $cur_newznab_provider.filter]#/>
<span>$cur_fname</span>
</label>
#end for
<span>(see site for meaning)</span>
<p>nothing selected allows everything (no filter, default)</p>
</span>
</div>
#end if
#if $hasattr($cur_newznab_provider, 'search_mode') and $cur_newznab_provider.supports_backlog:
<div class="field-pair">
<span class="component-title">Episode search mode</span>

View file

@ -73,7 +73,7 @@
</style>
<input type="hidden" id="providedIndexer" value="$provided_indexer">
#end if
<input type="text" id="nameToSearch" value="$default_show_name" placeholder="Enter a show name, TVDB ID, IMDb Url, or IMDb ID" class="form-control form-control-inline input-sm input350">
<input type="text" id="nameToSearch" value="$default_show_name" placeholder="Enter a show name, TVDB ID/Url, IMDb ID/Url" class="form-control form-control-inline input-sm input350">
&nbsp;
<span style="float:right">
<select name="indexerLang" id="indexerLangSelect" class="form-control form-control-inline input-sm">

View file

@ -26,7 +26,7 @@ from sickbeard import logger, encodingKludge as ek
# usenet
from . import newznab, omgwtfnzbs
# torrent
from . import alpharatio, beyondhd, bithdtv, bitmetv, blutopia, btn, btscene, dh, ettv, \
from . import alpharatio, alphareign, beyondhd, bithdtv, bitmetv, blutopia, btn, btscene, dh, ettv, eztv, \
fano, filelist, funfile, grabtheinfo, hdbits, hdspace, hdtorrents, \
iptorrents, limetorrents, magnetdl, morethan, nebulance, ncore, nyaa, pisexy, potuk, pretome, privatehd, ptf, \
rarbg, revtt, scenehd, scenetime, shazbat, showrss, skytorrents, speedcd, \
@ -42,6 +42,7 @@ except (StandardError, Exception):
__all__ = ['omgwtfnzbs',
'alpharatio',
'alphareign',
'anizb',
'beyondhd',
'bithdtv',
@ -52,6 +53,7 @@ __all__ = ['omgwtfnzbs',
'custom01',
'dh',
'ettv',
'eztv',
'fano',
'filelist',
'funfile',

View file

@ -0,0 +1,108 @@
# coding=utf-8
#
# Author: SickGear
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import re
import traceback
from . import generic
from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
class AlphaReignProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'AlphaReign')
self.url_base = 'https://alphareign.lol/'
self.urls = {'config_provider_home_uri': self.url_base,
'search': self.url_base + '?category=show&sort=created&query=%s&page=%s'}
self.minseed, self.minleech = 2 * [None]
def _search_provider(self, search_params, **kwargs):
results = []
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'get': 'magnet:'}.items())
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
if 'Cache' == mode:
search_url = self.urls['search'] % tuple(search_string.split(','))
else:
search_url = self.urls['search'] % (search_string, '')
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:
if not html or self._has_no_results(html) or re.search('<h3>Result.*?&quot;.*?&quot;</h3>', html):
raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_table = soup.find(id='results')
torrent_rows = [] if not torrent_table else torrent_table.find_all('div', class_='result')
for tr in torrent_rows:
try:
seeders, leechers, size = [tryInt(n, n) for n in [
tr['data-%s' % x].strip() for x in 'seeders', 'leechers', 'size']]
if self._peers_fail(mode, seeders, leechers):
continue
title = tr['data-name'].strip()
download_url = self._link(tr.find('a', href=rc['get'])['href'])
except (AttributeError, TypeError, ValueError):
continue
if title and download_url:
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
except generic.HaltParseException:
pass
except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url)
results = self._sort_seeding(mode, results + items[mode])
return results
def _season_strings(self, ep_obj, **kwargs):
return generic.TorrentProvider._season_strings(self, ep_obj, scene=False, **kwargs)
def _episode_strings(self, ep_obj, **kwargs):
return generic.TorrentProvider._episode_strings(self, ep_obj, scene=False, **kwargs)
def _cache_data(self, **kwargs):
return self._search_provider({'Cache': [',', ',2', ',3', ',4', ',5']})
provider = AlphaReignProvider()

View file

@ -32,12 +32,38 @@ class BTSceneProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'BTScene')
self.url_home = ['http://btsone.cc/', 'http://diriri.xyz/'] + \
['https://%s/' % base64.b64decode(x) for x in [''.join(x) for x in [
[re.sub('[L\sT]+', '', x[::-1]) for x in [
'zTRnTY', 'uVT 2Y', '15LSTZ', 's JmLb', 'rTNL2b', 'uQW LZ', '=LLMmd']],
[re.sub('[j\sq]+', '', x[::-1]) for x in [
'zRn qY', 'l52j b', '1j5S M', 'sq Jmb', 'r Nq2b', 'ujQWqZ', 's9jGqb']],
self.url_home = ['https://%s/' % base64.b64decode(x) for x in [''.join(x) for x in [
[re.sub('[O\sx]+', '', x[::-1]) for x in [
'zRnx Y', 'ux V2Y', '15 S Z', 'sJ Omb', 'r N 2b', 'uxQxWZ', '=MOm d']],
[re.sub('[L\sq]+', '', x[::-1]) for x in [
'zLRn Y', 'uVqq2Y', '15SqLZ', 'sqJLmb', 'rN L2b', 'uqLQWZ', '=qgX b']],
[re.sub('[Q\s0]+', '', x[::-1]) for x in [
'zRn Y', 'uQ V2Y', '1Q5QSZ', 'sJ0mQb', 'rQ0N2b', 'uIX QZ', 'ul200d']],
[re.sub('[T\s ]+', '', x[::-1]) for x in [
'zR nTY', 'uTVT2Y', '15 STZ', 'sTTJmb', 'r N2 b', 'uTTIXZ', '=TTM2Y']],
[re.sub('[i\sw]+', '', x[::-1]) for x in [
'zR nY', 'li52ib', '15i SM', 's Jmwb', 'rN2 b', 'uwQW Z', 's9i Gb']],
[re.sub('[X\sV]+', '', x[::-1]) for x in [
'z Rn Y', 'lXV52b', '1 5 SM', 'sJ mXb', 'rN2XVb', 'uVQWVZ', 'mRX3Vd']],
[re.sub('[p\sF]+', '', x[::-1]) for x in [
'zFRFnY', 'l5 F2b', '15SF M', 'sFJmpb', 'rN 2pb', 'upQWpZ', '=MFFXb']],
[re.sub('[Q\sp]+', '', x[::-1]) for x in [
'z RpnY', 'u V2 Y', 'i5QSQZ', 'hBpQXe', 'lN 3Qc', 'vQ 5CZ', '=cpmpc']],
[re.sub('[o\sG]+', '', x[::-1]) for x in [
'zo RnY', 'u GV2Y', 'i 5S Z', 'hGBX e', 'loNG3c', 'lG5CGZ', '== Qod']],
[re.sub('[q\sW]+', '', x[::-1]) for x in [
'zR nqY', 'u V2qY', 'iq5 SZ', 'h BXqe', 'lN3 c', 'i5C WZ', '==gq e']],
[re.sub('[q\sg]+', '', x[::-1]) for x in [
'c gtQnY', 'mbg lN2', 'M 2Y tU', 'vgJHqcu', 'cz5C qe', 'QqgZjFG', '= g=']],
[re.sub('[H\sF]+', '', x[::-1]) for x in [
'2YzFRFnY', '0H5SZHuV', 'WZyFFJ3b', 'p1me 0 5', 'iHHcvJnc', '=cFmc v5']],
[re.sub('[w\si]+', '', x[::-1]) for x in [
'RwnwY', '2 wYz', 'Z u V', 'sii5S', 'RXi Y', 'nL wv', '3i B']],
[re.sub('[k\sh]+', '', x[::-1]) for x in [
'zRnkhY', 'uV 2Y', '65hSkZ', '2Nk Ge', 'phdn L', '=kk=gb']],
[re.sub('[q\sP]+', '', x[::-1]) for x in [
'mPblqN2ctQnY', 'vlWduM2 YPtU', 'nYoRXahZPm L', '15PSZuV2 YzR', 'WYrN 2PbsJmb',
'==wZ y9mL sx']],
]]]
self.url_vars = {'search': '?q=%s&category=series&order=1', 'browse': 'lastdaycat/type/Series/',
'get': 'torrentdownload.php?id=%s'}

142
sickbeard/providers/eztv.py Normal file
View file

@ -0,0 +1,142 @@
# coding=utf-8
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import base64
import re
import traceback
from . import generic
from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
class EztvProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'EZTV')
self.url_home = ['https://eztv.ag/'] + \
['https://%s/' % base64.b64decode(x) for x in [''.join(x) for x in [
[re.sub('[f\sQ]+', '', x[::-1]) for x in [
'0pfQXZ', 'uQEQjd', 'if5fWd', 'j9ffGb', 'kQV2fa', '0fdnQL', '=f=g Z']],
[re.sub('[x\sg]+', '', x[::-1]) for x in [
'0xp XZ', 'uxEjxd', 'i x5Wd', 'jg9Ggb', 'kV2 xa', 'zx1gmL']],
[re.sub('[R\sK]+', '', x[::-1]) for x in [
'0pX KZ', 'i5i d', 'hBXKRe', 'lN3K c', 'v5KCKZ', '=cR mc']],
[re.sub('[f\sR]+', '', x[::-1]) for x in [
'0pfX Z', 'iff5id', 'hBXRfe', 'l N3 c', 'l5ffCZ', '==RQ d']],
[re.sub('[S\sx]+', '', x[::-1]) for x in [
'0 p XZ', 'iS5i d', 'h BXe', 'lNSx3c', 'i5SC Z', '==gxSe']],
[re.sub('[O\su]+', '', x[::-1]) for x in [
'0OpX Z', 'hu1Oid', 'w5yuuZ', '49u mc', 'w N nL', 'lONW Y']],
[re.sub('[O\sf]+', '', x[::-1]) for x in [
'd0pOXfZ', '3bff05i', '5W ZfyJ', 'p1fmOe0', 'cvJn c', 'mcOvf5i', '= Oc']],
[re.sub('[w\sl]+', '', x[::-1]) for x in [
'XwlZ', '0wlp', 'il d', 's l5', 'X wY', 'v lR', 'nl L', '3 B']],
[re.sub('[M\sQ]+', '', x[::-1]) for x in [
'pQMXZ', 'iQd 0', 'e 6M5', '2 NQG', 'dQMnL', 'gMQbp', '= Q=']],
[re.sub('[o\sS]+', '', x[::-1]) for x in [
'h1 id0opXZ', 'u8Wa15y Z', 'lhGdpFSmoZ', 'uVnL2 RnSe', 'ht2oY vxmY', 'nJ3obuwSGb']],
]]]
self.url_vars = {'search': 'search/%s', 'browse': 'page_%s'}
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(vars)s', 'browse': '%(home)s%(vars)s'}
self.minseed = None
@staticmethod
def _has_signature(data=None):
return data and re.search(r'(?i)(?:EZTV\s[-]\sTV\sTorrents)', data[0:300])
def _search_provider(self, search_params, **kwargs):
results = []
if not self.url:
return results
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'get': '^magnet:'}.items())
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
search_url = self.urls[('search', 'browse')['Cache' == mode]] % search_string
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:
if not html or self._has_no_results(html):
raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_table = soup.findAll('table', attrs={'class': ['table', 'forum_header_border']})[-1]
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
for tr in torrent_rows:
if 5 > len(tr.find_all('td')):
tr.decompose()
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
if 2 > len(torrent_rows):
raise generic.HaltParseException
head = None
for tr in torrent_rows[1:]:
cells = tr.find_all('td')
try:
head = head if None is not head else self._header_row(tr)
seeders = tryInt(cells[head['seed']].get_text().strip())
if self._peers_fail(mode, seeders):
continue
title = tr.select('a.epinfo')[0].get_text().strip()
size = cells[head['size']].get_text().strip()
download_url = self._link(tr.find('a', href=rc['get'])['href'])
except (AttributeError, TypeError, ValueError, KeyError):
continue
if title and download_url:
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
except generic.HaltParseException:
pass
except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url)
results = self._sort_seeding(mode, results + items[mode])
return results
def _season_strings(self, ep_obj, **kwargs):
return generic.TorrentProvider._season_strings(self, ep_obj, scene=False, **kwargs)
def _episode_strings(self, ep_obj, **kwargs):
return generic.TorrentProvider._episode_strings(self, ep_obj, scene=False, **kwargs)
def _cache_data(self, **kwargs):
return self._search_provider({'Cache': [0, 1]})
provider = EztvProvider()

View file

@ -34,8 +34,36 @@ class LimeTorrentsProvider(generic.TorrentProvider):
self.url_home = ['https://www.limetorrents.cc/'] + \
['https://%s/' % base64.b64decode(x) for x in [''.join(x) for x in [
[re.sub('[ \sF]+', '', x[::-1]) for x in [
'X ZtlFGb', 'lJnc vR', 'n LzR nb', 'vxmYuF V', 'CFZltF2Y', '==wYF2F5']],
[re.sub('[ \sg]+', '', x[::-1]) for x in [
'XZtlg Gb', 'lJngcv R', 'nLz R nb', 'v xmYu V', 'CZl t2 Y', '==gwY2g5']],
[re.sub('[S\si]+', '', x[::-1]) for x in [
'X SZtlGb', 'lJi ncvR', 'nSSLzRnb', 'vxmSYu V', 'CSZilt2Y', '=S= Aet5']],
[re.sub('[x\s0]+', '', x[::-1]) for x in [
'tlGx b', 'u0ExTZ', 'i5xW d', 'j9 Gxb', 'kV020a', 'zx1m L']],
[re.sub('[Y\so]+', '', x[::-1]) for x in [
'to lGb', 'uoEYTZ', 'i 5WYd', 'jo 9Gb', 'ko V2a', '0 dnL', '==Y gZ']],
[re.sub('[r\sp]+', '', x[::-1]) for x in [
'XZt rlGb', 'lJpncpvR', 'n LzRnb', 'vxmYu V', 'ic ltp2Y', '=4Wa r35']],
[re.sub('[F\so]+', '', x[::-1]) for x in [
'lJncvRoX ZtlGb', 'vxFmYuVnLzoRnb', 'pxo2FYj5iclt2Y', '05W ZyJ3b0 VWb',
'j9G buVFnct5yc', '=o0WYloJHdz5ya']],
[re.sub('[F\sK]+', '', x[::-1]) for x in [
'XKZtlGFb', 'lKJncFvR', 'mLzKKRnb', 's 5WFdy1', 'mLrNF2Fb', '=F 8mZul']],
[re.sub('[r\sS]+', '', x[::-1]) for x in [
'RXZStSlGb', 'nblJ nrcv', 'cvRn LrzR', '6RnSblJ n', '9mScylW b', 'wZyr9mSLy', '=Sr=']],
[re.sub('[1\sy]+', '', x[::-1]) for x in [
'tylyGb', 'v11RXZ', 'lyJ1nc', 'zRnyyb', 'hxy1mL', 'u8G d', '=1c Hc']],
[re.sub('[w\sy]+', '', x[::-1]) for x in [
't wlGb', 'v RXZ', 'lJ nc', 'zRnywb', '4pw nL', 'uY3 wY', 'ul2 yd']],
[re.sub('[f\s0]+', '', x[::-1]) for x in [
'XZtlG0 b', 'lJn fcvR', 'mL0zRn0b', 'zF Gc5fJ', 'mL kV2 c', '= =w0Zy9']],
[re.sub('[f\sy]+', '', x[::-1]) for x in [
'ZtylGyb', 'ncvyRyX', 'RnbylyJ', '5Jm fLz', 'cy zFGc', 'mLk V2', '1fyV']],
[re.sub('[u\sQ]+', '', x[::-1]) for x in [
'ZtlGQub', 'nc vRX', 'R nb lJ', '5 JQmLz', 'czQuFGc', 'muLkVQ2', '6QuJ']],
[re.sub('[p\sk]+', '', x[::-1]) for x in [
'XZtlkGpb', 'lJncvkkR', 'nLkzRnpb', 'vxm Y uV', 'Gbhppt2Y', 'n pJ3buw']],
]]]
self.url_vars = {'search': 'search/tv/%s/', 'browse': 'browse-torrents/TV-shows/'}

View file

@ -46,6 +46,11 @@ except ImportError:
except ImportError:
import xml.etree.ElementTree as etree
try:
from collections import OrderedDict
except ImportError:
from requests.compat import OrderedDict
class NewznabConstants:
SEARCH_TEXT = -100
@ -120,6 +125,12 @@ class NewznabProvider(generic.NZBProvider):
self._last_recent_search = None
self._caps_last_updated = datetime.datetime.fromordinal(1)
self.cache = NewznabCache(self)
# filters
if super(NewznabProvider, self).get_id() in ('nzbs_org',):
self.filter = []
if 'nzbs_org' == super(NewznabProvider, self).get_id():
self.may_filter = OrderedDict([
('so', ('scene only', False)), ('snn', ('scene not nuked', False))])
@property
def cat_ids(self):
@ -711,6 +722,10 @@ class NewznabProvider(generic.NZBProvider):
request_params['t'] = 'search'
request_params.update(params)
if hasattr(self, 'filter'):
if 'nzbs_org' == self.get_id():
request_params['rls'] = ((0, 1)['so' in self.filter], 2)['snn' in self.filter]
# workaround a strange glitch
if sum(ord(i) for i in self.get_id()) in [383] and 5 == 14 - request_params['maxage']:
request_params['maxage'] += 1

View file

@ -29,32 +29,32 @@ class SkytorrentsProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'Skytorrents', cache_update_freq=6)
generic.TorrentProvider.__init__(self, 'Skytorrents')
self.url_base = 'https://www.skytorrents.in/'
self.url_base = 'https://skytorrents.lol/'
self.urls = {'config_provider_home_uri': self.url_base,
'search': self.url_base + 'search/all/ad/1/%s?l=en-us'}
'search': self.url_base + '?category=show&sort=created&query=%s&page=%s'}
self.minseed, self.minleech = 2 * [None]
self.confirmed = False
def _search_provider(self, search_params, **kwargs):
results = []
if not self._authorised():
return results
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
'info': '^/info/', 'get': '^(/file/|magnet:)', 'verified': 'Verified'}.items())
'info': '^torrent/', 'get': '^magnet:'}.items())
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
search_url = self.urls['search'] % search_string
if 'Cache' == mode:
search_url = self.urls['search'] % tuple(search_string.split(','))
else:
search_url = self.urls['search'] % (search_string, '')
html = self.get_url(search_url)
if self.should_skip():
return results
@ -80,9 +80,7 @@ class SkytorrentsProvider(generic.TorrentProvider):
head = head if None is not head else self._header_row(tr)
seeders, leechers, size = [tryInt(n, n) for n in [
cells[head[x]].get_text().strip() for x in 'seed', 'leech', 'size']]
if (self.confirmed and
not (tr.find('img', src=rc['verified']) or tr.find('img', title=rc['verified']))) \
or self._peers_fail(mode, seeders, leechers):
if self._peers_fail(mode, seeders, leechers):
continue
info = tr.find('a', href=rc['info'])
@ -105,5 +103,15 @@ class SkytorrentsProvider(generic.TorrentProvider):
return results
def _season_strings(self, ep_obj, **kwargs):
return generic.TorrentProvider._season_strings(self, ep_obj, scene=False, **kwargs)
def _episode_strings(self, ep_obj, **kwargs):
return generic.TorrentProvider._episode_strings(self, ep_obj, scene=False, **kwargs)
def _cache_data(self, **kwargs):
return self._search_provider({'Cache': ['x264,', 'x264,2', 'x264,3', 'x264,4', 'x264,5']})
provider = SkytorrentsProvider()

View file

@ -17,6 +17,7 @@
import re
import time
from urllib import quote, unquote
from . import generic
from sickbeard.bs4_parser import BS4Parser
@ -41,14 +42,15 @@ class SpeedCDProvider(generic.TorrentProvider):
self.digest, self.freeleech, self.minseed, self.minleech = 4 * [None]
def _authorised(self, **kwargs):
digest = [x[::-1] for x in self.digest[::-1].rpartition('=')]
self.digest = digest[2] + digest[1] + quote(unquote(digest[0]))
return super(SpeedCDProvider, self)._authorised(
logged_in=(lambda y='': all(
[self.session.cookies.get_dict(domain='.speed.cd') and
self.session.cookies.clear('.speed.cd') is None or True] +
['RSS' in y, 'type="password"' not in y, self.has_all_cookies(['speedian'], 'inSpeed_')] +
[(self.session.cookies.get('inSpeed_' + x) or 'sg!no!pw') in self.digest for x in ['speedian']])),
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Perhaps the cookie expired? Check settings'))
def _search_provider(self, search_params, **kwargs):
@ -123,7 +125,9 @@ class SpeedCDProvider(generic.TorrentProvider):
@staticmethod
def ui_string(key):
return 'speedcd_digest' == key and 'use... \'inSpeed_speedian=yy\'' or ''
return 'speedcd_digest' == key and \
'use... \'inSpeed_speedian=yy\' - warning: SpeedCD cookies expire minutes after inactivity, ' \
'so keep SG running. If you get auth failures, grab another browser cookie' or ''
provider = SpeedCDProvider()

View file

@ -35,53 +35,34 @@ from lib.unidecode import unidecode
class ThePirateBayProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'The Pirate Bay', cache_update_freq=20)
generic.TorrentProvider.__init__(self, 'The Pirate Bay')
self.url_home = ['https://thepiratebay.%s/' % u for u in 'se', 'org'] + \
['https://%s/' % base64.b64decode(x) for x in [''.join(x) for x in [
[re.sub('[t\sG]+', '', x[::-1]) for x in [
'mGGY', '5tGF', 'HGtc', 'vGGJ', 'Htte', 'uG k', '2GGd', 'uGtl']],
[re.sub('[t\sR]+', '', x[::-1]) for x in [
'uF2R a', 'it VWe', 'uk XRY', 'uR82RY', 'vt sWd', 'vR x2P', '9QWtRY']],
[re.sub('[n\sJ]+', '', x[::-1]) for x in [
'lGJnc', 'XJY y', 'YJlJR', '5 Fm', '5 niM', 'm cJv', '= Jc']],
[re.sub('[S\sp]+', '', x[::-1]) for x in [
'XYySSlGc', '5FmSYl R', 'CdzF SmZ', '15ypbSj5', 'Gb/8pSya', '=0DppZh9']],
[re.sub('[1\sz]+', '', x[::-1]) for x in [
'XYzy lGc', '5zFm1YlR', '2Yp1VzXc', 'u812 Yus', '2PvszW1d', '91zQWYvx']],
[re.sub('[P\sT]+', '', x[::-1]) for x in [
'lGPPc', 'XYP y', 'c l R', 'vTJTH', 'kT He', 'GdTPu', 'wPP9']],
[re.sub('[Y\sr]+', '', x[::-1]) for x in [
'J rHc', 'Hrrev', 'awYYl', 'hJYYX', 'U YGd', 'Gdr u', 'wr 9']],
[re.sub('[R\sk]+', '', x[::-1]) for x in [
'vJRkHc', '0 lHRe', 'uR IGc', 'iV2RRd', '0kl2Rc', '==kQ Z']],
[re.sub('[p\sz]+', '', x[::-1]) for x in [
'Hppc', '4pzJ', 'Sppe', 'wzz5', 'XppY', '0 zJ', 'Q pe', '=pz=']],
[re.sub('[p\si]+', '', x[::-1]) for x in [
'hGpid', 'Gai l', 'Z kpl', 'u ViG', 'FpmiY', 'mLii5', 'j N']],
[re.sub('[g\ss]+', '', x[::-1]) for x in [
'lhGgsd', 'ngFW b', '0s Vmb', '5sFmgY', 'uglsmL', '=8 m Z']],
[re.sub('[I\ss]+', '', x[::-1]) for x in [
'clIhsGd', 'X IYylG', 'Fm Yl R', '5IJmsL5', 'cszFGIc', 'nsLkIV2', '0I N']],
[re.sub('[ \sq]+', '', x[::-1]) for x in [
'GqclhG d', 'lR XqYyl', 'mL5Fm qY', 'uVXbt l', 'HdqpNqWa', '=Q3cuq k']],
[re.sub('[k\sK]+', '', x[::-1]) for x in [
'GKclh Gd', 'lRXKYyKl', 'nL5F mKY', 'vxmYkKuV', 'CZlKKt2Y', '=kw2bsk5']],
[re.sub('[f\si]+', '', x[::-1]) for x in [
'Gicl hGd', 'lRXiYfyl', 'nL5F imY', 'vximYfuV', 'CZlft 2Y', '==Adffz5']],
[re.sub('[j\sz]+', '', x[::-1]) for x in [
'G c lhGd', 'lRXYjy l', 'nL5FmjjY', 'v xmzYuV', 'Gbh t2 Y', 'nJ 3zbuw']],
[re.sub('[p\sH]+', '', x[::-1]) for x in [
'lHRXYylpGc', 'uVnL5FmY', 'yB3aj9HGpb', '1x2HYuo2b', 'spNmYwRnY', 'ulmLuFHWZ', '=8mZ']],
[re.sub('[1\sf]+', '', x[::-1]) for x in [
'H d', 'w1 B', 'm fc', '4 19', 'S e', 'z115', 'Xffa', 'l 1R']],
[re.sub('[r\sn]+', '', x[::-1]) for x in [
'Hr d', 'irnB', 'Hnrc', 'vn J', 'Hrne', 'u rk', '2rnd', 'unrl']],
[re.sub('[s\sZ]+', '', x[::-1]) for x in [
'H sd', 'iZ B', 'nssc', 'u V', 'nZZL', 'pZsd', 'g sb', '= s=']],
]]] + ['http://%s' % base64.b64decode(x) for x in [''.join(x) for x in [
[re.sub('[q\sk]+', '', x[::-1]) for x in [
'mkYh5k2a', 'rR n LuV', '2avM3 L', 'vdGcqklV', 'nLnq5qWa', '19kDqcoB', '9kwm c']],
[re.sub('[h\sI]+', '', x[::-1]) for x in [
'm IY', '5 F', 'HhIc', 'vI J', 'HIhe', 'uI k', '2 d', 'uh l']],
[re.sub('[N\sQ]+', '', x[::-1]) for x in [
'lN Gc', 'X Yy', 'c lNR', 'vNJNH', 'kQNHe', 'GQdQu', 'wNN9']],
[re.sub('[F\sT]+', '', x[::-1]) for x in [
'JFHTc', 'HeTFv', 'aF wl', 'h JFX', 'UFFGd', 'G du', 'wFF9']],
[re.sub('[ \sL]+', '', x[::-1]) for x in [
'HLLc', '4LLJ', 'S Le', 'w L5', 'XLLY', '0 LJ', 'QLLe', '=L =']],
[re.sub('[r\sG]+', '', x[::-1]) for x in [
'H rd', 'i rB', 'HGGc', 'v rJ', 'H Ge', 'u rk', '2rrd', 'uG l']],
[re.sub('[Q\sh]+', '', x[::-1]) for x in [
'lQG c', 'XhYQy', 'ch lR', 'v J H', 'kQHQe', '2cQ u', '=Qhg']],
[re.sub('[T\st]+', '', x[::-1]) for x in [
'3t Y', '1tTJ', 'm te', 'utTl', 'y TZ', '4 t5', 'Xtte', '=Tto']],
[re.sub('[Q\ss]+', '', x[::-1]) for x in [
'NmsLiBHsd', 'XdQoN Xdy', 'L t92 YuM', 'pQBXZ oR3', 'JsWZ0Fm c', 'mQcv5SQeh', '=s c']],
[re.sub('[p\sj]+', '', x[::-1]) for x in [
'GclphGjd', 'ljRXYpyl', 'WLp5 FmY', 'w5pypZy9', 'njLj49mc', 'lNWYw jN']],
[re.sub('[M\sJ]+', '', x[::-1]) for x in [
'HJ d', 'iJJB', 'nM L', '4JJp', '3 Y', 'uJ Y', '2 d', 'u Jl']],
[re.sub('[j\sn]+', '', x[::-1]) for x in [
'Gn clhGd', 'l RXY yl', 'mL5F mnY', 'sjj5Wdy1', 'mLnr N2b', '= UGdnhR']],
[re.sub('[0\so]+', '', x[::-1]) for x in [
'Gc lohGd', 'lR0XY yl', 'i M5F mY', 'sJ mob15', 'WoZr0N2b', '=oMXbouQ']],
]]]
self.url_vars = {'search': 'search/%s/0/7/200', 'browse': 'tv/latest/'}
@ -199,6 +180,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
cnt = len(items[mode])
try:
if not html or self._has_no_results(html):
self._url = None
raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive'], attr='id="searchResult"') as soup:

View file

@ -34,12 +34,22 @@ class TorLockProvider(generic.TorrentProvider):
self.url_home = ['https://www.torlock.com/'] + \
['https://%s/' % base64.b64decode(x) for x in [''.join(x) for x in [
[re.sub('[X\sI]+', '', x[::-1]) for x in [
'yX9G d', 'j9 IGb', '1 I5ya', 'sJmX b', 'rN2I b', 'uQXXWZ', '0IVmIY']],
[re.sub('[w\sP]+', '', x[::-1]) for x in [
'y9PPGd', 'jPw9Gb', '1wP5ya', 's JmPb', 'rPN2wb', 'uQPPWZ', 'klm wY']],
[re.sub('[g\sv]+', '', x[::-1]) for x in [
'yB Dgd', 'jgg9Gb', '1vv5ya', 'svvJmb', 'rN2vgb', 'uQ vWZ', 'sg9 Gb']],
[re.sub('[o\sH]+', '', x[::-1]) for x in [
'y9HHGd', 'j 9Gob', '1H5 ya', 'sJmoHb', 'rHNH2b', 'uQWo Z', '0Vm HY']],
[re.sub('[w\sm]+', '', x[::-1]) for x in [
'bym9mGd', 'ya jw9G', '02wbjw1', 'vJwHmcu', 'cwz5 Ce', 'QwZjmFG', '= =']],
[re.sub('[N\sS]+', '', x[::-1]) for x in [
'y 9 Gd', 'j9SGNb', 'jN1 ya', 'u0N2Nb', 'vlNNWd', 'hNSZmL', 'o NRXa']],
[re.sub('[l\sO]+', '', x[::-1]) for x in [
'bylBD d', 'zajO9lG', '5lWdu E', 'j9OGb i', 'LkOV2Oa', 'A bvlxm', '=OO=']],
[re.sub('[o\ss]+', '', x[::-1]) for x in [
'bsyBDsd', 'zasj9oG', '5sWdsuE', 'j 9Gboi', 'LksV2oa', 'A sbvxm', '=s =']],
[re.sub('[q\sK]+', '', x[::-1]) for x in [
'yqq9Gd', 'j9 Gqb', '15qy a', 'sJKmKb', 'rNqq2b', 'uQqqWZ', '=gX Kb']],
[re.sub('[w\sI]+', '', x[::-1]) for x in [
'XwId', 'sI B', '2wIb', 'kI F', 'nI L', 'hIwB', 'nIwc', '5I R']],
[re.sub('[n\sP]+', '', x[::-1]) for x in [
'G nd', 'v nx', '2PPY', 'uPPs', 'GPPc', 'yPPF', 'Hnnd', '= nk']],
]]]
self.url_vars = {'search': 'television/torrents/%s.html?sort=added&order=desc',

View file

@ -35,32 +35,27 @@ class Torrentz2Provider(generic.TorrentProvider):
self.url_home = ['https://torrentz2.eu/'] + \
['https://%s/' % base64.b64decode(x) for x in [''.join(x) for x in [
[re.sub('[ \sQ]+', '', x[::-1]) for x in [
'GQQd', 'yQQ9', 'mQ c', 'uQ V', 'HQd', 'yQ o', 'm L', 'z Ql']],
[re.sub('[S\sl]+', '', x[::-1]) for x in [
'Glld', 'yll9', 'ml c', 'uSlV', 'HS d', 'yl o', 'mSSL', 'jS N']],
[re.sub('[1\sq]+', '', x[::-1]) for x in [
'G qd', 'y 9', 'm1qc', 'u1 V', 'H 1d', 'yqqo', 'n1qL', '21 R']],
[re.sub('[F\sf]+', '', x[::-1]) for x in [
'G fd', 'y 9', 'm c', 'u FV', 'H Fd', 'uf o', 'nffY', '=fFo']],
[re.sub('[j\sF]+', '', x[::-1]) for x in [
'cy9F Gd', 'HFdFuVm', 'lnFYu o', 'zNXFY w', 'Yu jQWZ', 'AbFv9F2', '=FF=']],
[re.sub('[K\sP]+', '', x[::-1]) for x in [
'yK9 Gd', 'uVm Pc', 'uoH Pd', 'w lnY', 'zP NXY', 'uQ PWZ', '=QK3Pc']],
[re.sub('[R\sh]+', '', x[::-1]) for x in [
'cyhR9Gd', 'HRdhuVm', '1hWaRuo', 'p5RWdRt', 'e0l2h Y', 'Adz h5S', '= R=']],
[re.sub('[K\s ]+', '', x[::-1]) for x in [
'cKy9G d', 'Hdu KVm', 'tWdu o', 'sJKKmb1', 'Lr N 2b', 'g bKl1m', '= K=']],
[re.sub('[s\sx]+', '', x[::-1]) for x in [
'cy 9G d', 'HdxxuVm', '5xWsduo', 'j9Gb si', 'L skV2a', 'AZp Jsm', '=s =']],
[re.sub('[P\s ]+', '', x[::-1]) for x in [
'cy9 PGd', 'H duPVm', '5WdPu o', 'jP 9Gbi', 'LPkV 2a', 'APbvx m', '=PP=']],
[re.sub('[X\sP]+', '', x[::-1]) for x in [
'yP9XGd', 'uVm Pc', 'uX oHd', 'i X5Wd', 'j 9GXb', 'k VX2a', '0PXNnL']],
[re.sub('[w\sf]+', '', x[::-1]) for x in [
'cwy9Gfd', 'H duV m', '5Wfwduo', 'j9G bfi', 'bsFw2 a', 'mwcfv5C', '=ffc']],
[re.sub('[Z\sj]+', '', x[::-1]) for x in [
'm cjy9Gd', 'uZoHduZV', '2bs5jWZd', 'vJZHcZrN', 'XYw 5 ia', '==Qejj0J']],
[re.sub('[r\sQ]+', '', x[::-1]) for x in [
'GQrd', 'y Q9', 'mr c', 'uQ V', 'H Qd', 'yQQo', 'mrrL', 'jrrN']],
[re.sub('[f\sJ]+', '', x[::-1]) for x in [
'G fd', 'yJJ9', 'm Jc', 'uJ V', 'HfJd', 'yf o', 'nJfL', '2JfR']],
[re.sub('[Q\sq]+', '', x[::-1]) for x in [
'cQyQ9Gd', 'HQduVqm', 'NW L yo', 'yqBnLqj', 'cuqg 3b', '2QYhQB3', '=QQU']],
[re.sub('[i\sP]+', '', x[::-1]) for x in [
'c Py9Gd', 'H d uVm', 'JXib uo', 'vxmbiP1', 'aius2PY', 'wbimP5W', '=P =']],
[re.sub('[q\si]+', '', x[::-1]) for x in [
'duVmcy 9Gid', '3b yJXat pH', '9GdnqJi3buI', 'uoHdu Vmicy', 'Yvqxmb1J Xb',
'QZ0F GiZus2', '= q=']],
[re.sub('[g\sK]+', '', x[::-1]) for x in [
'GKgb', '0ggF', 'y gb', 'wKK5', 'w gd', '=K =']],
[re.sub('[R\su]+', '', x[::-1]) for x in [
'n e', 'jR h', 'iuud', '3RR5', 'WRua', '=R 4']],
[re.sub('[K\sk]+', '', x[::-1]) for x in [
'cKy9KGd', 'HKdu Vm', 'N WKLyo', 'pVKknLj', 'Ym5yk b', 'Aak0 lW', '= =']],
[re.sub('[k\sR]+', '', x[::-1]) for x in [
'cyRk9Gd', 'HduV Rm', 'VnLk xo', 'vx RmYu', 'Zkl t2Y', 'Gdk35 C', '= kY']],
[re.sub('[q\sQ]+', '', x[::-1]) for x in [
'cyQ9GQd', 'HdquVqm', 'VnLQ xo', 'vQqxmYu', 'Zlt2 qY', 'wQctQ5C', '= =']],
]]]
self.url_vars = {'search': 'searchA?f=%s&safe=1', 'searchv': 'verifiedA?f=%s&safe=1'}

View file

@ -3001,6 +3001,10 @@ class NewHomeAddShows(Home):
def searchIndexersForShowName(self, search_term, lang='en', indexer=None):
if not lang or 'null' == lang:
lang = 'en'
try:
search_term = re.findall(r'(?i)thetvdb.*?seriesid=([\d]+)', search_term)[0]
except (StandardError, Exception):
pass
term = search_term.decode('utf-8').strip()
terms = []
try:
@ -6300,6 +6304,12 @@ class ConfigProviders(Config):
if cur_id + '_' + attr in kwargs:
setattr(nzb_src, attr, str(kwargs.get(cur_id + '_' + attr)).strip())
attr = 'filter'
if hasattr(nzb_src, attr):
setattr(nzb_src, attr,
[k for k in nzb_src.may_filter.keys()
if config.checkbox_to_value(kwargs.get('%s_filter_%s' % (cur_id, k)))])
for attr in ['search_fallback', 'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog']:
setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(cur_id + '_' + attr)))