Merge branch 'feature/ChangeProvs' into develop

This commit is contained in:
JackDandy 2018-08-24 23:28:12 +01:00
commit 77dd246d0c
14 changed files with 126 additions and 213 deletions

View file

@ -20,6 +20,7 @@
'non scene if no recent search results', 'non scene if no active search results', 'non scene if no recent search results', 'non scene if no active search results',
'not scene nuked', and 'nuked if no active search results' 'not scene nuked', and 'nuked if no active search results'
* Change improve tvdb_api performance; remember if episodes are cached and reload show if not and episodes are requested * Change improve tvdb_api performance; remember if episodes are cached and reload show if not and episodes are requested
* Change remove redundant torrent URLs and improve provider loader
[develop changelog] [develop changelog]

View file

@ -1166,6 +1166,8 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
cache_dir = sickbeard.CACHE_DIR or _getTempDir() cache_dir = sickbeard.CACHE_DIR or _getTempDir()
session = CacheControl(sess=session, cache=caches.FileCache(ek.ek(os.path.join, cache_dir, 'sessions'))) session = CacheControl(sess=session, cache=caches.FileCache(ek.ek(os.path.join, cache_dir, 'sessions')))
provider = kwargs.pop('provider', None)
# session master headers # session master headers
req_headers = {'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', req_headers = {'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Encoding': 'gzip,deflate'} 'Accept-Encoding': 'gzip,deflate'}
@ -1220,6 +1222,10 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
url = urlparse.urlunparse(parsed) url = urlparse.urlunparse(parsed)
response = session.get(url, timeout=timeout, **kwargs) response = session.get(url, timeout=timeout, **kwargs)
# noinspection PyProtectedMember
if provider and provider._has_signature(response.content):
return response.content
if raise_status_code: if raise_status_code:
response.raise_for_status() response.raise_for_status()

View file

@ -17,86 +17,38 @@
# along with SickGear. If not, see <http://www.gnu.org/licenses/>. # along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from os import sys from os import sys
import importlib
import os.path import os.path
import sickbeard import sickbeard
from . import generic from . import generic, newznab
from .newznab import NewznabConstants from .newznab import NewznabConstants
from sickbeard import logger, encodingKludge as ek from sickbeard import logger, encodingKludge as ek
# usenet
from . import newznab, omgwtfnzbs
# torrent
from . import alpharatio, alphareign, beyondhd, bithdtv, bitmetv, blutopia, btn, btscene, dh, ettv, eztv, \
fano, filelist, funfile, grabtheinfo, hdbits, hdme, hdspace, hdtorrents, horriblesubs, immortalseed, \
iptorrents, limetorrents, magnetdl, morethan, nebulance, ncore, nyaa, pisexy, potuk, pretome, privatehd, ptf, \
rarbg, revtt, scenehd, scenetime, shazbat, showrss, skytorrents, speedcd, \
thepiratebay, torlock, torrentday, torrenting, torrentleech, \
torrentz2, tvchaosuk, wop, xspeeds, zooqle
# anime
from . import anizb, tokyotoshokan
# custom
try:
from . import custom01
except (StandardError, Exception):
pass
__all__ = ['omgwtfnzbs', __all__ = [
'alpharatio', # usenet
'alphareign', 'omgwtfnzbs',
'anizb', # torrent
'beyondhd', 'alpharatio', 'alphareign', 'beyondhd', 'bithdtv', 'bitmetv', 'blutopia', 'btn', 'btscene',
'bithdtv', 'custom01', 'custom11', 'dh', 'ettv', 'eztv', 'fano', 'filelist', 'funfile', 'grabtheinfo',
'bitmetv', 'hdbits', 'hdme', 'hdspace', 'hdtorrents', 'horriblesubs',
'blutopia', 'immortalseed', 'iptorrents', 'limetorrents', 'magnetdl', 'morethan', 'nebulance', 'ncore', 'nyaa',
'btn', 'pisexy', 'potuk', 'pretome', 'privatehd', 'ptf',
'btscene', 'rarbg', 'revtt', 'scenehd', 'scenetime', 'shazbat', 'showrss', 'skytorrents', 'speedcd',
'custom01', 'thepiratebay', 'torlock', 'torrentday', 'torrenting', 'torrentleech', 'torrentz2', 'tvchaosuk',
'dh', 'wop', 'xspeeds', 'zooqle',
'ettv', # anime
'eztv', 'anizb', 'tokyotoshokan',
'fano',
'filelist',
'funfile',
'grabtheinfo',
'hdbits',
'hdme',
'hdspace',
'hdtorrents',
'horriblesubs',
'immortalseed',
'iptorrents',
'limetorrents',
'magnetdl',
'morethan',
'nebulance',
'ncore',
'nyaa',
'pisexy',
'potuk',
'pretome',
'privatehd',
'ptf',
'rarbg',
'revtt',
'scenehd',
'scenetime',
'shazbat',
'showrss',
'skytorrents',
'speedcd',
'thepiratebay',
'torlock',
'torrentday',
'torrenting',
'torrentleech',
'torrentz2',
'tvchaosuk',
'wop',
'xspeeds',
'zooqle',
'tokyotoshokan',
] ]
for module in __all__:
try:
m = importlib.import_module('.' + module, 'sickbeard.providers')
globals().update({n: getattr(m, n) for n in m.__all__} if hasattr(m, '__all__')
else dict(filter(lambda t: '_' != t[0][0], m.__dict__.items())))
except ImportError as e:
if 'custom' != module[0:6]:
raise e
def sortedProviderList(): def sortedProviderList():

View file

@ -37,36 +37,6 @@ class BTSceneProvider(generic.TorrentProvider):
'z Rn Y', 'uVv2vY', '1 5vSZ', 'sJ omb', 'rNov2b', 'uQoWvZ', '0FvoGb']], 'z Rn Y', 'uVv2vY', '1 5vSZ', 'sJ omb', 'rNov2b', 'uQoWvZ', '0FvoGb']],
[re.sub('[v\sp]+', '', x[::-1]) for x in [ [re.sub('[v\sp]+', '', x[::-1]) for x in [
'zRnp Y', 'upVp2Y', '15SvpZ', 'spJpmb', 'r N 2b', 'u QvWZ', '=Mvm d']], 'zRnp Y', 'upVp2Y', '15SvpZ', 'spJpmb', 'r N 2b', 'u QvWZ', '=Mvm d']],
[re.sub('[P\sh]+', '', x[::-1]) for x in [
'zh RnY', 'uV2 Y', '1P5ShZ', 's Jm b', 'rN2h b', 'uQPW Z', '=ghXPb']],
[re.sub('[g\si]+', '', x[::-1]) for x in [
'zRiniY', 'uVgg2Y', '1 i5SZ', 'sJiimb', 'rN2igb', 'u IX Z', 'ul 2d']],
[re.sub('[O\sp]+', '', x[::-1]) for x in [
'zORnOY', 'uV2OpY', '15pSOZ', 'spJpmb', 'rN2O b', 'uOIXpZ', '=pM2OY']],
[re.sub('[ \sH]+', '', x[::-1]) for x in [
'zRHnHY', 'l H52b', '15HHSM', 'sJ mHb', 'rN 2 b', 'uQ WHZ', 's 9 Gb']],
[re.sub('[o\s ]+', '', x[::-1]) for x in [
'zRoonY', 'l5 2 b', '15ooSM', 'sJomob', 'rN2o b', 'uoQW Z', 'mRo3od']],
[re.sub('[0\sg]+', '', x[::-1]) for x in [
'zR0n0Y', 'l5 g2b', '1g5S M', 'sJm gb', 'rN0g2b', 'uQW 0Z', '=gMX b']],
[re.sub('[r\sj]+', '', x[::-1]) for x in [
'zR nrY', 'uVj2rY', 'ir 5SZ', 'hB Xre', 'lN j3c', 'vj 5CZ', '=jjcmc']],
[re.sub('[M\st]+', '', x[::-1]) for x in [
'z MRnY', 'uV2tMY', 'i5 StZ', 'hBtXte', 'lN3 tc', 'lMM5CZ', '== tQd']],
[re.sub('[K\so]+', '', x[::-1]) for x in [
'zR n Y', 'uV2 oY', 'i5 SZ', 'hBX oe', 'loNK3c', 'i 5CoZ', '=K=goe']],
[re.sub('[i\sP]+', '', x[::-1]) for x in [
'ctQiniY', 'mblNPP2', 'M 2YPtU', 'vJHPcPu', 'c z5PCe', 'QZj FPG', '=i =']],
[re.sub('[k\sq]+', '', x[::-1]) for x in [
'2Yzk RnY', '0k5qSZuV', 'WZyJ3qqb', 'p1m ke05', 'i cvJnkc', '=cmkckv5']],
[re.sub('[f\sG]+', '', x[::-1]) for x in [
'Rn Y', '2 fYz', 'Z fuV', 'sf 5S', 'RffXY', 'nGfLv', '3ffB']],
[re.sub('[t\sF]+', '', x[::-1]) for x in [
'zFtRnY', 'u V2Y', '65S tZ', '2NFG e', 'pdFnFL', '= =Fgb']],
[re.sub('[q\s ]+', '', x[::-1]) for x in [
't QqnY', 'l N2 c', 'tUmq b', 'uM2 Y', 'vl Wqd', 'hZ qmL', 'oRXqqa']],
[re.sub('[o\st]+', '', x[::-1]) for x in [
'YzRnooY', 'SoZu V2', 'Jmtb 15', 'rN 2bs', 'Ls xWtY', 'wZyt 9m', '= t=']],
]]] ]]]
self.url_vars = {'search': '?q=%s&order=1', 'browse': 'lastdaycat/type/Series/', self.url_vars = {'search': '?q=%s&order=1', 'browse': 'lastdaycat/type/Series/',
'get': 'torrentdownload.php?id=%s'} 'get': 'torrentdownload.php?id=%s'}

View file

@ -38,22 +38,6 @@ class EztvProvider(generic.TorrentProvider):
'0vp XZ', 'uvEj d', 'i5 Wzd', 'j9 vGb', 'kV2v a', '0zdvnL', '==vg Z']], '0vp XZ', 'uvEj d', 'i5 Wzd', 'j9 vGb', 'kV2v a', '0zdvnL', '==vg Z']],
[re.sub('[f\sT]+', '', x[::-1]) for x in [ [re.sub('[f\sT]+', '', x[::-1]) for x in [
'0TpfXZ', 'ufTEjd', 'i5WTTd', 'j9f Gb', 'kV f2a', 'z1mTTL']], '0TpfXZ', 'ufTEjd', 'i5WTTd', 'j9f Gb', 'kV f2a', 'z1mTTL']],
[re.sub('[ \sR]+', '', x[::-1]) for x in [
'0pXRRZ', 'h 1id', 'w5yRRZ', '4 9 mc', 'w N nL', 'lNRW Y']],
[re.sub('[x\su]+', '', x[::-1]) for x in [
'dx0xpXZ', '3bx05xi', '5WZyxuJ', 'p1mexu0', 'c vuJnc', 'mcuuv5i', '= c']],
[re.sub('[T\sr]+', '', x[::-1]) for x in [
'XT Z', '0Trp', 'iTTd', 'sT 5', 'XTrY', 'vT R', 'nrrL', '3T B']],
[re.sub('[l\sT]+', '', x[::-1]) for x in [
'pX Z', 'idl 0', 'e6l 5', '2lTNG', 'd nTL', 'g blp', '= =']],
[re.sub('[T\sR]+', '', x[::-1]) for x in [
'0p X Z', 'h1iRRd', '15R yZ', 'u 8WRa', 'p RFmZ', '=gTGRd']],
[re.sub('[T\st]+', '', x[::-1]) for x in [
'0 ptXZ', '1T5i d', 'sTtJmb', 'rtN2Tb', 'sx WTY', 'ytT9mL', '=t=wTZ']],
[re.sub('[o\sz]+', '', x[::-1]) for x in [
'0zopXZ', '1z5oid', 'sJ mb', 'rNz2zb', 'uz QWZ', '0FGoob']],
[re.sub('[k\sv]+', '', x[::-1]) for x in [
'Xk Z', '0kkp', 'ivvd', 'y k5', 'WkvZ', '= Q']],
]]] ]]]
self.url_vars = {'search': 'search/%s', 'browse': 'page_%s'} self.url_vars = {'search': 'search/%s', 'browse': 'page_%s'}
self.url_tmpl = {'config_provider_home_uri': '%(home)s', self.url_tmpl = {'config_provider_home_uri': '%(home)s',

View file

@ -21,6 +21,7 @@ from __future__ import with_statement
import datetime import datetime
import itertools import itertools
import json
import math import math
import os import os
import re import re
@ -30,7 +31,7 @@ import threading
import socket import socket
from urllib import quote_plus from urllib import quote_plus
import zlib import zlib
from base64 import b16encode, b32decode from base64 import b16encode, b32decode, b64decode
import sickbeard import sickbeard
import requests import requests
@ -714,6 +715,16 @@ class GenericProvider(object):
except (StandardError, Exception): except (StandardError, Exception):
logger.log(u'Failed to save magnet link to file, %s' % final_file) logger.log(u'Failed to save magnet link to file, %s' % final_file)
elif not saved: elif not saved:
if 'torrent' == link_type and result.provider.get_id() in sickbeard.PROVIDER_HOMES:
# home var url can differ to current url if a url has changed, so exclude both on error
urls = list(set([sickbeard.PROVIDER_HOMES[result.provider.get_id()][0]]
+ re.findall('^(https?://[^/]+/)', result.url)
+ getattr(sickbeard, 'PROVIDER_EXCLUDE', [])))
sickbeard.PROVIDER_HOMES[result.provider.get_id()] = ('', None)
# noinspection PyProtectedMember
result.provider._valid_home(url_exclude=urls)
setattr(sickbeard, 'PROVIDER_EXCLUDE', ([], urls)[any([result.provider.url])])
logger.log(u'Server failed to return anything useful', logger.ERROR) logger.log(u'Server failed to return anything useful', logger.ERROR)
return saved return saved
@ -1446,7 +1457,58 @@ class TorrentProvider(GenericProvider):
return data and re.search(r'(?sim)<input[^<]+?name=["\'\s]*?password', data) and \ return data and re.search(r'(?sim)<input[^<]+?name=["\'\s]*?password', data) and \
re.search(r'(?sim)<input[^<]+?name=["\'\s]*?username', data) re.search(r'(?sim)<input[^<]+?name=["\'\s]*?username', data)
def _valid_home(self, attempt_fetch=True): def _decode_urls(self, url_exclude=None):
data_attr = 'PROVIDER_DATA'
data_refresh = 'PROVIDER_DATA_REFRESH'
obf = getattr(sickbeard, data_attr, None)
now = int(time.time())
data_window = getattr(sickbeard, data_refresh, now - 1)
if data_window < now:
setattr(sickbeard, data_refresh, (10*60) + now)
url = 'https://raw.githubusercontent.com/SickGear/sickgear.extdata/master/SickGear/data.txt'
obf_new = helpers.getURL(url, json=True) or {}
if obf_new:
setattr(sickbeard, data_attr, obf_new)
obf = obf_new
urls = []
seen_attr = 'PROVIDER_SEEN'
if obf and self.__module__ not in getattr(sickbeard, seen_attr, []):
file_path = '%s.py' % os.path.join(sickbeard.PROG_DIR, *self.__module__.split('.'))
if ek.ek(os.path.isfile, file_path):
with open(file_path, 'rb') as file_hd:
c = bytearray(str(zlib.crc32(file_hd.read())).encode('hex'))
for x in obf.keys():
if self.__module__.endswith(self._decode(bytearray(b64decode(x)), c)):
for u in obf[x]:
urls += [self._decode(bytearray(
b64decode(''.join([re.sub('[\s%s]+' % u[0], '', x[::-1]) for x in u[1:]]))), c)]
url_exclude = url_exclude or []
if url_exclude:
urls = urls[1:]
urls = filter(lambda u: u not in url_exclude, urls)
break
if not urls:
setattr(sickbeard, seen_attr, list(set(getattr(sickbeard, seen_attr, []) + [self.__module__])))
if not urls:
urls = filter(lambda u: 'http' in u, getattr(self, 'url_home', []))
return urls
@staticmethod
def _decode(data, c):
try:
result = ''.join(chr(int(str(
bytearray((8 * c)[i] ^ x for i, x in enumerate(data))[i:i + 2]), 16)) for i in range(0, len(data), 2))
except (StandardError, Exception):
result = '|'
return result
def _valid_home(self, attempt_fetch=True, url_exclude=None):
""" """
:return: signature verified home url else None if validation fail :return: signature verified home url else None if validation fail
""" """
@ -1454,13 +1516,13 @@ class TorrentProvider(GenericProvider):
if url_base: if url_base:
return url_base return url_base
url_list = getattr(self, 'url_home', None) url_list = self._decode_urls(url_exclude)
if not url_list and getattr(self, 'url_edit', None) or 10 > max([len(x) for x in url_list]): if not url_list and getattr(self, 'url_edit', None) or not any(filter(lambda u: 10 < len(u), url_list)):
return None return None
url_list = ['%s/' % x.rstrip('/') for x in url_list] url_list = map(lambda u: '%s/' % u.rstrip('/'), url_list)
last_url, expire = sickbeard.PROVIDER_HOMES.get(self.get_id(), ('', None)) last_url, expire = sickbeard.PROVIDER_HOMES.get(self.get_id(), ('', None))
url_drop = getattr(self, 'url_drop', []) url_drop = (url_exclude or []) + getattr(self, 'url_drop', [])
if url_drop and any([url in last_url for url in url_drop]): # deprecate url if url_drop and any([url in last_url for url in url_drop]): # deprecate url
last_url = '' last_url = ''

View file

@ -31,23 +31,12 @@ class IPTorrentsProvider(generic.TorrentProvider):
def __init__(self): def __init__(self):
generic.TorrentProvider.__init__(self, 'IPTorrents') generic.TorrentProvider.__init__(self, 'IPTorrents')
self.url_home = (['https://iptorrents.%s/' % u for u in 'eu', 'com', 'me', 'ru'] + self.url_home = (['https://iptorrents.com/'] +
['http://rss.workisboring.com/'] +
[base64.b64decode(x) for x in [''.join(x) for x in [ [base64.b64decode(x) for x in [''.join(x) for x in [
[re.sub('(?i)[q\s1]+', '', x[::-1]) for x in [ [re.sub('(?i)[q\s1]+', '', x[::-1]) for x in [
'c0RHa', 'vo1QD', 'hJ2L', 'GdhdXe', 'vdnLoN', 'J21cptmc', '5yZulmcv', '02bj', '=iq=']], 'c0RHa', 'vo1QD', 'hJ2L', 'GdhdXe', 'vdnLoN', 'J21cptmc', '5yZulmcv', '02bj', '=iq=']],
[re.sub('(?i)[q\seg]+', '', x[::-1]) for x in [ [re.sub('(?i)[q\seg]+', '', x[::-1]) for x in [
'RqHEa', 'LvEoDc0', 'Zvex2', 'LuF2', 'NXdu Vn', 'XZwQxeWY1', 'Yu42bzJ', 'tgG92']], 'RqHEa', 'LvEoDc0', 'Zvex2', 'LuF2', 'NXdu Vn', 'XZwQxeWY1', 'Yu42bzJ', 'tgG92']],
[re.sub('(?i)[q\sek]+', '', x[::-1]) for x in [
'H qa', 'vQoDc0R', '2L ', 'bod', 'hNmLk0N3', 'WLlxemY', 'LtVGZv1', 'wZy9m', '=kQ=']],
[re.sub('(?i)[q\seg1]+', '', x[::-1]) for x in [
'HGa', 'voDc0R', '21L', 'bucmbvt', 'ZyZWQ1L0Vm', 'ycrFW', '02bej5', 'e=gq']],
[re.sub('(?i)[q\sei]+', '', x[::-1]) for x in [
'Q0RHa', 'voiQDc', 'asF2L', 'hVmLuVW', 'yZulGd', 'mbhdmcv1', 'Adl5mLjl', '==Qe']],
[re.sub('[r\sh]+', '', x[::-1]) for x in [
'fzRh3re', 'ChdwhlW', 'FW Zyh5', 'vJWhrLk', 'Lhz t2b', 'wZyhh9m', '=rr=']],
[re.sub('[S\sN]+', '', x[::-1]) for x in [
'zSSR3e', 'wNlWNf', 'zN 5Cd', '2SNJXZ', 'ySNAXZ', 'j5SSCc', '=S02 b']],
]]]) ]]])
self.url_vars = {'login': 't', 'search': 't?%s;q=%s;qf=ti%s%s#torrents'} self.url_vars = {'login': 't', 'search': 't?%s;q=%s;qf=ti%s%s#torrents'}

View file

@ -38,34 +38,6 @@ class LimeTorrentsProvider(generic.TorrentProvider):
'XZFtlpGb', 'lJn pcvR', 'nFLpzRnb', 'v xpmYuV', 'CZlt F2Y', '=F QXYs5']], 'XZFtlpGb', 'lJn pcvR', 'nFLpzRnb', 'v xpmYuV', 'CZlt F2Y', '=F QXYs5']],
[re.sub('[K\sP]+', '', x[::-1]) for x in [ [re.sub('[K\sP]+', '', x[::-1]) for x in [
'XZKtPlGb', 'lJncPPvR', 'nKLzRnKb', 'vxm Y uV', 'CZlPt2PY', '==wYK2P5']], 'XZKtPlGb', 'lJncPPvR', 'nKLzRnKb', 'vxm Y uV', 'CZlPt2PY', '==wYK2P5']],
[re.sub('[i\sQ]+', '', x[::-1]) for x in [
'X ZtlGQb', 'l Jn cvR', 'nLzQQRnb', 'vxmQYuiV', 'CZQlt2iY', '=ii=Aet5']],
[re.sub('[q\sX]+', '', x[::-1]) for x in [
't lGqb', 'uXETqZ', 'i5WqXd', 'j 9Gqb', 'kqV2Xa', 'z1qm L']],
[re.sub('[w\sF]+', '', x[::-1]) for x in [
'twlGFb', 'uEF TZ', 'i5W wd', 'j9 G b', 'kVw2 a', '0dnFFL', '==F gZ']],
[re.sub('[Q\sy]+', '', x[::-1]) for x in [
'XZQtlGyb', 'lJQncyvR', 'nLzRyn b', 'vxmY uyV', 'icltQ2QY', '=4WaQ3y5']],
[re.sub('[0\sp]+', '', x[::-1]) for x in [
'XZtlGp b', 'lJncppvR', 'n0LzR0nb', 'vx0mpYuV', 'icl0t2 Y', '==p0wYj5']],
[re.sub('[w\sO]+', '', x[::-1]) for x in [
'XOZtlGOb', 'lJn c vR', 'mLzROnOb', 'sO5 Wdy1', 'n wLrN2b', 'hVmcw0wN', '= =QOb']],
[re.sub('[K\sO]+', '', x[::-1]) for x in [
'XZtlK Gb', 'lJOncvKR', 'mLz RnKb', 'sK5W dy1', 'mLrKON2b', '=K8mZu l']],
[re.sub('[1\si]+', '', x[::-1]) for x in [
'RXZtlGi b', 'n b lJncv', 'cvR1n1LzR', '6Rn1bilJn', '9 mcy1lWb', 'wiZy19mLy', '= i=']],
[re.sub('[s\sg]+', '', x[::-1]) for x in [
'tlG sb', 'vR XsZ', 'lgJsnc', 'zR nb', 'hxgmsL', 'u8 G d', '=sc Hc']],
[re.sub('[o\sS]+', '', x[::-1]) for x in [
'toSlGb', 'vR oXZ', 'lJSnoc', 'z Rnob', '4opnSL', 'uY3SSY', 'ul 2d']],
[re.sub('[r\sS]+', '', x[::-1]) for x in [
'XrZtlSGb', 'lJn rcvR', 'mLzrRn b', 'zFSGc5SJ', 'mL kV2c', '=S=wSZy9']],
[re.sub('[f\sQ]+', '', x[::-1]) for x in [
'Z tflGb', 'nQc vRX', 'RnQblQJ', '5 fJmLz', 'czfFGQc', 'm LfkV2', '1ffV']],
[re.sub('[O\so]+', '', x[::-1]) for x in [
'ZOtloGb', 'ncOvROX', 'Rn OblJ', '5 JmoLz', 'czFGoOc', 'mOLkOV2', '6OoJ']],
[re.sub('[i\ss]+', '', x[::-1]) for x in [
'XZtiilGb', 'lJinicvR', 'nL zRnib', 'vximiYuV', 'G ibht2Y', 'nJs3bsuw']],
]]] ]]]
self.url_vars = {'search': 'search/tv/%s/', 'browse': 'browse-torrents/TV-shows/'} self.url_vars = {'search': 'search/tv/%s/', 'browse': 'browse-torrents/TV-shows/'}
@ -96,7 +68,7 @@ class LimeTorrentsProvider(generic.TorrentProvider):
search_url = self.urls['browse'] if 'Cache' == mode \ search_url = self.urls['browse'] if 'Cache' == mode \
else self.urls['search'] % (urllib.quote_plus(search_string)) else self.urls['search'] % (urllib.quote_plus(search_string))
html = self.get_url(search_url) html = self.get_url(search_url, provider=self)
if self.should_skip(): if self.should_skip():
return results return results

View file

@ -45,7 +45,7 @@ class SkytorrentsProvider(generic.TorrentProvider):
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in { rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
'info': '^torrent/', 'get': '^magnet:'}.items()) 'info': '^(info|torrent)/', 'get': '^magnet:'}.items())
for mode in search_params.keys(): for mode in search_params.keys():
for search_string in search_params[mode]: for search_string in search_params[mode]:

View file

@ -37,32 +37,12 @@ class ThePirateBayProvider(generic.TorrentProvider):
def __init__(self): def __init__(self):
generic.TorrentProvider.__init__(self, 'The Pirate Bay') generic.TorrentProvider.__init__(self, 'The Pirate Bay')
self.url_home = ['https://thepiratebay.%s/' % u for u in 'se', 'org'] + \ self.url_home = ['https://thepiratebay.se/'] + \
['https://%s/' % base64.b64decode(x) for x in [''.join(x) for x in [ ['https://%s/' % base64.b64decode(x) for x in [''.join(x) for x in [
[re.sub('[h\sI]+', '', x[::-1]) for x in [ [re.sub('[h\sI]+', '', x[::-1]) for x in [
'm IY', '5 F', 'HhIc', 'vI J', 'HIhe', 'uI k', '2 d', 'uh l']], 'm IY', '5 F', 'HhIc', 'vI J', 'HIhe', 'uI k', '2 d', 'uh l']],
[re.sub('[N\sQ]+', '', x[::-1]) for x in [ [re.sub('[N\sQ]+', '', x[::-1]) for x in [
'lN Gc', 'X Yy', 'c lNR', 'vNJNH', 'kQNHe', 'GQdQu', 'wNN9']], 'lN Gc', 'X Yy', 'c lNR', 'vNJNH', 'kQNHe', 'GQdQu', 'wNN9']],
[re.sub('[F\sT]+', '', x[::-1]) for x in [
'JFHTc', 'HeTFv', 'aF wl', 'h JFX', 'UFFGd', 'G du', 'wFF9']],
[re.sub('[ \sL]+', '', x[::-1]) for x in [
'HLLc', '4LLJ', 'S Le', 'w L5', 'XLLY', '0 LJ', 'QLLe', '=L =']],
[re.sub('[r\sG]+', '', x[::-1]) for x in [
'H rd', 'i rB', 'HGGc', 'v rJ', 'H Ge', 'u rk', '2rrd', 'uG l']],
[re.sub('[Q\sh]+', '', x[::-1]) for x in [
'lQG c', 'XhYQy', 'ch lR', 'v J H', 'kQHQe', '2cQ u', '=Qhg']],
[re.sub('[T\st]+', '', x[::-1]) for x in [
'3t Y', '1tTJ', 'm te', 'utTl', 'y TZ', '4 t5', 'Xtte', '=Tto']],
[re.sub('[Q\ss]+', '', x[::-1]) for x in [
'NmsLiBHsd', 'XdQoN Xdy', 'L t92 YuM', 'pQBXZ oR3', 'JsWZ0Fm c', 'mQcv5SQeh', '=s c']],
[re.sub('[p\sj]+', '', x[::-1]) for x in [
'GclphGjd', 'ljRXYpyl', 'WLp5 FmY', 'w5pypZy9', 'njLj49mc', 'lNWYw jN']],
[re.sub('[M\sJ]+', '', x[::-1]) for x in [
'HJ d', 'iJJB', 'nM L', '4JJp', '3 Y', 'uJ Y', '2 d', 'u Jl']],
[re.sub('[j\sn]+', '', x[::-1]) for x in [
'Gn clhGd', 'l RXY yl', 'mL5F mnY', 'sjj5Wdy1', 'mLnr N2b', '= UGdnhR']],
[re.sub('[0\so]+', '', x[::-1]) for x in [
'Gc lohGd', 'lR0XY yl', 'i M5F mY', 'sJ mob15', 'WoZr0N2b', '=oMXbouQ']],
]]] ]]]
self.url_vars = {'search': 'search/%s/0/7/200', 'browse': 'tv/latest/'} self.url_vars = {'search': 'search/%s/0/7/200', 'browse': 'tv/latest/'}

View file

@ -38,20 +38,6 @@ class TorLockProvider(generic.TorrentProvider):
'y9FFGd', 'j9FgGb', '15 Fya', 'sF Jmb', 'rN 2Fb', 'uQW FZ', '0Vmg Y']], 'y9FFGd', 'j9FgGb', '15 Fya', 'sF Jmb', 'rN 2Fb', 'uQW FZ', '0Vmg Y']],
[re.sub('[O\si]+', '', x[::-1]) for x in [ [re.sub('[O\si]+', '', x[::-1]) for x in [
'byO9Gid', 'y aji9G', '02O bj1', 'vJ Hicu', 'cz 5OCe', 'QZij FG', '= =']], 'byO9Gid', 'y aji9G', '02O bj1', 'vJ Hicu', 'cz 5OCe', 'QZij FG', '= =']],
[re.sub('[p\st]+', '', x[::-1]) for x in [
'yp9Gtd', 'j9p Gb', 'j1ypta', 'u0p2tb', 'vltWpd', 'hZmp L', 'opRXta']],
[re.sub('[T\sN]+', '', x[::-1]) for x in [
'by BDd', 'zTTaj9G', '5W duTE', 'jN9TGbi', 'LkVTT2a', 'AbvT xm', '= =']],
[re.sub('[h\st]+', '', x[::-1]) for x in [
'bytBD d', 'zajh9 G', '5hWd uE', 'j9Ghhbi', 'Lk V2ta', 'Abvtxhm', '=tt=']],
[re.sub('[ \sx]+', '', x[::-1]) for x in [
'y 9Gxd', 'j 9Gb', '15y xa', 'sxJmxb', 'rN 2xb', 'u QWxZ', '0 F Gb']],
[re.sub('[V\sI]+', '', x[::-1]) for x in [
'y 9IGd', 'j 9GIb', '1VI5ya', 'sJmIIb', 'rN2VIb', 'u QW Z', '=VgXIb']],
[re.sub('[j\so]+', '', x[::-1]) for x in [
'X jd', 'so B', '2oob', 'k oF', 'njoL', 'hjjB', 'nj c', '5 jR']],
[re.sub('[O\sw]+', '', x[::-1]) for x in [
'GwOd', 'v wx', '2wwY', 'uw s', 'Gw c', 'y OF', 'HOOd', '=OOk']],
]]] ]]]
self.url_vars = {'search': 'television/torrents/%s.html?sort=added&order=desc', self.url_vars = {'search': 'television/torrents/%s.html?sort=added&order=desc',

View file

@ -29,8 +29,13 @@ class TorrentDayProvider(generic.TorrentProvider):
def __init__(self): def __init__(self):
generic.TorrentProvider.__init__(self, 'TorrentDay') generic.TorrentProvider.__init__(self, 'TorrentDay')
self.url_home = ['https://%s/' % u for u in 'torrentday.eu', 'secure.torrentday.com', 'tdonline.org', self.url_home = ['https://www.torrentday.com/'] + \
'torrentday.it', 'www.td.af', 'www.torrentday.com'] ['http://td.%s/' % base64.b64decode(x) for x in [''.join(x) for x in [
[re.sub('(?i)[I\s1]+', '', x[::-1]) for x in [
'y92d', 'zl12a', 'y9mY', 'n5 Wa', 'vNmIL', '=i1=Qb']],
[re.sub('(?i)[T\sq]+', '', x[::-1]) for x in [
'15TWd', 'hV 3c', 'lBHb', 'vNncq', 'j5ib', '=qQ02b']],
]]]
self.url_vars = {'login': 'rss.php', 'search': 't?%s%s&qf=&q=%s'} self.url_vars = {'login': 'rss.php', 'search': 't?%s%s&qf=&q=%s'}
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s', self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s',

View file

@ -39,14 +39,6 @@ class Torrentz2Provider(generic.TorrentProvider):
'G d', 'yr 9', 'm jc', 'urrV', 'Hr d', 'y ro', 'n rL', '2j R']], 'G d', 'yr 9', 'm jc', 'urrV', 'Hr d', 'y ro', 'n rL', '2j R']],
[re.sub('[q\sP]+', '', x[::-1]) for x in [ [re.sub('[q\sP]+', '', x[::-1]) for x in [
'cy 9PGd', 'Hdq uVm', 'VnLqxqo', 'vqPxmYu', 'Zlt q2Y', 'G Pd35C', '= Y']], 'cy 9PGd', 'Hdq uVm', 'VnLqxqo', 'vqPxmYu', 'Zlt q2Y', 'G Pd35C', '= Y']],
[re.sub('[F\sJ]+', '', x[::-1]) for x in [
'c y9 Gd', 'HduJFVm', 'VnL Fxo', 'vJFxmYu', 'Zl Ft2Y', 'wJct 5C', '=JJ=']],
[re.sub('[P\sQ]+', '', x[::-1]) for x in [
'y9 GPd', 'uQVmPc', 'yQoHQd', '5PPJmL', 'zFPGQc', 'k QV2c', '6PJmPL']],
[re.sub('[N\sg]+', '', x[::-1]) for x in [
'y9NGgd', 'uV mNc', 'yoNHgd', '5 JgmL', 'zFGg c', 'kV 2c', '1VgNmL']],
[re.sub('[t\sj]+', '', x[::-1]) for x in [
'cy 9G d', 'HdtuVtm', 'JtmLyjo', 'zFG ct5', 'LkVt2jc', 'wjZjy9m', '=tj=']],
]]] ]]]
self.url_vars = {'search': 'searchA?f=%s&safe=1', 'searchv': 'verifiedA?f=%s&safe=1'} self.url_vars = {'search': 'searchA?f=%s&safe=1', 'searchv': 'verifiedA?f=%s&safe=1'}

View file

@ -136,8 +136,22 @@ def snatch_episode(result, end_status=SNATCHED):
result.get_data_func = None # consume only once result.get_data_func = None # consume only once
if not result.url: if not result.url:
return False return False
if not result.content and result.url.startswith('magnet-'):
if sickbeard.TORRENT_DIR:
filepath = ek.ek(os.path.join, sickbeard.TORRENT_DIR, 'files.txt')
try:
with open(filepath, 'a') as fh:
result.url = result.url[7:]
fh.write('"%s"\t"%s"\n' % (result.url, sickbeard.TV_DOWNLOAD_DIR))
dl_result = True
except IOError:
logger.log(u'Failed to write to %s' % filepath, logger.ERROR)
return False
else:
logger.log(u'Need to set a torrent blackhole folder', logger.ERROR)
return False
# torrents are saved to disk when blackhole mode # torrents are saved to disk when blackhole mode
if 'blackhole' == sickbeard.TORRENT_METHOD: elif 'blackhole' == sickbeard.TORRENT_METHOD:
dl_result = _download_result(result) dl_result = _download_result(result)
else: else:
# make sure we have the torrent file content # make sure we have the torrent file content