From c9745bcfb3645a432b0d7b82eb43fff135c32c90 Mon Sep 17 00:00:00 2001 From: JackDandy Date: Fri, 27 Apr 2018 11:33:26 +0100 Subject: [PATCH 01/20] Prepare develop. --- CHANGES.md | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index ab91ee49..5f4ac222 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,4 +1,12 @@ -### 0.16.0 (2018-04-26 17:10:00 UTC) +### 0.17.0 (2018-xx-xx xx:xx:xx UTC) + + + +[develop changelog] + + + +### 0.16.0 (2018-04-26 17:10:00 UTC) * Change search show result 'exists in db' text into a link to display show page * Change increase namecache size and fix deleting items from it when at capacity From 65923c94ef32de72320f031d954b335eb74d540c Mon Sep 17 00:00:00 2001 From: JackDandy Date: Sat, 28 Apr 2018 15:57:04 +0100 Subject: [PATCH 02/20] =?UTF-8?q?Update=20Tornado=20Web=20Server=205.0.1?= =?UTF-8?q?=20(35a538f)=20=E2=86=92=205.0.1=20(2b2a220a).?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CHANGES.md | 1 + lib/tornado/ioloop.py | 18 +++++++++++++++++- lib/tornado/web.py | 1 + lib/tornado/websocket.py | 18 +++++------------- 4 files changed, 24 insertions(+), 14 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 5f4ac222..faf1a9d2 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,5 +1,6 @@ ### 0.17.0 (2018-xx-xx xx:xx:xx UTC) +* Update Tornado Web Server 5.0.1 (35a538f) to 5.0.1 (2b2a220a) [develop changelog] diff --git a/lib/tornado/ioloop.py b/lib/tornado/ioloop.py index 48700139..123f2ba5 100644 --- a/lib/tornado/ioloop.py +++ b/lib/tornado/ioloop.py @@ -47,6 +47,7 @@ import threading import time import traceback import math +import random from tornado.concurrent import Future, is_future, chain_future, future_set_exc_info, future_add_done_callback # noqa: E501 from tornado.log import app_log, gen_log @@ -1161,6 +1162,14 @@ class PeriodicCallback(object): Note that the timeout is given in milliseconds, while most other time-related functions in Tornado use seconds. + If ``jitter`` is specified, each callback time will be randomly selected + within a window of ``jitter * callback_time`` milliseconds. + Jitter can be used to reduce alignment of events with similar periods. + A jitter of 0.1 means allowing a 10% variation in callback time. + The window is centered on ``callback_time`` so the total number of calls + within a given interval should not be significantly affected by adding + jitter. + If the callback runs for longer than ``callback_time`` milliseconds, subsequent invocations will be skipped to get back on schedule. @@ -1168,12 +1177,16 @@ class PeriodicCallback(object): .. versionchanged:: 5.0 The ``io_loop`` argument (deprecated since version 4.1) has been removed. + + .. versionchanged:: 5.1 + The ``jitter`` argument is added. """ - def __init__(self, callback, callback_time): + def __init__(self, callback, callback_time, jitter=0): self.callback = callback if callback_time <= 0: raise ValueError("Periodic callback must have a positive callback_time") self.callback_time = callback_time + self.jitter = jitter self._running = False self._timeout = None @@ -1218,6 +1231,9 @@ class PeriodicCallback(object): def _update_next(self, current_time): callback_time_sec = self.callback_time / 1000.0 + if self.jitter: + # apply jitter fraction + callback_time_sec *= 1 + (self.jitter * (random.random() - 0.5)) if self._next_timeout <= current_time: # The period should be measured from the start of one call # to the start of the next. If one call takes too long, diff --git a/lib/tornado/web.py b/lib/tornado/web.py index 8a9a8053..4f427729 100644 --- a/lib/tornado/web.py +++ b/lib/tornado/web.py @@ -2825,6 +2825,7 @@ class FallbackHandler(RequestHandler): def prepare(self): self.fallback(self.request) self._finished = True + self.on_finish() class OutputTransform(object): diff --git a/lib/tornado/websocket.py b/lib/tornado/websocket.py index 91c5f1ff..738a9ccb 100644 --- a/lib/tornado/websocket.py +++ b/lib/tornado/websocket.py @@ -19,7 +19,6 @@ the protocol (known as "draft 76") and are not compatible with this module. from __future__ import absolute_import, division, print_function import base64 -import collections import hashlib import os import struct @@ -34,6 +33,7 @@ from tornado.ioloop import IOLoop, PeriodicCallback from tornado.iostream import StreamClosedError from tornado.log import gen_log, app_log from tornado import simple_httpclient +from tornado.queues import Queue from tornado.tcpclient import TCPClient from tornado.util import _websocket_mask, PY3 @@ -1096,8 +1096,7 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection): self.compression_options = compression_options self.connect_future = Future() self.protocol = None - self.read_future = None - self.read_queue = collections.deque() + self.read_queue = Queue(1) self.key = base64.b64encode(os.urandom(16)) self._on_message_callback = on_message_callback self.close_code = self.close_reason = None @@ -1207,12 +1206,8 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection): is given it will be called with the future when it is ready. """ - assert self.read_future is None - future = Future() - if self.read_queue: - future_set_result_unless_cancelled(future, self.read_queue.popleft()) - else: - self.read_future = future + + future = self.read_queue.get() if callback is not None: self.io_loop.add_future(future, callback) return future @@ -1220,11 +1215,8 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection): def on_message(self, message): if self._on_message_callback: self._on_message_callback(message) - elif self.read_future is not None: - future_set_result_unless_cancelled(self.read_future, message) - self.read_future = None else: - self.read_queue.append(message) + return self.read_queue.put(message) def ping(self, data=b''): """Send ping frame to the remote end. From 78c9c7dc5ccfaaab0cf1bf00e732a1c4feb62f8e Mon Sep 17 00:00:00 2001 From: JackDandy Date: Sat, 28 Apr 2018 16:06:30 +0100 Subject: [PATCH 03/20] Add Xspeeds torrent provider. --- CHANGES.md | 1 + sickbeard/providers/__init__.py | 3 +- sickbeard/providers/generic.py | 4 +- sickbeard/providers/xspeeds.py | 209 ++++++++++++++++++++++++++++++++ 4 files changed, 214 insertions(+), 3 deletions(-) create mode 100644 sickbeard/providers/xspeeds.py diff --git a/CHANGES.md b/CHANGES.md index faf1a9d2..d905a383 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,6 +1,7 @@ ### 0.17.0 (2018-xx-xx xx:xx:xx UTC) * Update Tornado Web Server 5.0.1 (35a538f) to 5.0.1 (2b2a220a) +* Add Xspeeds torrent provider [develop changelog] diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index 111f693b..f7f0858c 100755 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -31,7 +31,7 @@ from . import alpharatio, alphareign, beyondhd, bithdtv, bitmetv, blutopia, btn, iptorrents, limetorrents, magnetdl, morethan, nebulance, ncore, nyaa, pisexy, potuk, pretome, privatehd, ptf, \ rarbg, revtt, scenehd, scenetime, shazbat, showrss, skytorrents, speedcd, \ thepiratebay, torlock, torrentday, torrenting, torrentleech, \ - torrentz2, tvchaosuk, wop, zooqle + torrentz2, tvchaosuk, wop, xspeeds, zooqle # anime from . import anizb, tokyotoshokan # custom @@ -89,6 +89,7 @@ __all__ = ['omgwtfnzbs', 'torrentz2', 'tvchaosuk', 'wop', + 'xspeeds', 'zooqle', 'tokyotoshokan', ] diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index de2b9051..f2dc431e 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -1109,9 +1109,9 @@ class GenericProvider(object): """ if not self.should_skip(): str1, thing, str3 = (('', '%s item' % mode.lower(), ''), (' usable', 'proper', ' found'))['Propers' == mode] - logger.log(u'%s %s in response from %s' % (('No' + str1, count)[0 < count], ( + logger.log((u'%s %s in response from %s' % (('No' + str1, count)[0 < count], ( '%s%s%s%s' % (('', 'freeleech ')[getattr(self, 'freeleech', False)], thing, maybe_plural(count), str3)), - re.sub('(\s)\s+', r'\1', url))) + re.sub('(\s)\s+', r'\1', url))).replace('%%', '%')) def check_auth_cookie(self): diff --git a/sickbeard/providers/xspeeds.py b/sickbeard/providers/xspeeds.py new file mode 100644 index 00000000..90481b7e --- /dev/null +++ b/sickbeard/providers/xspeeds.py @@ -0,0 +1,209 @@ +# coding=utf-8 +# +# Author: SickGear +# +# This file is part of SickGear. +# +# SickGear is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SickGear is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SickGear. If not, see . + +import re +import traceback + +from . import generic +from sickbeard import logger +from sickbeard.bs4_parser import BS4Parser +from sickbeard.helpers import tryInt, has_anime +from lib.unidecode import unidecode + + +class XspeedsProvider(generic.TorrentProvider): + + def __init__(self): + + generic.TorrentProvider.__init__(self, 'Xspeeds') + + self.url_base = 'https://www.xspeeds.eu/' + self.urls = {'config_provider_home_uri': self.url_base, + 'login_action': self.url_base + 'login.php', + 'edit': self.url_base + 'usercp.php?act=edit_details', + 'search': self.url_base + 'browse.php'} + + self.categories = {'Season': [94, 21], 'Episode': [91, 74, 54, 20, 47, 16], 'anime': [70]} + self.categories['Cache'] = self.categories['Season'] + self.categories['Episode'] + + self.url = self.urls['config_provider_home_uri'] + + self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None] + + def _authorised(self, **kwargs): + + return super(XspeedsProvider, self)._authorised( + logged_in=(lambda y=None: self.has_all_cookies(pre='c_secure_')), post_params={'form_tmpl': True}) + + def _search_provider(self, search_params, **kwargs): + + results = [] + if not self._authorised(): + return results + + items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} + + rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download', 'fl': 'free'}.items()) + for mode in search_params.keys(): + save_url, restore = self._set_categories(mode) + if self.should_skip(): + return results + for search_string in search_params[mode]: + search_string = search_string.replace(u'£', '%') + search_string = re.sub('[\s\.]+', '%', search_string) + search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string + + kwargs = dict(post_data={'keywords': search_string, 'do': 'quick_sort', 'page': '0', + 'category': '0', 'search_type': 't_name', 'sort': 'added', + 'order': 'desc', 'daysprune': '-1'}) + + html = self.get_url(self.urls['search'], **kwargs) + if self.should_skip(): + return results + + cnt = len(items[mode]) + try: + if not html or self._has_no_results(html): + raise generic.HaltParseException + + with BS4Parser(html, 'html.parser') as soup: + torrent_table = soup.find('table', id='sortabletable') + torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') + get_detail = True + + if 2 > len(torrent_rows): + raise generic.HaltParseException + + head = None + for tr in torrent_rows[1:]: + cells = tr.find_all('td') + if 6 > len(cells): + continue + try: + head = head if None is not head else self._header_row(tr) + seeders, leechers, size = [tryInt(n, n) for n in [ + cells[head[x]].get_text().strip() for x in 'seed', 'leech', 'size']] + if self._peers_fail(mode, seeders, leechers) \ + or self.freeleech and None is cells[1].find('img', title=rc['fl']): + continue + + info = tr.find('a', href=rc['info']) + title = (tr.find('div', class_='tooltip-content').get_text() or info.get_text()).strip() + title = re.findall('(?m)(^[^\r\n]+)', title)[0] + download_url = self._link(tr.find('a', href=rc['get'])['href']) + except (StandardError, Exception): + continue + + if get_detail and title.endswith('...'): + try: + with BS4Parser(self.get_url('%s%s' % ( + self.urls['config_provider_home_uri'], info['href'].lstrip('/').replace( + self.urls['config_provider_home_uri'], ''))), + 'html.parser') as soup_detail: + title = soup_detail.find( + 'td', class_='thead', attrs={'colspan': '3'}).get_text().strip() + title = re.findall('(?m)(^[^\r\n]+)', title)[0] + except IndexError: + continue + except (StandardError, Exception): + get_detail = False + + title = self.regulate_title(title) + if download_url and title: + items[mode].append((title, download_url, seeders, self._bytesizer(size))) + + except generic.HaltParseException: + pass + except (StandardError, Exception): + logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + + self._log_search(mode, len(items[mode]) - cnt, + ('search string: ' + search_string.replace('%', '%%'), self.name)['Cache' == mode]) + + if mode in 'Season' and len(items[mode]): + break + + if save_url: + self.get_url(save_url, post_data=restore) + + results = self._sort_seeding(mode, results + items[mode]) + + return results + + def _set_categories(self, mode): + # set up categories + html = self.get_url(self.urls['edit']) + if self.should_skip(): + return None, None + try: + form = re.findall('(?is).*()', html)[0] + save_url = self._link(re.findall('(?i)action="([^"]+?)"', form)[0]) + tags = re.findall(r'(?is)(]*?name=[\'"][^\'"]+[^>]*)', form) + except (StandardError, Exception): + return None, None + + cats, params = [], {} + attrs = [[(re.findall(r'(?is)%s=[\'"]([^\'"]+)' % attr, c) or [''])[0] + for attr in ['type', 'name', 'value', 'checked']] for c in tags] + for itype, name, value, checked in attrs: + if 'cat' == name[0:3] and 'checkbox' == itype.lower(): + if any(checked): + try: + cats += [re.findall('(\d+)[^\d]*$', name)[0]] + except IndexError: + pass + elif 'hidden' == itype.lower() or 'nothing' in name or \ + (itype.lower() in ['checkbox', 'radio'] and any(checked)): + params[name] = value + selects = re.findall('(?is)()', form) + for select in selects: + name, values, index = None, None, 0 + try: + name = re.findall('(?is) Date: Tue, 8 May 2018 18:40:05 +0100 Subject: [PATCH 04/20] Add HDME torrent provider. --- CHANGES.md | 1 + gui/slick/images/providers/hdme.png | Bin 0 -> 528 bytes sickbeard/providers/__init__.py | 3 +- sickbeard/providers/generic.py | 2 +- sickbeard/providers/hdme.py | 118 ++++++++++++++++++++++++++++ 5 files changed, 122 insertions(+), 2 deletions(-) create mode 100644 gui/slick/images/providers/hdme.png create mode 100644 sickbeard/providers/hdme.py diff --git a/CHANGES.md b/CHANGES.md index 14ce99ba..8b779d7a 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,6 +1,7 @@ ### 0.17.0 (2018-xx-xx xx:xx:xx UTC) * Update Tornado Web Server 5.0.1 (35a538f) to 5.0.1 (2b2a220a) +* Add HDME torrent provider * Add Xspeeds torrent provider diff --git a/gui/slick/images/providers/hdme.png b/gui/slick/images/providers/hdme.png new file mode 100644 index 0000000000000000000000000000000000000000..b5e23ee6734a3682bb30d5c3c21a31162946e16e GIT binary patch literal 528 zcmV+r0`L8aP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGi!T-w1< zdmn~j$bZ0q_xml|?N-+7H90VyPUUz!%HePz2jIc%^?I>t!1MW(csx#k0~U)#n$0Ew zj+xzV7mvpytJR7g4Y=R$8t`~L2rM-k4FVk6>2zX%>C=GQ?Iy8UOl&qAYcv|=QoUX$ zz^~UUeP$1?0hh~#OK!JYYPA{x({~OJP%IYtp!50s6@XWZL?Q$@fTe1+DwRrw9NKU= z)ng;wGhI|5?>-tZN S1lK(P0000. + +import re +import traceback + +from . import generic +from sickbeard import logger +from sickbeard.bs4_parser import BS4Parser +from sickbeard.helpers import tryInt +from lib.unidecode import unidecode + + +class HDMEProvider(generic.TorrentProvider): + + def __init__(self): + generic.TorrentProvider.__init__(self, 'HDME') + + self.url_home = ['https://www.hdme.eu'] + + self.url_vars = {'login_action': 'login.php', 'search': 'browse.php?search=%s&%s&incldead=%s'} + self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login_action': '%(home)s%(vars)s', + 'search': '%(home)s%(vars)s'} + + self.categories = {'Season': [34], 'Episode': [38, 39]} + self.categories['Cache'] = self.categories['Season'] + self.categories['Episode'] + + self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None] + + def _authorised(self, **kwargs): + + return super(HDMEProvider, self)._authorised(post_params={'form_tmpl': True}) + + def _search_provider(self, search_params, **kwargs): + + results = [] + if not self._authorised(): + return results + + items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} + + rc = dict((k, re.compile('(?i)' + v)) for (k, v) in { + 'info': 'detail', 'get': 'download', 'fl': '\(Freeleech\)'}.items()) + for mode in search_params.keys(): + for search_string in search_params[mode]: + search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string + search_url = self.urls['search'] % (search_string, self._categories_string(mode), + ('3', '0')[not self.freeleech]) + + html = self.get_url(search_url, timeout=90) + if self.should_skip(): + return results + + cnt = len(items[mode]) + try: + if not html or self._has_no_results(html): + raise generic.HaltParseException + + html = html.replace('', '') + html = re.sub(r'(?s)(.*)(]*?950[^>]*>.*)()', r'\1\3', html) + html = re.sub(r'(?s)]+font[^>]+>', '
', html) + html = re.sub(r'(?s)(]+>(?!<[ab]).*?)(?:(?:)+)', r'\1', html) + html = re.sub(r'(?m)^
', r'', html) + with BS4Parser(html, features=['html5lib', 'permissive'], attr='id="parse"') as soup: + torrent_table = soup.find('table', id='parse') + torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') + + if 2 > len(torrent_rows): + raise generic.HaltParseException + + head = None + for tr in torrent_rows[1:]: + cells = tr.find_all('td') + if 5 > len(cells): + continue + try: + head = head if None is not head else self._header_row(tr) + seeders, leechers, size = [tryInt(n, n) for n in [ + cells[head[x]].get_text().strip() for x in 'seed', 'leech', 'size']] + if self._peers_fail(mode, seeders, leechers): + continue + + info = tr.find('a', href=rc['info']) + title = (info.attrs.get('title') or info.get_text().split()[0]).strip() + download_url = self._link(tr.find('a', href=rc['get'])['href']) + except (AttributeError, TypeError, ValueError, KeyError): + continue + + if title and download_url: + items[mode].append((title, download_url, seeders, self._bytesizer(size))) + + except generic.HaltParseException: + pass + except (StandardError, Exception): + logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + + self._log_search(mode, len(items[mode]) - cnt, search_url) + + results = self._sort_seeding(mode, results + items[mode]) + + return results + + +provider = HDMEProvider() From 36251ec71fbb83800f79ddba3b73b9a58490b997 Mon Sep 17 00:00:00 2001 From: JackDandy Date: Thu, 17 May 2018 01:07:25 +0100 Subject: [PATCH 05/20] Add ImmortalSeed torrent provider. --- CHANGES.md | 1 + gui/slick/images/providers/immortalseed.png | Bin 0 -> 466 bytes .../interfaces/default/config_providers.tmpl | 12 +- sickbeard/providers/__init__.py | 3 +- sickbeard/providers/immortalseed.py | 113 ++++++++++++++++++ 5 files changed, 125 insertions(+), 4 deletions(-) create mode 100644 gui/slick/images/providers/immortalseed.png create mode 100644 sickbeard/providers/immortalseed.py diff --git a/CHANGES.md b/CHANGES.md index 4af98b75..4447628a 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -2,6 +2,7 @@ * Update Tornado Web Server 5.0.1 (35a538f) to 5.0.1 (2b2a220a) * Add HDME torrent provider +* Add ImmortalSeed torrent provider * Add Xspeeds torrent provider diff --git a/gui/slick/images/providers/immortalseed.png b/gui/slick/images/providers/immortalseed.png new file mode 100644 index 0000000000000000000000000000000000000000..5c7ec08a37b97244a03dbece8ab49c09c8be1954 GIT binary patch literal 466 zcmV;@0WJQCP)MwjyJU zkLAatQmR8BfvmN9=>{Yi)@{qBLYT&(R_oGK%?C2a$SOjB^yP$x;reU-0ZFo&m0}Vl zm{A6q4l-{J0KN6Pf)d4x6$-6isj literal 0 HcmV?d00001 diff --git a/gui/slick/interfaces/default/config_providers.tmpl b/gui/slick/interfaces/default/config_providers.tmpl index 7a1f89cc..d52ebd20 100644 --- a/gui/slick/interfaces/default/config_providers.tmpl +++ b/gui/slick/interfaces/default/config_providers.tmpl @@ -451,11 +451,17 @@ #end if #if $hasattr($cur_torrent_provider, 'api_key'): + #set $field_name = cur_torrent_provider.get_id() + '_api_key'
-
diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index 58e7c991..89398b5b 100755 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -27,7 +27,7 @@ from sickbeard import logger, encodingKludge as ek from . import newznab, omgwtfnzbs # torrent from . import alpharatio, alphareign, beyondhd, bithdtv, bitmetv, blutopia, btn, btscene, dh, ettv, eztv, \ - fano, filelist, funfile, grabtheinfo, hdbits, hdme, hdspace, hdtorrents, \ + fano, filelist, funfile, grabtheinfo, hdbits, hdme, hdspace, hdtorrents, immortalseed, \ iptorrents, limetorrents, magnetdl, morethan, nebulance, ncore, nyaa, pisexy, potuk, pretome, privatehd, ptf, \ rarbg, revtt, scenehd, scenetime, shazbat, showrss, skytorrents, speedcd, \ thepiratebay, torlock, torrentday, torrenting, torrentleech, \ @@ -62,6 +62,7 @@ __all__ = ['omgwtfnzbs', 'hdme', 'hdspace', 'hdtorrents', + 'immortalseed', 'iptorrents', 'limetorrents', 'magnetdl', diff --git a/sickbeard/providers/immortalseed.py b/sickbeard/providers/immortalseed.py new file mode 100644 index 00000000..619e703b --- /dev/null +++ b/sickbeard/providers/immortalseed.py @@ -0,0 +1,113 @@ +# coding=utf-8 +# +# Author: SickGear +# +# This file is part of SickGear. +# +# SickGear is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SickGear is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SickGear. If not, see . + +import re +import time + +from . import generic +from sickbeard.helpers import tryInt +from lib.unidecode import unidecode +import feedparser +import sickbeard + + +class ImmortalSeedProvider(generic.TorrentProvider): + + def __init__(self): + + generic.TorrentProvider.__init__(self, 'ImmortalSeed') + + self.url_base = 'https://immortalseed.me/' + self.urls = {'config_provider_home_uri': self.url_base, + 'search': self.url_base + 'rss.php?feedtype=download&timezone=0&showrows=100' + '&%s&categories=%s&incl=%s'} + + self.categories = {'Season': [6, 4], 'Episode': [8, 48, 9], 'anime': [32]} + self.categories['Cache'] = self.categories['Season'] + self.categories['Episode'] + + self.url = self.urls['config_provider_home_uri'] + + self.api_key, self.minseed, self.minleech = 3 * [None] + + def _check_auth(self, **kwargs): + try: + secret_key = 'secret_key=' + re.split('secret_key\s*=\s*([0-9a-zA-Z]+)', self.api_key)[1] + except (StandardError, Exception): + raise sickbeard.exceptions.AuthException('Invalid secret key for %s in Media Providers/Options' % self.name) + + if secret_key != self.api_key: + self.api_key = secret_key + sickbeard.save_config() + + return True + + def _search_provider(self, search_params, **kwargs): + + results = [] + + items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} + + rc = dict((k, re.compile('(?i)' + v)) for (k, v) in { + 'seed': 'seed[^\d/]+([\d]+)', 'leech': 'leech[^\d/]+([\d]+)', + 'size': 'size[^\d/]+([^/]+)', 'get': '(.*download.*)', 'title': 'NUKED\b\.(.*)$'}.items()) + for mode in search_params.keys(): + for search_string in search_params[mode]: + search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string + search_string = search_string.replace(' ', '.') + + search_url = self.urls['search'] % ( + self.api_key, self._categories_string(mode, template='%s', delimiter=','), search_string) + + resp = self.get_url(search_url) + if self.should_skip(): + return results + + data = feedparser.parse(resp) + tr = data and data.get('entries', []) or [] + + cnt = len(items[mode]) + for item in tr: + try: + seeders, leechers, size = [tryInt(n, n) for n in [ + rc[x].findall(item.summary)[0].strip() for x in 'seed', 'leech', 'size']] + if self._peers_fail(mode, seeders, leechers): + continue + title = rc['title'].sub(r'\1', item.title.strip()) + download_url = self._link(rc['get'].findall(getattr(item, 'link', ''))[0]) + except (StandardError, Exception): + continue + + if download_url and title: + items[mode].append((title, download_url, seeders, self._bytesizer(size))) + + time.sleep(1.1) + self._log_search(mode, len(items[mode]) - cnt, search_url) + + results = self._sort_seeding(mode, results + items[mode]) + + return results + + def ui_string(self, key): + return ('%s_api_key' % self.get_id()) == key and 'Secret key' or \ + ('%s_api_key_tip' % self.get_id()) == key and \ + '\'secret_key=\' from the generated RSS link at %s' % \ + (self.url_base, self.name) or '' + + +provider = ImmortalSeedProvider() From aab67a45f74d668dd75d445f076da2c7748351b1 Mon Sep 17 00:00:00 2001 From: JackDandy Date: Mon, 14 May 2018 03:21:08 +0100 Subject: [PATCH 06/20] Change consolidate provider filters into 'Only allow releases that are'. Add provider filters, Only allow releases that are ... 'scene releases (srrDB/predb listed)', 'or contain' text or regex, 'non scene if no recent search results', 'non scene if no active search results', 'not scene nuked', and 'nuked if no active search results'. Add scene release checking to PROPER search task. Change refactor core Proper functions. Pep8 common tests. --- CHANGES.md | 5 + gui/slick/interfaces/default/config.tmpl | 6 +- .../interfaces/default/config_providers.tmpl | 250 +- lib/cfscrape.py | 4 +- readme.md | 2 + sickbeard/__init__.py | 148 +- sickbeard/properFinder.py | 461 +- sickbeard/providers/generic.py | 7 + sickbeard/providers/newznab.py | 18 +- sickbeard/providers/scenehd.py | 2 +- sickbeard/search.py | 149 +- sickbeard/webserve.py | 62 +- tests/common_tests.py | 4365 +++++++++++------ 13 files changed, 3532 insertions(+), 1947 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 4447628a..246272c8 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -4,6 +4,11 @@ * Add HDME torrent provider * Add ImmortalSeed torrent provider * Add Xspeeds torrent provider +* Change consolidate provider filters into 'Only allow releases that are' +* Add provider filters, Only allow releases that are ... + 'scene releases (srrDB/predb listed)', 'or contain' text or regex, + 'non scene if no recent search results', 'non scene if no active search results', + 'not scene nuked', and 'nuked if no active search results' [develop changelog] diff --git a/gui/slick/interfaces/default/config.tmpl b/gui/slick/interfaces/default/config.tmpl index 9526de0c..d264d67e 100644 --- a/gui/slick/interfaces/default/config.tmpl +++ b/gui/slick/interfaces/default/config.tmpl @@ -38,8 +38,10 @@ Homepagehttps://github.com/SickGear/SickGear/wiki Sourcehttps://github.com/SickGear/SickGear/ Internet Relay Chat#SickGear on irc.freenode.net - Powered byPython, HTML5, jQuery, SQLite, TheTVDB, Trakt.tv, Fanart.tv, TMDb, GitHub -  This project uses the TMDb API but is not endorsed or certified by TMDb. + Powered byPython, HTML5, jQuery, SQLite, Regex, CSS, Javascript, Tornado webserver +  Huge thanks to Jetbrains for PyCharm IDE, trust them with your development project + Credits toAlso; TheTVDB, Trakt.tv, TVMaze, Fanart.tv, IMDb, TheXem, srrDB, Predb, and GitHub +  This project uses the TMDb API but is not endorsed or certified by TMDb diff --git a/gui/slick/interfaces/default/config_providers.tmpl b/gui/slick/interfaces/default/config_providers.tmpl index d52ebd20..3b794898 100644 --- a/gui/slick/interfaces/default/config_providers.tmpl +++ b/gui/slick/interfaces/default/config_providers.tmpl @@ -28,32 +28,20 @@ #if not $sickbeard.USE_TORRENTS $methods_notused.append('Torrent') #end if - +#slurp #if $sickbeard.USE_NZBS or $sickbeard.USE_TORRENTS