From c4771327103dd1e92b185f945f3c5d4290c757d4 Mon Sep 17 00:00:00 2001 From: JackDandy Date: Fri, 25 Sep 2015 16:09:31 +0100 Subject: [PATCH] Enable Alpha Ratio again now that the secure login page over https is fixed. --- CHANGES.md | 4 +- gui/slick/images/providers/alpharatio.png | Bin 0 -> 664 bytes sickbeard/__init__.py | 2 +- sickbeard/providers/__init__.py | 1 + sickbeard/providers/alpharatio.py | 147 ++++++++++++++++++++++ 5 files changed, 152 insertions(+), 2 deletions(-) create mode 100644 gui/slick/images/providers/alpharatio.png create mode 100644 sickbeard/providers/alpharatio.py diff --git a/CHANGES.md b/CHANGES.md index 3e5a73ab..e3e46edc 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -39,11 +39,13 @@ * Change default de-referrer url to blank * Change javascript urls in templates to allow proper caching * Change downloads to prevent cache misfiring with "Result is not a valid torrent file" -* Change remove AlphaRatio until their secure login page over https is fixed * Add Torrenting provider * Add FunFile torrent provider * Remove unnecessary call to indexers during nameparsing +[develop changelog] +Enable Alpha Ratio again now that the secure login page over https is fixed + ### 0.10.0 (2015-08-06 11:05:00 UTC) * Remove EZRSS provider diff --git a/gui/slick/images/providers/alpharatio.png b/gui/slick/images/providers/alpharatio.png new file mode 100644 index 0000000000000000000000000000000000000000..1b483588011cef4d77d440b8626c63c3a14db1f7 GIT binary patch literal 664 zcmV;J0%!e+P)7HUyBNX;VV{xqkXYijDf+wMH4mkft2g6Ibae*AtvK3{&{a}HsQ;a`IL zQ*+8YncLnSYxMbod5LED&;yZE!-T3> zG#x}!op`ivltX*-?o{pE)Lkfj8jlz% zK*DnX$3c-5U19(L5DxVlW_mJnvb`yiBp=%qxlQCT#IlPZh5hqo+lu>F-9j9MaHy@X z>huR^Oamj%a%XhfjA9FD)Qpyr!L!+>g2{vcH3}`^8AMT(?GAs*&G+*SQAN^{v@8`4 zCV1148?VOL@8cPFS<{XVD>ygLdi(7)D|^jtFIsVmvdmh0C@>u~F4yBcxO(sNi64AX zG6;un-ibIRTgAE?MB_m~+LCei$>@v!TQtp-4__i1k(cm1zGSTUsRk=FE;G{>YCWN y|I*7xkz|7wToT|?Aw;L5-L2I%JtdBv1;J}0000. + +import re +import datetime +import traceback + +from . import generic +from sickbeard import logger, tvcache, helpers +from sickbeard.bs4_parser import BS4Parser +from lib.unidecode import unidecode + + +class AlphaRatioProvider(generic.TorrentProvider): + + def __init__(self): + + generic.TorrentProvider.__init__(self, 'AlphaRatio') + + self.url_base = 'https://alpharatio.cc/' + self.urls = {'config_provider_home_uri': self.url_base, + 'login': self.url_base + 'login.php', + 'search': self.url_base + 'torrents.php?searchstr=%s' + + '&tags_type=1&order_by=time&order_way=desc' + + '&filter_cat[1]=1&filter_cat[2]=1&filter_cat[3]=1&filter_cat[4]=1&filter_cat[5]=1' + + '&action=basic&searchsubmit=1', + 'get': self.url_base + '%s'} + + self.url = self.urls['config_provider_home_uri'] + + self.username, self.password, self.minseed, self.minleech = 4 * [None] + self.cache = AlphaRatioCache(self) + + def _do_login(self): + + logged_in = lambda: 'session' in self.session.cookies + if logged_in(): + return True + + if self._check_auth(): + login_params = {'username': self.username, 'password': self.password, 'keeplogged': '1', 'login': 'Login'} + response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session) + if response and logged_in(): + return True + + msg = u'Failed to authenticate with %s, abort provider' + if response and 'Invalid Username/password' in response: + msg = u'Invalid username or password for %s. Check settings' + logger.log(msg % self.name, logger.ERROR) + + return False + + def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0): + + results = [] + if not self._do_login(): + return results + + items = {'Season': [], 'Episode': [], 'Cache': []} + + rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'view', 'get': 'download'}.items()) + for mode in search_params.keys(): + for search_string in search_params[mode]: + + if isinstance(search_string, unicode): + search_string = unidecode(search_string) + + search_url = self.urls['search'] % search_string + html = self.get_url(search_url) + + cnt = len(items[mode]) + try: + if not html or self._has_no_results(html): + raise generic.HaltParseException + + with BS4Parser(html, features=['html5lib', 'permissive']) as soup: + torrent_table = soup.find('table', attrs={'id': 'torrent_table'}) + torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') + + if 2 > len(torrent_rows): + raise generic.HaltParseException + + for tr in torrent_rows[1:]: + try: + seeders, leechers = [int(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1)] + if mode != 'Cache' and (seeders < self.minseed or leechers < self.minleech): + continue + + title = tr.find('a', title=rc['info']).get_text().strip() + + link = str(tr.find('a', title=rc['get'])['href']).replace('&', '&').lstrip('/') + download_url = self.urls['get'] % link + except (AttributeError, TypeError): + continue + + if title and download_url: + items[mode].append((title, download_url, seeders)) + + except generic.HaltParseException: + pass + except Exception: + logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + self._log_result(mode, len(items[mode]) - cnt, search_url) + + items[mode].sort(key=lambda tup: tup[2], reverse=True) + + results += items[mode] + + return results + + def find_propers(self, search_date=datetime.datetime.today()): + + return self._find_propers(search_date) + + def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs): + + return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, use_or=False) + + +class AlphaRatioCache(tvcache.TVCache): + + def __init__(self, this_provider): + tvcache.TVCache.__init__(self, this_provider) + + self.minTime = 20 # cache update frequency + + def _getRSSData(self): + + return self.provider.get_cache_data() + +provider = AlphaRatioProvider()