From e3d7f9fb23149e4843878f155870b350ae86c42c Mon Sep 17 00:00:00 2001 From: JackDandy Date: Fri, 8 Apr 2016 17:09:58 +0100 Subject: [PATCH] Add Fano torrent provider. --- CHANGES.md | 1 + gui/slick/images/providers/fano.png | Bin 0 -> 868 bytes sickbeard/providers/__init__.py | 3 +- sickbeard/providers/fano.py | 113 ++++++++++++++++++++++++++++ 4 files changed, 116 insertions(+), 1 deletion(-) create mode 100644 gui/slick/images/providers/fano.png create mode 100644 sickbeard/providers/fano.py diff --git a/CHANGES.md b/CHANGES.md index 34a78aeb..6704d314 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -86,6 +86,7 @@ * Add RevTT torrent provider * Add PTF torrent provider * Add ILT torrent provider +* Add Fano torrent provider ### 0.11.11 (2016-04-05 19:20:00 UTC) diff --git a/gui/slick/images/providers/fano.png b/gui/slick/images/providers/fano.png new file mode 100644 index 0000000000000000000000000000000000000000..8465f929893a6def1da96e7f3f73d55446ea19d2 GIT binary patch literal 868 zcmV-q1DpJbP)0HG>MGpbLo~kSk)r{tFG2E-V)XS6#I0?u3?m zS9jZ8u|gN(I(3mM28~1^O2kE?Q%N-U>L?*&-3u~^aKD9 z1ObYopr@zDh{xlfJ+cxU; zIymQ0Rh5s8jjhGw@gEptHIazK%gf83ve|6v`udvn_V!|WdK$4<48dR!oO5`dhhnja zy}dnHmKA6;8ViCT{6j=Pi_K|c-jKOtXR4NtJYBg}q zVOdrn91eeo#bSSnrBZ3(@bFOeJP*lavJI})YS`M^!twDjEXx8BfiVWoIgXBwz8D@J z{#GoP%kRtOGKPkRplRC6qu}i94C!xby6EWWXc=5xU14=~71?YSj^luH-l|EG1X-3Vfq{X6-EcUJ z{r!F9a=Eq+*4EarzP=7!*I}9_Y}wdkwynHK)BF)XsQK?iS zA|l`SDV0i*D2gP@a+@ScqLr1E8_)B;0uco>naszDi3tKgGcz-kPN%6}uTv_OA^^k~ zYY6~NO-=Egot. + +import re +import traceback + +from . import generic +from sickbeard import logger +from sickbeard.bs4_parser import BS4Parser +from sickbeard.helpers import tryInt +from lib.unidecode import unidecode + + +class FanoProvider(generic.TorrentProvider): + + def __init__(self): + generic.TorrentProvider.__init__(self, 'Fano') + + self.url_base = 'https://www.fano.in/' + self.urls = {'config_provider_home_uri': self.url_base, + 'login_action': self.url_base + 'login.php', + 'search': self.url_base + 'browse_old.php?search=%s&%s&incldead=0%s', + 'get': self.url_base + '%s'} + + self.categories = {'Season': [49], 'Episode': [6, 23, 32, 35], 'anime': [27]} + self.categories['Cache'] = self.categories['Season'] + self.categories['Episode'] + + self.url = self.urls['config_provider_home_uri'] + + self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None] + + def _authorised(self, **kwargs): + + return super(FanoProvider, self)._authorised(logged_in=lambda x=None: self.has_all_cookies(['uid', 'pass'])) + + def _search_provider(self, search_params, **kwargs): + + results = [] + if not self._authorised(): + return results + + items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} + + rc = dict((k, re.compile('(?i)' + v)) + for (k, v) in {'abd': '(\d{4}(?:[.]\d{2}){2})', 'info': 'details', 'get': 'download'}.items()) + for mode in search_params.keys(): + rc['cats'] = re.compile('(?i)cat=(?:%s)' % self._categories_string(mode, template='', delimiter='|')) + for search_string in search_params[mode]: + search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string + search_string = '+'.join(rc['abd'].sub(r'%22\1%22', search_string).split()) + search_url = self.urls['search'] % (search_string, self._categories_string(mode), + ('&sgold=on', '')[not self.freeleech]) + + html = self.get_url(search_url) + + cnt = len(items[mode]) + try: + if not html or self._has_no_results(html): + raise generic.HaltParseException + + with BS4Parser(html, features=['html5lib', 'permissive']) as soup: + torrent_table = soup.find('table', id='line') + torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') + + if 2 > len(torrent_rows): + raise generic.HaltParseException + + for tr in torrent_rows[1:]: + try: + seeders, leechers, size = [tryInt(n, n) for n in [ + (tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -4)]] + if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']): + continue + + title = tr.find('a', href=rc['info']).get_text().strip() + + download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') + + except (AttributeError, TypeError, ValueError, IndexError): + continue + + if title and download_url: + items[mode].append((title, download_url, seeders, self._bytesizer(size))) + + except generic.HaltParseException: + pass + except Exception: + logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + + self._log_search(mode, len(items[mode]) - cnt, search_url) + + self._sort_seeders(mode, items) + + results = list(set(results + items[mode])) + + return results + + +provider = FanoProvider()