# coding=utf-8 # # This file is part of SickGear. # # SickGear is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # SickGear is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with SickGear. If not, see . import re import time import traceback from . import generic from sickbeard import logger from sickbeard.bs4_parser import BS4Parser from sickbeard.helpers import tryInt from lib.unidecode import unidecode class GFTrackerProvider(generic.TorrentProvider): def __init__(self): generic.TorrentProvider.__init__(self, 'GFTracker', cache_update_freq=17) self.url_base = 'https://thegft.org/' self.urls = {'config_provider_home_uri': self.url_base, 'login_init': self.url_base + 'login.php', 'login': self.url_base + 'loginsite.php', 'browse': self.url_base + 'browse.php?view=0&%s&searchtype=1%s', 'search': '&search=%s'} self.categories = {'shows': [4, 17, 19, 26, 37, 47], 'anime': [16]} self.url = self.urls['config_provider_home_uri'] self.username, self.password, self.minseed, self.minleech = 4 * [None] def _authorised(self, **kwargs): return super(GFTrackerProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies(pre='gft_')), url=[self.urls['login_init']]) def _search_provider(self, search_params, **kwargs): results = [] if not self._authorised(): return results items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'details', 'get': 'download', 'seeders': r'(^\d+)', 'leechers': r'(\d+)$'}.items()) for mode in search_params.keys(): for search_string in search_params[mode]: search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string search_url = self.urls['browse'] % (self._categories_string(mode), (self.urls['search'] % search_string, '')['Cache' == mode]) html = self.get_url(search_url) cnt = len(items[mode]) try: if not html or self._has_no_results(html): time.sleep(1.1) raise generic.HaltParseException with BS4Parser(html, features=['html5lib', 'permissive']) as soup: torrent_table = soup.find('div', id='torrentBrowse').find('table') torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') if 2 > len(torrent_rows): raise generic.HaltParseException head = None for tr in torrent_rows[1:]: cells = tr.find_all('td') if 3 > len(cells): continue try: head = head if None is not head else self._header_row(tr) seeders, leechers = 2 * [cells[head['seed']].get_text().strip()] seeders, leechers = [tryInt(n) for n in [ rc['seeders'].findall(seeders)[0], rc['leechers'].findall(leechers)[0]]] if self._peers_fail(mode, seeders, leechers): continue info = tr.find('a', href=rc['info']) title = (info.attrs.get('title') or info.get_text()).strip() size = cells[head['size']].get_text().strip() download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError): continue if title and download_url: items[mode].append((title, download_url, seeders, self._bytesizer(size))) except generic.HaltParseException: pass except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) results = self._sort_seeding(mode, results + items[mode]) return results def _season_strings(self, ep_obj, **kwargs): return generic.TorrentProvider._season_strings(self, ep_obj, scene=False) def _episode_strings(self, ep_obj, **kwargs): return generic.TorrentProvider._episode_strings(self, ep_obj, scene=False, **kwargs) provider = GFTrackerProvider()