mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-05 17:43:37 +00:00
Merge branch 'master' into develop
This commit is contained in:
commit
d1e3e29668
4 changed files with 6 additions and 121 deletions
|
@ -36,6 +36,11 @@
|
|||
* Change add xsrf protection support to media processing scripts
|
||||
|
||||
|
||||
### 0.15.5 (2018-04-04 21:10:00 UTC)
|
||||
|
||||
* Remove GFT torrent provider
|
||||
|
||||
|
||||
### 0.15.4 (2018-04-03 16:10:00 UTC)
|
||||
|
||||
* Fix Torrentleech provider
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 886 B |
|
@ -27,7 +27,7 @@ from sickbeard import logger, encodingKludge as ek
|
|||
from . import newznab, omgwtfnzbs
|
||||
# torrent
|
||||
from . import alpharatio, beyondhd, bithdtv, bitmetv, blutopia, btn, btscene, dh, ettv, \
|
||||
fano, filelist, funfile, gftracker, grabtheinfo, hdbits, hdspace, hdtorrents, \
|
||||
fano, filelist, funfile, grabtheinfo, hdbits, hdspace, hdtorrents, \
|
||||
iptorrents, limetorrents, magnetdl, morethan, nebulance, ncore, nyaa, pisexy, potuk, pretome, privatehd, ptf, \
|
||||
rarbg, revtt, scenehd, scenetime, shazbat, showrss, skytorrents, speedcd, \
|
||||
thepiratebay, torlock, torrentbytes, torrentday, torrenting, torrentleech, \
|
||||
|
@ -55,7 +55,6 @@ __all__ = ['omgwtfnzbs',
|
|||
'fano',
|
||||
'filelist',
|
||||
'funfile',
|
||||
'gftracker',
|
||||
'grabtheinfo',
|
||||
'hdbits',
|
||||
'hdspace',
|
||||
|
|
|
@ -1,119 +0,0 @@
|
|||
# coding=utf-8
|
||||
#
|
||||
# This file is part of SickGear.
|
||||
#
|
||||
# SickGear is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# SickGear is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import re
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from . import generic
|
||||
from sickbeard import logger
|
||||
from sickbeard.bs4_parser import BS4Parser
|
||||
from sickbeard.helpers import tryInt
|
||||
from lib.unidecode import unidecode
|
||||
|
||||
|
||||
class GFTrackerProvider(generic.TorrentProvider):
|
||||
|
||||
def __init__(self):
|
||||
generic.TorrentProvider.__init__(self, 'GFTracker', cache_update_freq=17)
|
||||
|
||||
self.url_base = 'https://thegft.org/'
|
||||
self.urls = {'config_provider_home_uri': self.url_base,
|
||||
'login_init': self.url_base + 'login.php',
|
||||
'login': self.url_base + 'loginsite.php',
|
||||
'browse': self.url_base + 'browse.php?view=0&%s&searchtype=1%s',
|
||||
'search': '&search=%s'}
|
||||
|
||||
self.categories = {'shows': [4, 17, 19, 26, 37, 47], 'anime': [16]}
|
||||
|
||||
self.url = self.urls['config_provider_home_uri']
|
||||
|
||||
self.username, self.password, self.scene, self.minseed, self.minleech = 5 * [None]
|
||||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(GFTrackerProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies(pre='gft_')),
|
||||
url=[self.urls['login_init']])
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
results = []
|
||||
if not self._authorised():
|
||||
return results
|
||||
|
||||
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||
|
||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'details', 'get': 'download',
|
||||
'seeders': r'(^\d+)', 'leechers': r'(\d+)$'}.items())
|
||||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||
search_url = self.urls['browse'] % (self._categories_string(mode),
|
||||
(self.urls['search'] % search_string, '')['Cache' == mode])
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
if not html or self._has_no_results(html):
|
||||
time.sleep(1.1)
|
||||
raise generic.HaltParseException
|
||||
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
torrent_table = soup.find('div', id='torrentBrowse').find('table')
|
||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||
|
||||
if 2 > len(torrent_rows):
|
||||
raise generic.HaltParseException
|
||||
|
||||
head = None
|
||||
for tr in torrent_rows[1:]:
|
||||
cells = tr.find_all('td')
|
||||
if 3 > len(cells):
|
||||
continue
|
||||
try:
|
||||
head = head if None is not head else self._header_row(tr)
|
||||
seeders, leechers = 2 * [cells[head['seed']].get_text().strip()]
|
||||
seeders, leechers = [tryInt(n) for n in [
|
||||
rc['seeders'].findall(seeders)[0], rc['leechers'].findall(leechers)[0]]]
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
info = tr.find('a', href=rc['info'])
|
||||
title = (info.attrs.get('title') or info.get_text()).strip()
|
||||
size = cells[head['size']].get_text().strip()
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
||||
if title and download_url:
|
||||
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
||||
provider = GFTrackerProvider()
|
Loading…
Reference in a new issue