mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-11 05:33:37 +00:00
Merge pull request #698 from JackDandy/feature/ChangeTD
Change TorrentDay to use its 2.x interface.
This commit is contained in:
commit
a12a99fcb2
2 changed files with 42 additions and 24 deletions
|
@ -76,6 +76,7 @@
|
|||
* Fix post processing "Force already processed" processing only the first of multiple files
|
||||
* Add FileList torrent provider
|
||||
* Add provider Anizb
|
||||
* Change TorrentDay to use its 2.x interface
|
||||
|
||||
|
||||
### 0.11.11 (2016-04-05 19:20:00 UTC)
|
||||
|
|
|
@ -19,7 +19,8 @@ import re
|
|||
import time
|
||||
|
||||
from . import generic
|
||||
from sickbeard.helpers import tryInt
|
||||
from sickbeard.bs4_parser import BS4Parser
|
||||
from sickbeard.helpers import tryInt, anon_url
|
||||
|
||||
|
||||
class TorrentDayProvider(generic.TorrentProvider):
|
||||
|
@ -30,7 +31,7 @@ class TorrentDayProvider(generic.TorrentProvider):
|
|||
self.url_home = ['https://%s/' % u for u in 'torrentday.eu', 'secure.torrentday.com', 'tdonline.org',
|
||||
'torrentday.it', 'www.td.af', 'www.torrentday.com']
|
||||
|
||||
self.url_vars = {'login': 'torrents/', 'search': 'V3/API/API.php', 'get': 'download.php/%s/%s'}
|
||||
self.url_vars = {'login': 'rss.php', 'search': 't?%s%s;q=%s%s', 'get': '%s'}
|
||||
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s',
|
||||
'search': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'}
|
||||
|
||||
|
@ -39,17 +40,14 @@ class TorrentDayProvider(generic.TorrentProvider):
|
|||
|
||||
self.proper_search_terms = None
|
||||
|
||||
self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None]
|
||||
self.digest, self.freeleech, self.minseed, self.minleech = 4 * [None]
|
||||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(TorrentDayProvider, self)._authorised(
|
||||
post_params={'submit.x': 0, 'submit.y': 0},
|
||||
failed_msg=(lambda x=None: re.search(r'(?i)tried((<[^>]+>)|\W)*too((<[^>]+>)|\W)*often', x) and
|
||||
u'Abort %s, Too many login attempts. Settings must be checked' or (
|
||||
re.search(r'(?i)username((<[^>]+>)|\W)*or((<[^>]+>)|\W)*password', x) and
|
||||
u'Invalid username or password for %s. Check settings' or
|
||||
u'Failed to authenticate with %s, abort provider')))
|
||||
logged_in=(lambda x=None: (None is x or 'RSS URL' in x) and self.has_all_cookies() and
|
||||
self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest),
|
||||
failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings'))
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -59,35 +57,48 @@ class TorrentDayProvider(generic.TorrentProvider):
|
|||
|
||||
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||
|
||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download'}.items())
|
||||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
search_string = '+'.join(search_string.split())
|
||||
post_data = dict((x.split('=') for x in self._categories_string(mode).split('&')),
|
||||
search=search_string, cata='yes', jxt=8, jxw='b')
|
||||
|
||||
if self.freeleech:
|
||||
post_data.update({'free': 'on'})
|
||||
search_url = self.urls['search'] % (
|
||||
self._categories_string(mode, '%s', ';'), (';free', '')[not self.freeleech],
|
||||
search_string, (';o=seeders', '')['Cache' == mode])
|
||||
|
||||
data_json = self.get_url(self.urls['search'], post_data=post_data, json=True)
|
||||
html = self.get_url(search_url)
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
if not data_json:
|
||||
if not html or self._has_no_results(html):
|
||||
raise generic.HaltParseException
|
||||
torrents = data_json.get('Fs')[0].get('Cn').get('torrents')
|
||||
|
||||
for item in torrents:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [item.get(x) for x in 'seed', 'leech', 'size']]
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
torrent_table = soup.find('table', id='torrentTable')
|
||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||
|
||||
if 2 > len(torrent_rows):
|
||||
raise generic.HaltParseException
|
||||
|
||||
for tr in torrent_rows[1:]:
|
||||
try:
|
||||
seeders, leechers = [tryInt(tr.find('td', attrs={'class': x}).get_text().strip())
|
||||
for x in ('seedersInfo', 'leechersInfo')]
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
title = re.sub(r'\[.*=.*\].*\[/.*\]', '', item['name'])
|
||||
title = tr.find('a', href=rc['info']).get_text().strip()
|
||||
size = tr.find_all('td')[-3].get_text().strip()
|
||||
|
||||
download_url = self.urls['get'] % (item['id'], item['fname'])
|
||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
||||
if title and download_url:
|
||||
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
time.sleep(1.1)
|
||||
|
||||
|
@ -104,5 +115,11 @@ class TorrentDayProvider(generic.TorrentProvider):
|
|||
|
||||
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='.', date_or=True, **kwargs)
|
||||
|
||||
def ui_string(self, key):
|
||||
current_url = self.urls['config_provider_home_uri']
|
||||
return ('torrentday_digest' == key and
|
||||
('use... \'uid=xx; pass=yy\' from a session logged in at <a target="_blank" href="%s">%s</a>' %
|
||||
(anon_url(current_url), current_url.strip('/'))) or '')
|
||||
|
||||
|
||||
provider = TorrentDayProvider()
|
||||
|
|
Loading…
Reference in a new issue