SickGear/sickbeard/providers/torrentday.py

134 lines
4.8 KiB
Python
Raw Normal View History

# coding=utf-8
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import re
import datetime
import time
from . import generic
from sickbeard import logger, tvcache, helpers
class TorrentDayProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'TorrentDay')
self.url_base = 'https://torrentday.eu/'
self.urls = {'config_provider_home_uri': self.url_base,
'login': self.url_base + 'torrents/',
'search': self.url_base + 'V3/API/API.php',
'get': self.url_base + 'download.php/%s/%s'}
self.categories = {'Season': {'c14': 1},
'Episode': {'c2': 1, 'c26': 1, 'c7': 1, 'c24': 1},
'Cache': {'c2': 1, 'c26': 1, 'c7': 1, 'c24': 1, 'c14': 1}}
self.url = self.urls['config_provider_home_uri']
self.username, self.password, self.minseed, self.minleech = 4 * [None]
self.freeleech = False
self.cache = TorrentDayCache(self)
def _do_login(self):
logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies
if logged_in():
return True
if self._check_auth():
login_params = {'username': self.username, 'password': self.password, 'submit.x': 0, 'submit.y': 0}
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
if response and logged_in():
return True
msg = u'Failed to authenticate'
if response and 'tried too often' in response:
msg = u'Too many login attempts'
logger.log(u'%s, abort provider %s' % (msg, self.name), logger.ERROR)
return False
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
results = []
if not self._do_login():
return results
items = {'Season': [], 'Episode': [], 'Cache': []}
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string = '+'.join(search_string.split())
post_data = dict({'/browse.php?': None, 'cata': 'yes', 'jxt': 8, 'jxw': 'b', 'search': search_string},
**self.categories[mode])
if self.freeleech:
post_data.update({'free': 'on'})
data_json = self.get_url(self.urls['search'], post_data=post_data, json=True)
cnt = len(items[mode])
try:
if not data_json:
raise generic.HaltParseException
torrents = data_json.get('Fs', [])[0].get('Cn', {}).get('torrents', [])
for torrent in torrents:
seeders, leechers = int(torrent['seed']), int(torrent['leech'])
if 'Cache' != mode and (seeders < self.minseed or leechers < self.minleech):
continue
title = re.sub(r'\[.*=.*\].*\[/.*\]', '', torrent['name'])
download_url = self.urls['get'] % (torrent['id'], torrent['fname'])
if title and download_url:
items[mode].append((title, download_url, seeders))
except Exception:
time.sleep(1.1)
self._log_result(mode, len(items[mode]) - cnt,
('search string: ' + search_string, self.name)['Cache' == mode])
# For each search mode sort all the items by seeders
items[mode].sort(key=lambda tup: tup[2], reverse=True)
Fixed issues with editing/saving custom scene exceptions. Fixed charmap issues for anime show names. Fixed issues with display show page and epCat key errors. Fixed duplicate log messages for clearing provider caches. Fixed issues with email notifier ep names not properly being encoded to UTF-8. TVDB<->TVRAGE Indexer ID mapping is now performed on demand to be used when needed such as newznab providers can be searched with tvrage_id's and some will return tvrage_id's that later can be used to create show objects from for faster and more accurate name parsing, mapping is done via Trakt API calls. Added stop event signals to schedualed tasks, SR now waits indefinate till task has been fully stopped before completing a restart or shutdown event. NameParserCache is now persistent and stores 200 parsed results at any given time for quicker lookups and better performance, this helps maintain results between updates or shutdown/startup events. Black and White lists for anime now only get used for anime shows as intended, performance gain for non-anime shows that dont need to load these lists. Internal name cache now builds it self on demand when needed per show request plus checks if show is already in cache and if true exits routine to save time. Schedualer and QueueItems classes are now a sub-class of threading.Thread and a stop threading event signal has been added to each. If I forgot to list something it doesn't mean its not fixed so please test and report back if anything is wrong or has been corrected by this new release.
2014-07-15 02:00:53 +00:00
results += items[mode]
return results
def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date, '')
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, sep_date='.')
2014-05-08 22:28:28 +00:00
class TorrentDayCache(tvcache.TVCache):
def __init__(self, this_provider):
tvcache.TVCache.__init__(self, this_provider)
def _getRSSData(self):
return self.provider.get_cache_data()
provider = TorrentDayProvider()