2014-03-10 05:18:05 +00:00
|
|
|
# coding=utf-8
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# This file is part of SickGear.
|
2014-03-10 05:18:05 +00:00
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is free software: you can redistribute it and/or modify
|
2014-03-10 05:18:05 +00:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is distributed in the hope that it will be useful,
|
2014-03-10 05:18:05 +00:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-11-12 16:43:14 +00:00
|
|
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
import datetime
|
2014-05-07 07:50:49 +00:00
|
|
|
import time
|
|
|
|
import math
|
2015-06-16 15:46:33 +00:00
|
|
|
import socket
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
from . import generic
|
2015-04-18 04:55:04 +00:00
|
|
|
from sickbeard import classes, scene_exceptions, logger, tvcache
|
2014-07-24 04:44:11 +00:00
|
|
|
from sickbeard.helpers import sanitizeSceneName
|
2014-03-10 05:18:05 +00:00
|
|
|
from sickbeard.exceptions import ex, AuthException
|
|
|
|
from lib import jsonrpclib
|
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
class BTNProvider(generic.TorrentProvider):
|
2015-06-16 15:46:33 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def __init__(self):
|
2015-06-16 15:46:33 +00:00
|
|
|
generic.TorrentProvider.__init__(self, 'BTN')
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
self.url_base = 'https://broadcasthe.net'
|
|
|
|
self.url_api = 'http://api.btnapps.net'
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
self.url = self.url_base
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
self.api_key = None
|
|
|
|
self.cache = BTNCache(self)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
def _check_auth_from_data(self, data_json):
|
|
|
|
|
|
|
|
if data_json is None:
|
2015-07-13 09:39:20 +00:00
|
|
|
return self._check_auth()
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
if 'api-error' not in data_json:
|
|
|
|
return True
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
logger.log(u'Incorrect authentication credentials for %s : %s' % (self.name, data_json['api-error']),
|
|
|
|
logger.DEBUG)
|
|
|
|
raise AuthException('Your authentication credentials for %s are incorrect, check your config.' % self.name)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
self._check_auth()
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
params = {}
|
|
|
|
|
|
|
|
if search_params:
|
|
|
|
params.update(search_params)
|
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
if age:
|
|
|
|
params['age'] = '<=%i' % age # age in seconds
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
results = []
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
data_json = self._api_call(params)
|
|
|
|
if not (data_json and self._check_auth_from_data(data_json)):
|
|
|
|
self._log_result('rpc search', 0, self.name)
|
|
|
|
else:
|
|
|
|
|
|
|
|
found_torrents = {} if 'torrents' not in data_json else data_json['torrents']
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# We got something, we know the API sends max 1000 results at a time.
|
|
|
|
# See if there are more than 1000 results for our query, if not we
|
|
|
|
# keep requesting until we've got everything.
|
|
|
|
# max 150 requests per hour so limit at that. Scan every 15 minutes. 60 / 15 = 4.
|
2014-07-15 02:00:53 +00:00
|
|
|
max_pages = 150
|
2014-03-10 05:18:05 +00:00
|
|
|
results_per_page = 1000
|
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
if 'results' in data_json and int(data_json['results']) >= results_per_page:
|
|
|
|
pages_needed = int(math.ceil(int(data_json['results']) / results_per_page))
|
2014-03-10 05:18:05 +00:00
|
|
|
if pages_needed > max_pages:
|
|
|
|
pages_needed = max_pages
|
|
|
|
|
|
|
|
# +1 because range(1,4) = 1, 2, 3
|
|
|
|
for page in range(1, pages_needed + 1):
|
2015-06-16 15:46:33 +00:00
|
|
|
data_json = self._api_call(params, results_per_page, page * results_per_page)
|
2014-03-10 05:18:05 +00:00
|
|
|
# Note that this these are individual requests and might time out individually. This would result in 'gaps'
|
|
|
|
# in the results. There is no way to fix this though.
|
2015-06-16 15:46:33 +00:00
|
|
|
if 'torrents' in data_json:
|
|
|
|
found_torrents.update(data_json['torrents'])
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
cnt = len(results)
|
2014-03-10 05:18:05 +00:00
|
|
|
for torrentid, torrent_info in found_torrents.iteritems():
|
2015-06-16 15:46:33 +00:00
|
|
|
title, url = self._get_title_and_url(torrent_info)
|
2014-03-10 05:18:05 +00:00
|
|
|
if title and url:
|
|
|
|
results.append(torrent_info)
|
2015-06-16 15:46:33 +00:00
|
|
|
self._log_result('search', len(results) - cnt, self.name + ' JSON-RPC API')
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
return results
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
def _api_call(self, params=None, results_per_page=1000, offset=0):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
if None is params:
|
|
|
|
params = {}
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
logger.log(u'Searching with parameters: ' + str(params), logger.DEBUG)
|
2015-01-03 10:18:13 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
parsed_json = {}
|
|
|
|
server = jsonrpclib.Server(self.url_api)
|
2014-03-10 05:18:05 +00:00
|
|
|
try:
|
2015-06-16 15:46:33 +00:00
|
|
|
parsed_json = server.getTorrents(self.api_key, params, int(results_per_page), int(offset))
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-08 12:47:01 +00:00
|
|
|
except jsonrpclib.jsonrpc.ProtocolError as error:
|
2015-06-16 15:46:33 +00:00
|
|
|
if 'Call Limit' in error.message:
|
|
|
|
logger.log(u'Request ignored because the %s 150 calls/hr limit was reached' % self.name, logger.WARNING)
|
|
|
|
else:
|
|
|
|
logger.log(u'JSON-RPC protocol error while accessing %s: %s' % (self.name, ex(error)), logger.ERROR)
|
|
|
|
return {'api-error': ex(error)}
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
except socket.timeout:
|
2015-06-16 15:46:33 +00:00
|
|
|
logger.log(u'Timeout while accessing ' + self.name, logger.WARNING)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-08 12:47:01 +00:00
|
|
|
except socket.error as error:
|
2015-06-16 15:46:33 +00:00
|
|
|
# timeouts are sometimes thrown as socket errors
|
|
|
|
logger.log(u'Socket error while accessing %s: %s' % (self.name, error[1]), logger.ERROR)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-08 12:47:01 +00:00
|
|
|
except Exception as error:
|
2014-03-10 05:18:05 +00:00
|
|
|
errorstring = str(error)
|
2015-06-16 15:46:33 +00:00
|
|
|
if errorstring.startswith('<') and errorstring.endswith('>'):
|
2014-03-10 05:18:05 +00:00
|
|
|
errorstring = errorstring[1:-1]
|
2015-06-16 15:46:33 +00:00
|
|
|
logger.log(u'Error while accessing %s: %s' % (self.name, errorstring), logger.ERROR)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
return parsed_json
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
def _get_title_and_url(self, data_json):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# The BTN API gives a lot of information in response,
|
2014-11-12 16:43:14 +00:00
|
|
|
# however SickGear is built mostly around Scene or
|
2014-03-10 05:18:05 +00:00
|
|
|
# release names, which is why we are using them here.
|
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
if 'ReleaseName' in data_json and data_json['ReleaseName']:
|
|
|
|
title = data_json['ReleaseName']
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
# If we don't have a release name we need to get creative
|
|
|
|
title = u''
|
2015-06-16 15:46:33 +00:00
|
|
|
keys = ['Series', 'GroupName', 'Resolution', 'Source', 'Codec']
|
|
|
|
for key in keys:
|
|
|
|
if key in data_json:
|
|
|
|
title += ('', '.')[any(title)] + data_json[key]
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
if title:
|
|
|
|
title = title.replace(' ', '.')
|
|
|
|
|
|
|
|
url = None
|
2015-06-16 15:46:33 +00:00
|
|
|
if 'DownloadURL' in data_json:
|
|
|
|
url = data_json['DownloadURL']
|
2014-03-10 05:18:05 +00:00
|
|
|
if url:
|
|
|
|
# unescaped / is valid in JSON, but it can be escaped
|
2015-06-16 15:46:33 +00:00
|
|
|
url = url.replace('\\/', '/')
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
return title, url
|
|
|
|
|
|
|
|
def _get_season_search_strings(self, ep_obj, **kwargs):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
search_params = []
|
2014-07-15 02:00:53 +00:00
|
|
|
current_params = {'category': 'Season'}
|
|
|
|
|
|
|
|
# Search for entire seasons: no need to do special things for air by date or sports shows
|
2015-08-14 23:02:05 +00:00
|
|
|
if ep_obj.show.air_by_date or ep_obj.show.is_sports:
|
2014-07-15 02:00:53 +00:00
|
|
|
# Search for the year of the air by date show
|
|
|
|
current_params['name'] = str(ep_obj.airdate).split('-')[0]
|
2015-06-16 15:46:33 +00:00
|
|
|
elif ep_obj.show.is_anime:
|
|
|
|
current_params['name'] = '%s' % ep_obj.scene_absolute_number
|
2014-07-15 02:00:53 +00:00
|
|
|
else:
|
2015-08-14 23:02:05 +00:00
|
|
|
current_params['name'] = 'Season %s' % (ep_obj.season, ep_obj.scene_season)[bool(ep_obj.show.is_scene)]
|
2014-07-15 02:00:53 +00:00
|
|
|
|
|
|
|
# search
|
2015-06-16 15:46:33 +00:00
|
|
|
if 1 == ep_obj.show.indexer:
|
2014-07-15 02:00:53 +00:00
|
|
|
current_params['tvdb'] = ep_obj.show.indexerid
|
|
|
|
search_params.append(current_params)
|
2015-06-16 15:46:33 +00:00
|
|
|
elif 2 == ep_obj.show.indexer:
|
2014-07-15 02:00:53 +00:00
|
|
|
current_params['tvrage'] = ep_obj.show.indexerid
|
|
|
|
search_params.append(current_params)
|
|
|
|
else:
|
|
|
|
name_exceptions = list(
|
2015-03-29 23:15:25 +00:00
|
|
|
set([sanitizeSceneName(a) for a in scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
|
2014-07-15 02:00:53 +00:00
|
|
|
for name in name_exceptions:
|
2014-05-12 16:35:40 +00:00
|
|
|
# Search by name if we don't have tvdb or tvrage id
|
2015-03-29 02:27:59 +00:00
|
|
|
cur_return = current_params.copy()
|
2015-03-29 23:15:25 +00:00
|
|
|
cur_return['series'] = name
|
2015-03-29 02:27:59 +00:00
|
|
|
search_params.append(cur_return)
|
2014-05-04 03:16:26 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
return search_params
|
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-04-30 13:49:50 +00:00
|
|
|
if not ep_obj:
|
2014-03-10 05:18:05 +00:00
|
|
|
return [{}]
|
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
to_return = []
|
2014-03-10 05:18:05 +00:00
|
|
|
search_params = {'category': 'Episode'}
|
2014-04-22 21:58:48 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
# episode
|
2015-08-14 23:02:05 +00:00
|
|
|
if ep_obj.show.air_by_date or ep_obj.show.is_sports:
|
2014-04-30 13:49:50 +00:00
|
|
|
date_str = str(ep_obj.airdate)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# BTN uses dots in dates, we just search for the date since that
|
|
|
|
# combined with the series identifier should result in just one episode
|
|
|
|
search_params['name'] = date_str.replace('-', '.')
|
2015-08-14 23:02:05 +00:00
|
|
|
elif ep_obj.show.is_anime:
|
2015-06-16 15:46:33 +00:00
|
|
|
search_params['name'] = '%s' % ep_obj.scene_absolute_number
|
2014-06-06 23:55:14 +00:00
|
|
|
else:
|
2014-03-10 05:18:05 +00:00
|
|
|
# Do a general name search for the episode, formatted like SXXEYY
|
2015-08-14 23:02:05 +00:00
|
|
|
season, episode = ((ep_obj.season, ep_obj.episode),
|
|
|
|
(ep_obj.scene_season, ep_obj.scene_episode))[bool(ep_obj.show.is_scene)]
|
|
|
|
search_params['name'] = 'S%02dE%02d' % (season, episode)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
# search
|
2015-06-16 15:46:33 +00:00
|
|
|
if 1 == ep_obj.show.indexer:
|
2014-07-15 02:00:53 +00:00
|
|
|
search_params['tvdb'] = ep_obj.show.indexerid
|
|
|
|
to_return.append(search_params)
|
2015-06-16 15:46:33 +00:00
|
|
|
elif 2 == ep_obj.show.indexer:
|
2014-07-15 02:00:53 +00:00
|
|
|
search_params['tvrage'] = ep_obj.show.indexerid
|
|
|
|
to_return.append(search_params)
|
|
|
|
else:
|
2014-03-10 05:18:05 +00:00
|
|
|
# add new query string for every exception
|
2014-07-15 02:00:53 +00:00
|
|
|
name_exceptions = list(
|
2015-03-29 23:15:25 +00:00
|
|
|
set([sanitizeSceneName(a) for a in scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
|
2014-03-10 05:18:05 +00:00
|
|
|
for cur_exception in name_exceptions:
|
2015-03-29 02:27:59 +00:00
|
|
|
cur_return = search_params.copy()
|
2015-03-29 23:15:25 +00:00
|
|
|
cur_return['series'] = cur_exception
|
2015-03-29 02:27:59 +00:00
|
|
|
to_return.append(cur_return)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
return to_return
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def find_propers(self, search_date=None):
|
2015-06-16 15:46:33 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
results = []
|
|
|
|
|
|
|
|
search_terms = ['%.proper.%', '%.repack.%']
|
|
|
|
|
|
|
|
for term in search_terms:
|
2015-07-13 09:39:20 +00:00
|
|
|
for item in self._do_search({'release': term}, age=4 * 24 * 60 * 60):
|
2014-03-10 05:18:05 +00:00
|
|
|
if item['Time']:
|
|
|
|
try:
|
2015-06-16 15:46:33 +00:00
|
|
|
result_date = datetime.datetime.fromtimestamp(float(item['Time']))
|
2014-03-10 05:18:05 +00:00
|
|
|
except TypeError:
|
2015-06-16 15:46:33 +00:00
|
|
|
continue
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
if not search_date or result_date > search_date:
|
|
|
|
title, url = self._get_title_and_url(item)
|
|
|
|
results.append(classes.Proper(title, url, result_date, self.show))
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
return results
|
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
def get_cache_data(self, **kwargs):
|
|
|
|
|
|
|
|
# Get the torrents uploaded since last check.
|
|
|
|
seconds_since_last_update = int(math.ceil(time.time() - time.mktime(kwargs['age'])))
|
|
|
|
|
|
|
|
# default to 15 minutes
|
|
|
|
seconds_min_time = kwargs['min_time'] * 60
|
|
|
|
if seconds_min_time > seconds_since_last_update:
|
|
|
|
seconds_since_last_update = seconds_min_time
|
|
|
|
|
|
|
|
# Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of "RSS" data search,
|
|
|
|
# older items will be done through backlog
|
|
|
|
if 86400 < seconds_since_last_update:
|
|
|
|
logger.log(u'Only trying to fetch the last 24 hours even though the last known successful update on %s was over 24 hours'
|
|
|
|
% self.name, logger.WARNING)
|
|
|
|
seconds_since_last_update = 86400
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
return self._do_search(search_params=None, age=seconds_since_last_update)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
class BTNCache(tvcache.TVCache):
|
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
def __init__(self, this_provider):
|
|
|
|
tvcache.TVCache.__init__(self, this_provider)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
self.minTime = 15 # cache update frequency
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
def _getRSSData(self):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-16 15:46:33 +00:00
|
|
|
return self.provider.get_cache_data(age=self._getLastUpdate().timetuple(), min_time=self.minTime)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
provider = BTNProvider()
|