2014-04-22 08:02:43 +00:00
|
|
|
# coding=utf-8
|
2014-03-10 05:18:05 +00:00
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# This file is part of SickGear.
|
2014-03-10 05:18:05 +00:00
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is free software: you can redistribute it and/or modify
|
2014-03-10 05:18:05 +00:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is distributed in the hope that it will be useful,
|
2014-03-10 05:18:05 +00:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-11-12 16:43:14 +00:00
|
|
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
from __future__ import with_statement
|
|
|
|
|
|
|
|
import datetime
|
2015-09-18 00:06:34 +00:00
|
|
|
import itertools
|
|
|
|
import math
|
2014-03-10 05:18:05 +00:00
|
|
|
import os
|
|
|
|
import re
|
2015-09-18 00:06:34 +00:00
|
|
|
import time
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
import urlparse
|
2017-08-21 19:53:26 +00:00
|
|
|
import threading
|
2017-11-02 18:30:05 +00:00
|
|
|
import socket
|
2016-09-07 20:24:10 +00:00
|
|
|
from urllib import quote_plus
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
import zlib
|
2015-04-18 04:55:04 +00:00
|
|
|
from base64 import b16encode, b32decode
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
import sickbeard
|
2014-07-24 04:44:11 +00:00
|
|
|
import requests
|
2015-07-13 09:39:20 +00:00
|
|
|
import requests.cookies
|
2017-02-17 03:16:51 +00:00
|
|
|
from cfscrape import CloudflareScraper
|
2015-09-18 00:06:34 +00:00
|
|
|
from hachoir_parser import guessParser
|
2016-08-26 23:36:01 +00:00
|
|
|
from hachoir_core.error import HachoirError
|
2015-09-18 00:06:34 +00:00
|
|
|
from hachoir_core.stream import FileInputStream
|
|
|
|
|
2018-01-24 02:24:00 +00:00
|
|
|
from sickbeard import helpers, classes, logger, db, tvcache, encodingKludge as ek
|
2015-06-19 23:34:56 +00:00
|
|
|
from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT
|
|
|
|
from sickbeard.exceptions import SickBeardException, AuthException, ex
|
2016-11-14 21:33:15 +00:00
|
|
|
from sickbeard.helpers import maybe_plural, remove_file_failed
|
2014-07-06 13:11:04 +00:00
|
|
|
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
2018-01-24 02:24:00 +00:00
|
|
|
from sickbeard.show_name_helpers import get_show_names_all_possible
|
2017-11-02 18:30:05 +00:00
|
|
|
from sickbeard.sbdatetime import sbdatetime
|
2014-09-07 05:25:01 +00:00
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
|
2015-06-19 23:34:56 +00:00
|
|
|
class HaltParseException(SickBeardException):
|
|
|
|
"""Something requires the current processing to abort"""
|
|
|
|
|
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
class ProviderFailTypes:
|
2017-11-02 18:30:05 +00:00
|
|
|
http = 1
|
|
|
|
connection = 2
|
|
|
|
connection_timeout = 3
|
|
|
|
timeout = 4
|
2018-01-15 17:54:36 +00:00
|
|
|
other = 5
|
2017-11-02 18:30:05 +00:00
|
|
|
limit = 6
|
|
|
|
nodata = 7
|
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
names = {http: 'http', timeout: 'timeout',
|
|
|
|
connection: 'connection', connection_timeout: 'connection_timeout',
|
|
|
|
nodata: 'nodata', other: 'other', limit: 'limit'}
|
2017-11-02 18:30:05 +00:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
class ProviderFail(object):
|
|
|
|
def __init__(self, fail_type=ProviderFailTypes.other, code=None, fail_time=None):
|
2017-11-02 18:30:05 +00:00
|
|
|
self.code = code
|
2018-01-15 17:54:36 +00:00
|
|
|
self.fail_type = fail_type
|
|
|
|
self.fail_time = (datetime.datetime.now(), fail_time)[isinstance(fail_time, datetime.datetime)]
|
2017-11-02 18:30:05 +00:00
|
|
|
|
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
class ProviderFailList(object):
|
2017-11-02 18:30:05 +00:00
|
|
|
def __init__(self, provider_name):
|
|
|
|
self.provider_name = provider_name
|
2018-01-15 17:54:36 +00:00
|
|
|
self._fails = []
|
2017-11-02 18:30:05 +00:00
|
|
|
self.lock = threading.Lock()
|
|
|
|
self.clear_old()
|
|
|
|
self.load_list()
|
|
|
|
self.last_save = datetime.datetime.now()
|
|
|
|
self.dirty = False
|
|
|
|
|
|
|
|
@property
|
2018-01-15 17:54:36 +00:00
|
|
|
def fails(self):
|
|
|
|
return self._fails
|
2017-11-02 18:30:05 +00:00
|
|
|
|
|
|
|
@property
|
2018-01-15 17:54:36 +00:00
|
|
|
def fails_sorted(self):
|
|
|
|
fail_dict = {}
|
|
|
|
b_d = {'count': 0}
|
|
|
|
for e in self._fails:
|
|
|
|
fail_date = e.fail_time.date()
|
|
|
|
fail_hour = e.fail_time.time().hour
|
|
|
|
date_time = datetime.datetime.combine(fail_date, datetime.time(hour=fail_hour))
|
|
|
|
if ProviderFailTypes.names[e.fail_type] not in fail_dict.get(date_time, {}):
|
Add choose to delete watched episodes from a list of played media at Kodi, Emby, and/or Plex.
Add episode watched state system that integrates with Kodi, Plex, and/or Emby, instructions at Shows/History/Layout/"Watched".
Add installable SickGear Kodi repository containing addon "SickGear Watched State Updater".
Change add Emby setting for watched state scheduler at Config/Notifications/Emby/"Update watched interval".
Change add Plex setting for watched state scheduler at Config/Notifications/Plex/"Update watched interval".
Add API cmd=sg.updatewatchedstate, instructions for use are linked to in layout "Watched" at /history.
Change history page table filter input values are saved across page refreshes.
Change history page table filter inputs, accept values like "dvd or web" to only display both.
Change history page table filter inputs, press 'ESC' key inside a filter input to reset it.
Add provider activity stats to Shows/History/Layout/ drop down.
Change move provider failures table from Manage/Media Search to Shows/History/Layout/Provider fails.
Change sort provider failures by most recent failure, and with paused providers at the top.
Change remove table form non-testing version 20007, that was reassigned.
2018-03-06 01:18:08 +00:00
|
|
|
default = {'date': str(fail_date), 'date_time': date_time,
|
|
|
|
'timestamp': helpers.tryInt(sbdatetime.totimestamp(e.fail_time)), 'multirow': False}
|
2018-01-15 17:54:36 +00:00
|
|
|
for et in ProviderFailTypes.names.itervalues():
|
|
|
|
default[et] = b_d.copy()
|
|
|
|
fail_dict.setdefault(date_time, default)[ProviderFailTypes.names[e.fail_type]]['count'] = 1
|
2017-11-02 18:30:05 +00:00
|
|
|
else:
|
2018-01-15 17:54:36 +00:00
|
|
|
fail_dict[date_time][ProviderFailTypes.names[e.fail_type]]['count'] += 1
|
|
|
|
if ProviderFailTypes.http == e.fail_type:
|
|
|
|
if e.code in fail_dict[date_time].get(ProviderFailTypes.names[e.fail_type],
|
|
|
|
{'code': {}}).get('code', {}):
|
|
|
|
fail_dict[date_time][ProviderFailTypes.names[e.fail_type]]['code'][e.code] += 1
|
2017-11-02 18:30:05 +00:00
|
|
|
else:
|
2018-01-15 17:54:36 +00:00
|
|
|
fail_dict[date_time][ProviderFailTypes.names[e.fail_type]].setdefault('code', {})[e.code] = 1
|
|
|
|
|
|
|
|
row_count = {}
|
|
|
|
for (k, v) in fail_dict.iteritems():
|
|
|
|
row_count.setdefault(v.get('date'), 0)
|
|
|
|
if v.get('date') in row_count:
|
|
|
|
row_count[v.get('date')] += 1
|
|
|
|
for (k, v) in fail_dict.iteritems():
|
|
|
|
if 1 < row_count.get(v.get('date')):
|
|
|
|
fail_dict[k]['multirow'] = True
|
|
|
|
|
|
|
|
fail_list = sorted([fail_dict[k] for k in fail_dict.iterkeys()], key=lambda y: y.get('date_time'), reverse=True)
|
|
|
|
|
|
|
|
totals = {}
|
|
|
|
for fail_date in set([fail.get('date') for fail in fail_list]):
|
|
|
|
daytotals = {}
|
|
|
|
for et in ProviderFailTypes.names.itervalues():
|
|
|
|
daytotals.update({et: sum([x.get(et).get('count') for x in fail_list if fail_date == x.get('date')])})
|
|
|
|
totals.update({fail_date: daytotals})
|
|
|
|
for (fail_date, total) in totals.iteritems():
|
|
|
|
for i, item in enumerate(fail_list):
|
|
|
|
if fail_date == item.get('date'):
|
|
|
|
if item.get('multirow'):
|
|
|
|
fail_list[i:i] = [item.copy()]
|
|
|
|
for et in ProviderFailTypes.names.itervalues():
|
|
|
|
fail_list[i][et] = {'count': total[et]}
|
|
|
|
if et == ProviderFailTypes.names[ProviderFailTypes.http]:
|
|
|
|
fail_list[i][et]['code'] = {}
|
|
|
|
break
|
|
|
|
|
|
|
|
return fail_list
|
2017-11-02 18:30:05 +00:00
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
def add_fail(self, fail):
|
|
|
|
if isinstance(fail, ProviderFail):
|
2017-11-02 18:30:05 +00:00
|
|
|
with self.lock:
|
|
|
|
self.dirty = True
|
2018-01-15 17:54:36 +00:00
|
|
|
self._fails.append(fail)
|
|
|
|
logger.log('Adding fail.%s for %s' % (ProviderFailTypes.names.get(
|
|
|
|
fail.fail_type, ProviderFailTypes.names[ProviderFailTypes.other]), self.provider_name()),
|
2017-11-02 18:30:05 +00:00
|
|
|
logger.DEBUG)
|
|
|
|
self.save_list()
|
|
|
|
|
|
|
|
def save_list(self):
|
|
|
|
if self.dirty:
|
|
|
|
self.clear_old()
|
|
|
|
with self.lock:
|
2018-01-15 17:54:36 +00:00
|
|
|
my_db = db.DBConnection('cache.db')
|
2017-11-02 18:30:05 +00:00
|
|
|
cl = []
|
2018-01-15 17:54:36 +00:00
|
|
|
for f in self._fails:
|
|
|
|
cl.append(['INSERT OR IGNORE INTO provider_fails (prov_name, fail_type, fail_code, fail_time) '
|
|
|
|
'VALUES (?,?,?,?)', [self.provider_name(), f.fail_type, f.code,
|
|
|
|
sbdatetime.totimestamp(f.fail_time)]])
|
2017-11-02 18:30:05 +00:00
|
|
|
self.dirty = False
|
|
|
|
if cl:
|
2018-01-15 17:54:36 +00:00
|
|
|
my_db.mass_action(cl)
|
2017-11-02 18:30:05 +00:00
|
|
|
self.last_save = datetime.datetime.now()
|
|
|
|
|
|
|
|
def load_list(self):
|
|
|
|
with self.lock:
|
|
|
|
try:
|
2018-01-15 17:54:36 +00:00
|
|
|
my_db = db.DBConnection('cache.db')
|
|
|
|
if my_db.hasTable('provider_fails'):
|
|
|
|
results = my_db.select('SELECT * FROM provider_fails WHERE prov_name = ?', [self.provider_name()])
|
|
|
|
self._fails = []
|
2017-11-02 18:30:05 +00:00
|
|
|
for r in results:
|
|
|
|
try:
|
2018-01-15 17:54:36 +00:00
|
|
|
self._fails.append(ProviderFail(
|
|
|
|
fail_type=helpers.tryInt(r['fail_type']), code=helpers.tryInt(r['fail_code']),
|
|
|
|
fail_time=datetime.datetime.fromtimestamp(helpers.tryInt(r['fail_time']))))
|
2017-11-02 18:30:05 +00:00
|
|
|
except (StandardError, Exception):
|
|
|
|
continue
|
|
|
|
except (StandardError, Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def clear_old(self):
|
|
|
|
with self.lock:
|
|
|
|
try:
|
2018-01-15 17:54:36 +00:00
|
|
|
my_db = db.DBConnection('cache.db')
|
|
|
|
if my_db.hasTable('provider_fails'):
|
2017-11-02 18:30:05 +00:00
|
|
|
time_limit = sbdatetime.totimestamp(datetime.datetime.now() - datetime.timedelta(days=28))
|
2018-01-15 17:54:36 +00:00
|
|
|
my_db.action('DELETE FROM provider_fails WHERE fail_time < ?', [time_limit])
|
2017-11-02 18:30:05 +00:00
|
|
|
except (StandardError, Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class GenericProvider(object):
|
2015-07-13 09:39:20 +00:00
|
|
|
NZB = 'nzb'
|
|
|
|
TORRENT = 'torrent'
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-27 00:00:01 +00:00
|
|
|
def __init__(self, name, supports_backlog=False, anime_only=False):
|
2014-03-10 05:18:05 +00:00
|
|
|
# these need to be set in the subclass
|
|
|
|
self.providerType = None
|
|
|
|
self.name = name
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
self.supports_backlog = supports_backlog
|
2015-04-18 04:55:04 +00:00
|
|
|
self.anime_only = anime_only
|
2015-09-18 00:06:34 +00:00
|
|
|
if anime_only:
|
|
|
|
self.proper_search_terms = 'v1|v2|v3|v4|v5'
|
2014-03-10 05:18:05 +00:00
|
|
|
self.url = ''
|
|
|
|
|
2014-04-29 13:14:19 +00:00
|
|
|
self.show = None
|
2014-05-17 05:23:11 +00:00
|
|
|
|
2014-05-16 09:16:01 +00:00
|
|
|
self.search_mode = None
|
|
|
|
self.search_fallback = False
|
2015-04-15 06:16:27 +00:00
|
|
|
self.enabled = False
|
2014-12-22 18:30:53 +00:00
|
|
|
self.enable_recentsearch = False
|
2014-08-29 05:16:25 +00:00
|
|
|
self.enable_backlog = False
|
2017-01-01 20:24:41 +00:00
|
|
|
self.enable_scheduled_backlog = True
|
2015-09-18 00:06:34 +00:00
|
|
|
self.categories = None
|
2014-05-27 07:44:23 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
self.cache = tvcache.TVCache(self)
|
|
|
|
|
2017-02-17 03:16:51 +00:00
|
|
|
self.session = CloudflareScraper.create_scraper()
|
2014-07-27 14:04:37 +00:00
|
|
|
|
2014-07-27 10:59:21 +00:00
|
|
|
self.headers = {
|
2014-09-07 05:25:01 +00:00
|
|
|
# Using USER_AGENT instead of Mozilla to keep same user agent along authentication and download phases,
|
2015-07-13 09:39:20 +00:00
|
|
|
# otherwise session might be broken and download fail, asking again for authentication
|
2016-08-26 23:36:01 +00:00
|
|
|
# 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' +
|
|
|
|
# 'Chrome/32.0.1700.107 Safari/537.36'}
|
2014-09-06 00:16:04 +00:00
|
|
|
'User-Agent': USER_AGENT}
|
2014-04-28 09:15:29 +00:00
|
|
|
|
2017-11-02 18:30:05 +00:00
|
|
|
self._failure_count = 0
|
|
|
|
self._failure_time = None
|
2018-01-15 17:54:36 +00:00
|
|
|
self.fails = ProviderFailList(self.get_id)
|
|
|
|
self._tmr_limit_count = 0
|
|
|
|
self._tmr_limit_time = None
|
|
|
|
self._tmr_limit_wait = None
|
|
|
|
self._last_fail_type = None
|
2017-11-02 18:30:05 +00:00
|
|
|
self.has_limit = False
|
|
|
|
self.fail_times = {1: (0, 15), 2: (0, 30), 3: (1, 0), 4: (2, 0), 5: (3, 0), 6: (6, 0), 7: (12, 0), 8: (24, 0)}
|
2018-01-15 17:54:36 +00:00
|
|
|
self._load_fail_values()
|
2017-11-02 18:30:05 +00:00
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
def _load_fail_values(self):
|
2017-11-02 18:30:05 +00:00
|
|
|
if hasattr(sickbeard, 'DATA_DIR'):
|
2018-01-15 17:54:36 +00:00
|
|
|
my_db = db.DBConnection('cache.db')
|
|
|
|
if my_db.hasTable('provider_fails_count'):
|
|
|
|
r = my_db.select('SELECT * FROM provider_fails_count WHERE prov_name = ?', [self.get_id()])
|
2017-11-02 18:30:05 +00:00
|
|
|
if r:
|
|
|
|
self._failure_count = helpers.tryInt(r[0]['failure_count'], 0)
|
|
|
|
if r[0]['failure_time']:
|
|
|
|
self._failure_time = datetime.datetime.fromtimestamp(r[0]['failure_time'])
|
|
|
|
else:
|
|
|
|
self._failure_time = None
|
2018-01-15 17:54:36 +00:00
|
|
|
self._tmr_limit_count = helpers.tryInt(r[0]['tmr_limit_count'], 0)
|
|
|
|
if r[0]['tmr_limit_time']:
|
|
|
|
self._tmr_limit_time = datetime.datetime.fromtimestamp(r[0]['tmr_limit_time'])
|
2017-11-02 18:30:05 +00:00
|
|
|
else:
|
2018-01-15 17:54:36 +00:00
|
|
|
self._tmr_limit_time = None
|
|
|
|
if r[0]['tmr_limit_wait']:
|
|
|
|
self._tmr_limit_wait = datetime.timedelta(seconds=helpers.tryInt(r[0]['tmr_limit_wait'], 0))
|
2017-11-02 18:30:05 +00:00
|
|
|
else:
|
2018-01-15 17:54:36 +00:00
|
|
|
self._tmr_limit_wait = None
|
|
|
|
self._last_fail_type = self.last_fail
|
|
|
|
|
|
|
|
def _save_fail_value(self, field, value):
|
|
|
|
my_db = db.DBConnection('cache.db')
|
|
|
|
if my_db.hasTable('provider_fails_count'):
|
|
|
|
r = my_db.action('UPDATE provider_fails_count SET %s = ? WHERE prov_name = ?' % field,
|
|
|
|
[value, self.get_id()])
|
2017-11-02 18:30:05 +00:00
|
|
|
if 0 == r.rowcount:
|
2018-01-15 17:54:36 +00:00
|
|
|
my_db.action('REPLACE INTO provider_fails_count (prov_name, %s) VALUES (?,?)' % field,
|
|
|
|
[self.get_id(), value])
|
2017-11-02 18:30:05 +00:00
|
|
|
|
|
|
|
@property
|
2018-01-15 17:54:36 +00:00
|
|
|
def last_fail(self):
|
2017-11-02 18:30:05 +00:00
|
|
|
try:
|
2018-01-15 17:54:36 +00:00
|
|
|
return sorted(self.fails.fails, key=lambda x: x.fail_time, reverse=True)[0].fail_type
|
2017-11-02 18:30:05 +00:00
|
|
|
except (StandardError, Exception):
|
|
|
|
return None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def failure_count(self):
|
|
|
|
return self._failure_count
|
|
|
|
|
|
|
|
@failure_count.setter
|
|
|
|
def failure_count(self, value):
|
|
|
|
changed_val = self._failure_count != value
|
|
|
|
self._failure_count = value
|
|
|
|
if changed_val:
|
2018-01-15 17:54:36 +00:00
|
|
|
self._save_fail_value('failure_count', value)
|
2017-11-02 18:30:05 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def failure_time(self):
|
|
|
|
return self._failure_time
|
|
|
|
|
|
|
|
@failure_time.setter
|
|
|
|
def failure_time(self, value):
|
|
|
|
if None is value or isinstance(value, datetime.datetime):
|
|
|
|
changed_val = self._failure_time != value
|
|
|
|
self._failure_time = value
|
|
|
|
if changed_val:
|
2018-01-15 17:54:36 +00:00
|
|
|
self._save_fail_value('failure_time', (sbdatetime.totimestamp(value), value)[None is value])
|
2017-11-02 18:30:05 +00:00
|
|
|
|
|
|
|
@property
|
2018-01-15 17:54:36 +00:00
|
|
|
def tmr_limit_count(self):
|
|
|
|
return self._tmr_limit_count
|
2017-11-02 18:30:05 +00:00
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
@tmr_limit_count.setter
|
|
|
|
def tmr_limit_count(self, value):
|
|
|
|
changed_val = self._tmr_limit_count != value
|
|
|
|
self._tmr_limit_count = value
|
2017-11-02 18:30:05 +00:00
|
|
|
if changed_val:
|
2018-01-15 17:54:36 +00:00
|
|
|
self._save_fail_value('tmr_limit_count', value)
|
2017-11-02 18:30:05 +00:00
|
|
|
|
|
|
|
@property
|
2018-01-15 17:54:36 +00:00
|
|
|
def tmr_limit_time(self):
|
|
|
|
return self._tmr_limit_time
|
2017-11-02 18:30:05 +00:00
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
@tmr_limit_time.setter
|
|
|
|
def tmr_limit_time(self, value):
|
2017-11-02 18:30:05 +00:00
|
|
|
if None is value or isinstance(value, datetime.datetime):
|
2018-01-15 17:54:36 +00:00
|
|
|
changed_val = self._tmr_limit_time != value
|
|
|
|
self._tmr_limit_time = value
|
2017-11-02 18:30:05 +00:00
|
|
|
if changed_val:
|
2018-01-15 17:54:36 +00:00
|
|
|
self._save_fail_value('tmr_limit_time', (sbdatetime.totimestamp(value), value)[None is value])
|
2017-11-02 18:30:05 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def max_index(self):
|
|
|
|
return len(self.fail_times)
|
|
|
|
|
|
|
|
@property
|
2018-01-15 17:54:36 +00:00
|
|
|
def tmr_limit_wait(self):
|
|
|
|
return self._tmr_limit_wait
|
|
|
|
|
|
|
|
@tmr_limit_wait.setter
|
|
|
|
def tmr_limit_wait(self, value):
|
|
|
|
if isinstance(getattr(self, 'fails', None), ProviderFailList) and isinstance(value, datetime.timedelta):
|
|
|
|
self.fails.add_fail(ProviderFail(fail_type=ProviderFailTypes.limit))
|
|
|
|
changed_val = self._tmr_limit_wait != value
|
|
|
|
self._tmr_limit_wait = value
|
2017-11-02 18:30:05 +00:00
|
|
|
if changed_val:
|
|
|
|
if None is value:
|
2018-01-15 17:54:36 +00:00
|
|
|
self._save_fail_value('tmr_limit_wait', value)
|
2017-11-02 18:30:05 +00:00
|
|
|
elif isinstance(value, datetime.timedelta):
|
2018-01-15 17:54:36 +00:00
|
|
|
self._save_fail_value('tmr_limit_wait', value.total_seconds())
|
2017-11-02 18:30:05 +00:00
|
|
|
|
|
|
|
def fail_time_index(self, base_limit=2):
|
|
|
|
i = self.failure_count - base_limit
|
|
|
|
return (i, self.max_index)[i >= self.max_index]
|
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
def tmr_limit_update(self, period, unit, desc):
|
|
|
|
self.tmr_limit_time = datetime.datetime.now()
|
|
|
|
self.tmr_limit_count += 1
|
|
|
|
limit_set = False
|
|
|
|
if None not in (period, unit):
|
|
|
|
limit_set = True
|
|
|
|
if unit in ('s', 'sec', 'secs', 'seconds', 'second'):
|
|
|
|
self.tmr_limit_wait = datetime.timedelta(seconds=helpers.tryInt(period))
|
|
|
|
elif unit in ('m', 'min', 'mins', 'minutes', 'minute'):
|
|
|
|
self.tmr_limit_wait = datetime.timedelta(minutes=helpers.tryInt(period))
|
|
|
|
elif unit in ('h', 'hr', 'hrs', 'hours', 'hour'):
|
|
|
|
self.tmr_limit_wait = datetime.timedelta(hours=helpers.tryInt(period))
|
|
|
|
elif unit in ('d', 'days', 'day'):
|
|
|
|
self.tmr_limit_wait = datetime.timedelta(days=helpers.tryInt(period))
|
|
|
|
else:
|
|
|
|
limit_set = False
|
|
|
|
if not limit_set:
|
|
|
|
time_index = self.fail_time_index(base_limit=0)
|
|
|
|
self.tmr_limit_wait = self.wait_time(time_index)
|
|
|
|
logger.log('Request limit reached. Waiting for %s until next retry. Message: %s' %
|
|
|
|
(self.tmr_limit_wait, desc or 'none found'), logger.WARNING)
|
|
|
|
|
|
|
|
def wait_time(self, time_index=None):
|
|
|
|
"""
|
|
|
|
Return a suitable wait time, selected by parameter, or based on the current failure count
|
|
|
|
|
|
|
|
:param time_index: A key value index into the fail_times dict, or selects using failure count if None
|
|
|
|
:type time_index: Integer
|
|
|
|
:return: Time
|
|
|
|
:rtype: Timedelta
|
|
|
|
"""
|
|
|
|
if None is time_index:
|
|
|
|
time_index = self.fail_time_index()
|
|
|
|
return datetime.timedelta(hours=self.fail_times[time_index][0], minutes=self.fail_times[time_index][1])
|
|
|
|
|
|
|
|
def fail_newest_delta(self):
|
|
|
|
"""
|
|
|
|
Return how long since most recent failure
|
|
|
|
:return: Period since most recent failure on record
|
|
|
|
:rtype: timedelta
|
|
|
|
"""
|
|
|
|
return datetime.datetime.now() - self.failure_time
|
|
|
|
|
|
|
|
def is_waiting(self):
|
|
|
|
return self.fail_newest_delta() < self.wait_time()
|
|
|
|
|
|
|
|
def valid_tmr_time(self):
|
|
|
|
return isinstance(self.tmr_limit_wait, datetime.timedelta) and \
|
|
|
|
isinstance(self.tmr_limit_time, datetime.datetime)
|
2017-11-02 18:30:05 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def get_next_try_time(self):
|
|
|
|
n = None
|
|
|
|
h = datetime.timedelta(seconds=0)
|
|
|
|
f = datetime.timedelta(seconds=0)
|
2018-01-15 17:54:36 +00:00
|
|
|
if self.valid_tmr_time():
|
|
|
|
h = self.tmr_limit_time + self.tmr_limit_wait - datetime.datetime.now()
|
|
|
|
if 3 <= self.failure_count and isinstance(self.failure_time, datetime.datetime) and self.is_waiting():
|
|
|
|
h = self.failure_time + self.wait_time() - datetime.datetime.now()
|
2017-11-02 18:30:05 +00:00
|
|
|
if datetime.timedelta(seconds=0) < max((h, f)):
|
|
|
|
n = max((h, f))
|
|
|
|
return n
|
|
|
|
|
|
|
|
def retry_next(self):
|
2018-01-15 17:54:36 +00:00
|
|
|
if self.valid_tmr_time():
|
|
|
|
self.tmr_limit_time = datetime.datetime.now() - self.tmr_limit_wait
|
|
|
|
if 3 <= self.failure_count and isinstance(self.failure_time, datetime.datetime) and self.is_waiting():
|
|
|
|
self.failure_time = datetime.datetime.now() - self.wait_time()
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def fmt_delta(delta):
|
|
|
|
return str(delta).rsplit('.')[0]
|
|
|
|
|
|
|
|
def should_skip(self, log_warning=True, use_tmr_limit=True):
|
|
|
|
"""
|
|
|
|
Determine if a subsequent server request should be skipped. The result of this logic is based on most recent
|
|
|
|
server connection activity including, exhausted request limits, and counting connect failures to determine a
|
|
|
|
"cool down" period before recommending reconnection attempts; by returning False.
|
|
|
|
:param log_warning: Output to log if True (default) otherwise set False for no output.
|
|
|
|
:type log_warning: Boolean
|
|
|
|
:param use_tmr_limit: Setting this to False will ignore a tmr limit being reached and will instead return False.
|
|
|
|
:type use_tmr_limit: Boolean
|
|
|
|
:return: True for any known issue that would prevent a subsequent server connection, otherwise False.
|
|
|
|
:rtype: Boolean
|
|
|
|
"""
|
|
|
|
if self.valid_tmr_time():
|
|
|
|
time_left = self.tmr_limit_time + self.tmr_limit_wait - datetime.datetime.now()
|
2017-11-02 18:30:05 +00:00
|
|
|
if time_left > datetime.timedelta(seconds=0):
|
|
|
|
if log_warning:
|
2018-01-15 17:54:36 +00:00
|
|
|
# Ensure provider name output (e.g. when displaying config/provs) instead of e.g. thread "Tornado"
|
|
|
|
prepend = ('[%s] :: ' % self.name, '')[any([x.name in threading.currentThread().getName()
|
|
|
|
for x in sickbeard.providers.sortedProviderList()])]
|
|
|
|
logger.log('%sToo many requests reached at %s, waiting for %s' % (
|
|
|
|
prepend, self.fmt_delta(self.tmr_limit_time), self.fmt_delta(time_left)), logger.WARNING)
|
|
|
|
return use_tmr_limit
|
2017-11-02 18:30:05 +00:00
|
|
|
else:
|
2018-01-15 17:54:36 +00:00
|
|
|
self.tmr_limit_time = None
|
|
|
|
self.tmr_limit_wait = None
|
2017-11-02 18:30:05 +00:00
|
|
|
if 3 <= self.failure_count:
|
|
|
|
if None is self.failure_time:
|
|
|
|
self.failure_time = datetime.datetime.now()
|
2018-01-15 17:54:36 +00:00
|
|
|
if self.is_waiting():
|
2017-11-02 18:30:05 +00:00
|
|
|
if log_warning:
|
2018-01-15 17:54:36 +00:00
|
|
|
time_left = self.wait_time() - self.fail_newest_delta()
|
|
|
|
logger.log('Failed %s times, skipping provider for %s, last failure at %s with fail type: %s' % (
|
|
|
|
self.failure_count, self.fmt_delta(time_left), self.fmt_delta(self.failure_time),
|
|
|
|
ProviderFailTypes.names.get(
|
|
|
|
self.last_fail, ProviderFailTypes.names[ProviderFailTypes.other])), logger.WARNING)
|
2017-11-02 18:30:05 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def inc_failure_count(self, *args, **kwargs):
|
2018-01-15 17:54:36 +00:00
|
|
|
fail_type = ('fail_type' in kwargs and kwargs['fail_type'].fail_type) or \
|
|
|
|
(isinstance(args, tuple) and isinstance(args[0], ProviderFail) and args[0].fail_type)
|
2017-11-02 18:30:05 +00:00
|
|
|
if not isinstance(self.failure_time, datetime.datetime) or \
|
2018-01-15 17:54:36 +00:00
|
|
|
fail_type != self._last_fail_type or \
|
|
|
|
self.fail_newest_delta() > datetime.timedelta(seconds=3):
|
2017-11-02 18:30:05 +00:00
|
|
|
self.failure_count += 1
|
|
|
|
self.failure_time = datetime.datetime.now()
|
2018-01-15 17:54:36 +00:00
|
|
|
self._last_fail_type = fail_type
|
|
|
|
self.fails.add_fail(*args, **kwargs)
|
2017-11-02 18:30:05 +00:00
|
|
|
else:
|
2018-01-15 17:54:36 +00:00
|
|
|
logger.log('%s: Not logging same failure within 3 seconds' % self.name, logger.DEBUG)
|
2017-11-02 18:30:05 +00:00
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
def get_url(self, url, skip_auth=False, use_tmr_limit=True, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Return data from a URI with a possible check for authentication prior to the data fetch.
|
|
|
|
Raised errors and no data in responses are tracked for making future logic decisions.
|
|
|
|
|
|
|
|
:param url: Address where to fetch data from
|
|
|
|
:type url: String
|
|
|
|
:param skip_auth: Skip authentication check of provider if True
|
|
|
|
:type skip_auth: Boolean
|
|
|
|
:param use_tmr_limit: An API limit can be +ve before a fetch, but unwanted, set False to short should_skip
|
|
|
|
:type use_tmr_limit: Boolean
|
|
|
|
:param args: params to pass-through to getURL
|
|
|
|
:type args:
|
|
|
|
:param kwargs: keyword params to pass-through to getURL
|
|
|
|
:type kwargs:
|
|
|
|
:return: None or data fetched from URL
|
|
|
|
:rtype: String or Nonetype
|
|
|
|
"""
|
2017-11-02 18:30:05 +00:00
|
|
|
data = None
|
|
|
|
|
|
|
|
# check for auth
|
2018-01-15 17:54:36 +00:00
|
|
|
if (not skip_auth and not (self.is_public_access()
|
|
|
|
and type(self).__name__ not in ['TorrentRssProvider']) and not self._authorised()) \
|
|
|
|
or self.should_skip(use_tmr_limit=use_tmr_limit):
|
|
|
|
return
|
2017-11-02 18:30:05 +00:00
|
|
|
|
|
|
|
kwargs['raise_exceptions'] = True
|
|
|
|
kwargs['raise_status_code'] = True
|
Add choose to delete watched episodes from a list of played media at Kodi, Emby, and/or Plex.
Add episode watched state system that integrates with Kodi, Plex, and/or Emby, instructions at Shows/History/Layout/"Watched".
Add installable SickGear Kodi repository containing addon "SickGear Watched State Updater".
Change add Emby setting for watched state scheduler at Config/Notifications/Emby/"Update watched interval".
Change add Plex setting for watched state scheduler at Config/Notifications/Plex/"Update watched interval".
Add API cmd=sg.updatewatchedstate, instructions for use are linked to in layout "Watched" at /history.
Change history page table filter input values are saved across page refreshes.
Change history page table filter inputs, accept values like "dvd or web" to only display both.
Change history page table filter inputs, press 'ESC' key inside a filter input to reset it.
Add provider activity stats to Shows/History/Layout/ drop down.
Change move provider failures table from Manage/Media Search to Shows/History/Layout/Provider fails.
Change sort provider failures by most recent failure, and with paused providers at the top.
Change remove table form non-testing version 20007, that was reassigned.
2018-03-06 01:18:08 +00:00
|
|
|
for k, v in dict(headers=self.headers, hooks=dict(response=self.cb_response)).items():
|
2018-01-15 17:54:36 +00:00
|
|
|
kwargs.setdefault(k, v)
|
Add choose to delete watched episodes from a list of played media at Kodi, Emby, and/or Plex.
Add episode watched state system that integrates with Kodi, Plex, and/or Emby, instructions at Shows/History/Layout/"Watched".
Add installable SickGear Kodi repository containing addon "SickGear Watched State Updater".
Change add Emby setting for watched state scheduler at Config/Notifications/Emby/"Update watched interval".
Change add Plex setting for watched state scheduler at Config/Notifications/Plex/"Update watched interval".
Add API cmd=sg.updatewatchedstate, instructions for use are linked to in layout "Watched" at /history.
Change history page table filter input values are saved across page refreshes.
Change history page table filter inputs, accept values like "dvd or web" to only display both.
Change history page table filter inputs, press 'ESC' key inside a filter input to reset it.
Add provider activity stats to Shows/History/Layout/ drop down.
Change move provider failures table from Manage/Media Search to Shows/History/Layout/Provider fails.
Change sort provider failures by most recent failure, and with paused providers at the top.
Change remove table form non-testing version 20007, that was reassigned.
2018-03-06 01:18:08 +00:00
|
|
|
if 'nzbs.in' not in url: # this provider returns 503's 3 out of 4 requests with the persistent session system
|
|
|
|
kwargs.setdefault('session', self.session)
|
2018-01-15 17:54:36 +00:00
|
|
|
|
|
|
|
post_data = kwargs.get('post_data')
|
|
|
|
post_json = kwargs.get('post_json')
|
2017-11-02 18:30:05 +00:00
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
# noinspection PyUnusedLocal
|
|
|
|
log_failure_url = False
|
2017-11-02 18:30:05 +00:00
|
|
|
try:
|
2018-01-15 17:54:36 +00:00
|
|
|
data = helpers.getURL(url, *args, **kwargs)
|
2017-11-02 18:30:05 +00:00
|
|
|
if data:
|
|
|
|
if 0 != self.failure_count:
|
|
|
|
logger.log('Unblocking provider: %s' % self.get_id(), logger.DEBUG)
|
|
|
|
self.failure_count = 0
|
|
|
|
self.failure_time = None
|
|
|
|
else:
|
2018-01-15 17:54:36 +00:00
|
|
|
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.nodata))
|
|
|
|
log_failure_url = True
|
2017-11-02 18:30:05 +00:00
|
|
|
except requests.exceptions.HTTPError as e:
|
2018-01-15 17:54:36 +00:00
|
|
|
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.http, code=e.response.status_code))
|
|
|
|
except requests.exceptions.ConnectionError:
|
|
|
|
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.connection))
|
|
|
|
except requests.exceptions.ReadTimeout:
|
|
|
|
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.timeout))
|
|
|
|
except (requests.exceptions.Timeout, socket.timeout):
|
|
|
|
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.connection_timeout))
|
2017-11-02 18:30:05 +00:00
|
|
|
except (StandardError, Exception) as e:
|
2018-01-15 17:54:36 +00:00
|
|
|
log_failure_url = True
|
|
|
|
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.other))
|
2017-11-02 18:30:05 +00:00
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
self.fails.save_list()
|
|
|
|
if log_failure_url:
|
|
|
|
self.log_failure_url(url, post_data, post_json)
|
2017-11-02 18:30:05 +00:00
|
|
|
return data
|
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
def log_failure_url(self, url, post_data=None, post_json=None):
|
|
|
|
if self.should_skip(log_warning=False):
|
|
|
|
post = []
|
|
|
|
if post_data:
|
|
|
|
post += [' .. Post params: [%s]' % '&'.join([post_data])]
|
|
|
|
if post_json:
|
|
|
|
post += [' .. Json params: [%s]' % '&'.join([post_json])]
|
|
|
|
logger.log('Failure URL: %s%s' % (url, ''.join(post)), logger.WARNING)
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def get_id(self):
|
|
|
|
return GenericProvider.make_id(self.name)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
@staticmethod
|
2015-07-13 09:39:20 +00:00
|
|
|
def make_id(name):
|
|
|
|
return re.sub('[^\w\d_]', '_', name.strip().lower())
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def image_name(self, *default_name):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
for name in ['%s.%s' % (self.get_id(), image_ext) for image_ext in ['png', 'gif', 'jpg']]:
|
2015-06-19 23:34:56 +00:00
|
|
|
if ek.ek(os.path.isfile,
|
|
|
|
ek.ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers', name)):
|
|
|
|
return name
|
|
|
|
|
|
|
|
return '%s.png' % ('newznab', default_name[0])[any(default_name)]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _authorised(self):
|
2014-08-12 10:09:11 +00:00
|
|
|
return True
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
def _check_auth(self, is_required=None):
|
2014-07-27 10:59:21 +00:00
|
|
|
return True
|
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
def is_public_access(self):
|
|
|
|
try:
|
2016-12-31 00:49:20 +00:00
|
|
|
return bool(re.search('(?i)rarbg|sick|anizb', self.name)) \
|
2016-08-26 23:36:01 +00:00
|
|
|
or False is bool(('_authorised' in self.__class__.__dict__ or hasattr(self, 'digest')
|
|
|
|
or self._check_auth(is_required=True)))
|
|
|
|
except AuthException:
|
|
|
|
return False
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def is_active(self):
|
|
|
|
if GenericProvider.NZB == self.providerType and sickbeard.USE_NZBS:
|
|
|
|
return self.is_enabled()
|
|
|
|
elif GenericProvider.TORRENT == self.providerType and sickbeard.USE_TORRENTS:
|
|
|
|
return self.is_enabled()
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def is_enabled(self):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
This should be overridden and should return the config setting eg. sickbeard.MYPROVIDER
|
|
|
|
"""
|
2015-04-15 06:16:27 +00:00
|
|
|
return self.enabled
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def get_result(self, episodes, url):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
Returns a result of the correct type for this provider
|
|
|
|
"""
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
if GenericProvider.NZB == self.providerType:
|
2014-03-10 05:18:05 +00:00
|
|
|
result = classes.NZBSearchResult(episodes)
|
2015-07-13 09:39:20 +00:00
|
|
|
elif GenericProvider.TORRENT == self.providerType:
|
2014-03-10 05:18:05 +00:00
|
|
|
result = classes.TorrentSearchResult(episodes)
|
|
|
|
else:
|
|
|
|
result = classes.SearchResult(episodes)
|
2014-03-25 05:57:24 +00:00
|
|
|
|
|
|
|
result.provider = self
|
2015-07-13 09:39:20 +00:00
|
|
|
result.url = url
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
return result
|
|
|
|
|
2016-02-26 01:07:39 +00:00
|
|
|
# noinspection PyUnusedLocal
|
|
|
|
def cb_response(self, r, *args, **kwargs):
|
|
|
|
self.session.response = dict(url=r.url, status_code=r.status_code, elapsed=r.elapsed, from_cache=r.from_cache)
|
|
|
|
return r
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def download_result(self, result):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
Save the result to disk.
|
|
|
|
"""
|
|
|
|
|
2014-07-27 10:59:21 +00:00
|
|
|
# check for auth
|
2015-09-18 00:06:34 +00:00
|
|
|
if not self._authorised():
|
2014-07-27 14:04:37 +00:00
|
|
|
return False
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
if GenericProvider.TORRENT == self.providerType:
|
2015-09-16 23:52:18 +00:00
|
|
|
final_dir = sickbeard.TORRENT_DIR
|
|
|
|
link_type = 'magnet'
|
2014-07-27 14:04:37 +00:00
|
|
|
try:
|
2017-05-03 23:40:47 +00:00
|
|
|
btih = None
|
|
|
|
try:
|
|
|
|
btih = re.findall('urn:btih:([\w]{32,40})', result.url)[0]
|
|
|
|
if 32 == len(btih):
|
|
|
|
from base64 import b16encode, b32decode
|
|
|
|
btih = b16encode(b32decode(btih))
|
|
|
|
except (StandardError, Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
if not btih or not re.search('(?i)[0-9a-f]{32,40}', btih):
|
2015-07-13 09:39:20 +00:00
|
|
|
logger.log('Unable to extract torrent hash from link: ' + ex(result.url), logger.ERROR)
|
2014-07-27 14:04:37 +00:00
|
|
|
return False
|
|
|
|
|
2017-05-03 23:40:47 +00:00
|
|
|
urls = ['http%s://%s/torrent/%s.torrent' % (u + (btih.upper(),))
|
2017-07-13 19:20:00 +00:00
|
|
|
for u in (('s', 'itorrents.org'), ('s', 'torrage.info'), ('', 'reflektor.karmorra.info'),
|
2018-03-19 13:11:25 +00:00
|
|
|
('', 'thetorrent.org'))]
|
2016-08-26 23:36:01 +00:00
|
|
|
except (StandardError, Exception):
|
2015-09-16 23:52:18 +00:00
|
|
|
link_type = 'torrent'
|
2014-07-27 14:04:37 +00:00
|
|
|
urls = [result.url]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
elif GenericProvider.NZB == self.providerType:
|
2015-09-16 23:52:18 +00:00
|
|
|
final_dir = sickbeard.NZB_DIR
|
|
|
|
link_type = 'nzb'
|
2014-07-27 10:59:21 +00:00
|
|
|
urls = [result.url]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-27 10:59:21 +00:00
|
|
|
else:
|
|
|
|
return
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-15 20:22:58 +00:00
|
|
|
ref_state = 'Referer' in self.session.headers and self.session.headers['Referer']
|
|
|
|
saved = False
|
2014-07-27 10:59:21 +00:00
|
|
|
for url in urls:
|
2015-08-14 23:02:05 +00:00
|
|
|
cache_dir = sickbeard.CACHE_DIR or helpers._getTempDir()
|
|
|
|
base_name = '%s.%s' % (helpers.sanitizeFileName(result.name), self.providerType)
|
2017-07-13 19:20:00 +00:00
|
|
|
final_file = ek.ek(os.path.join, final_dir, base_name)
|
|
|
|
cached = getattr(result, 'cache_file', None)
|
|
|
|
if cached and ek.ek(os.path.isfile, cached):
|
|
|
|
base_name = ek.ek(os.path.basename, cached)
|
2015-08-14 23:02:05 +00:00
|
|
|
cache_file = ek.ek(os.path.join, cache_dir, base_name)
|
2014-07-27 10:59:21 +00:00
|
|
|
|
2015-09-15 20:22:58 +00:00
|
|
|
self.session.headers['Referer'] = url
|
2018-04-18 12:52:01 +00:00
|
|
|
if cached or helpers.download_file(url, cache_file, session=self.session, allow_redirects='/it' not in url):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-08-14 23:02:05 +00:00
|
|
|
if self._verify_download(cache_file):
|
2015-09-15 20:22:58 +00:00
|
|
|
logger.log(u'Downloaded %s result from %s' % (self.name, url))
|
|
|
|
try:
|
|
|
|
helpers.moveFile(cache_file, final_file)
|
|
|
|
msg = 'moved'
|
2016-08-26 23:36:01 +00:00
|
|
|
except (OSError, Exception):
|
2015-09-15 20:22:58 +00:00
|
|
|
msg = 'copied cached file'
|
2017-07-13 19:20:00 +00:00
|
|
|
logger.log(u'Saved .%s data and %s to %s' % (
|
|
|
|
(link_type, 'torrent cache')['magnet' == link_type], msg, final_file))
|
2015-09-15 20:22:58 +00:00
|
|
|
saved = True
|
|
|
|
break
|
2015-08-14 23:02:05 +00:00
|
|
|
|
2015-09-15 20:22:58 +00:00
|
|
|
remove_file_failed(cache_file)
|
2015-08-14 23:02:05 +00:00
|
|
|
|
2015-09-15 20:22:58 +00:00
|
|
|
if 'Referer' in self.session.headers:
|
|
|
|
if ref_state:
|
|
|
|
self.session.headers['Referer'] = ref_state
|
|
|
|
else:
|
|
|
|
del(self.session.headers['Referer'])
|
2014-08-25 15:55:16 +00:00
|
|
|
|
2016-10-27 09:28:53 +00:00
|
|
|
if not saved and 'magnet' == link_type:
|
2017-07-13 19:20:00 +00:00
|
|
|
logger.log(u'All torrent cache servers failed to return a downloadable result', logger.DEBUG)
|
2016-10-27 09:28:53 +00:00
|
|
|
final_file = ek.ek(os.path.join, final_dir, '%s.%s' % (helpers.sanitizeFileName(result.name), link_type))
|
|
|
|
try:
|
|
|
|
with open(final_file, 'wb') as fp:
|
|
|
|
fp.write(result.url)
|
|
|
|
fp.flush()
|
|
|
|
os.fsync(fp.fileno())
|
2018-03-19 13:11:25 +00:00
|
|
|
saved = True
|
2016-10-27 09:28:53 +00:00
|
|
|
logger.log(u'Saved magnet link to file as some clients (or plugins) support this, %s' % final_file)
|
2017-07-13 19:20:00 +00:00
|
|
|
if 'blackhole' == sickbeard.TORRENT_METHOD:
|
|
|
|
logger.log('Tip: If your client fails to load magnet in files, ' +
|
|
|
|
'change blackhole to a client connection method in search settings')
|
2016-10-27 09:28:53 +00:00
|
|
|
except (StandardError, Exception):
|
2017-07-13 19:20:00 +00:00
|
|
|
logger.log(u'Failed to save magnet link to file, %s' % final_file)
|
2016-10-27 09:28:53 +00:00
|
|
|
elif not saved:
|
|
|
|
logger.log(u'Server failed to return anything useful', logger.ERROR)
|
2015-09-15 20:22:58 +00:00
|
|
|
|
|
|
|
return saved
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
def _verify_download(self, file_name=None):
|
|
|
|
"""
|
|
|
|
Checks the saved file to see if it was actually valid, if not then consider the download a failure.
|
|
|
|
"""
|
2015-09-15 20:22:58 +00:00
|
|
|
result = True
|
2014-03-10 05:18:05 +00:00
|
|
|
# primitive verification of torrents, just make sure we didn't get a text file or something
|
2015-07-13 09:39:20 +00:00
|
|
|
if GenericProvider.TORRENT == self.providerType:
|
2015-09-15 20:22:58 +00:00
|
|
|
parser = stream = None
|
|
|
|
try:
|
|
|
|
stream = FileInputStream(file_name)
|
|
|
|
parser = guessParser(stream)
|
2016-08-26 23:36:01 +00:00
|
|
|
except (HachoirError, Exception):
|
2015-09-15 20:22:58 +00:00
|
|
|
pass
|
|
|
|
result = parser and 'application/x-bittorrent' == parser.mime_type
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-15 20:22:58 +00:00
|
|
|
try:
|
|
|
|
stream._input.close()
|
2016-08-26 23:36:01 +00:00
|
|
|
except (HachoirError, Exception):
|
2015-09-15 20:22:58 +00:00
|
|
|
pass
|
|
|
|
|
|
|
|
return result
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def search_rss(self, episodes):
|
2014-09-20 12:03:48 +00:00
|
|
|
return self.cache.findNeededEpisodes(episodes)
|
2014-05-15 04:16:46 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def get_quality(self, item, anime=False):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
Figures out the quality of the given RSS item node
|
2015-09-18 00:06:34 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
|
2015-09-18 00:06:34 +00:00
|
|
|
|
|
|
|
Returns a Quality value obtained from the node's data
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
2015-09-18 00:06:34 +00:00
|
|
|
(title, url) = self._title_and_url(item) # @UnusedVariable
|
2014-05-26 06:29:22 +00:00
|
|
|
quality = Quality.sceneQuality(title, anime)
|
2014-03-10 05:18:05 +00:00
|
|
|
return quality
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
def _search_provider(self, search_params, search_mode='eponly', epcount=0, age=0, **kwargs):
|
2014-03-10 05:18:05 +00:00
|
|
|
return []
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _season_strings(self, episode):
|
2014-03-10 05:18:05 +00:00
|
|
|
return []
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _episode_strings(self, *args, **kwargs):
|
2014-03-10 05:18:05 +00:00
|
|
|
return []
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _title_and_url(self, item):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
2015-05-27 00:00:01 +00:00
|
|
|
Retrieves the title and URL data from the item
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-27 00:00:01 +00:00
|
|
|
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed, or a two part tup
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
Returns: A tuple containing two strings representing title and URL respectively
|
|
|
|
"""
|
2014-05-05 13:26:02 +00:00
|
|
|
|
2015-06-19 23:34:56 +00:00
|
|
|
title, url = None, None
|
2015-05-27 00:00:01 +00:00
|
|
|
try:
|
2015-09-18 00:06:34 +00:00
|
|
|
title, url = isinstance(item, tuple) and (item[0], item[1]) or \
|
|
|
|
(item.get('title', None), item.get('link', None))
|
2016-08-26 23:36:01 +00:00
|
|
|
except (StandardError, Exception):
|
2015-05-27 00:00:01 +00:00
|
|
|
pass
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
title = title and re.sub(r'\s+', '.', u'%s' % title)
|
|
|
|
url = url and str(url).replace('&', '&')
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-08-23 21:39:10 +00:00
|
|
|
return title, url
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
def _link(self, url, url_tmpl=None):
|
|
|
|
|
|
|
|
url = url and str(url).strip().replace('&', '&') or ''
|
2017-10-15 00:19:10 +00:00
|
|
|
return url if re.match('(?i)(https?://|magnet:)', url) \
|
|
|
|
else (url_tmpl or self.urls.get('get', (getattr(self, 'url', '') or
|
|
|
|
getattr(self, 'url_base')) + '%s')) % url.lstrip('/')
|
2016-08-26 23:36:01 +00:00
|
|
|
|
2017-08-24 22:55:02 +00:00
|
|
|
@staticmethod
|
|
|
|
def _header_row(table_row, custom_match=None, custom_tags=None, header_strip=''):
|
2016-11-01 18:13:51 +00:00
|
|
|
"""
|
|
|
|
:param header_row: Soup resultset of table header row
|
|
|
|
:param custom_match: Dict key/values to override one or more default regexes
|
2017-08-24 22:55:02 +00:00
|
|
|
:param custom_tags: List of tuples with tag and attribute
|
2016-11-01 18:13:51 +00:00
|
|
|
:param header_strip: String regex of ambiguities to remove from headers
|
|
|
|
:return: dict column indices or None for leech, seeds, and size
|
|
|
|
"""
|
|
|
|
results = {}
|
|
|
|
rc = dict((k, re.compile('(?i)' + r)) for (k, r) in dict(
|
|
|
|
{'seed': r'(?:seed|s/l)', 'leech': r'(?:leech|peers)', 'size': r'(?:size)'}.items()
|
|
|
|
+ ({}, custom_match)[any([custom_match])].items()).items())
|
|
|
|
table = table_row.find_parent('table')
|
|
|
|
header_row = table.tr or table.thead.tr or table.tbody.tr
|
|
|
|
for y in [x for x in header_row(True) if x.attrs.get('class')]:
|
|
|
|
y['class'] = '..'.join(y['class'])
|
|
|
|
all_cells = header_row.find_all('th')
|
|
|
|
all_cells = all_cells if any(all_cells) else header_row.find_all('td')
|
|
|
|
|
|
|
|
headers = [re.sub(
|
|
|
|
r'[\s]+', '',
|
|
|
|
((any([cell.get_text()]) and any([rc[x].search(cell.get_text()) for x in rc.keys()]) and cell.get_text())
|
|
|
|
or (cell.attrs.get('id') and any([rc[x].search(cell['id']) for x in rc.keys()]) and cell['id'])
|
|
|
|
or (cell.attrs.get('title') and any([rc[x].search(cell['title']) for x in rc.keys()]) and cell['title'])
|
|
|
|
or next(iter(set(filter(lambda z: any([z]), [
|
|
|
|
next(iter(set(filter(lambda y: any([y]), [
|
|
|
|
cell.find(tag, **p) for p in [{attr: rc[x]} for x in rc.keys()]]))), {}).get(attr)
|
|
|
|
for (tag, attr) in [
|
|
|
|
('img', 'title'), ('img', 'src'), ('i', 'title'), ('i', 'class'),
|
2017-08-24 22:55:02 +00:00
|
|
|
('abbr', 'title'), ('a', 'title'), ('a', 'href')] + (custom_tags or [])]))), '')
|
2016-11-01 18:13:51 +00:00
|
|
|
or cell.get_text()
|
|
|
|
)).strip() for cell in all_cells]
|
|
|
|
headers = [re.sub(header_strip, '', x) for x in headers]
|
|
|
|
all_headers = headers
|
|
|
|
colspans = [int(cell.attrs.get('colspan', 0)) for cell in all_cells]
|
|
|
|
if any(colspans):
|
|
|
|
all_headers = []
|
|
|
|
for i, width in enumerate(colspans):
|
|
|
|
all_headers += [headers[i]] + ([''] * (width - 1))
|
|
|
|
|
|
|
|
for k, r in rc.iteritems():
|
|
|
|
if k not in results:
|
|
|
|
for name in filter(lambda v: any([v]) and r.search(v), all_headers[::-1]):
|
|
|
|
results[k] = all_headers.index(name) - len(all_headers)
|
|
|
|
break
|
|
|
|
|
|
|
|
for missing in set(rc.keys()) - set(results.keys()):
|
|
|
|
results[missing] = None
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
2016-09-07 20:24:10 +00:00
|
|
|
@staticmethod
|
|
|
|
def _dhtless_magnet(btih, name=None):
|
|
|
|
"""
|
|
|
|
:param btih: torrent hash
|
|
|
|
:param name: torrent name
|
|
|
|
:return: a magnet loaded with default trackers for clients without enabled DHT or None if bad hash
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
btih = btih.lstrip('/').upper()
|
|
|
|
if 32 == len(btih):
|
|
|
|
btih = b16encode(b32decode(btih)).lower()
|
|
|
|
btih = re.search('(?i)[0-9a-f]{32,40}', btih) and btih or None
|
|
|
|
except (StandardError, Exception):
|
|
|
|
btih = None
|
|
|
|
return (btih and 'magnet:?xt=urn:btih:%s&dn=%s&tr=%s' % (btih, quote_plus(name or btih), '&tr='.join(
|
|
|
|
[quote_plus(tr) for tr in
|
|
|
|
'http://atrack.pow7.com/announce', 'http://mgtracker.org:2710/announce',
|
|
|
|
'http://pow7.com/announce', 'http://t1.pow7.com/announce',
|
|
|
|
'http://tracker.tfile.me/announce', 'udp://9.rarbg.com:2710/announce',
|
|
|
|
'udp://9.rarbg.me:2710/announce', 'udp://9.rarbg.to:2710/announce',
|
|
|
|
'udp://eddie4.nl:6969/announce', 'udp://explodie.org:6969/announce',
|
|
|
|
'udp://inferno.demonoid.pw:3395/announce', 'udp://inferno.subdemon.com:3395/announce',
|
|
|
|
'udp://ipv4.tracker.harry.lu:80/announce', 'udp://p4p.arenabg.ch:1337/announce',
|
|
|
|
'udp://shadowshq.yi.org:6969/announce', 'udp://tracker.aletorrenty.pl:2710/announce',
|
|
|
|
'udp://tracker.coppersurfer.tk:6969', 'udp://tracker.coppersurfer.tk:6969/announce',
|
|
|
|
'udp://tracker.internetwarriors.net:1337', 'udp://tracker.internetwarriors.net:1337/announce',
|
|
|
|
'udp://tracker.leechers-paradise.org:6969', 'udp://tracker.leechers-paradise.org:6969/announce',
|
|
|
|
'udp://tracker.opentrackr.org:1337/announce', 'udp://tracker.torrent.eu.org:451/announce',
|
Add fanart to Episodes View, Display Show, Edit Show, and Media Renamer page.
Add "Maximum fanart image files per show to cache" to config General/Interface.
Add populate images when the daily show updater is run with a default maximum 3 images per show.
Change force full update in a show will replace existing images with new.
Add fanart livepanel to lower right of Episodes View and Display Show page.
Add highlight panel red until button is clicked a few times.
Add flick through multiple background images on Episodes View and Display Show page.
Add persistent move poster image to right hand side or hide on Display Show page (multi-click the eye).
Add persistent translucency of background images on Episodes View and Display Show page.
Add persistent fanart rating to avoid art completely, random display, random from a group, or display fave always.
Add persistent views of the show detail on Display Show page.
Add persistent views on Episodes View.
Add persistent button to collapse and expand card images on Episode View/Layout daybyday.
Add non persistent "Open gear" and "Full fanart" image views to Episodes View and Display Show page.
Add "smart" selection of fanart image to display on Episode view.
Change insert [!] and change text shade of ended shows in drop down show list on Display Show page.
Change button graphic for next and previous show of show list on Display Show page.
Add logic to hide some livepanel buttons until artwork becomes available or in other circumstances.
Add "(Ended)" where appropriate to show title on Display Show page.
Add links to fanart.tv where appropriate on Display Show page.
Change use tense for label "Airs" or "Aired" depending on if show ended.
Change display "No files" instead of "0 files" and "Upgrade once" instead of "End upgrade on first match".
Add persistent button to newest season to "Show all" episodes.
Add persistent button to all shown seasons to "Hide most" episodes.
Add button to older seasons to toggle "Show Season n" or "Show Specials" with "Hide..." episodes.
Add season level status counts next to each season header on display show page
Add sorting to season table headers on display show page
Add filename and size to quality badge on display show page, removed its redundant "downloaded" text
Remove redundant "Add show" buttons
Change combine the NFO and TBN columns into a single Meta column
Change reduce screen estate used by episode numbers columns
Change improve clarity of text on Add Show page.
Add "Reset fanart ratings" to show Edit/Other tab.
Add fanart usage to show Edit/Other tab.
Add fanart keys guide to show Edit/Other tab.
Change add placeholder tip to "Alternative release name(s)" on show Edit.
Change add placeholder tip to search box on shows Search.
Change hide Anime tips on show Edit when selecting its mutually exclusive options.
Change label "End upgrade on first match" to "Upgrade once" on show Edit.
Change improve performance rendering displayShow.
Add total episodes to start of show description (excludes specials if those are hidden).
Add "Add show" actions i.e. "Search", "Trakt cards", "IMDb cards", and "Anime" to Shows menu.
Add "Import (existing)" action to Tools menu.
Change SD quality from red to dark green, 2160p UHD 4K is red.
Change relocate the functions of Logs & Errors to the right side Tools menu -> View Log File.
Add warning indicator to the Tools menu in different colour depending on error count (green through red).
Change View Log error item output from reversed to natural order.
Change View Log add a typeface and some colour to improve readability.
Change View Log/Errors only display "Clear Errors" button when there are errors to clear.
Change improve performance of View Log File.
2016-02-28 23:43:40 +00:00
|
|
|
'udp://tracker.trackerfix.com:80/announce', 'udp://tracker.zer0day.to:1337/announce'])) or None)
|
2016-09-07 20:24:10 +00:00
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
def get_show(self, item, **kwargs):
|
|
|
|
return None
|
|
|
|
|
2017-09-13 17:18:59 +00:00
|
|
|
def get_size_uid(self, item, **kwargs):
|
|
|
|
return -1, None
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
def find_search_results(self, show, episodes, search_mode, manual_search=False, **kwargs):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
self._check_auth()
|
2014-05-01 01:20:53 +00:00
|
|
|
self.show = show
|
|
|
|
|
2014-04-27 10:31:54 +00:00
|
|
|
results = {}
|
2015-07-13 09:39:20 +00:00
|
|
|
item_list = []
|
2017-11-02 18:30:05 +00:00
|
|
|
if self.should_skip():
|
|
|
|
return results
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-16 05:40:49 +00:00
|
|
|
searched_scene_season = None
|
2015-07-13 09:39:20 +00:00
|
|
|
for ep_obj in episodes:
|
2017-11-02 18:30:05 +00:00
|
|
|
if self.should_skip(log_warning=False):
|
|
|
|
break
|
2014-08-30 08:47:00 +00:00
|
|
|
# search cache for episode result
|
2015-07-13 09:39:20 +00:00
|
|
|
cache_result = self.cache.searchCache(ep_obj, manual_search)
|
|
|
|
if cache_result:
|
|
|
|
if ep_obj.episode not in results:
|
|
|
|
results[ep_obj.episode] = cache_result
|
2014-08-30 08:47:00 +00:00
|
|
|
else:
|
2015-07-13 09:39:20 +00:00
|
|
|
results[ep_obj.episode].extend(cache_result)
|
2014-08-30 08:47:00 +00:00
|
|
|
|
|
|
|
# found result, search next episode
|
2014-07-21 05:07:53 +00:00
|
|
|
continue
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if 'sponly' == search_mode:
|
|
|
|
# skip if season already searched
|
|
|
|
if 1 < len(episodes) and searched_scene_season == ep_obj.scene_season:
|
|
|
|
continue
|
2014-05-16 05:40:49 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
searched_scene_season = ep_obj.scene_season
|
2014-05-16 09:16:01 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
# get season search params
|
|
|
|
search_params = self._season_strings(ep_obj)
|
2014-05-16 05:16:35 +00:00
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
# get single episode search params
|
|
|
|
search_params = self._episode_strings(ep_obj)
|
|
|
|
|
|
|
|
for cur_param in search_params:
|
|
|
|
item_list += self._search_provider(cur_param, search_mode=search_mode, epcount=len(episodes))
|
2017-11-02 18:30:05 +00:00
|
|
|
if self.should_skip():
|
|
|
|
break
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
return self.finish_find_search_results(show, episodes, search_mode, manual_search, results, item_list)
|
|
|
|
|
|
|
|
def finish_find_search_results(self, show, episodes, search_mode, manual_search, results, item_list, **kwargs):
|
|
|
|
|
2014-07-21 05:07:53 +00:00
|
|
|
# if we found what we needed already from cache then return results and exit
|
|
|
|
if len(results) == len(episodes):
|
2014-05-30 07:36:47 +00:00
|
|
|
return results
|
2014-05-12 19:48:47 +00:00
|
|
|
|
2014-07-21 06:43:52 +00:00
|
|
|
# sort list by quality
|
2015-07-13 09:39:20 +00:00
|
|
|
if len(item_list):
|
2014-07-21 06:43:52 +00:00
|
|
|
items = {}
|
2015-07-13 09:39:20 +00:00
|
|
|
items_unknown = []
|
|
|
|
for item in item_list:
|
|
|
|
quality = self.get_quality(item, anime=show.is_anime)
|
|
|
|
if Quality.UNKNOWN == quality:
|
|
|
|
items_unknown += [item]
|
2014-07-21 06:59:57 +00:00
|
|
|
else:
|
2014-07-21 15:38:56 +00:00
|
|
|
if quality not in items:
|
|
|
|
items[quality] = [item]
|
|
|
|
else:
|
|
|
|
items[quality].append(item)
|
2014-07-22 00:55:45 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
item_list = list(itertools.chain(*[v for (k, v) in sorted(items.items(), reverse=True)]))
|
|
|
|
item_list += items_unknown if items_unknown else []
|
2014-07-21 06:43:52 +00:00
|
|
|
|
2014-07-21 05:07:53 +00:00
|
|
|
# filter results
|
2014-07-22 06:00:58 +00:00
|
|
|
cl = []
|
2015-07-13 09:39:20 +00:00
|
|
|
for item in item_list:
|
2015-09-18 00:06:34 +00:00
|
|
|
(title, url) = self._title_and_url(item)
|
2014-05-08 14:03:50 +00:00
|
|
|
|
2017-07-21 21:34:23 +00:00
|
|
|
parser = NameParser(False, showObj=self.get_show(item, **kwargs), convert=True, indexer_lookup=False)
|
2014-07-21 05:07:53 +00:00
|
|
|
# parse the file name
|
|
|
|
try:
|
2015-07-13 09:39:20 +00:00
|
|
|
parse_result = parser.parse(title)
|
2014-07-21 05:07:53 +00:00
|
|
|
except InvalidNameException:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Unable to parse the filename %s into a valid episode' % title, logger.DEBUG)
|
2014-07-21 05:07:53 +00:00
|
|
|
continue
|
|
|
|
except InvalidShowException:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'No match for search criteria in the parsed filename ' + title, logger.DEBUG)
|
2014-07-21 05:07:53 +00:00
|
|
|
continue
|
2014-05-04 12:05:27 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
show_obj = parse_result.show
|
2014-07-21 05:07:53 +00:00
|
|
|
quality = parse_result.quality
|
|
|
|
release_group = parse_result.release_group
|
2014-07-22 04:53:32 +00:00
|
|
|
version = parse_result.version
|
2014-07-21 05:07:53 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
add_cache_entry = False
|
2015-08-14 23:02:05 +00:00
|
|
|
if not (show_obj.air_by_date or show_obj.is_sports):
|
2015-07-13 09:39:20 +00:00
|
|
|
if 'sponly' == search_mode:
|
2014-09-24 11:04:56 +00:00
|
|
|
if len(parse_result.episode_numbers):
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'This is supposed to be a season pack search but the result ' + title +
|
|
|
|
u' is not a valid season pack, skipping it', logger.DEBUG)
|
2015-07-13 09:39:20 +00:00
|
|
|
add_cache_entry = True
|
|
|
|
if len(parse_result.episode_numbers)\
|
2015-09-18 00:06:34 +00:00
|
|
|
and (parse_result.season_number not in set([ep.season for ep in episodes]) or not [
|
|
|
|
ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers]):
|
|
|
|
logger.log(u'The result ' + title + u' doesn\'t seem to be a valid episode that we are trying' +
|
|
|
|
u' to snatch, ignoring', logger.DEBUG)
|
2015-07-13 09:39:20 +00:00
|
|
|
add_cache_entry = True
|
2014-07-22 06:00:58 +00:00
|
|
|
else:
|
2015-07-13 09:39:20 +00:00
|
|
|
if not len(parse_result.episode_numbers)\
|
|
|
|
and parse_result.season_number\
|
|
|
|
and not [ep for ep in episodes
|
2015-09-18 00:06:34 +00:00
|
|
|
if ep.season == parse_result.season_number and
|
|
|
|
ep.episode in parse_result.episode_numbers]:
|
|
|
|
logger.log(u'The result ' + title + u' doesn\'t seem to be a valid season that we are trying' +
|
|
|
|
u' to snatch, ignoring', logger.DEBUG)
|
2015-07-13 09:39:20 +00:00
|
|
|
add_cache_entry = True
|
2017-08-24 22:55:02 +00:00
|
|
|
elif len(parse_result.episode_numbers) and not [
|
|
|
|
ep for ep in episodes if ep.season == parse_result.season_number and
|
2016-08-26 23:36:01 +00:00
|
|
|
ep.episode in parse_result.episode_numbers]:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'The result ' + title + ' doesn\'t seem to be a valid episode that we are trying' +
|
|
|
|
u' to snatch, ignoring', logger.DEBUG)
|
2015-07-13 09:39:20 +00:00
|
|
|
add_cache_entry = True
|
2014-07-22 06:00:58 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
if not add_cache_entry:
|
2014-07-22 06:00:58 +00:00
|
|
|
# we just use the existing info for normal searches
|
2014-09-07 05:25:01 +00:00
|
|
|
actual_season = parse_result.season_number
|
2014-07-22 06:00:58 +00:00
|
|
|
actual_episodes = parse_result.episode_numbers
|
2014-07-21 05:07:53 +00:00
|
|
|
else:
|
2015-07-13 09:39:20 +00:00
|
|
|
if not parse_result.is_air_by_date:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'This is supposed to be a date search but the result ' + title +
|
|
|
|
u' didn\'t parse as one, skipping it', logger.DEBUG)
|
2015-07-13 09:39:20 +00:00
|
|
|
add_cache_entry = True
|
2014-07-22 06:00:58 +00:00
|
|
|
else:
|
2017-08-14 00:16:18 +00:00
|
|
|
actual_season = parse_result.season_number
|
|
|
|
actual_episodes = parse_result.episode_numbers
|
2014-07-22 06:00:58 +00:00
|
|
|
|
2017-08-14 00:16:18 +00:00
|
|
|
if not actual_episodes or \
|
|
|
|
not [ep for ep in episodes if ep.season == actual_season and ep.episode in actual_episodes]:
|
|
|
|
logger.log(u'The result ' + title + ' doesn\'t seem to be a valid episode that we are trying' +
|
|
|
|
u' to snatch, ignoring', logger.DEBUG)
|
|
|
|
add_cache_entry = True
|
2014-07-22 06:00:58 +00:00
|
|
|
|
|
|
|
# add parsed result to cache for usage later on
|
2015-07-13 09:39:20 +00:00
|
|
|
if add_cache_entry:
|
|
|
|
logger.log(u'Adding item from search to cache: ' + title, logger.DEBUG)
|
|
|
|
ci = self.cache.add_cache_entry(title, url, parse_result=parse_result)
|
|
|
|
if None is not ci:
|
2014-07-22 06:00:58 +00:00
|
|
|
cl.append(ci)
|
|
|
|
continue
|
2014-07-21 05:07:53 +00:00
|
|
|
|
|
|
|
# make sure we want the episode
|
2015-07-13 09:39:20 +00:00
|
|
|
want_ep = True
|
2017-12-12 02:46:28 +00:00
|
|
|
multi_ep = False
|
2014-07-21 05:07:53 +00:00
|
|
|
for epNo in actual_episodes:
|
2017-12-12 02:46:28 +00:00
|
|
|
want_ep = show_obj.wantEpisode(actual_season, epNo, quality, manual_search, multi_ep)
|
|
|
|
if not want_ep:
|
2014-07-21 05:07:53 +00:00
|
|
|
break
|
2017-12-12 02:46:28 +00:00
|
|
|
# after initial single ep perspective, prepare multi ep for subsequent iterations
|
|
|
|
multi_ep = 1 < actual_episodes
|
2014-07-21 05:07:53 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
if not want_ep:
|
|
|
|
logger.log(u'Ignoring result %s because we don\'t want an episode that is %s'
|
|
|
|
% (title, Quality.qualityStrings[quality]), logger.DEBUG)
|
2014-07-21 05:07:53 +00:00
|
|
|
continue
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
logger.log(u'Found result %s at %s' % (title, url), logger.DEBUG)
|
2014-07-21 05:07:53 +00:00
|
|
|
|
|
|
|
# make a result object
|
2015-07-13 09:39:20 +00:00
|
|
|
ep_obj = []
|
2014-07-21 05:07:53 +00:00
|
|
|
for curEp in actual_episodes:
|
2015-07-13 09:39:20 +00:00
|
|
|
ep_obj.append(show_obj.getEpisode(actual_season, curEp))
|
2014-07-21 05:07:53 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
result = self.get_result(ep_obj, url)
|
|
|
|
if None is result:
|
|
|
|
continue
|
|
|
|
result.show = show_obj
|
2014-07-21 05:07:53 +00:00
|
|
|
result.name = title
|
|
|
|
result.quality = quality
|
|
|
|
result.release_group = release_group
|
|
|
|
result.content = None
|
2014-07-22 04:53:32 +00:00
|
|
|
result.version = version
|
2017-09-13 17:18:59 +00:00
|
|
|
result.size, result.puid = self.get_size_uid(item, **kwargs)
|
2017-10-31 20:35:57 +00:00
|
|
|
result.is_repack, result.properlevel = Quality.get_proper_level(parse_result.extra_info_no_name(),
|
2017-09-13 17:18:59 +00:00
|
|
|
parse_result.version, show_obj.is_anime,
|
|
|
|
check_is_repack=True)
|
2014-07-21 05:07:53 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
if 1 == len(ep_obj):
|
|
|
|
ep_num = ep_obj[0].episode
|
|
|
|
logger.log(u'Single episode result.', logger.DEBUG)
|
|
|
|
elif 1 < len(ep_obj):
|
|
|
|
ep_num = MULTI_EP_RESULT
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Separating multi-episode result to check for later - result contains episodes: ' +
|
|
|
|
str(parse_result.episode_numbers), logger.DEBUG)
|
2015-07-13 09:39:20 +00:00
|
|
|
elif 0 == len(ep_obj):
|
|
|
|
ep_num = SEASON_RESULT
|
|
|
|
logger.log(u'Separating full season result to check for later', logger.DEBUG)
|
|
|
|
|
|
|
|
if ep_num not in results:
|
|
|
|
results[ep_num] = [result]
|
2014-07-21 13:29:07 +00:00
|
|
|
else:
|
2015-07-13 09:39:20 +00:00
|
|
|
results[ep_num].append(result)
|
2014-05-03 23:26:12 +00:00
|
|
|
|
2014-07-22 06:00:58 +00:00
|
|
|
# check if we have items to add to cache
|
2015-07-13 09:39:20 +00:00
|
|
|
if 0 < len(cl):
|
|
|
|
my_db = self.cache.get_db()
|
|
|
|
my_db.mass_action(cl)
|
2014-07-22 06:00:58 +00:00
|
|
|
|
2014-05-03 23:26:12 +00:00
|
|
|
return results
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def find_propers(self, search_date=None, **kwargs):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
results = self.cache.listPropers(search_date)
|
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
return [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in
|
|
|
|
results]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def seed_ratio(self):
|
|
|
|
"""
|
2014-05-08 22:28:28 +00:00
|
|
|
Provider should override this value if custom seed ratio enabled
|
|
|
|
It should return the value of the provider seed ratio
|
2015-07-13 09:39:20 +00:00
|
|
|
"""
|
2014-05-08 22:28:28 +00:00
|
|
|
return ''
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _log_search(self, mode='Cache', count=0, url='url missing'):
|
|
|
|
"""
|
|
|
|
Simple function to log the result of a search types except propers
|
|
|
|
:param count: count of successfully processed items
|
|
|
|
:param url: source url of item(s)
|
|
|
|
"""
|
|
|
|
if 'Propers' != mode:
|
|
|
|
self.log_result(mode, count, url)
|
|
|
|
|
|
|
|
def log_result(self, mode='Cache', count=0, url='url missing'):
|
2015-06-19 23:34:56 +00:00
|
|
|
"""
|
2015-09-18 00:06:34 +00:00
|
|
|
Simple function to log the result of any search
|
2016-08-26 23:36:01 +00:00
|
|
|
:param mode: string that this log relates to
|
2015-06-19 23:34:56 +00:00
|
|
|
:param count: count of successfully processed items
|
|
|
|
:param url: source url of item(s)
|
|
|
|
"""
|
2017-11-02 18:30:05 +00:00
|
|
|
if not self.should_skip():
|
|
|
|
str1, thing, str3 = (('', '%s item' % mode.lower(), ''), (' usable', 'proper', ' found'))['Propers' == mode]
|
|
|
|
logger.log(u'%s %s in response from %s' % (('No' + str1, count)[0 < count], (
|
|
|
|
'%s%s%s%s' % (('', 'freeleech ')[getattr(self, 'freeleech', False)], thing, maybe_plural(count), str3)),
|
|
|
|
re.sub('(\s)\s+', r'\1', url)))
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def check_auth_cookie(self):
|
|
|
|
|
|
|
|
if hasattr(self, 'cookies'):
|
|
|
|
cookies = self.cookies
|
|
|
|
|
2017-03-17 01:57:08 +00:00
|
|
|
if not (cookies and re.match('^(?:\w+=[^;\s]+[;\s]*)+$', cookies)):
|
2015-07-13 09:39:20 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
cj = requests.utils.add_dict_to_cookiejar(self.session.cookies,
|
|
|
|
dict([x.strip().split('=') for x in cookies.split(';')
|
|
|
|
if x != ''])),
|
|
|
|
for item in cj:
|
|
|
|
if not isinstance(item, requests.cookies.RequestsCookieJar):
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
def _check_cookie(self):
|
|
|
|
|
|
|
|
if self.check_auth_cookie():
|
|
|
|
return True, None
|
|
|
|
|
|
|
|
return False, 'Cookies not correctly formatted key=value pairs e.g. uid=xx;pass=yy)'
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def has_all_cookies(self, cookies=None, pre=''):
|
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
cookies = cookies and ([cookies], cookies)[isinstance(cookies, list)] or ['uid', 'pass']
|
|
|
|
return all(['%s%s' % (pre, item) in self.session.cookies for item in cookies])
|
2015-09-18 00:06:34 +00:00
|
|
|
|
|
|
|
def _categories_string(self, mode='Cache', template='c%s=1', delimiter='&'):
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
return delimiter.join([('%s', template)[any(template)] % c for c in sorted(
|
|
|
|
'shows' in self.categories and (isinstance(self.categories['shows'], type([])) and
|
|
|
|
self.categories['shows'] or [self.categories['shows']]) or
|
|
|
|
self.categories[(mode, 'Episode')['Propers' == mode]] +
|
|
|
|
([], self.categories.get('anime') or [])[
|
|
|
|
(mode in ['Cache', 'Propers'] and helpers.has_anime()) or
|
|
|
|
((mode in ['Season', 'Episode']) and self.show and self.show.is_anime)])])
|
2015-09-18 00:06:34 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _bytesizer(size_dim=''):
|
|
|
|
|
|
|
|
try:
|
2016-08-26 23:36:01 +00:00
|
|
|
value = float('.'.join(re.findall('(?i)(\d+)(?:[.,](\d+))?', size_dim)[0]))
|
2015-09-18 00:06:34 +00:00
|
|
|
except TypeError:
|
|
|
|
return size_dim
|
|
|
|
except IndexError:
|
|
|
|
return None
|
|
|
|
try:
|
|
|
|
value *= 1024 ** ['b', 'k', 'm', 'g', 't'].index(re.findall('(t|g|m|k)[i]?b', size_dim.lower())[0])
|
|
|
|
except IndexError:
|
|
|
|
pass
|
2015-12-16 23:35:39 +00:00
|
|
|
return long(math.ceil(value))
|
2015-09-18 00:06:34 +00:00
|
|
|
|
2017-08-24 22:55:02 +00:00
|
|
|
@staticmethod
|
|
|
|
def _should_stop():
|
2017-08-21 19:53:26 +00:00
|
|
|
if getattr(threading.currentThread(), 'stop', False):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def _sleep_with_stop(self, t):
|
|
|
|
t_l = t
|
|
|
|
while t_l > 0:
|
|
|
|
time.sleep(3)
|
|
|
|
t_l -= 3
|
|
|
|
if self._should_stop():
|
|
|
|
return
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2017-11-02 18:30:05 +00:00
|
|
|
class NZBProvider(GenericProvider):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-05-27 00:00:01 +00:00
|
|
|
def __init__(self, name, supports_backlog=True, anime_only=False):
|
|
|
|
GenericProvider.__init__(self, name, supports_backlog, anime_only)
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
self.providerType = GenericProvider.NZB
|
2017-11-02 18:30:05 +00:00
|
|
|
self.has_limit = True
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def image_name(self):
|
|
|
|
|
|
|
|
return GenericProvider.image_name(self, 'newznab')
|
|
|
|
|
|
|
|
def maybe_apikey(self):
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
if getattr(self, 'needs_auth', None):
|
|
|
|
return (getattr(self, 'key', '') and self.key) or (getattr(self, 'api_key', '') and self.api_key) or None
|
2015-07-13 09:39:20 +00:00
|
|
|
return False
|
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
def _check_auth(self, is_required=None):
|
2015-07-13 09:39:20 +00:00
|
|
|
|
|
|
|
has_key = self.maybe_apikey()
|
|
|
|
if has_key:
|
|
|
|
return has_key
|
|
|
|
if None is has_key:
|
2017-01-02 18:44:35 +00:00
|
|
|
raise AuthException('%s for %s is empty in Media Providers/Options'
|
2015-07-13 09:39:20 +00:00
|
|
|
% ('API key' + ('', ' and/or Username')[hasattr(self, 'username')], self.name))
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
return GenericProvider._check_auth(self)
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def find_propers(self, search_date=None, shows=None, anime=None, **kwargs):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
|
|
|
cache_results = self.cache.listPropers(search_date)
|
|
|
|
results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in
|
|
|
|
cache_results]
|
|
|
|
|
2017-11-02 18:30:05 +00:00
|
|
|
if self.should_skip():
|
|
|
|
return results
|
|
|
|
|
2015-06-19 23:34:56 +00:00
|
|
|
index = 0
|
2015-07-13 09:39:20 +00:00
|
|
|
alt_search = ('nzbs_org' == self.get_id())
|
2015-06-19 23:34:56 +00:00
|
|
|
do_search_alt = False
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
search_terms = []
|
|
|
|
regex = []
|
|
|
|
if shows:
|
2017-09-13 17:18:59 +00:00
|
|
|
search_terms += ['.proper.', '.repack.', '.real.']
|
|
|
|
regex += ['proper|repack', Quality.real_check]
|
2015-09-18 00:06:34 +00:00
|
|
|
proper_check = re.compile(r'(?i)(\b%s\b)' % '|'.join(regex))
|
|
|
|
if anime:
|
|
|
|
terms = 'v1|v2|v3|v4|v5'
|
|
|
|
search_terms += [terms]
|
|
|
|
regex += [terms]
|
|
|
|
proper_check = re.compile(r'(?i)(%s)' % '|'.join(regex))
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2016-02-19 17:38:38 +00:00
|
|
|
urls = []
|
2015-06-19 23:34:56 +00:00
|
|
|
while index < len(search_terms):
|
2017-11-02 18:30:05 +00:00
|
|
|
if self.should_skip(log_warning=False):
|
|
|
|
break
|
|
|
|
|
2016-02-19 17:38:38 +00:00
|
|
|
search_params = {'q': search_terms[index], 'maxage': sickbeard.BACKLOG_DAYS + 2}
|
2015-06-19 23:34:56 +00:00
|
|
|
if alt_search:
|
|
|
|
|
|
|
|
if do_search_alt:
|
2016-02-19 17:38:38 +00:00
|
|
|
search_params['t'] = 'search'
|
2015-06-19 23:34:56 +00:00
|
|
|
index += 1
|
|
|
|
|
2016-02-19 17:38:38 +00:00
|
|
|
do_search_alt = not do_search_alt
|
2015-06-19 23:34:56 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
index += 1
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
for item in self._search_provider({'Propers': [search_params]}):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
(title, url) = self._title_and_url(item)
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2016-02-19 17:38:38 +00:00
|
|
|
if not proper_check.search(title) or url in urls:
|
2015-06-19 23:34:56 +00:00
|
|
|
continue
|
2016-02-19 17:38:38 +00:00
|
|
|
urls.append(url)
|
2015-06-19 23:34:56 +00:00
|
|
|
|
|
|
|
if 'published_parsed' in item and item['published_parsed']:
|
|
|
|
result_date = item.published_parsed
|
|
|
|
if result_date:
|
|
|
|
result_date = datetime.datetime(*result_date[0:6])
|
|
|
|
else:
|
2016-07-02 15:06:50 +00:00
|
|
|
logger.log(u'Unable to figure out the date for entry %s, skipping it' % title)
|
2015-06-19 23:34:56 +00:00
|
|
|
continue
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
if not search_date or search_date < result_date:
|
2015-06-19 23:34:56 +00:00
|
|
|
search_result = classes.Proper(title, url, result_date, self.show)
|
|
|
|
results.append(search_result)
|
|
|
|
|
2016-02-19 17:38:38 +00:00
|
|
|
time.sleep(0.5)
|
2015-06-19 23:34:56 +00:00
|
|
|
|
|
|
|
return results
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def cache_data(self, *args, **kwargs):
|
|
|
|
|
|
|
|
search_params = {'Cache': [{}]}
|
2016-09-04 20:00:44 +00:00
|
|
|
return self._search_provider(search_params=search_params, **kwargs)
|
2015-09-18 00:06:34 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
|
2017-11-02 18:30:05 +00:00
|
|
|
class TorrentProvider(GenericProvider):
|
2015-05-27 00:00:01 +00:00
|
|
|
|
2017-08-21 23:11:21 +00:00
|
|
|
def __init__(self, name, supports_backlog=True, anime_only=False, cache_update_freq=None, update_freq=None):
|
2015-05-27 00:00:01 +00:00
|
|
|
GenericProvider.__init__(self, name, supports_backlog, anime_only)
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2014-07-25 04:29:03 +00:00
|
|
|
self.providerType = GenericProvider.TORRENT
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-05-27 00:00:01 +00:00
|
|
|
self._seed_ratio = None
|
2015-09-12 17:06:54 +00:00
|
|
|
self.seed_time = None
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
self._url = None
|
|
|
|
self.urls = {}
|
|
|
|
self.cache._cache_data = self._cache_data
|
|
|
|
if cache_update_freq:
|
|
|
|
self.cache.update_freq = cache_update_freq
|
2017-08-21 23:11:21 +00:00
|
|
|
self.ping_freq = update_freq
|
|
|
|
self.ping_skip = None
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def url(self):
|
2016-06-23 19:58:26 +00:00
|
|
|
if None is self._url or (hasattr(self, 'url_tmpl') and not self.urls):
|
2016-09-24 11:23:22 +00:00
|
|
|
self._url = self._valid_home(False)
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
self._valid_url()
|
|
|
|
return self._url
|
|
|
|
|
|
|
|
@url.setter
|
|
|
|
def url(self, value=None):
|
|
|
|
self._url = value
|
|
|
|
|
|
|
|
def _valid_url(self):
|
|
|
|
return True
|
2015-05-27 00:00:01 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def image_name(self):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
return GenericProvider.image_name(self, 'torrent')
|
2015-05-27 00:00:01 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def seed_ratio(self):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-05-27 00:00:01 +00:00
|
|
|
return self._seed_ratio
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
@staticmethod
|
|
|
|
def _sort_seeders(mode, items):
|
2016-08-26 23:36:01 +00:00
|
|
|
""" legacy function used by a custom provider, do not remove """
|
2015-09-18 00:06:34 +00:00
|
|
|
mode in ['Season', 'Episode'] and items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
@staticmethod
|
|
|
|
def _sort_seeding(mode, items):
|
|
|
|
|
|
|
|
if mode in ['Season', 'Episode']:
|
|
|
|
return sorted(set(items), key=lambda tup: tup[2], reverse=True)
|
|
|
|
return items
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _peers_fail(self, mode, seeders=0, leechers=0):
|
|
|
|
|
|
|
|
return 'Cache' != mode and (seeders < getattr(self, 'minseed', 0) or leechers < getattr(self, 'minleech', 0))
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
def get_quality(self, item, anime=False):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-05-27 00:00:01 +00:00
|
|
|
if isinstance(item, tuple):
|
|
|
|
name = item[0]
|
2015-06-06 15:08:59 +00:00
|
|
|
elif isinstance(item, dict):
|
2015-09-18 00:06:34 +00:00
|
|
|
name, url = self._title_and_url(item)
|
2015-05-27 00:00:01 +00:00
|
|
|
else:
|
|
|
|
name = item.title
|
|
|
|
return Quality.sceneQuality(name, anime)
|
|
|
|
|
2015-06-19 23:34:56 +00:00
|
|
|
@staticmethod
|
|
|
|
def _reverse_quality(quality):
|
|
|
|
|
|
|
|
return {
|
|
|
|
Quality.SDTV: 'HDTV x264',
|
|
|
|
Quality.SDDVD: 'DVDRIP',
|
|
|
|
Quality.HDTV: '720p HDTV x264',
|
|
|
|
Quality.FULLHDTV: '1080p HDTV x264',
|
|
|
|
Quality.RAWHDTV: '1080i HDTV mpeg2',
|
|
|
|
Quality.HDWEBDL: '720p WEB-DL h264',
|
|
|
|
Quality.FULLHDWEBDL: '1080p WEB-DL h264',
|
|
|
|
Quality.HDBLURAY: '720p Bluray x264',
|
|
|
|
Quality.FULLHDBLURAY: '1080p Bluray x264'
|
|
|
|
}.get(quality, '')
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _season_strings(self, ep_obj, detail_only=False, scene=True, prefix='', **kwargs):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if not ep_obj:
|
|
|
|
return []
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
show = ep_obj.show
|
|
|
|
ep_dict = self._ep_dict(ep_obj)
|
|
|
|
sp_detail = (show.air_by_date or show.is_sports) and str(ep_obj.airdate).split('-')[0] or \
|
|
|
|
(show.is_anime and ep_obj.scene_absolute_number or
|
2016-08-26 23:36:01 +00:00
|
|
|
('sp_detail' in kwargs.keys() and kwargs['sp_detail'](ep_dict)) or 'S%(seasonnumber)02d' % ep_dict)
|
2015-09-18 00:06:34 +00:00
|
|
|
sp_detail = ([sp_detail], sp_detail)[isinstance(sp_detail, list)]
|
|
|
|
detail = ({}, {'Season_only': sp_detail})[detail_only and not self.show.is_sports and not self.show.is_anime]
|
|
|
|
return [dict({'Season': self._build_search_strings(sp_detail, scene, prefix)}.items() + detail.items())]
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _episode_strings(self, ep_obj, detail_only=False, scene=True, prefix='', sep_date=' ', date_or=False, **kwargs):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
|
|
|
if not ep_obj:
|
|
|
|
return []
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
show = ep_obj.show
|
|
|
|
if show.air_by_date or show.is_sports:
|
|
|
|
ep_detail = [str(ep_obj.airdate).replace('-', sep_date)]\
|
|
|
|
if 'date_detail' not in kwargs.keys() else kwargs['date_detail'](ep_obj.airdate)
|
|
|
|
if show.is_sports:
|
2015-06-19 23:34:56 +00:00
|
|
|
month = ep_obj.airdate.strftime('%b')
|
2015-09-18 00:06:34 +00:00
|
|
|
ep_detail = (ep_detail + [month], ['%s|%s' % (x, month) for x in ep_detail])[date_or]
|
|
|
|
elif show.is_anime:
|
|
|
|
ep_detail = ep_obj.scene_absolute_number \
|
|
|
|
if 'ep_detail_anime' not in kwargs.keys() else kwargs['ep_detail_anime'](ep_obj.scene_absolute_number)
|
2015-06-19 23:34:56 +00:00
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
ep_dict = self._ep_dict(ep_obj)
|
|
|
|
ep_detail = sickbeard.config.naming_ep_type[2] % ep_dict \
|
|
|
|
if 'ep_detail' not in kwargs.keys() else kwargs['ep_detail'](ep_dict)
|
2016-08-21 20:31:18 +00:00
|
|
|
if sickbeard.scene_exceptions.has_abs_episodes(ep_obj):
|
2016-09-07 20:24:10 +00:00
|
|
|
ep_detail = ([ep_detail], ep_detail)[isinstance(ep_detail, list)] + ['%d' % ep_dict['episodenumber']]
|
2015-09-18 00:06:34 +00:00
|
|
|
ep_detail = ([ep_detail], ep_detail)[isinstance(ep_detail, list)]
|
|
|
|
detail = ({}, {'Episode_only': ep_detail})[detail_only and not show.is_sports and not show.is_anime]
|
|
|
|
return [dict({'Episode': self._build_search_strings(ep_detail, scene, prefix)}.items() + detail.items())]
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _ep_dict(ep_obj):
|
|
|
|
season, episode = ((ep_obj.season, ep_obj.episode),
|
|
|
|
(ep_obj.scene_season, ep_obj.scene_episode))[bool(ep_obj.show.is_scene)]
|
|
|
|
return {'seasonnumber': season, 'episodenumber': episode}
|
|
|
|
|
|
|
|
def _build_search_strings(self, ep_detail, process_name=True, prefix=''):
|
2015-06-19 23:34:56 +00:00
|
|
|
"""
|
|
|
|
Build a list of search strings for querying a provider
|
|
|
|
:param ep_detail: String of episode detail or List of episode details
|
|
|
|
:param process_name: Bool Whether to call sanitizeSceneName() on show name
|
2015-09-18 00:06:34 +00:00
|
|
|
:param prefix: String to insert to search strings
|
2015-06-19 23:34:56 +00:00
|
|
|
:return: List of search string parameters
|
|
|
|
"""
|
2015-09-18 00:06:34 +00:00
|
|
|
ep_detail = ([ep_detail], ep_detail)[isinstance(ep_detail, list)]
|
|
|
|
prefix = ([prefix], prefix)[isinstance(prefix, list)]
|
2015-06-19 23:34:56 +00:00
|
|
|
|
|
|
|
search_params = []
|
2016-08-26 23:36:01 +00:00
|
|
|
crop = re.compile(r'([.\s])(?:\1)+')
|
2018-01-24 02:24:00 +00:00
|
|
|
for name in get_show_names_all_possible(self.show, scenify=process_name and getattr(self, 'scene', True)):
|
2015-06-19 23:34:56 +00:00
|
|
|
for detail in ep_detail:
|
2015-09-18 00:06:34 +00:00
|
|
|
search_params += [crop.sub(r'\1', '%s %s%s' % (name, x, detail)) for x in prefix]
|
2015-06-19 23:34:56 +00:00
|
|
|
return search_params
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
@staticmethod
|
|
|
|
def _has_signature(data=None):
|
2017-08-24 22:55:02 +00:00
|
|
|
return data and re.search(r'(?sim)<input[^<]+?name=["\'\s]*?password', data) and \
|
|
|
|
re.search(r'(?sim)<input[^<]+?name=["\'\s]*?username', data)
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
2016-09-24 11:23:22 +00:00
|
|
|
def _valid_home(self, attempt_fetch=True):
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
"""
|
|
|
|
:return: signature verified home url else None if validation fail
|
|
|
|
"""
|
|
|
|
url_base = getattr(self, 'url_base', None)
|
|
|
|
if url_base:
|
|
|
|
return url_base
|
|
|
|
|
|
|
|
url_list = getattr(self, 'url_home', None)
|
|
|
|
if not url_list and getattr(self, 'url_edit', None) or 10 > max([len(x) for x in url_list]):
|
|
|
|
return None
|
|
|
|
|
2016-11-07 13:27:49 +00:00
|
|
|
url_list = ['%s/' % x.rstrip('/') for x in url_list]
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
last_url, expire = sickbeard.PROVIDER_HOMES.get(self.get_id(), ('', None))
|
2018-04-17 14:26:17 +00:00
|
|
|
url_drop = getattr(self, 'url_drop', [])
|
|
|
|
if url_drop and any([url in last_url for url in url_drop]): # deprecate url
|
|
|
|
last_url = ''
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
if 'site down' == last_url:
|
2016-11-19 03:19:57 +00:00
|
|
|
if expire and (expire > int(time.time())) or not self.enabled:
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
return None
|
|
|
|
elif last_url:
|
2016-11-07 13:27:49 +00:00
|
|
|
last_url = last_url.replace('getrss.php', '/') # correct develop typo after a network outage (0.11>0.12)
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
last_url in url_list and url_list.remove(last_url)
|
|
|
|
url_list.insert(0, last_url)
|
|
|
|
|
2016-11-19 03:19:57 +00:00
|
|
|
if not self.enabled:
|
|
|
|
return last_url
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
for cur_url in url_list:
|
|
|
|
if not self.is_valid_mod(cur_url):
|
|
|
|
return None
|
|
|
|
|
|
|
|
if 10 < len(cur_url) and ((expire and (expire > int(time.time()))) or
|
2018-01-15 17:54:36 +00:00
|
|
|
self._has_signature(self.get_url(cur_url, skip_auth=True))):
|
|
|
|
if self.should_skip():
|
|
|
|
return None
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
for k, v in getattr(self, 'url_tmpl', {}).items():
|
|
|
|
self.urls[k] = v % {'home': cur_url, 'vars': getattr(self, 'url_vars', {}).get(k, '')}
|
|
|
|
|
|
|
|
if last_url != cur_url or (expire and not (expire > int(time.time()))):
|
2016-10-24 23:04:02 +00:00
|
|
|
sickbeard.PROVIDER_HOMES[self.get_id()] = (cur_url, int(time.time()) + (60*60))
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
sickbeard.save_config()
|
|
|
|
return cur_url
|
|
|
|
|
|
|
|
logger.log('Failed to identify a "%s" page with %s %s (local network issue, site down, or ISP blocked) ' %
|
|
|
|
(self.name, len(url_list), ('URL', 'different URLs')[1 < len(url_list)]) +
|
2016-09-24 11:23:22 +00:00
|
|
|
(attempt_fetch and ('Suggest; 1) Disable "%s" 2) Use a proxy/VPN' % self.get_id()) or ''),
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
(logger.WARNING, logger.ERROR)[self.enabled])
|
|
|
|
self.urls = {}
|
|
|
|
sickbeard.PROVIDER_HOMES[self.get_id()] = ('site down', int(time.time()) + (5 * 60))
|
|
|
|
sickbeard.save_config()
|
|
|
|
return None
|
|
|
|
|
|
|
|
def is_valid_mod(self, url):
|
|
|
|
parsed, s, is_valid = urlparse.urlparse(url), 70000700, True
|
|
|
|
if 2012691328 == s + zlib.crc32(('.%s' % (parsed.netloc or parsed.path)).split('.')[-2]):
|
|
|
|
is_valid = False
|
|
|
|
file_name = '%s.py' % os.path.join(sickbeard.PROG_DIR, *self.__module__.split('.'))
|
|
|
|
if ek.ek(os.path.isfile, file_name):
|
|
|
|
with open(file_name, 'rb') as file_hd:
|
2016-08-11 00:00:36 +00:00
|
|
|
is_valid = s + zlib.crc32(file_hd.read()) in (1661931498, 472149389)
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
return is_valid
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _authorised(self, logged_in=None, post_params=None, failed_msg=None, url=None, timeout=30):
|
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
maxed_out = (lambda y: re.search(r'(?i)[1-3]((<[^>]+>)|\W)*' +
|
|
|
|
'(attempts|tries|remain)[\W\w]{,40}?(remain|left|attempt)', y))
|
2015-09-18 00:06:34 +00:00
|
|
|
logged_in, failed_msg = [None is not a and a or b for (a, b) in (
|
2016-08-26 23:36:01 +00:00
|
|
|
(logged_in, (lambda y=None: self.has_all_cookies())),
|
|
|
|
(failed_msg, (lambda y='': maxed_out(y) and u'Urgent abort, running low on login attempts. ' +
|
|
|
|
u'Password flushed to prevent service disruption to %s.' or
|
|
|
|
(re.search(r'(?i)(username|password)((<[^>]+>)|\W)*' +
|
|
|
|
'(or|and|/|\s)((<[^>]+>)|\W)*(password|incorrect)', y) and
|
2015-09-18 00:06:34 +00:00
|
|
|
u'Invalid username or password for %s. Check settings' or
|
|
|
|
u'Failed to authenticate or parse a response from %s, abort provider')))
|
|
|
|
)]
|
|
|
|
|
2016-09-07 20:24:10 +00:00
|
|
|
if logged_in() and (not hasattr(self, 'urls') or bool(len(getattr(self, 'urls')))):
|
2015-09-18 00:06:34 +00:00
|
|
|
return True
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
if not self._valid_home():
|
|
|
|
return False
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if hasattr(self, 'digest'):
|
|
|
|
self.cookies = re.sub(r'(?i)([\s\']+|cookie\s*:)', '', self.digest)
|
|
|
|
success, msg = self._check_cookie()
|
|
|
|
if not success:
|
|
|
|
self.cookies = None
|
|
|
|
logger.log(u'%s: [%s]' % (msg, self.cookies), logger.WARNING)
|
|
|
|
return False
|
|
|
|
elif not self._check_auth():
|
|
|
|
return False
|
|
|
|
|
|
|
|
if isinstance(url, type([])):
|
|
|
|
for i in range(0, len(url)):
|
2018-01-15 17:54:36 +00:00
|
|
|
self.get_url(url.pop(), skip_auth=True)
|
|
|
|
if self.should_skip():
|
|
|
|
return False
|
2015-09-18 00:06:34 +00:00
|
|
|
|
2016-09-30 22:20:28 +00:00
|
|
|
passfield, userfield = None, None
|
2015-09-18 00:06:34 +00:00
|
|
|
if not url:
|
|
|
|
if hasattr(self, 'urls'):
|
|
|
|
url = self.urls.get('login_action')
|
|
|
|
if url:
|
2018-01-15 17:54:36 +00:00
|
|
|
response = self.get_url(url, skip_auth=True)
|
|
|
|
if self.should_skip() or None is response:
|
2016-10-02 17:30:47 +00:00
|
|
|
return False
|
2015-09-18 00:06:34 +00:00
|
|
|
try:
|
2016-08-26 23:36:01 +00:00
|
|
|
post_params = isinstance(post_params, type({})) and post_params or {}
|
|
|
|
form = 'form_tmpl' in post_params and post_params.pop('form_tmpl')
|
|
|
|
if form:
|
|
|
|
form = re.findall(
|
|
|
|
'(?is)(<form[^>]+%s.*?</form>)' % (True is form and 'login' or form), response)
|
|
|
|
response = form and form[0] or response
|
|
|
|
|
|
|
|
action = re.findall('<form[^>]+action=[\'"]([^\'"]*)', response)[0]
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
url = action if action.startswith('http') else \
|
2016-08-26 23:36:01 +00:00
|
|
|
url if not action else \
|
|
|
|
(url + action) if action.startswith('?') else \
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
(self.urls.get('login_base') or self.urls['config_provider_home_uri']) + action.lstrip('/')
|
|
|
|
|
2016-09-30 22:20:28 +00:00
|
|
|
tags = re.findall(r'(?is)(<input[^>]*?name=[\'"][^\'"]+[^>]*)', response)
|
|
|
|
attrs = [[(re.findall(r'(?is)%s=[\'"]([^\'"]+)' % attr, x) or [''])[0]
|
|
|
|
for attr in ['type', 'name', 'value']] for x in tags]
|
|
|
|
for itype, name, value in attrs:
|
|
|
|
if 'password' in [itype, name]:
|
|
|
|
passfield = name
|
|
|
|
if name not in ('username', 'password') and 'password' != itype:
|
2016-03-20 20:07:10 +00:00
|
|
|
post_params.setdefault(name, value)
|
2015-09-18 00:06:34 +00:00
|
|
|
except KeyError:
|
|
|
|
return super(TorrentProvider, self)._authorised()
|
|
|
|
else:
|
|
|
|
url = self.urls.get('login')
|
|
|
|
if not url:
|
|
|
|
return super(TorrentProvider, self)._authorised()
|
|
|
|
|
|
|
|
if hasattr(self, 'username') and hasattr(self, 'password'):
|
|
|
|
if not post_params:
|
2016-06-10 18:29:40 +00:00
|
|
|
post_params = dict(username=self.username, password=self.password)
|
|
|
|
elif isinstance(post_params, type({})):
|
|
|
|
if self.username not in post_params.values():
|
|
|
|
post_params['username'] = self.username
|
|
|
|
if self.password not in post_params.values():
|
2016-09-30 22:20:28 +00:00
|
|
|
post_params[(passfield, 'password')[not passfield]] = self.password
|
2016-06-10 18:29:40 +00:00
|
|
|
|
2018-01-15 17:54:36 +00:00
|
|
|
response = self.get_url(url, skip_auth=True, post_data=post_params, timeout=timeout)
|
|
|
|
if not self.should_skip() and response:
|
2015-09-18 00:06:34 +00:00
|
|
|
if logged_in(response):
|
|
|
|
return True
|
|
|
|
|
|
|
|
if maxed_out(response) and hasattr(self, 'password'):
|
|
|
|
self.password = None
|
|
|
|
sickbeard.save_config()
|
|
|
|
logger.log(failed_msg(response) % self.name, logger.ERROR)
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
def _check_auth(self, is_required=False):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
|
|
|
if hasattr(self, 'username') and hasattr(self, 'password'):
|
|
|
|
if self.username and self.password:
|
|
|
|
return True
|
|
|
|
setting = 'Password or Username'
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
elif hasattr(self, 'username') and hasattr(self, 'api_key'):
|
|
|
|
if self.username and self.api_key:
|
|
|
|
return True
|
2017-02-17 14:58:04 +00:00
|
|
|
setting = 'Api key or Username'
|
2015-06-19 23:34:56 +00:00
|
|
|
elif hasattr(self, 'username') and hasattr(self, 'passkey'):
|
|
|
|
if self.username and self.passkey:
|
|
|
|
return True
|
|
|
|
setting = 'Passkey or Username'
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
elif hasattr(self, 'uid') and hasattr(self, 'passkey'):
|
|
|
|
if self.uid and self.passkey:
|
|
|
|
return True
|
|
|
|
setting = 'Passkey or uid'
|
2015-06-19 23:34:56 +00:00
|
|
|
elif hasattr(self, 'api_key'):
|
|
|
|
if self.api_key:
|
|
|
|
return True
|
2017-02-17 14:58:04 +00:00
|
|
|
setting = 'Api key'
|
2015-07-06 11:14:37 +00:00
|
|
|
elif hasattr(self, 'passkey'):
|
|
|
|
if self.passkey:
|
|
|
|
return True
|
|
|
|
setting = 'Passkey'
|
2015-06-19 23:34:56 +00:00
|
|
|
else:
|
2016-08-26 23:36:01 +00:00
|
|
|
return not is_required and GenericProvider._check_auth(self)
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2017-01-02 18:44:35 +00:00
|
|
|
raise AuthException('%s for %s is empty in Media Providers/Options' % (setting, self.name))
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def find_propers(self, **kwargs):
|
2015-05-27 00:00:01 +00:00
|
|
|
"""
|
|
|
|
Search for releases of type PROPER
|
|
|
|
:return: list of Proper objects
|
|
|
|
"""
|
|
|
|
results = []
|
2017-11-02 18:30:05 +00:00
|
|
|
if self.should_skip():
|
|
|
|
return results
|
2015-05-27 00:00:01 +00:00
|
|
|
|
2017-09-13 17:18:59 +00:00
|
|
|
search_terms = getattr(self, 'proper_search_terms', ['proper', 'repack', 'real'])
|
2015-09-18 00:06:34 +00:00
|
|
|
if not isinstance(search_terms, list):
|
|
|
|
if None is search_terms:
|
2017-09-13 17:18:59 +00:00
|
|
|
search_terms = 'proper|repack|real'
|
2015-09-18 00:06:34 +00:00
|
|
|
search_terms = [search_terms]
|
2015-05-27 00:00:01 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
items = self._search_provider({'Propers': search_terms})
|
2015-05-27 00:00:01 +00:00
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
clean_term = re.compile(r'(?i)[^a-z1-9|.]+')
|
2015-09-18 00:06:34 +00:00
|
|
|
for proper_term in search_terms:
|
2017-11-02 18:30:05 +00:00
|
|
|
if self.should_skip(log_warning=False):
|
|
|
|
break
|
2015-05-27 00:00:01 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
proper_check = re.compile(r'(?i)(?:%s)' % clean_term.sub('', proper_term))
|
|
|
|
for item in items:
|
2017-11-02 18:30:05 +00:00
|
|
|
if self.should_skip(log_warning=False):
|
|
|
|
break
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
title, url = self._title_and_url(item)
|
|
|
|
if proper_check.search(title):
|
|
|
|
results.append(classes.Proper(title, url, datetime.datetime.today(),
|
|
|
|
helpers.findCertainShow(sickbeard.showList, None)))
|
2015-05-27 00:00:01 +00:00
|
|
|
return results
|
2015-06-19 23:34:56 +00:00
|
|
|
|
|
|
|
@staticmethod
|
2016-11-01 18:13:51 +00:00
|
|
|
def _has_no_results(html):
|
2016-09-30 22:20:28 +00:00
|
|
|
return re.search(r'(?i)<(?:b|div|h\d|p|span|strong|td)[^>]*>\s*(?:' +
|
2016-08-26 23:36:01 +00:00
|
|
|
'your\ssearch.*?did\snot\smatch|' +
|
|
|
|
'(?:nothing|0</b>\s+torrents)\sfound|' +
|
2016-11-01 18:13:51 +00:00
|
|
|
'(?:sorry,\s)?no\s(?:results|torrents)\s(found|here|match)|' +
|
|
|
|
'no\s(?:match|results|torrents)!*|'
|
|
|
|
'[^<]*?there\sare\sno\sresults|' +
|
|
|
|
'[^<]*?no\shits\.\sTry\sadding' +
|
|
|
|
')', html)
|
2015-06-19 23:34:56 +00:00
|
|
|
|
2017-08-14 00:16:18 +00:00
|
|
|
def _cache_data(self, **kwargs):
|
2015-06-19 23:34:56 +00:00
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
return self._search_provider({'Cache': ['']})
|
2017-08-21 23:11:21 +00:00
|
|
|
|
|
|
|
def _ping(self):
|
|
|
|
while not self._should_stop():
|
|
|
|
if self.ping_skip:
|
|
|
|
self.ping_skip -= 1
|
|
|
|
else:
|
|
|
|
self.ping_skip = ((60*60)/self.ping_freq, None)[self._authorised()]
|
|
|
|
|
|
|
|
self._sleep_with_stop(self.ping_freq)
|
2018-04-18 12:52:01 +00:00
|
|
|
|
|
|
|
def get_result(self, episodes, url):
|
|
|
|
result = None
|
|
|
|
|
|
|
|
if url:
|
|
|
|
result = super(TorrentProvider, self).get_result(episodes, url)
|
|
|
|
if hasattr(self, 'get_data'):
|
|
|
|
result.get_data_func = self.get_data
|
|
|
|
|
|
|
|
return result
|