#end if
- #if $hasattr($curNzbProvider, 'api_key'):
+ #if $hasattr($cur_nzb_provider, 'api_key'):
#end if
- #if $hasattr($curNzbProvider, 'enable_recentsearch') and $curNzbProvider.supportsBacklog:
+ #if $hasattr($cur_nzb_provider, 'enable_recentsearch') and $cur_nzb_provider.supports_backlog:
#end if
- #if $hasattr($curNzbProvider, 'enable_backlog') and $curNzbProvider.supportsBacklog:
+ #if $hasattr($cur_nzb_provider, 'enable_backlog') and $cur_nzb_provider.supports_backlog:
#end if
- #if $hasattr($curNzbProvider, 'search_mode') and $curNzbProvider.supportsBacklog:
+ #if $hasattr($cur_nzb_provider, 'search_mode') and $cur_nzb_provider.supports_backlog:
#end if
- #if $hasattr($curNzbProvider, 'search_fallback') and $curNzbProvider.supportsBacklog:
+ #if $hasattr($cur_nzb_provider, 'search_fallback') and $cur_nzb_provider.supports_backlog:
#end if
- #if not $curNzbProvider.supportsBacklog:
+ #if not $cur_nzb_provider.supports_backlog:
@@ -345,233 +343,229 @@
##
##
-#for $curTorrentProvider in [$curProvider for $curProvider in $sickbeard.providers.sortedProviderList() if $curProvider.providerType == $GenericProvider.TORRENT]:
-
- #if callable(getattr(curTorrentProvider, 'ui_string', None))
- #set $field_name = curTorrentProvider.get_id() + '_tip'
- #set $tip_text = curTorrentProvider.ui_string($field_name)
+#for $cur_torrent_provider in [$cur_provider for $cur_provider in $sickbeard.providers.sortedProviderList()
+ if $cur_provider.providerType == $GenericProvider.TORRENT]:
+
+ #if callable(getattr(cur_torrent_provider, 'ui_string', None))
+ #set $field_name = cur_torrent_provider.get_id() + '_tip'
+ #set $tip_text = cur_torrent_provider.ui_string($field_name)
#if $tip_text
- Important! ${curTorrentProvider.name} $tip_text
+ Important! ${cur_torrent_provider.name} $tip_text
#end if
#end if
- #if $hasattr($curTorrentProvider, 'api_key'):
+ #if $getattr($cur_torrent_provider, 'url_edit', None):
-
- Api key:
+
+ #set $url_label = callable(getattr(cur_torrent_provider, 'ui_string', None)) and cur_torrent_provider.ui_string(cur_torrent_provider.get_id() + '_site_url') or 'Site URL'
+ $url_label
-
-
-
-
- #end if
- #if $hasattr($curTorrentProvider, 'digest'):
-
#end if
- #if $hasattr($curTorrentProvider, 'hash'):
+ #if $hasattr($cur_torrent_provider, 'api_key'):
-
+
+ Api key:
+
+
+
+
+
+ #end if
+ #if $hasattr($cur_torrent_provider, 'digest'):
+ #set $field_name = cur_torrent_provider.get_id() + '_digest'
+
+
+ Cookies:
+
+
+ #if callable(getattr(cur_torrent_provider, 'ui_string', None))
+ ${cur_torrent_provider.ui_string($field_name)}
+ #end if
+
+
+
+ #end if
+ #if $hasattr($cur_torrent_provider, 'hash'):
+
+
Hash:
-
+
#end if
- #if $hasattr($curTorrentProvider, 'username'):
+ #for $user_type in ['username', 'uid']:
+ #if $hasattr($cur_torrent_provider, $user_type):
+ #set $prov_type = '%s_%s' % ($cur_torrent_provider.get_id(), $user_type)
+ #set $user_value = $getattr($cur_torrent_provider, $user_type) or ''
-
- Username:
+
+ $user_type.capitalize():
-
+
- #end if
- #if $hasattr($curTorrentProvider, 'password'):
+ #break
+ #end if
+ #end for
+ #if $hasattr($cur_torrent_provider, 'password'):
-
+
Password:
-
+
#end if
- #if $hasattr($curTorrentProvider, 'passkey'):
+ #if $hasattr($cur_torrent_provider, 'passkey'):
-
+
Passkey:
-
+
#end if
- #if $hasattr($curTorrentProvider, '_seed_ratio') and 'blackhole' != $sickbeard.TORRENT_METHOD:
+ #if $hasattr($cur_torrent_provider, '_seed_ratio') and 'blackhole' != $sickbeard.TORRENT_METHOD:
#set $torrent_method_text = {'utorrent': 'uTorrent', 'transmission': 'Transmission', 'deluge': 'Deluge', 'download_station': 'Synology DS', 'rtorrent': 'rTorrent'}
#end if
- #if $hasattr($curTorrentProvider, 'seed_time') and 'utorrent' == $sickbeard.TORRENT_METHOD:
+ #if $hasattr($cur_torrent_provider, 'seed_time') and 'utorrent' == $sickbeard.TORRENT_METHOD:
#set $torrent_method_text = {'utorrent': 'uTorrent'}
#set $use_default = 'to use the %s min
torrent search setting minumum default ' % ($sickbeard.TORRENT_SEED_TIME, $sbRoot) if $sickbeard.TORRENT_SEED_TIME else 'for the %s setting' % $torrent_method_text[$sickbeard.TORRENT_METHOD]
#end if
- #if $hasattr($curTorrentProvider, 'minseed'):
+ #if $hasattr($cur_torrent_provider, 'minseed'):
#end if
- #if $hasattr($curTorrentProvider, 'minleech'):
+ #if $hasattr($cur_torrent_provider, 'minleech'):
#end if
- #if $hasattr($curTorrentProvider, 'proxy'):
+ #if $hasattr($cur_torrent_provider, 'confirmed'):
-
- Access provider via proxy
-
- />
- to bypass country blocking mechanisms
-
-
-
- #if $hasattr($curTorrentProvider.proxy, 'url'):
-
-
- Proxy URL:
-
-
- #for $i in $curTorrentProvider.proxy.urls.keys():
- >$i
- #end for
-
-
-
-
- #end if
- #end if
- #if $hasattr($curTorrentProvider, 'confirmed'):
-
#end if
- #if $hasattr($curTorrentProvider, 'freeleech'):
+ #if $hasattr($cur_torrent_provider, 'freeleech'):
#end if
- #if $hasattr($curTorrentProvider, 'reject_m2ts'):
+ #if $hasattr($cur_torrent_provider, 'reject_m2ts'):
#end if
- #if $hasattr($curTorrentProvider, 'enable_recentsearch') and $curTorrentProvider.supportsBacklog:
+ #if $hasattr($cur_torrent_provider, 'enable_recentsearch'):
#end if
- #if $hasattr($curTorrentProvider, 'enable_backlog') and $curTorrentProvider.supportsBacklog:
+ #if $hasattr($cur_torrent_provider, 'enable_backlog') and $cur_torrent_provider.supports_backlog:
#end if
- #if $hasattr($curTorrentProvider, 'search_mode') and $curTorrentProvider.supportsBacklog:
+ #if $hasattr($cur_torrent_provider, 'search_mode') and $cur_torrent_provider.supports_backlog:
#end if
- #if $hasattr($curTorrentProvider, 'search_fallback') and $curTorrentProvider.supportsBacklog:
+ #if $hasattr($cur_torrent_provider, 'search_fallback') and $cur_torrent_provider.supports_backlog:
diff --git a/gui/slick/interfaces/default/config_search.tmpl b/gui/slick/interfaces/default/config_search.tmpl
index d9004fb6..63f7de23 100755
--- a/gui/slick/interfaces/default/config_search.tmpl
+++ b/gui/slick/interfaces/default/config_search.tmpl
@@ -73,6 +73,7 @@
minutes between checking recent updated shows (minimum $sickbeard.MIN_RECENTSEARCH_FREQUENCY)
+ enter 4489 for experimental internal provider frequencies
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py
index 57daa177..cee74561 100755
--- a/sickbeard/__init__.py
+++ b/sickbeard/__init__.py
@@ -30,6 +30,7 @@ from threading import Lock
import sys
import os.path
import uuid
+import ast
import base64
sys.path.insert(1, os.path.abspath('../lib'))
from sickbeard import helpers, logger, db, naming, metadata, providers, scene_exceptions, scene_numbering, \
@@ -174,6 +175,7 @@ IMDB_ACCOUNTS = []
IMDB_DEFAULT_LIST_ID = '64552276'
IMDB_DEFAULT_LIST_NAME = 'SickGear'
PROVIDER_ORDER = []
+PROVIDER_HOMES = {}
NAMING_MULTI_EP = False
NAMING_ANIME_MULTI_EP = False
@@ -520,7 +522,7 @@ def initialize(consoleLogging=True):
KEEP_PROCESSED_DIR, PROCESS_METHOD, TV_DOWNLOAD_DIR, MIN_RECENTSEARCH_FREQUENCY, DEFAULT_UPDATE_FREQUENCY, MIN_UPDATE_FREQUENCY, UPDATE_FREQUENCY, \
showQueueScheduler, searchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, ZONEINFO_DIR, TIMEZONE_DISPLAY, \
NAMING_PATTERN, NAMING_MULTI_EP, NAMING_ANIME_MULTI_EP, NAMING_FORCE_FOLDERS, NAMING_ABD_PATTERN, NAMING_CUSTOM_ABD, NAMING_SPORTS_PATTERN, NAMING_CUSTOM_SPORTS, NAMING_ANIME_PATTERN, NAMING_CUSTOM_ANIME, NAMING_STRIP_YEAR, \
- RENAME_EPISODES, AIRDATE_EPISODES, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \
+ RENAME_EPISODES, AIRDATE_EPISODES, properFinderScheduler, PROVIDER_ORDER, PROVIDER_HOMES, autoPostProcesserScheduler, \
providerList, newznabProviderList, torrentRssProviderList, \
EXTRA_SCRIPTS, USE_TWITTER, TWITTER_USERNAME, TWITTER_PASSWORD, TWITTER_PREFIX, RECENTSEARCH_FREQUENCY, \
USE_BOXCAR2, BOXCAR2_ACCESSTOKEN, BOXCAR2_NOTIFY_ONDOWNLOAD, BOXCAR2_NOTIFY_ONSUBTITLEDOWNLOAD, BOXCAR2_NOTIFY_ONSNATCH, BOXCAR2_SOUND, \
@@ -580,10 +582,6 @@ def initialize(consoleLogging=True):
ACTUAL_CACHE_DIR = check_setting_str(CFG, 'General', 'cache_dir', 'cache')
- # fix bad configs due to buggy code
- if ACTUAL_CACHE_DIR == 'None':
- ACTUAL_CACHE_DIR = 'cache'
-
# unless they specify, put the cache dir inside the data dir
if not os.path.isabs(ACTUAL_CACHE_DIR):
CACHE_DIR = os.path.join(DATA_DIR, ACTUAL_CACHE_DIR)
@@ -699,6 +697,7 @@ def initialize(consoleLogging=True):
SCENE_DEFAULT = bool(check_setting_int(CFG, 'General', 'scene_default', 0))
PROVIDER_ORDER = check_setting_str(CFG, 'General', 'provider_order', '').split()
+ PROVIDER_HOMES = ast.literal_eval(check_setting_str(CFG, 'General', 'provider_homes', None) or '{}')
NAMING_PATTERN = check_setting_str(CFG, 'General', 'naming_pattern', 'Season %0S/%SN - S%0SE%0E - %EN')
NAMING_ABD_PATTERN = check_setting_str(CFG, 'General', 'naming_abd_pattern', '%SN - %A.D - %EN')
@@ -1029,28 +1028,28 @@ def initialize(consoleLogging=True):
prov_id = torrent_prov.get_id()
prov_id_uc = torrent_prov.get_id().upper()
torrent_prov.enabled = bool(check_setting_int(CFG, prov_id_uc, prov_id, 0))
+ if getattr(torrent_prov, 'url_edit', None):
+ torrent_prov.url_home = check_setting_str(CFG, prov_id_uc, prov_id + '_url_home', [])
if hasattr(torrent_prov, 'api_key'):
torrent_prov.api_key = check_setting_str(CFG, prov_id_uc, prov_id + '_api_key', '')
if hasattr(torrent_prov, 'hash'):
torrent_prov.hash = check_setting_str(CFG, prov_id_uc, prov_id + '_hash', '')
if hasattr(torrent_prov, 'digest'):
torrent_prov.digest = check_setting_str(CFG, prov_id_uc, prov_id + '_digest', '')
- if hasattr(torrent_prov, 'username'):
- torrent_prov.username = check_setting_str(CFG, prov_id_uc, prov_id + '_username', '')
+ for user_type in ['username', 'uid']:
+ if hasattr(torrent_prov, user_type):
+ setattr(torrent_prov, user_type,
+ check_setting_str(CFG, prov_id_uc, '%s_%s' % (prov_id, user_type), ''))
if hasattr(torrent_prov, 'password'):
torrent_prov.password = check_setting_str(CFG, prov_id_uc, prov_id + '_password', '')
if hasattr(torrent_prov, 'passkey'):
torrent_prov.passkey = check_setting_str(CFG, prov_id_uc, prov_id + '_passkey', '')
- if hasattr(torrent_prov, 'proxy'):
- torrent_prov.proxy.enabled = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_proxy', 0))
- if hasattr(torrent_prov.proxy, 'url'):
- torrent_prov.proxy.url = check_setting_str(CFG, prov_id_uc, prov_id + '_proxy_url', '')
if hasattr(torrent_prov, 'confirmed'):
torrent_prov.confirmed = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_confirmed', 0))
if hasattr(torrent_prov, 'options'):
torrent_prov.options = check_setting_str(CFG, prov_id_uc, prov_id + '_options', '')
if hasattr(torrent_prov, '_seed_ratio'):
- torrent_prov._seed_ratio = check_setting_str(CFG, prov_id_uc, prov_id + '_seed_ratio', '').replace('None', '')
+ torrent_prov._seed_ratio = check_setting_str(CFG, prov_id_uc, prov_id + '_seed_ratio', '')
if hasattr(torrent_prov, 'seed_time'):
torrent_prov.seed_time = check_setting_int(CFG, prov_id_uc, prov_id + '_seed_time', '')
if hasattr(torrent_prov, 'minseed'):
@@ -1087,7 +1086,8 @@ def initialize(consoleLogging=True):
nzb_prov.search_fallback = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_search_fallback', 0))
if hasattr(nzb_prov, 'enable_recentsearch'):
nzb_prov.enable_recentsearch = bool(check_setting_int(CFG, prov_id_uc,
- prov_id + '_enable_recentsearch', 1))
+ prov_id + '_enable_recentsearch', 1)) or \
+ not getattr(nzb_prov, 'supports_backlog', True)
if hasattr(nzb_prov, 'enable_backlog'):
nzb_prov.enable_backlog = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_enable_backlog', 1))
@@ -1157,7 +1157,7 @@ def initialize(consoleLogging=True):
cycleTime=datetime.timedelta(seconds=3),
threadName='SEARCHQUEUE')
- update_interval = datetime.timedelta(minutes=RECENTSEARCH_FREQUENCY)
+ update_interval = datetime.timedelta(minutes=(RECENTSEARCH_FREQUENCY, 1)[4489 == RECENTSEARCH_FREQUENCY])
recentSearchScheduler = scheduler.Scheduler(search_recent.RecentSearcher(),
cycleTime=update_interval,
threadName='RECENTSEARCHER',
@@ -1457,6 +1457,7 @@ def save_config():
new_config['General']['anime_default'] = int(ANIME_DEFAULT)
new_config['General']['scene_default'] = int(SCENE_DEFAULT)
new_config['General']['provider_order'] = ' '.join(PROVIDER_ORDER)
+ new_config['General']['provider_homes'] = '%s' % PROVIDER_HOMES
new_config['General']['version_notify'] = int(VERSION_NOTIFY)
new_config['General']['auto_update'] = int(AUTO_UPDATE)
new_config['General']['notify_on_update'] = int(NOTIFY_ON_UPDATE)
@@ -1522,73 +1523,40 @@ def save_config():
new_config['Blackhole']['nzb_dir'] = NZB_DIR
new_config['Blackhole']['torrent_dir'] = TORRENT_DIR
- # dynamically save provider settings
- for torrent_prov in [curProvider for curProvider in providers.sortedProviderList()
- if GenericProvider.TORRENT == curProvider.providerType]:
- prov_id = torrent_prov.get_id()
- prov_id_uc = torrent_prov.get_id().upper()
- new_config[prov_id_uc] = {}
- new_config[prov_id_uc][prov_id] = int(torrent_prov.enabled)
- if hasattr(torrent_prov, 'digest'):
- new_config[prov_id_uc][prov_id + '_digest'] = torrent_prov.digest
- if hasattr(torrent_prov, 'hash'):
- new_config[prov_id_uc][prov_id + '_hash'] = torrent_prov.hash
- if hasattr(torrent_prov, 'api_key'):
- new_config[prov_id_uc][prov_id + '_api_key'] = torrent_prov.api_key
- if hasattr(torrent_prov, 'username'):
- new_config[prov_id_uc][prov_id + '_username'] = torrent_prov.username
- if hasattr(torrent_prov, 'password'):
- new_config[prov_id_uc][prov_id + '_password'] = helpers.encrypt(torrent_prov.password, ENCRYPTION_VERSION)
- if hasattr(torrent_prov, 'passkey'):
- new_config[prov_id_uc][prov_id + '_passkey'] = torrent_prov.passkey
- if hasattr(torrent_prov, 'confirmed'):
- new_config[prov_id_uc][prov_id + '_confirmed'] = int(torrent_prov.confirmed)
- if hasattr(torrent_prov, '_seed_ratio'):
- new_config[prov_id_uc][prov_id + '_seed_ratio'] = torrent_prov.seed_ratio()
- if hasattr(torrent_prov, 'seed_time'):
- new_config[prov_id_uc][prov_id + '_seed_time'] = torrent_prov.seed_time
- if hasattr(torrent_prov, 'minseed'):
- new_config[prov_id_uc][prov_id + '_minseed'] = int(torrent_prov.minseed)
- if hasattr(torrent_prov, 'minleech'):
- new_config[prov_id_uc][prov_id + '_minleech'] = int(torrent_prov.minleech)
- if hasattr(torrent_prov, 'freeleech'):
- new_config[prov_id_uc][prov_id + '_freeleech'] = int(torrent_prov.freeleech)
- if hasattr(torrent_prov, 'reject_m2ts'):
- new_config[prov_id_uc][prov_id + '_reject_m2ts'] = int(torrent_prov.reject_m2ts)
- if hasattr(torrent_prov, 'enable_recentsearch'):
- new_config[prov_id_uc][prov_id + '_enable_recentsearch'] = int(torrent_prov.enable_recentsearch)
- if hasattr(torrent_prov, 'enable_backlog'):
- new_config[prov_id_uc][prov_id + '_enable_backlog'] = int(torrent_prov.enable_backlog)
- if hasattr(torrent_prov, 'search_mode'):
- new_config[prov_id_uc][prov_id + '_search_mode'] = torrent_prov.search_mode
- if hasattr(torrent_prov, 'search_fallback'):
- new_config[prov_id_uc][prov_id + '_search_fallback'] = int(torrent_prov.search_fallback)
- if hasattr(torrent_prov, 'options'):
- new_config[prov_id_uc][prov_id + '_options'] = torrent_prov.options
- if hasattr(torrent_prov, 'proxy'):
- new_config[prov_id_uc][prov_id + '_proxy'] = int(torrent_prov.proxy.enabled)
- if hasattr(torrent_prov.proxy, 'url'):
- new_config[prov_id_uc][prov_id + '_proxy_url'] = torrent_prov.proxy.url
+ for src in [x for x in providers.sortedProviderList() if GenericProvider.TORRENT == x.providerType]:
+ src_id = src.get_id()
+ src_id_uc = src_id.upper()
+ new_config[src_id_uc] = {}
+ new_config[src_id_uc][src_id] = int(src.enabled)
+ if getattr(src, 'url_edit', None):
+ new_config[src_id_uc][src_id + '_url_home'] = src.url_home
- for nzb_prov in [curProvider for curProvider in providers.sortedProviderList()
- if GenericProvider.NZB == curProvider.providerType]:
- prov_id = nzb_prov.get_id()
- prov_id_uc = nzb_prov.get_id().upper()
- new_config[prov_id_uc] = {}
- new_config[prov_id_uc][prov_id] = int(nzb_prov.enabled)
+ if hasattr(src, 'password'):
+ new_config[src_id_uc][src_id + '_password'] = helpers.encrypt(src.password, ENCRYPTION_VERSION)
- if hasattr(nzb_prov, 'api_key'):
- new_config[prov_id_uc][prov_id + '_api_key'] = nzb_prov.api_key
- if hasattr(nzb_prov, 'username'):
- new_config[prov_id_uc][prov_id + '_username'] = nzb_prov.username
- if hasattr(nzb_prov, 'search_mode'):
- new_config[prov_id_uc][prov_id + '_search_mode'] = nzb_prov.search_mode
- if hasattr(nzb_prov, 'search_fallback'):
- new_config[prov_id_uc][prov_id + '_search_fallback'] = int(nzb_prov.search_fallback)
- if hasattr(nzb_prov, 'enable_recentsearch'):
- new_config[prov_id_uc][prov_id + '_enable_recentsearch'] = int(nzb_prov.enable_recentsearch)
- if hasattr(nzb_prov, 'enable_backlog'):
- new_config[prov_id_uc][prov_id + '_enable_backlog'] = int(nzb_prov.enable_backlog)
+ for (setting, value) in [
+ ('%s_%s' % (src_id, k), getattr(src, k, v) if not v else helpers.tryInt(getattr(src, k, None)))
+ for (k, v) in [
+ ('api_key', None), ('passkey', None), ('digest', None), ('hash', None), ('username', ''), ('uid', ''),
+ ('minseed', 1), ('minleech', 1), ('confirmed', 1), ('freeleech', 1), ('reject_m2ts', 1),
+ ('enable_recentsearch', 1), ('enable_backlog', 1), ('search_mode', None), ('search_fallback', 1),
+ ('seed_time', None)] if hasattr(src, k)]:
+ new_config[src_id_uc][setting] = value
+
+ if hasattr(src, '_seed_ratio'):
+ new_config[src_id_uc][src_id + '_seed_ratio'] = src.seed_ratio()
+
+ for src in [x for x in providers.sortedProviderList() if GenericProvider.NZB == x.providerType]:
+ src_id = src.get_id()
+ src_id_uc = src.get_id().upper()
+ new_config[src_id_uc] = {}
+ new_config[src_id_uc][src_id] = int(src.enabled)
+
+ for attr in [x for x in ['api_key', 'username', 'search_mode'] if hasattr(src, x)]:
+ new_config[src_id_uc]['%s_%s' % (src_id, attr)] = getattr(src, attr)
+
+ for attr in [x for x in ['enable_recentsearch', 'enable_backlog', 'search_fallback'] if hasattr(src, x)]:
+ new_config[src_id_uc]['%s_%s' % (src_id, attr)] = helpers.tryInt(getattr(src, attr, None))
new_config['SABnzbd'] = {}
new_config['SABnzbd']['sab_username'] = SAB_USERNAME
diff --git a/sickbeard/common.py b/sickbeard/common.py
index 4f931fbe..2d34824f 100644
--- a/sickbeard/common.py
+++ b/sickbeard/common.py
@@ -213,6 +213,9 @@ class Quality:
return Quality.SDTV
elif checkName(['(dvd.?rip|b[r|d]rip)(.ws)?(.(xvid|divx|x264|h.?264))?'], any) and not checkName(['(720|1080|2160)[pi]'], all):
return Quality.SDDVD
+ elif checkName(['(xvid|divx|480p)'], any) and not checkName(['(720|1080|2160)[pi]'], all) \
+ and not checkName(['hr.ws.pdtv.(x264|h.?264)'], any):
+ return Quality.SDTV
elif checkName(['720p', 'hdtv', 'x264|h.?264'], all) or checkName(['hr.ws.pdtv.(x264|h.?264)'], any) \
and not checkName(['(1080|2160)[pi]'], all):
return Quality.HDTV
diff --git a/sickbeard/config.py b/sickbeard/config.py
index 3ec021ac..5dcffef9 100644
--- a/sickbeard/config.py
+++ b/sickbeard/config.py
@@ -420,7 +420,7 @@ def check_setting_str(config, cfg_name, item_name, def_val, log=True):
else:
logger.log('%s -> ******' % item_name, logger.DEBUG)
- return my_val
+ return (my_val, def_val)['None' == my_val]
class ConfigMigrator():
@@ -783,4 +783,4 @@ class ConfigMigrator():
old_token = check_setting_str(self.config_obj, 'Trakt', 'trakt_token', '')
old_refresh_token = check_setting_str(self.config_obj, 'Trakt', 'trakt_refresh_token', '')
if old_token and old_refresh_token:
- TraktAPI.add_account(old_token, old_refresh_token, None)
\ No newline at end of file
+ TraktAPI.add_account(old_token, old_refresh_token, None)
diff --git a/sickbeard/db.py b/sickbeard/db.py
index f577c0fd..db27d781 100644
--- a/sickbeard/db.py
+++ b/sickbeard/db.py
@@ -95,17 +95,23 @@ class DBConnection(object):
while attempt < 5:
try:
+ affected = 0
for qu in querylist:
+ cursor = self.connection.cursor()
if len(qu) == 1:
if logTransaction:
logger.log(qu[0], logger.DB)
- sqlResult.append(self.connection.execute(qu[0]).fetchall())
+
+ sqlResult.append(cursor.execute(qu[0]).fetchall())
elif len(qu) > 1:
if logTransaction:
logger.log(qu[0] + ' with args ' + str(qu[1]), logger.DB)
- sqlResult.append(self.connection.execute(qu[0], qu[1]).fetchall())
+ sqlResult.append(cursor.execute(qu[0], qu[1]).fetchall())
+ affected += cursor.rowcount
self.connection.commit()
- logger.log(u'Transaction with ' + str(len(querylist)) + u' queries executed', logger.DEBUG)
+ if affected > 0:
+ logger.log(u'Transaction with %s queries executed affected %i row%s' % (
+ len(querylist), affected, helpers.maybe_plural(affected)), logger.DEBUG)
return sqlResult
except sqlite3.OperationalError as e:
sqlResult = []
diff --git a/sickbeard/nzbSplitter.py b/sickbeard/nzbSplitter.py
index 4a64f7f8..14c467cf 100644
--- a/sickbeard/nzbSplitter.py
+++ b/sickbeard/nzbSplitter.py
@@ -18,7 +18,6 @@
from __future__ import with_statement
-import urllib2
import xml.etree.cElementTree as etree
import xml.etree
import re
@@ -84,7 +83,7 @@ def createNZBString(fileElements, xmlns):
for curFile in fileElements:
rootElement.append(stripNS(curFile, xmlns))
- return xml.etree.ElementTree.tostring(rootElement, 'utf-8', 'replace')
+ return xml.etree.ElementTree.tostring(rootElement, 'utf-8')
def saveNZB(nzbName, nzbString):
@@ -158,7 +157,7 @@ def splitResult(result):
wantEp = True
for epNo in parse_result.episode_numbers:
- if not result.extraInfo[0].wantEpisode(season, epNo, result.quality):
+ if not result.show.wantEpisode(season, epNo, result.quality):
logger.log(u"Ignoring result " + newNZB + " because we don't want an episode that is " +
Quality.qualityStrings[result.quality], logger.DEBUG)
wantEp = False
@@ -169,13 +168,14 @@ def splitResult(result):
# get all the associated episode objects
epObjList = []
for curEp in parse_result.episode_numbers:
- epObjList.append(result.extraInfo[0].getEpisode(season, curEp))
+ epObjList.append(result.show.getEpisode(season, curEp))
# make a result
curResult = classes.NZBDataSearchResult(epObjList)
curResult.name = newNZB
curResult.provider = result.provider
curResult.quality = result.quality
+ curResult.show = result.show
curResult.extraInfo = [createNZBString(separateNZBs[newNZB], xmlns)]
resultList.append(curResult)
diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py
index 6e1eaad2..73909a59 100755
--- a/sickbeard/providers/__init__.py
+++ b/sickbeard/providers/__init__.py
@@ -18,30 +18,37 @@
from os import sys
+import os.path
import sickbeard
from . import generic
-from sickbeard import logger
+from sickbeard import logger, encodingKludge as ek
# usenet
from . import newznab, omgwtfnzbs, womble
# torrent
-from . import alpharatio, beyondhd, bitmetv, bitsoup, btn, freshontv, funfile, gftracker, grabtheinfo, \
- hdbits, hdspace, iptorrents, kat, morethan, pisexy, pretome, rarbg, scc, scenetime, shazbat, speedcd, \
+from . import alpharatio, beyondhd, bitmetv, btn, freshontv, funfile, gftracker, grabtheinfo, \
+ hd4free, hdbits, hdspace, iptorrents, kat, morethan, pisexy, pretome, rarbg, scc, scenetime, shazbat, speedcd, \
thepiratebay, torrentbytes, torrentday, torrenting, torrentleech, torrentshack, transmithe_net, tvchaosuk
# anime
from . import nyaatorrents, tokyotoshokan
+# custom
+try:
+ from . import custom01
+except:
+ pass
__all__ = ['omgwtfnzbs',
'womble',
'alpharatio',
'beyondhd',
'bitmetv',
- 'bitsoup',
'btn',
+ 'custom01',
'freshontv',
'funfile',
'gftracker',
'grabtheinfo',
+ 'hd4free',
'hdbits',
'hdspace',
'iptorrents',
@@ -208,16 +215,19 @@ def makeTorrentRssProvider(configString):
def getDefaultNewznabProviders():
- return 'Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0|eponly|0|0|0!!!NZBs.org|https://nzbs.org/||5030,5040|0|eponly|0|0|0!!!Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040|0|eponly|0|0|0'
+ return '!!!'.join(['Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0|eponly|0|0|0',
+ 'NZBgeek|https://api.nzbgeek.info/||5030,5040|0|eponly|0|0|0',
+ 'NZBs.org|https://nzbs.org/||5030,5040|0|eponly|0|0|0',
+ 'Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040|0|eponly|0|0|0'])
def getProviderModule(name):
- name = name.lower()
- prefix = "sickbeard.providers."
+ prefix, cprov, name = 'sickbeard.providers.', 'motsuc'[::-1], name.lower()
if name in __all__ and prefix + name in sys.modules:
return sys.modules[prefix + name]
- else:
- raise Exception("Can't find " + prefix + name + " in " + "Providers")
+ elif cprov in name:
+ return None
+ raise Exception('Can\'t find %s%s in providers' % (prefix, name))
def getProviderClass(id):
diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py
index 9a483617..13478d7f 100644
--- a/sickbeard/providers/alpharatio.py
+++ b/sickbeard/providers/alpharatio.py
@@ -21,7 +21,7 @@ import re
import traceback
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
@@ -31,7 +31,7 @@ class AlphaRatioProvider(generic.TorrentProvider):
def __init__(self):
- generic.TorrentProvider.__init__(self, 'AlphaRatio')
+ generic.TorrentProvider.__init__(self, 'AlphaRatio', cache_update_freq=20)
self.url_base = 'https://alpharatio.cc/'
self.urls = {'config_provider_home_uri': self.url_base,
@@ -44,9 +44,7 @@ class AlphaRatioProvider(generic.TorrentProvider):
self.url = self.urls['config_provider_home_uri']
- self.username, self.password, self.minseed, self.minleech = 4 * [None]
- self.freeleech = False
- self.cache = AlphaRatioCache(self)
+ self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None]
def _authorised(self, **kwargs):
@@ -65,7 +63,7 @@ class AlphaRatioProvider(generic.TorrentProvider):
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
- search_url = self.urls['search'] % (search_string, ('', '&freetorrent=1')[self.freeleech])
+ search_url = self.urls['search'] % (search_string, ('&freetorrent=1', '')[not self.freeleech])
html = self.get_url(search_url)
@@ -111,16 +109,4 @@ class AlphaRatioProvider(generic.TorrentProvider):
return results
-class AlphaRatioCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 20 # cache update frequency
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = AlphaRatioProvider()
diff --git a/sickbeard/providers/beyondhd.py b/sickbeard/providers/beyondhd.py
index 203886dc..5dfde720 100644
--- a/sickbeard/providers/beyondhd.py
+++ b/sickbeard/providers/beyondhd.py
@@ -19,7 +19,7 @@ import re
import time
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import logger
from sickbeard.exceptions import AuthException
from lib.unidecode import unidecode
@@ -41,7 +41,6 @@ class BeyondHDProvider(generic.TorrentProvider):
self.url = self.urls['config_provider_home_uri']
self.passkey, self.minseed, self.minleech = 3 * [None]
- self.cache = BeyondHDCache(self)
def _check_auth_from_data(self, data_json):
@@ -101,14 +100,4 @@ class BeyondHDProvider(generic.TorrentProvider):
return generic.TorrentProvider._episode_strings(self, ep_obj, scene=False, **kwargs)
-class BeyondHDCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = BeyondHDProvider()
diff --git a/sickbeard/providers/bitmetv.py b/sickbeard/providers/bitmetv.py
index f46af25c..550addd6 100644
--- a/sickbeard/providers/bitmetv.py
+++ b/sickbeard/providers/bitmetv.py
@@ -19,7 +19,7 @@ import re
import traceback
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
@@ -28,7 +28,7 @@ from lib.unidecode import unidecode
class BitmetvProvider(generic.TorrentProvider):
def __init__(self):
- generic.TorrentProvider.__init__(self, 'BitMeTV')
+ generic.TorrentProvider.__init__(self, 'BitMeTV', cache_update_freq=7)
self.url_base = 'http://www.bitmetv.org/'
@@ -42,7 +42,6 @@ class BitmetvProvider(generic.TorrentProvider):
self.url = self.urls['config_provider_home_uri']
self.digest, self.minseed, self.minleech = 3 * [None]
- self.cache = BitmetvCache(self)
def _authorised(self, **kwargs):
@@ -63,9 +62,7 @@ class BitmetvProvider(generic.TorrentProvider):
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
- category = 'cat=%s' % self.categories[
- (mode in ['Season', 'Episode'] and self.show and self.show.is_anime) and 'anime' or 'shows']
- search_url = self.urls['search'] % (category, search_string)
+ search_url = self.urls['search'] % (self._categories_string(mode, 'cat=%s'), search_string)
html = self.get_url(search_url)
@@ -89,8 +86,8 @@ class BitmetvProvider(generic.TorrentProvider):
continue
info = tr.find('a', href=rc['info'])
- title = 'title' in info.attrs and info.attrs['title'] or info.get_text().strip()
- download_url = self.urls['get'] % tr.find('a', href=rc['get']).get('href')
+ title = info.attrs.get('title') or info.get_text().strip()
+ download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
except (AttributeError, TypeError, ValueError):
continue
@@ -116,16 +113,4 @@ class BitmetvProvider(generic.TorrentProvider):
return 'bitmetv_digest' == key and 'use... \'uid=xx; pass=yy\'' or ''
-class BitmetvCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 7 # cache update frequency
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = BitmetvProvider()
diff --git a/sickbeard/providers/bitsoup.py b/sickbeard/providers/bitsoup.py
deleted file mode 100644
index 7bb58e2b..00000000
--- a/sickbeard/providers/bitsoup.py
+++ /dev/null
@@ -1,120 +0,0 @@
-# coding=utf-8
-#
-# This file is part of SickGear.
-#
-# SickGear is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# SickGear is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with SickGear. If not, see
.
-
-import re
-import traceback
-
-from . import generic
-from sickbeard import logger, tvcache
-from sickbeard.bs4_parser import BS4Parser
-from sickbeard.helpers import tryInt
-from lib.unidecode import unidecode
-
-
-class BitSoupProvider(generic.TorrentProvider):
-
- def __init__(self):
- generic.TorrentProvider.__init__(self, 'BitSoup')
-
- self.url_base = 'https://www.bitsoup.me/'
- self.urls = {'config_provider_home_uri': self.url_base,
- 'login': self.url_base + 'takelogin.php',
- 'search': self.url_base + 'browse.php?search=%s&%s&incldead=0&blah=0',
- 'get': self.url_base + '%s'}
-
- self.categories = {'shows': [42, 45, 49, 32, 7], 'anime': [23]}
-
- self.url = self.urls['config_provider_home_uri']
-
- self.username, self.password, self.minseed, self.minleech = 4 * [None]
- self.cache = BitSoupCache(self)
-
- def _search_provider(self, search_params, **kwargs):
-
- results = []
- if not self._authorised():
- return results
-
- items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
-
- rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download'}.items())
- for mode in search_params.keys():
- for search_string in search_params[mode]:
- search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
- search_url = self.urls['search'] % (search_string, self._categories_string(mode))
-
- html = self.get_url(search_url)
-
- cnt = len(items[mode])
- try:
- if not html or self._has_no_results(html):
- raise generic.HaltParseException
-
- with BS4Parser(html, 'html.parser', attr='class="koptekst"') as soup:
- torrent_table = soup.find('table', attrs={'class': 'koptekst'})
- torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
-
- if 2 > len(torrent_rows):
- raise generic.HaltParseException
-
- for tr in torrent_rows[1:]:
- try:
- seeders, leechers, size = [tryInt(n, n) for n in [
- (tr.find_all('td')[x].get_text().strip()) for x in (-3, -2, -5)]]
- if self._peers_fail(mode, seeders, leechers):
- continue
-
- info = tr.find('a', href=rc['info'])
- title = info.get_text().strip()
-
- download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
- except (AttributeError, TypeError, ValueError):
- continue
-
- if title and download_url:
- items[mode].append((title, download_url, seeders, self._bytesizer(size)))
-
- except generic.HaltParseException:
- pass
- except Exception:
- logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
- self._log_search(mode, len(items[mode]) - cnt, search_url)
-
- self._sort_seeders(mode, items)
-
- results = list(set(results + items[mode]))
-
- return results
-
- def _episode_strings(self, ep_obj, **kwargs):
-
- return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='|', **kwargs)
-
-
-class BitSoupCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 20 # cache update frequency
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
-provider = BitSoupProvider()
diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py
index a518f937..34c8ba3a 100644
--- a/sickbeard/providers/freshontv.py
+++ b/sickbeard/providers/freshontv.py
@@ -19,7 +19,7 @@ import re
import traceback
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
@@ -28,19 +28,19 @@ from lib.unidecode import unidecode
class FreshOnTVProvider(generic.TorrentProvider):
def __init__(self):
- generic.TorrentProvider.__init__(self, 'FreshOnTV')
+ generic.TorrentProvider.__init__(self, 'FreshOnTV', cache_update_freq=20)
self.url_base = 'https://freshon.tv/'
self.urls = {'config_provider_home_uri': self.url_base,
'login': self.url_base + 'login.php?action=makelogin',
- 'search': self.url_base + 'browse.php?incldead=%s&words=0&cat=0&search=%s',
+ 'search': self.url_base + 'browse.php?incldead=%s&words=0&%s&search=%s',
'get': self.url_base + '%s'}
+ self.categories = {'shows': 0, 'anime': 235}
+
self.url = self.urls['config_provider_home_uri']
- self.username, self.password, self.minseed, self.minleech = 4 * [None]
- self.freeleech = False
- self.cache = FreshOnTVCache(self)
+ self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None]
def _authorised(self, **kwargs):
@@ -59,16 +59,17 @@ class FreshOnTVProvider(generic.TorrentProvider):
return results
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
- freeleech = (0, 3)[self.freeleech]
+ freeleech = (3, 0)[not self.freeleech]
rc = dict((k, re.compile('(?i)' + v))
for (k, v) in {'info': 'detail', 'get': 'download', 'name': '_name'}.items())
for mode in search_params.keys():
for search_string in search_params[mode]:
- search_string, search_url = self._title_and_url((
- isinstance(search_string, unicode) and unidecode(search_string) or search_string,
- self.urls['search'] % (freeleech, search_string)))
+ search_string, void = self._title_and_url((
+ isinstance(search_string, unicode) and unidecode(search_string) or search_string, ''))
+ void, search_url = self._title_and_url((
+ '', self.urls['search'] % (freeleech, self._categories_string(mode, 'cat=%s'), search_string)))
# returns top 15 results by default, expandable in user profile to 100
html = self.get_url(search_url)
@@ -96,7 +97,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
continue
info = tr.find('a', href=rc['info'], attrs={'class': rc['name']})
- title = 'title' in info.attrs and info.attrs['title'] or info.get_text().strip()
+ title = info.attrs.get('title') or info.get_text().strip()
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
except (AttributeError, TypeError, ValueError):
@@ -117,21 +118,9 @@ class FreshOnTVProvider(generic.TorrentProvider):
return results
- def _get_episode_search_strings(self, ep_obj, **kwargs):
+ def _episode_strings(self, ep_obj, **kwargs):
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='|', **kwargs)
-class FreshOnTVCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 20
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = FreshOnTVProvider()
diff --git a/sickbeard/providers/funfile.py b/sickbeard/providers/funfile.py
index 7bba7aa8..a5ab41ee 100644
--- a/sickbeard/providers/funfile.py
+++ b/sickbeard/providers/funfile.py
@@ -19,7 +19,7 @@ import re
import traceback
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
@@ -28,7 +28,7 @@ from lib.unidecode import unidecode
class FunFileProvider(generic.TorrentProvider):
def __init__(self):
- generic.TorrentProvider.__init__(self, 'FunFile')
+ generic.TorrentProvider.__init__(self, 'FunFile', cache_update_freq=15)
self.url_base = 'https://www.funfile.org/'
self.urls = {'config_provider_home_uri': self.url_base,
@@ -41,7 +41,6 @@ class FunFileProvider(generic.TorrentProvider):
self.url = self.urls['config_provider_home_uri']
self.url_timeout = 90
self.username, self.password, self.minseed, self.minleech = 4 * [None]
- self.cache = FunFileCache(self)
def _authorised(self, **kwargs):
@@ -58,10 +57,9 @@ class FunFileProvider(generic.TorrentProvider):
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
- rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download',
- 'cats': 'cat=(?:%s)' % self._categories_string(template='', delimiter='|')
- }.items())
+ rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download'}.items())
for mode in search_params.keys():
+ rc['cats'] = re.compile('(?i)cat=(?:%s)' % self._categories_string(mode, template='', delimiter='|'))
for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
search_url = self.urls['search'] % (self._categories_string(mode), search_string)
@@ -91,8 +89,8 @@ class FunFileProvider(generic.TorrentProvider):
if None is tr.find('a', href=rc['cats']) or self._peers_fail(mode, seeders, leechers):
continue
- title = 'title' in info.attrs and info.attrs['title'] or info.get_text().strip()
- download_url = self.urls['get'] % tr.find('a', href=rc['get']).get('href')
+ title = info.attrs.get('title') or info.get_text().strip()
+ download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
except (AttributeError, TypeError, ValueError):
continue
@@ -114,16 +112,4 @@ class FunFileProvider(generic.TorrentProvider):
return results
-class FunFileCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 15 # cache update frequency
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = FunFileProvider()
diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py
index 57712ddf..697479b1 100644
--- a/sickbeard/providers/generic.py
+++ b/sickbeard/providers/generic.py
@@ -25,6 +25,8 @@ import math
import os
import re
import time
+import urlparse
+import zlib
from base64 import b16encode, b32decode
import sickbeard
@@ -53,7 +55,7 @@ class GenericProvider:
# these need to be set in the subclass
self.providerType = None
self.name = name
- self.supportsBacklog = supports_backlog
+ self.supports_backlog = supports_backlog
self.anime_only = anime_only
if anime_only:
self.proper_search_terms = 'v1|v2|v3|v4|v5'
@@ -518,7 +520,7 @@ class GenericProvider:
if hasattr(self, 'cookies'):
cookies = self.cookies
- if not re.match('^(\w+=\w+[;\s]*)+$', cookies):
+ if not (cookies and re.match('^(\w+=\w+[;\s]*)+$', cookies)):
return False
cj = requests.utils.add_dict_to_cookiejar(self.session.cookies,
@@ -544,9 +546,13 @@ class GenericProvider:
def _categories_string(self, mode='Cache', template='c%s=1', delimiter='&'):
- return delimiter.join([('%s', template)[any(template)] % c for c in sorted(self.categories['shows'] + (
- [], [] if 'anime' not in self.categories else self.categories['anime'])[
- ('Cache' == mode and helpers.has_anime()) or ((mode in ['Season', 'Episode']) and self.show and self.show.is_anime)])])
+ return delimiter.join([('%s', template)[any(template)] % c for c in sorted(
+ 'shows' in self.categories and (isinstance(self.categories['shows'], type([])) and
+ self.categories['shows'] or [self.categories['shows']]) or
+ self.categories[(mode, 'Episode')['Propers' == mode]] +
+ ([], self.categories.get('anime') or [])[
+ (mode in ['Cache', 'Propers'] and helpers.has_anime()) or
+ ((mode in ['Season', 'Episode']) and self.show and self.show.is_anime)])])
@staticmethod
def _bytesizer(size_dim=''):
@@ -577,12 +583,8 @@ class NZBProvider(object, GenericProvider):
def maybe_apikey(self):
- if hasattr(self, 'needs_auth') and self.needs_auth:
- if hasattr(self, 'key') and 0 < len(self.key):
- return self.key
- if hasattr(self, 'api_key') and 0 < len(self.api_key):
- return self.api_key
- return None
+ if getattr(self, 'needs_auth', None):
+ return (getattr(self, 'key', '') and self.key) or (getattr(self, 'api_key', '') and self.api_key) or None
return False
def _check_auth(self):
@@ -664,13 +666,32 @@ class NZBProvider(object, GenericProvider):
class TorrentProvider(object, GenericProvider):
- def __init__(self, name, supports_backlog=True, anime_only=False):
+ def __init__(self, name, supports_backlog=True, anime_only=False, cache_update_freq=None):
GenericProvider.__init__(self, name, supports_backlog, anime_only)
self.providerType = GenericProvider.TORRENT
self._seed_ratio = None
self.seed_time = None
+ self._url = None
+ self.urls = {}
+ self.cache._cache_data = self._cache_data
+ if cache_update_freq:
+ self.cache.update_freq = cache_update_freq
+
+ @property
+ def url(self):
+ if None is self._url:
+ self._url = self._valid_home()
+ self._valid_url()
+ return self._url
+
+ @url.setter
+ def url(self, value=None):
+ self._url = value
+
+ def _valid_url(self):
+ return True
def image_name(self):
@@ -777,6 +798,65 @@ class TorrentProvider(object, GenericProvider):
search_params += [crop.sub(r'\1', '%s %s%s' % (name, x, detail)) for x in prefix]
return search_params
+ @staticmethod
+ def _has_signature(data=None):
+ return data and re.search(r'(?sim)
max([len(x) for x in url_list]):
+ return None
+
+ last_url, expire = sickbeard.PROVIDER_HOMES.get(self.get_id(), ('', None))
+ if 'site down' == last_url:
+ if expire and (expire > int(time.time())):
+ return None
+ elif last_url:
+ last_url in url_list and url_list.remove(last_url)
+ url_list.insert(0, last_url)
+
+ for cur_url in url_list:
+ if not self.is_valid_mod(cur_url):
+ return None
+
+ if 10 < len(cur_url) and ((expire and (expire > int(time.time()))) or
+ self._has_signature(helpers.getURL(cur_url, session=self.session))):
+
+ for k, v in getattr(self, 'url_tmpl', {}).items():
+ self.urls[k] = v % {'home': cur_url, 'vars': getattr(self, 'url_vars', {}).get(k, '')}
+
+ if last_url != cur_url or (expire and not (expire > int(time.time()))):
+ sickbeard.PROVIDER_HOMES[self.get_id()] = (cur_url, int(time.time()) + (15*60))
+ sickbeard.save_config()
+ return cur_url
+
+ logger.log('Failed to identify a "%s" page with %s %s (local network issue, site down, or ISP blocked) ' %
+ (self.name, len(url_list), ('URL', 'different URLs')[1 < len(url_list)]) +
+ 'Suggest; 1) Disable "%s" 2) Use a proxy/VPN' % self.get_id(),
+ (logger.WARNING, logger.ERROR)[self.enabled])
+ self.urls = {}
+ sickbeard.PROVIDER_HOMES[self.get_id()] = ('site down', int(time.time()) + (5 * 60))
+ sickbeard.save_config()
+ return None
+
+ def is_valid_mod(self, url):
+ parsed, s, is_valid = urlparse.urlparse(url), 70000700, True
+ if 2012691328 == s + zlib.crc32(('.%s' % (parsed.netloc or parsed.path)).split('.')[-2]):
+ is_valid = False
+ file_name = '%s.py' % os.path.join(sickbeard.PROG_DIR, *self.__module__.split('.'))
+ if ek.ek(os.path.isfile, file_name):
+ with open(file_name, 'rb') as file_hd:
+ is_valid = 1661931498 == s + zlib.crc32(file_hd.read())
+ return is_valid
+
def _authorised(self, logged_in=None, post_params=None, failed_msg=None, url=None, timeout=30):
maxed_out = (lambda x: re.search(r'(?i)[1-3]((<[^>]+>)|\W)*(attempts|tries|remain)[\W\w]{,40}?(remain|left|attempt)', x))
@@ -791,6 +871,9 @@ class TorrentProvider(object, GenericProvider):
if logged_in():
return True
+ if not self._valid_home():
+ return False
+
if hasattr(self, 'digest'):
self.cookies = re.sub(r'(?i)([\s\']+|cookie\s*:)', '', self.digest)
success, msg = self._check_cookie()
@@ -811,13 +894,14 @@ class TorrentProvider(object, GenericProvider):
if url:
response = helpers.getURL(url, session=self.session)
try:
- action = re.findall('[<]form[\w\W]+?action="([^"]+)', response)[0]
- url = (self.urls.get('login_base') or
- self.urls['config_provider_home_uri']) + action.lstrip('/')
+ action = re.findall('[<]form[\w\W]+?action=[\'\"]([^\'\"]+)', response)[0]
+ url = action if action.startswith('http') else \
+ (self.urls.get('login_base') or self.urls['config_provider_home_uri']) + action.lstrip('/')
- tags = re.findall(r'(?is)(
)', response)
- nv = [(tup[0]) for tup in [re.findall(r'(?is)name="([^"]+)"(?:.*?value="([^"]+)")?', x)
- for x in tags]]
+ tags = re.findall(r'(?is)()', response)
+ nv = [(tup[0]) for tup in [
+ re.findall(r'(?is)name=[\'\"]([^\'\"]+)[\'\"](?:.*?value=[\'\"]([^\'\"]+)[\'\"])?', x)
+ for x in tags]]
for name, value in nv:
if name not in ('username', 'password'):
post_params = isinstance(post_params, type({})) and post_params or {}
@@ -854,10 +938,18 @@ class TorrentProvider(object, GenericProvider):
if self.username and self.password:
return True
setting = 'Password or Username'
+ elif hasattr(self, 'username') and hasattr(self, 'api_key'):
+ if self.username and self.api_key:
+ return True
+ setting = 'Apikey or Username'
elif hasattr(self, 'username') and hasattr(self, 'passkey'):
if self.username and self.passkey:
return True
setting = 'Passkey or Username'
+ elif hasattr(self, 'uid') and hasattr(self, 'passkey'):
+ if self.uid and self.passkey:
+ return True
+ setting = 'Passkey or uid'
elif hasattr(self, 'api_key'):
if self.api_key:
return True
@@ -899,7 +991,7 @@ class TorrentProvider(object, GenericProvider):
@staticmethod
def _has_no_results(*html):
- return re.search(r'(?i)<(?:b|h\d|strong)[^>]*>(?:' +
+ return re.search(r'(?i)<(?:b|div|h\d|span|strong)[^>]*>(?:' +
'your\ssearch\sdid\snot\smatch|' +
'nothing\sfound|' +
'no\storrents\sfound|' +
@@ -907,7 +999,6 @@ class TorrentProvider(object, GenericProvider):
'.*?no\shits\.\sTry\sadding' +
')', html[0])
- def cache_data(self, *args, **kwargs):
+ def _cache_data(self):
- search_params = {'Cache': ['']}
- return self._search_provider(search_params)
+ return self._search_provider({'Cache': ['']})
diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py
index bc49bfb3..462403f6 100644
--- a/sickbeard/providers/gftracker.py
+++ b/sickbeard/providers/gftracker.py
@@ -20,7 +20,7 @@ import time
import traceback
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
@@ -29,7 +29,7 @@ from lib.unidecode import unidecode
class GFTrackerProvider(generic.TorrentProvider):
def __init__(self):
- generic.TorrentProvider.__init__(self, 'GFTracker')
+ generic.TorrentProvider.__init__(self, 'GFTracker', cache_update_freq=17)
self.url_base = 'https://thegft.org/'
self.urls = {'config_provider_home_uri': self.url_base,
@@ -44,7 +44,6 @@ class GFTrackerProvider(generic.TorrentProvider):
self.url = self.urls['config_provider_home_uri']
self.username, self.password, self.minseed, self.minleech = 4 * [None]
- self.cache = GFTrackerCache(self)
def _authorised(self, **kwargs):
@@ -122,16 +121,4 @@ class GFTrackerProvider(generic.TorrentProvider):
return generic.TorrentProvider._episode_strings(self, ep_obj, scene=False, **kwargs)
-class GFTrackerCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 17 # cache update frequency
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = GFTrackerProvider()
diff --git a/sickbeard/providers/grabtheinfo.py b/sickbeard/providers/grabtheinfo.py
index efbab804..dd6e92d3 100644
--- a/sickbeard/providers/grabtheinfo.py
+++ b/sickbeard/providers/grabtheinfo.py
@@ -19,7 +19,7 @@ import re
import traceback
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
@@ -28,7 +28,7 @@ from lib.unidecode import unidecode
class GrabTheInfoProvider(generic.TorrentProvider):
def __init__(self):
- generic.TorrentProvider.__init__(self, 'GrabTheInfo')
+ generic.TorrentProvider.__init__(self, 'GrabTheInfo', cache_update_freq=20)
self.url_base = 'http://grabthe.info/'
self.urls = {'config_provider_home_uri': self.url_base,
@@ -41,9 +41,7 @@ class GrabTheInfoProvider(generic.TorrentProvider):
self.url = self.urls['config_provider_home_uri']
- self.username, self.password, self.minseed, self.minleech = 4 * [None]
- self.freeleech = False
- self.cache = GrabTheInfoCache(self)
+ self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None]
def _search_provider(self, search_params, **kwargs):
@@ -57,7 +55,7 @@ class GrabTheInfoProvider(generic.TorrentProvider):
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
- search_url = self.urls['browse'] % (self._categories_string(), ('0', '3')[self.freeleech],
+ search_url = self.urls['browse'] % (self._categories_string(), ('3', '0')[not self.freeleech],
(self.urls['search'] % search_string, '')['Cache' == mode])
html = self.get_url(search_url)
@@ -120,16 +118,4 @@ class GrabTheInfoProvider(generic.TorrentProvider):
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='|', **kwargs)
-class GrabTheInfoCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 20 # cache update frequency
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = GrabTheInfoProvider()
diff --git a/sickbeard/providers/hd4free.py b/sickbeard/providers/hd4free.py
new file mode 100644
index 00000000..ba9b89c5
--- /dev/null
+++ b/sickbeard/providers/hd4free.py
@@ -0,0 +1,81 @@
+# coding=utf-8
+#
+# This file is part of SickGear.
+#
+# SickGear is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# SickGear is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with SickGear. If not, see .
+
+import time
+
+from . import generic
+from sickbeard.helpers import tryInt
+
+
+class HD4FreeProvider(generic.TorrentProvider):
+
+ def __init__(self):
+ generic.TorrentProvider.__init__(self, 'HD4Free')
+
+ self.url_base = 'https://hd4free.xyz/'
+
+ self.urls = {'search': self.url_base + 'searchapi.php',
+ 'get': self.url_base + 'download.php?torrent=%s&torrent_pass=%s'}
+
+ self.url = self.url_base
+
+ self.username, self.api_key, self.freeleech, self.minseed, self.minleech = 5 * [None]
+
+ def _authorised(self, **kwargs):
+
+ return self._check_auth()
+
+ def _search_provider(self, search_params, age=0, **kwargs):
+
+ results = []
+ if not self._authorised():
+ return results
+
+ items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
+
+ params = {'username': self.username, 'apikey': self.api_key,
+ 'tv': 'true', 'fl': ('true', None)[not self.freeleech]}
+ for mode in search_params.keys():
+ for search_string in search_params[mode]:
+ params['search'] = '+'.join(search_string.split())
+ data_json = self.get_url(self.urls['search'], params=params, json=True)
+
+ cnt = len(items[mode])
+ for k, item in data_json.items():
+ if 'error' == k or not item.get('total_results'):
+ break
+ seeders, leechers, size = [tryInt(n, n) for n in [
+ item.get(x) for x in 'seeders', 'leechers', 'size']]
+ if self._peers_fail(mode, seeders, leechers):
+ continue
+ title = item.get('release_name')
+ download_url = (self.urls['get'] % (item.get('torrentid'), item.get('torrentpass')), None)[
+ not (item.get('torrentid') and item.get('torrentpass'))]
+ if title and download_url:
+ items[mode].append((title, download_url, seeders, self._bytesizer('%smb' % size)))
+
+ self._log_search(mode, len(items[mode]) - cnt, self.session.response['url'])
+ time.sleep(1.1)
+
+ self._sort_seeders(mode, items)
+
+ results = list(set(results + items[mode]))
+
+ return results
+
+
+provider = HD4FreeProvider()
diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py
index 308a830b..8ba0c731 100644
--- a/sickbeard/providers/hdbits.py
+++ b/sickbeard/providers/hdbits.py
@@ -19,7 +19,7 @@ import re
import urllib
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import logger
from sickbeard.exceptions import AuthException
from sickbeard.helpers import tryInt
from sickbeard.indexers import indexer_config
@@ -33,7 +33,7 @@ except ImportError:
class HDBitsProvider(generic.TorrentProvider):
def __init__(self):
- generic.TorrentProvider.__init__(self, 'HDBits')
+ generic.TorrentProvider.__init__(self, 'HDBits', cache_update_freq=15)
# api_spec: https://hdbits.org/wiki/API
self.url_base = 'https://hdbits.org/'
@@ -46,9 +46,7 @@ class HDBitsProvider(generic.TorrentProvider):
self.proper_search_terms = [' proper ', ' repack ']
self.url = self.urls['config_provider_home_uri']
- self.username, self.passkey, self.minseed, self.minleech = 4 * [None]
- self.freeleech = False
- self.cache = HDBitsCache(self)
+ self.username, self.passkey, self.freeleech, self.minseed, self.minleech = 5 * [None]
def check_auth_from_data(self, parsed_json):
@@ -148,16 +146,4 @@ class HDBitsProvider(generic.TorrentProvider):
return results
-class HDBitsCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 15 # cache update frequency
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = HDBitsProvider()
diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py
index 35c53599..f0b1522b 100644
--- a/sickbeard/providers/hdspace.py
+++ b/sickbeard/providers/hdspace.py
@@ -19,7 +19,7 @@ import re
import traceback
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from lib.unidecode import unidecode
@@ -27,7 +27,7 @@ from lib.unidecode import unidecode
class HDSpaceProvider(generic.TorrentProvider):
def __init__(self):
- generic.TorrentProvider.__init__(self, 'HDSpace')
+ generic.TorrentProvider.__init__(self, 'HDSpace', cache_update_freq=17)
self.url_base = 'https://hd-space.org/'
self.urls = {'config_provider_home_uri': self.url_base,
@@ -40,9 +40,7 @@ class HDSpaceProvider(generic.TorrentProvider):
self.url = self.urls['config_provider_home_uri']
- self.username, self.password, self.minseed, self.minleech = 4 * [None]
- self.freeleech = False
- self.cache = HDSpaceCache(self)
+ self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None]
def _authorised(self, **kwargs):
@@ -124,16 +122,4 @@ class HDSpaceProvider(generic.TorrentProvider):
return generic.TorrentProvider._episode_strings(self, ep_obj, scene=False, **kwargs)
-class HDSpaceCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 17 # cache update frequency
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = HDSpaceProvider()
diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py
index faaa5db3..8abc5576 100644
--- a/sickbeard/providers/iptorrents.py
+++ b/sickbeard/providers/iptorrents.py
@@ -19,7 +19,7 @@ import re
import traceback
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from lib.unidecode import unidecode
@@ -29,24 +29,24 @@ class IPTorrentsProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'IPTorrents')
- self.url_base = 'https://iptorrents.eu/'
- self.urls = {'config_provider_home_uri': self.url_base,
- 'login': self.url_base + 'torrents/',
- 'search': self.url_base + 't?%s;q=%s;qf=ti%s%s#torrents',
- 'get': self.url_base + '%s'}
+ self.url_home = ['https://iptorrents.%s/' % u for u in 'eu', 'com', 'ru']
+
+ self.url_vars = {'login': 'getrss.php', 'search': 't?%s;q=%s;qf=ti%s%s#torrents', 'get': '%s'}
+ self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s',
+ 'search': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'}
self.categories = {'shows': [4, 5, 22, 23, 24, 25, 26, 55, 65, 66, 73, 78, 79], 'anime': [60]}
self.proper_search_terms = None
- self.url = self.urls['config_provider_home_uri']
- self.username, self.password, self.minseed, self.minleech = 4 * [None]
- self.freeleech = False
- self.cache = IPTorrentsCache(self)
+ self.digest, self.freeleech, self.minseed, self.minleech = 4 * [None]
def _authorised(self, **kwargs):
- return super(IPTorrentsProvider, self)._authorised(post_params={'php': ''})
+ return super(IPTorrentsProvider, self)._authorised(
+ logged_in=(lambda x=None: (None is x or 'RSS Link' in x) and self.has_all_cookies() and
+ self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest),
+ failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings'))
def _search_provider(self, search_params, **kwargs):
@@ -61,8 +61,9 @@ class IPTorrentsProvider(generic.TorrentProvider):
for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
# URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile
- search_url = self.urls['search'] % (self._categories_string(mode, '%s', ';'), search_string,
- ('', ';free')[self.freeleech], (';o=seeders', '')['Cache' == mode])
+ search_url = self.urls['search'] % (
+ self._categories_string(mode, '%s', ';'), search_string,
+ (';free', '')[not self.freeleech], (';o=seeders', '')['Cache' == mode])
html = self.get_url(search_url)
@@ -108,15 +109,9 @@ class IPTorrentsProvider(generic.TorrentProvider):
return results
-
-class IPTorrentsCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- def _cache_data(self):
-
- return self.provider.cache_data()
+ @staticmethod
+ def ui_string(key):
+ return 'iptorrents_digest' == key and 'use... \'uid=xx; pass=yy\'' or ''
provider = IPTorrentsProvider()
diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py
index 97a418ed..8808e0e2 100644
--- a/sickbeard/providers/kat.py
+++ b/sickbeard/providers/kat.py
@@ -23,7 +23,7 @@ import traceback
import urllib
from . import generic
-from sickbeard import config, logger, show_name_helpers, tvcache
+from sickbeard import config, logger, show_name_helpers
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import (has_anime, tryInt)
from sickbeard.common import Quality, mediaExtensions
@@ -34,20 +34,22 @@ from lib.unidecode import unidecode
class KATProvider(generic.TorrentProvider):
def __init__(self):
- generic.TorrentProvider.__init__(self, 'KickAssTorrents')
+ generic.TorrentProvider.__init__(self, 'KickAssTorrents', cache_update_freq=20)
- self.url_base = 'https://kat.ph/'
- self.urls = {'config_provider_home_uri': self.url_base,
- 'base': [self.url_base, 'http://katproxy.com/'],
- 'search': 'usearch/%s/',
- 'sorted': '?field=time_add&sorder=desc'}
+ self.url_home = ['https://%s/' % u for u in 'kat.ph', 'kat.cr', 'kickass.unblocked.red', 'katproxy.com']
+
+ self.url_vars = {'search': 'usearch/%s/?field=time_add&sorder=desc', 'get': '%s'}
+ self.url_tmpl = {'config_provider_home_uri': '%(home)s',
+ 'search': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'}
self.proper_search_terms = None
- self.url = self.urls['config_provider_home_uri']
self.minseed, self.minleech = 2 * [None]
self.confirmed = False
- self.cache = KATCache(self)
+
+ @staticmethod
+ def _has_signature(data=None):
+ return data and (re.search(r'(?sim)(KAT)', data[15:1024:]) or 'kastatic' in data)
def _find_season_quality(self, title, torrent_link, ep_number):
""" Return the modified title of a Season Torrent with the quality found inspecting torrent file list """
@@ -135,7 +137,7 @@ class KATProvider(generic.TorrentProvider):
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'link': 'normal', 'get': '^magnet', 'verif': 'verif'}.items())
- url = 0
+
for mode in search_params.keys():
search_show = mode in ['Season', 'Episode']
if not search_show and has_anime():
@@ -145,19 +147,17 @@ class KATProvider(generic.TorrentProvider):
for enum, search_string in enumerate(search_params[mode]):
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
- self.url = self.urls['base'][url]
- search_url = self.url + (self.urls['search'] % urllib.quote('%scategory:%s' % (
+ search_url = self.urls['search'] % urllib.quote('%scategory:%s' % (
('', '%s ' % search_string)['Cache' != mode],
- ('tv', 'anime')[(search_show and bool(self.show and self.show.is_anime)) or bool(enum)])))
+ ('tv', 'anime')[(search_show and bool(self.show and self.show.is_anime)) or bool(enum)]))
self.session.headers.update({'Referer': search_url})
- html = self.get_url(search_url + self.urls['sorted'])
+ html = self.get_url(search_url)
cnt = len(items[mode])
try:
- if not html or 'kastatic' not in html or self._has_no_results(html) or re.search(r'(?is)<(?:h\d)[^>]*>.*?(?:did\snot\smatch)', html):
- if html and 'kastatic' not in html:
- url += (1, 0)[url == len(self.urls['base'])]
+ if not html or self._has_no_results(html) or \
+ re.search(r'(?is)<(?:h\d)[^>]*>.*?(?:did\snot\smatch)', html):
raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
@@ -183,11 +183,13 @@ class KATProvider(generic.TorrentProvider):
except (AttributeError, TypeError, ValueError):
continue
- if self.confirmed and not (tr.find('a', title=rc['verif']) or tr.find('i', title=rc['verif'])):
+ if self.confirmed and not (tr.find('a', title=rc['verif']) or
+ tr.find('i', title=rc['verif'])):
logger.log(u'Skipping untrusted non-verified result: %s' % title, logger.DEBUG)
continue
- # Check number video files = episode in season and find the real Quality for full season torrent analyzing files in torrent
+ # Check number video files = episode in season and find the real Quality for
+ # full season torrent analyzing files in torrent
if 'Season' == mode and 'sponly' == search_mode:
ep_number = int(epcount / len(set(show_name_helpers.allPossibleShowNames(self.show))))
title = self._find_season_quality(title, link, ep_number)
@@ -208,16 +210,4 @@ class KATProvider(generic.TorrentProvider):
return results
-class KATCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 20 # cache update frequency
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = KATProvider()
diff --git a/sickbeard/providers/morethan.py b/sickbeard/providers/morethan.py
index 4da685ba..4ecdb796 100644
--- a/sickbeard/providers/morethan.py
+++ b/sickbeard/providers/morethan.py
@@ -21,7 +21,7 @@ import re
import traceback
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
@@ -30,19 +30,19 @@ from lib.unidecode import unidecode
class MoreThanProvider(generic.TorrentProvider):
def __init__(self):
- generic.TorrentProvider.__init__(self, 'MoreThan')
+ generic.TorrentProvider.__init__(self, 'MoreThan', cache_update_freq=20)
self.url_base = 'https://www.morethan.tv/'
self.urls = {'config_provider_home_uri': self.url_base,
'login': self.url_base + 'login.php',
'search': self.url_base + 'torrents.php?searchstr=%s&' + '&'.join([
- 'tags_type=1', 'order_by=time', '&order_way=desc', 'filter_cat[2]=1', 'action=basic', 'searchsubmit=1']),
+ 'tags_type=1', 'order_by=time', 'order_way=desc',
+ 'filter_cat[2]=1', 'action=basic', 'searchsubmit=1']),
'get': self.url_base + '%s'}
self.url = self.urls['config_provider_home_uri']
self.username, self.password, self.minseed, self.minleech = 4 * [None]
- self.cache = MoreThanCache(self)
def _authorised(self, **kwargs):
@@ -95,7 +95,7 @@ class MoreThanProvider(generic.TorrentProvider):
title = '%s %s' % (tr.find('div', attrs={'class': rc['name']}).get_text().strip(),
title)
- link = str(tr.find('a', title=rc['get'])['href']).replace('&', '&').lstrip('/')
+ link = str(tr.find('a', href=rc['get'])['href']).replace('&', '&').lstrip('/')
download_url = self.urls['get'] % link
except (AttributeError, TypeError, ValueError):
continue
@@ -117,16 +117,4 @@ class MoreThanProvider(generic.TorrentProvider):
return results
-class MoreThanCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 20 # cache update frequency
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = MoreThanProvider()
diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py
index f9c9c55c..aab020f9 100755
--- a/sickbeard/providers/newznab.py
+++ b/sickbeard/providers/newznab.py
@@ -27,14 +27,14 @@ from sickbeard.exceptions import AuthException
class NewznabProvider(generic.NZBProvider):
- def __init__(self, name, url, key='', cat_ids='5030,5040', search_mode='eponly',
+ def __init__(self, name, url, key='', cat_ids=None, search_mode=None,
search_fallback=False, enable_recentsearch=False, enable_backlog=False):
generic.NZBProvider.__init__(self, name, True, False)
self.url = url
self.key = key
- self.cat_ids = cat_ids
- self.search_mode = search_mode
+ self.cat_ids = cat_ids or '5030,5040'
+ self.search_mode = search_mode or 'eponly'
self.search_fallback = search_fallback
self.enable_recentsearch = enable_recentsearch
self.enable_backlog = enable_backlog
@@ -81,11 +81,11 @@ class NewznabProvider(generic.NZBProvider):
if isinstance(api_key, basestring):
params['apikey'] = api_key
- categories = self.get_url('%s/api' % self.url, params=params, timeout=10)
+ url = '%s/api?%s' % (self.url.strip('/'), '&'.join(['%s=%s' % (k, v) for k, v in params.items()]))
+ categories = self.get_url(url, timeout=10)
if not categories:
- logger.log(u'Error getting html for [%s]' % self.session.response['url'], logger.DEBUG)
- return (False, return_categories, 'Error getting html for [%s]' %
- ('%s/api?%s' % (self.url, '&'.join('%s=%s' % (x, y) for x, y in params.items()))))
+ logger.log(u'Error getting html for [%s]' % url, logger.DEBUG)
+ return False, return_categories, 'Error getting html for [%s]' % url
xml_categories = helpers.parse_xml(categories)
if not xml_categories:
@@ -114,16 +114,20 @@ class NewznabProvider(generic.NZBProvider):
base_params = {}
# season
+ ep_detail = None
if ep_obj.show.air_by_date or ep_obj.show.is_sports:
- date_str = str(ep_obj.airdate).split('-')[0]
- base_params['season'] = date_str
- base_params['q'] = date_str.replace('-', '.')
+ airdate = str(ep_obj.airdate).split('-')[0]
+ base_params['season'] = airdate
+ base_params['q'] = airdate
+ if ep_obj.show.air_by_date:
+ ep_detail = '+"%s"' % airdate
elif ep_obj.show.is_anime:
base_params['season'] = '%d' % ep_obj.scene_absolute_number
else:
base_params['season'] = str((ep_obj.season, ep_obj.scene_season)[bool(ep_obj.show.is_scene)])
+ ep_detail = 'S%02d' % helpers.tryInt(base_params['season'], 1)
- # search
+ # id search
ids = helpers.mapIndexersToShow(ep_obj.show)
if ids[1]: # or ids[2]:
params = base_params.copy()
@@ -136,7 +140,7 @@ class NewznabProvider(generic.NZBProvider):
use_id = True
use_id and search_params.append(params)
- # add new query strings for exceptions
+ # query search and exceptions
name_exceptions = list(
set([helpers.sanitizeSceneName(a) for a in
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
@@ -144,7 +148,14 @@ class NewznabProvider(generic.NZBProvider):
params = base_params.copy()
if 'q' in params:
params['q'] = '%s.%s' % (cur_exception, params['q'])
- search_params.append(params)
+ search_params.append(params)
+
+ if ep_detail:
+ params = base_params.copy()
+ params['q'] = '%s.%s' % (cur_exception, ep_detail)
+ 'season' in params and params.pop('season')
+ 'ep' in params and params.pop('ep')
+ search_params.append(params)
return [{'Season': search_params}]
@@ -156,18 +167,25 @@ class NewznabProvider(generic.NZBProvider):
if not ep_obj:
return [base_params]
+ ep_detail = None
if ep_obj.show.air_by_date or ep_obj.show.is_sports:
- date_str = str(ep_obj.airdate)
- base_params['season'] = date_str.partition('-')[0]
- base_params['ep'] = date_str.partition('-')[2].replace('-', '/')
+ airdate = str(ep_obj.airdate).split('-')
+ base_params['season'] = airdate[0]
+ if ep_obj.show.air_by_date:
+ base_params['ep'] = '/'.join(airdate[1:])
+ ep_detail = '+"%s.%s"' % (base_params['season'], '.'.join(airdate[1:]))
elif ep_obj.show.is_anime:
- base_params['ep'] = '%i' % int(
- ep_obj.scene_absolute_number if int(ep_obj.scene_absolute_number) > 0 else ep_obj.scene_episode)
+ base_params['ep'] = '%i' % (helpers.tryInt(ep_obj.scene_absolute_number) or
+ helpers.tryInt(ep_obj.scene_episode))
+ ep_detail = '%02d' % base_params['ep']
else:
base_params['season'], base_params['ep'] = (
(ep_obj.season, ep_obj.episode), (ep_obj.scene_season, ep_obj.scene_episode))[ep_obj.show.is_scene]
+ ep_detail = sickbeard.config.naming_ep_type[2] % {
+ 'seasonnumber': helpers.tryInt(base_params['season'], 1),
+ 'episodenumber': helpers.tryInt(base_params['ep'], 1)}
- # search
+ # id search
ids = helpers.mapIndexersToShow(ep_obj.show)
if ids[1]: # or ids[2]:
params = base_params.copy()
@@ -181,7 +199,7 @@ class NewznabProvider(generic.NZBProvider):
use_id = True
use_id and search_params.append(params)
- # add new query strings for exceptions
+ # query search and exceptions
name_exceptions = list(
set([helpers.sanitizeSceneName(a) for a in
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
@@ -191,15 +209,11 @@ class NewznabProvider(generic.NZBProvider):
params['q'] = cur_exception
search_params.append(params)
- if ep_obj.show.is_anime:
- # Experimental, add a search string without search explicitly for the episode!
- # Remove the ?ep=e46 parameter and use the episode number to the query parameter.
- # Can be useful for newznab indexers that do not have the episodes 100% parsed.
- # Start with only applying the search string to anime shows
+ if ep_detail:
params = base_params.copy()
- params['q'] = '%s.%02d' % (cur_exception, int(params['ep']))
- if 'ep' in params:
- params.pop('ep')
+ params['q'] = '%s.%s' % (cur_exception, ep_detail)
+ 'season' in params and params.pop('season')
+ 'ep' in params and params.pop('ep')
search_params.append(params)
return [{'Episode': search_params}]
@@ -229,8 +243,10 @@ class NewznabProvider(generic.NZBProvider):
# category ids
cat = []
- cat_anime = ('5070', '6070')['nzbs_org' == self.get_id()]
- cat_sport = '5060'
+ cat_sport = ['5060']
+ cat_anime = []
+ if 'nzbgeek' != self.get_id():
+ cat_anime = (['5070'], ['6070'])['nzbs_org' == self.get_id()]
if 'Episode' == mode or 'Season' == mode:
if not ('rid' in params or 'tvdbid' in params or 'q' in params or not self.supports_tvdbid()):
logger.log('Error no rid, tvdbid, or search term available for search.')
@@ -238,18 +254,24 @@ class NewznabProvider(generic.NZBProvider):
if self.show:
if self.show.is_sports:
- cat = [cat_sport]
+ cat = cat_sport
elif self.show.is_anime:
- cat = [cat_anime]
+ cat = cat_anime
else:
- cat = [cat_sport, cat_anime]
+ cat = cat_sport + cat_anime
if self.cat_ids or len(cat):
base_params['cat'] = ','.join(sorted(set(self.cat_ids.split(',') + cat)))
request_params = base_params.copy()
+ if 'q' in params and not (any(x in params for x in ['season', 'ep'])):
+ request_params['t'] = 'search'
request_params.update(params)
+ # workaround a strange glitch
+ if sum(ord(i) for i in self.get_id()) in [383] and 5 == 14 - request_params['maxage']:
+ request_params['maxage'] += 1
+
offset = 0
batch_count = not 0
@@ -336,18 +358,17 @@ class NewznabCache(tvcache.TVCache):
result = []
- if True or self.shouldUpdate():
+ if 4489 != sickbeard.RECENTSEARCH_FREQUENCY or self.should_update():
try:
self._checkAuth()
+ items = self.provider.cache_data()
except Exception:
- return result
+ items = None
- items = self.provider.cache_data()
if items:
-
self._clearCache()
- self.setLastUpdate()
+ # parse data
cl = []
for item in items:
ci = self._parseItem(item)
@@ -358,6 +379,9 @@ class NewznabCache(tvcache.TVCache):
my_db = self.get_db()
my_db.mass_action(cl)
+ # set updated as time the attempt to fetch data is
+ self.setLastUpdate()
+
return result
# overwrite method with that parses the rageid from the newznab feed
diff --git a/sickbeard/providers/pisexy.py b/sickbeard/providers/pisexy.py
index c838aecd..74f247d7 100644
--- a/sickbeard/providers/pisexy.py
+++ b/sickbeard/providers/pisexy.py
@@ -17,7 +17,7 @@ import re
import traceback
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
@@ -37,7 +37,6 @@ class PiSexyProvider(generic.TorrentProvider):
self.url = self.urls['config_provider_home_uri']
self.username, self.password, self.minseed, self.minleech = 4 * [None]
- self.cache = PiSexyCache(self)
def _authorised(self, **kwargs):
@@ -108,14 +107,4 @@ class PiSexyProvider(generic.TorrentProvider):
return results
-class PiSexyCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = PiSexyProvider()
diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py
index 0b9a763d..7a49c1a9 100644
--- a/sickbeard/providers/pretome.py
+++ b/sickbeard/providers/pretome.py
@@ -16,7 +16,6 @@
# along with SickGear. If not, see .
from . import generic
-from sickbeard import tvcache
from sickbeard.rssfeeds import RSSFeeds
from lib.unidecode import unidecode
@@ -24,7 +23,7 @@ from lib.unidecode import unidecode
class PreToMeProvider(generic.TorrentProvider):
def __init__(self):
- generic.TorrentProvider.__init__(self, 'PreToMe')
+ generic.TorrentProvider.__init__(self, 'PreToMe', cache_update_freq=6)
self.url_base = 'https://pretome.info/'
@@ -35,7 +34,6 @@ class PreToMeProvider(generic.TorrentProvider):
self.url = self.urls['config_provider_home_uri']
self.passkey = None
- self.cache = PreToMeCache(self)
def _authorised(self, **kwargs):
@@ -72,16 +70,4 @@ class PreToMeProvider(generic.TorrentProvider):
return results
-class PreToMeCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 6 # cache update frequency
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = PreToMeProvider()
diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py
index f32b3b64..6e520d0f 100644
--- a/sickbeard/providers/rarbg.py
+++ b/sickbeard/providers/rarbg.py
@@ -21,7 +21,7 @@ import datetime
import time
from . import generic
-from sickbeard import helpers, logger, tvcache
+from sickbeard import helpers, logger
from sickbeard.indexers.indexer_config import INDEXER_TVDB
@@ -51,7 +51,6 @@ class RarbgProvider(generic.TorrentProvider):
self.minseed, self.minleech, self.token, self.token_expiry = 4 * [None]
self.confirmed = False
self.request_throttle = datetime.datetime.now()
- self.cache = RarbgCache(self)
def _authorised(self, reset=False, **kwargs):
@@ -178,14 +177,4 @@ class RarbgProvider(generic.TorrentProvider):
return search_params
-class RarbgCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = RarbgProvider()
diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py
index 039985a7..acce2694 100644
--- a/sickbeard/providers/scc.py
+++ b/sickbeard/providers/scc.py
@@ -20,7 +20,7 @@ import time
import traceback
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
@@ -31,18 +31,16 @@ class SCCProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'SceneAccess')
- self.url_base = 'https://sceneaccess.eu/'
- self.urls = {'config_provider_home_uri': self.url_base,
- 'login': self.url_base + 'login',
- 'search': self.url_base + 'browse?search=%s&method=1&c27=27&c17=17&c11=11',
- 'nonscene': self.url_base + 'nonscene?search=%s&method=1&c44=44&c45=44',
- 'archive': self.url_base + 'archive?search=%s&method=1&c26=26',
- 'get': self.url_base + '%s'}
+ self.url_home = ['https://sceneaccess.%s/' % u for u in 'eu', 'org']
- self.url = self.urls['config_provider_home_uri']
+ self.url_vars = {
+ 'login': 'login', 'search': 'browse?search=%s&method=1&c27=27&c17=17&c11=11', 'get': '%s',
+ 'nonscene': 'nonscene?search=%s&method=1&c44=44&c45=44', 'archive': 'archive?search=%s&method=1&c26=26'}
+ self.url_tmpl = {
+ 'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s', 'search': '%(home)s%(vars)s',
+ 'get': '%(home)s%(vars)s', 'nonscene': '%(home)s%(vars)s', 'archive': '%(home)s%(vars)s'}
self.username, self.password, self.minseed, self.minleech = 4 * [None]
- self.cache = SCCCache(self)
def _authorised(self, **kwargs):
@@ -121,16 +119,4 @@ class SCCProvider(generic.TorrentProvider):
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='.', **kwargs)
-class SCCCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 20 # cache update frequency
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = SCCProvider()
diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py
index 0af4552d..a337f539 100644
--- a/sickbeard/providers/scenetime.py
+++ b/sickbeard/providers/scenetime.py
@@ -20,7 +20,7 @@ import re
import traceback
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
@@ -29,7 +29,7 @@ from lib.unidecode import unidecode
class SceneTimeProvider(generic.TorrentProvider):
def __init__(self):
- generic.TorrentProvider.__init__(self, 'SceneTime')
+ generic.TorrentProvider.__init__(self, 'SceneTime', cache_update_freq=15)
self.url_base = 'https://www.scenetime.com/'
self.urls = {'config_provider_home_uri': self.url_base,
@@ -42,9 +42,7 @@ class SceneTimeProvider(generic.TorrentProvider):
self.url = self.urls['config_provider_home_uri']
- self.username, self.password, self.minseed, self.minleech = 4 * [None]
- self.freeleech = False
- self.cache = SceneTimeCache(self)
+ self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None]
def _authorised(self, **kwargs):
@@ -58,15 +56,16 @@ class SceneTimeProvider(generic.TorrentProvider):
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
- rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': '.*id=(\d+).*', 'fl': '\[freeleech\]',
- 'cats': 'cat=(?:%s)' % self._categories_string(template='', delimiter='|')
- }.items())
+ rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
+ 'info': 'detail', 'get': '.*id=(\d+).*', 'fl': '\[freeleech\]',
+ 'cats': 'cat=(?:%s)' % self._categories_string(template='', delimiter='|')}.items())
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
post_data = self.urls['params'].copy()
- post_data.update(ast.literal_eval('{%s}' % self._categories_string(template='"c%s": "1"', delimiter=',')))
+ post_data.update(ast.literal_eval(
+ '{%s}' % self._categories_string(template='"c%s": "1"', delimiter=',')))
if 'Cache' != mode:
search_string = '+'.join(search_string.split())
post_data['search'] = search_string
@@ -99,10 +98,11 @@ class SceneTimeProvider(generic.TorrentProvider):
continue
info = tr.find('a', href=rc['info'])
- title = 'title' in info.attrs and info.attrs['title'] or info.get_text().strip()
+ title = info.attrs.get('title') or info.get_text().strip()
- download_url = self.urls['get'] % {'id': re.sub(rc['get'], r'\1', str(info.attrs['href'])),
- 'title': str(title).replace(' ', '.')}
+ download_url = self.urls['get'] % {
+ 'id': re.sub(rc['get'], r'\1', str(info.attrs['href'])),
+ 'title': str(title).replace(' ', '.')}
except (AttributeError, TypeError, ValueError):
continue
@@ -124,16 +124,4 @@ class SceneTimeProvider(generic.TorrentProvider):
return results
-class SceneTimeCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 15 # cache update frequency
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = SceneTimeProvider()
diff --git a/sickbeard/providers/shazbat.py b/sickbeard/providers/shazbat.py
index 80f5f09a..bc5650cc 100644
--- a/sickbeard/providers/shazbat.py
+++ b/sickbeard/providers/shazbat.py
@@ -22,7 +22,7 @@ import time
import traceback
from . import generic
-from sickbeard import helpers, logger, tvcache
+from sickbeard import helpers, logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
@@ -32,7 +32,7 @@ class ShazbatProvider(generic.TorrentProvider):
def __init__(self):
- generic.TorrentProvider.__init__(self, 'Shazbat')
+ generic.TorrentProvider.__init__(self, 'Shazbat', cache_update_freq=20)
self.url_base = 'https://www.shazbat.tv/'
self.urls = {'config_provider_home_uri': self.url_base,
@@ -46,8 +46,6 @@ class ShazbatProvider(generic.TorrentProvider):
self.url = self.urls['config_provider_home_uri']
self.username, self.password, self.minseed, self.minleech = 4 * [None]
- self.freeleech = False
- self.cache = ShazbatCache(self)
def _authorised(self, **kwargs):
@@ -147,16 +145,4 @@ class ShazbatProvider(generic.TorrentProvider):
return generic.TorrentProvider._episode_strings(self, ep_obj, detail_only=True, scene=False, **kwargs)
-class ShazbatCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 20 # cache update frequency
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = ShazbatProvider()
diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py
index e4a896ba..ba20732c 100644
--- a/sickbeard/providers/speedcd.py
+++ b/sickbeard/providers/speedcd.py
@@ -19,34 +19,32 @@ import re
import time
from . import generic
-from sickbeard import tvcache
+from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
class SpeedCDProvider(generic.TorrentProvider):
def __init__(self):
- generic.TorrentProvider.__init__(self, 'SpeedCD')
+ generic.TorrentProvider.__init__(self, 'SpeedCD', cache_update_freq=20)
- self.url_base = 'http://speed.cd/'
+ self.url_base = 'https://speed.cd/'
self.urls = {'config_provider_home_uri': self.url_base,
'login_action': self.url_base + 'login.php',
'search': self.url_base + 'V3/API/API.php',
- 'get': self.url_base + 'download.php?torrent=%s'}
+ 'get': self.url_base + '%s'}
- self.categories = {'Season': {'c41': 1, 'c53': 1},
- 'Episode': {'c2': 1, 'c49': 1, 'c50': 1, 'c55': 1},
- 'Cache': {'c41': 1, 'c2': 1, 'c49': 1, 'c50': 1, 'c53': 1, 'c55': 1}}
+ self.categories = {'Season': [41, 53], 'Episode': [2, 49, 50, 55], 'anime': [30]}
+ self.categories['Cache'] = self.categories['Season'] + self.categories['Episode']
self.url = self.urls['config_provider_home_uri']
- self.username, self.password, self.minseed, self.minleech = 4 * [None]
- self.freeleech = False
- self.cache = SpeedCDCache(self)
+ self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None]
def _authorised(self, **kwargs):
- return super(SpeedCDProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies('inSpeed_speedian')))
+ return super(SpeedCDProvider, self)._authorised(
+ logged_in=(lambda x=None: self.has_all_cookies('inSpeed_speedian')))
def _search_provider(self, search_params, **kwargs):
@@ -56,37 +54,49 @@ class SpeedCDProvider(generic.TorrentProvider):
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
- remove_tag = re.compile(r'<[^>]*>')
+ rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'get': 'download', 'fl': '\[freeleech\]'}.items())
+
for mode in search_params.keys():
- search_mode = (mode, 'Episode')['Propers' == mode]
+ rc['cats'] = re.compile('(?i)cat=(?:%s)' % self._categories_string(mode, template='', delimiter='|'))
for search_string in search_params[mode]:
search_string = '+'.join(search_string.split())
- post_data = dict({'/browse.php?': None, 'cata': 'yes', 'jxt': 4, 'jxw': 'b', 'search': search_string},
- **self.categories[search_mode])
- if self.freeleech:
- post_data['freeleech'] = 'on'
+ post_data = dict((x.split('=') for x in self._categories_string(mode).split('&')), search=search_string,
+ jxt=2, jxw='b', freeleech=('on', None)[not self.freeleech])
data_json = self.get_url(self.urls['search'], post_data=post_data, json=True)
cnt = len(items[mode])
try:
- if not data_json:
+ html = data_json.get('Fs')[0].get('Cn')[0].get('d')
+ if not html or self._has_no_results(html):
raise generic.HaltParseException
- torrents = data_json.get('Fs', [])[0].get('Cn', {}).get('torrents', [])
- for item in torrents:
+ with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
+ torrent_table = soup.find('table', attrs={'cellspacing': 0})
+ torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
- if self.freeleech and not item.get('free'):
- continue
+ if 2 > len(torrent_rows):
+ raise generic.HaltParseException
- seeders, leechers, size = [tryInt(n, n) for n in [item.get(x) for x in 'seed', 'leech', 'size']]
- if self._peers_fail(mode, seeders, leechers):
- continue
+ for tr in torrent_rows[1:]:
+ try:
+ seeders, leechers, size = [tryInt(n, n) for n in [
+ tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -3)]]
+ if None is tr.find('a', href=rc['cats']) \
+ or self.freeleech and None is rc['fl'].search(tr.find_all('td')[1].get_text()) \
+ or self._peers_fail(mode, seeders, leechers):
+ continue
- title = remove_tag.sub('', item.get('name'))
- download_url = self.urls['get'] % item.get('id')
- if title and download_url:
- items[mode].append((title, download_url, seeders, self._bytesizer(size)))
+ info = tr.find('a', 'torrent')
+ title = info.attrs.get('title') or info.get_text().strip()
+
+ download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
+
+ except (AttributeError, TypeError, ValueError):
+ continue
+
+ if title and download_url:
+ items[mode].append((title, download_url, seeders, self._bytesizer(size)))
except Exception:
time.sleep(1.1)
@@ -105,16 +115,4 @@ class SpeedCDProvider(generic.TorrentProvider):
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='.', **kwargs)
-class SpeedCDCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 20 # cache update frequency
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = SpeedCDProvider()
diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py
index bec87ae0..c46c0b7d 100644
--- a/sickbeard/providers/thepiratebay.py
+++ b/sickbeard/providers/thepiratebay.py
@@ -23,7 +23,7 @@ import traceback
import urllib
from . import generic
-from sickbeard import config, logger, tvcache, show_name_helpers
+from sickbeard import config, logger, show_name_helpers
from sickbeard.bs4_parser import BS4Parser
from sickbeard.common import Quality, mediaExtensions
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
@@ -33,40 +33,32 @@ from lib.unidecode import unidecode
class ThePirateBayProvider(generic.TorrentProvider):
def __init__(self):
- generic.TorrentProvider.__init__(self, 'The Pirate Bay')
+ generic.TorrentProvider.__init__(self, 'The Pirate Bay', cache_update_freq=20)
- self.urls = {'config_provider_home_uri': ['https://thepiratebay.se/', 'https://thepiratebay.gd/',
- 'https://thepiratebay.mn/', 'https://thepiratebay.vg/',
- 'https://thepiratebay.la/'],
- 'search': 'search/%s/0/7/200',
- 'browse': 'tv/latest/'} # order by seed
+ self.url_home = ['https://thepiratebay.%s/' % u for u in 'se', 'org']
+
+ self.url_vars = {'search': 'search/%s/0/7/200', 'browse': 'tv/latest/'}
+ self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(home)s%(vars)s',
+ 'browse': '%(home)s%(vars)s'}
self.proper_search_terms = None
- self.url = self.urls['config_provider_home_uri'][0]
self.minseed, self.minleech = 2 * [None]
self.confirmed = False
- self.cache = ThePirateBayCache(self)
+
+ @staticmethod
+ def _has_signature(data=None):
+ return data and re.search(r'Pirate\sBay', data[33:7632:])
def _find_season_quality(self, title, torrent_id, ep_number):
""" Return the modified title of a Season Torrent with the quality found inspecting torrent file list """
+ if not self.url:
+ return False
+
quality = Quality.UNKNOWN
file_name = None
- data = None
- has_signature = False
- details_url = '/ajax_details_filelist.php?id=%s' % torrent_id
- for idx, url in enumerate(self.urls['config_provider_home_uri']):
- data = self.get_url(url + details_url)
- if data and re.search(r'The\sPirate\sBay', data[33:200:]):
- has_signature = True
- break
- else:
- data = None
-
- if not has_signature:
- logger.log(u'Failed to identify a page from ThePirateBay at %s attempted urls (tpb blocked? general network issue or site dead)' % len(self.urls['config_provider_home_uri']), logger.ERROR)
-
+ data = self.get_url('%sajax_details_filelist.php?id=%s' % (self.url, torrent_id))
if not data:
return None
@@ -138,30 +130,22 @@ class ThePirateBayProvider(generic.TorrentProvider):
def _search_provider(self, search_params, search_mode='eponly', epcount=0, **kwargs):
results = []
+ if not self.url:
+ return results
+
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v))
for (k, v) in {'info': 'detail', 'get': 'download[^"]+magnet', 'tid': r'.*/(\d{5,}).*',
'verify': '(?:helper|moderator|trusted|vip)'}.items())
- has_signature = False
+
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
- log_url = '%s %s' % (self.name, search_string) # placebo value
- for idx, search_url in enumerate(self.urls['config_provider_home_uri']):
- search_url += self.urls['browse'] if 'Cache' == mode\
- else self.urls['search'] % (urllib.quote(search_string))
-
- log_url = u'(%s/%s): %s' % (idx + 1, len(self.urls['config_provider_home_uri']), search_url)
-
- html = self.get_url(search_url)
-
- if html and re.search(r'Pirate\sBay', html[33:7632:]):
- has_signature = True
- break
- else:
- html = None
+ search_url = self.urls['browse'] if 'Cache' == mode \
+ else self.urls['search'] % (urllib.quote(search_string))
+ html = self.get_url(search_url)
cnt = len(items[mode])
try:
@@ -213,28 +197,13 @@ class ThePirateBayProvider(generic.TorrentProvider):
pass
except Exception:
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
- self._log_search(mode, len(items[mode]) - cnt, log_url)
+ self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items)
results = list(set(results + items[mode]))
- if not has_signature:
- logger.log(u'Failed to identify a page from ThePirateBay at %s attempted urls (tpb blocked? general network issue or site dead)' % len(self.urls['config_provider_home_uri']), logger.ERROR)
-
return results
-class ThePirateBayCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 20 # cache update frequency
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = ThePirateBayProvider()
diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py
index dc60d118..6dbaa82a 100644
--- a/sickbeard/providers/torrentbytes.py
+++ b/sickbeard/providers/torrentbytes.py
@@ -19,7 +19,7 @@ import re
import traceback
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
@@ -28,21 +28,18 @@ from lib.unidecode import unidecode
class TorrentBytesProvider(generic.TorrentProvider):
def __init__(self):
- generic.TorrentProvider.__init__(self, 'TorrentBytes')
+ generic.TorrentProvider.__init__(self, 'TorrentBytes', cache_update_freq=20)
- self.url_base = 'https://www.torrentbytes.net/'
- self.urls = {'config_provider_home_uri': self.url_base,
- 'login': self.url_base + 'takelogin.php',
- 'search': self.url_base + 'browse.php?search=%s&%s',
- 'get': self.url_base + '%s'}
+ self.url_home = ['https://www.torrentbytes.net/']
- self.categories = {'shows': [41, 33, 38, 32, 37]}
+ self.url_vars = {'login': 'takelogin.php', 'search': 'browse.php?search=%s&%s', 'get': '%s'}
+ self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s',
+ 'search': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'}
- self.url = self.urls['config_provider_home_uri']
+ self.categories = {'Season': [41, 32], 'Episode': [33, 37, 38]}
+ self.categories['Cache'] = self.categories['Season'] + self.categories['Episode']
- self.username, self.password, self.minseed, self.minleech = 4 * [None]
- self.freeleech = False
- self.cache = TorrentBytesCache(self)
+ self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None]
def _authorised(self, **kwargs):
@@ -56,12 +53,12 @@ class TorrentBytesProvider(generic.TorrentProvider):
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
- rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download', 'fl': '\[\W*F\W?L\W*\]'
- }.items())
+ rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download',
+ 'fl': '\[\W*F\W?L\W*\]'}.items())
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
- search_url = self.urls['search'] % (search_string, self._categories_string())
+ search_url = self.urls['search'] % (search_string, self._categories_string(mode))
html = self.get_url(search_url, timeout=90)
@@ -82,11 +79,11 @@ class TorrentBytesProvider(generic.TorrentProvider):
info = tr.find('a', href=rc['info'])
seeders, leechers, size = [tryInt(n, n) for n in [
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -4)]]
- if self.freeleech and (len(info.contents) < 2 or not rc['fl'].search(info.contents[1].string.strip())) \
- or self._peers_fail(mode, seeders, leechers):
+ if self.freeleech and (len(info.contents) < 2 or not rc['fl'].search(
+ info.contents[1].string.strip())) or self._peers_fail(mode, seeders, leechers):
continue
- title = 'title' in info.attrs and info.attrs['title'] or info.contents[0]
+ title = info.attrs.get('title') or info.contents[0]
title = (isinstance(title, list) and title[0] or title).strip()
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
except (AttributeError, TypeError, ValueError):
@@ -109,16 +106,4 @@ class TorrentBytesProvider(generic.TorrentProvider):
return results
-class TorrentBytesCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 20 # cache update frequency
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = TorrentBytesProvider()
diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py
index cf35ef8f..7dd6ea43 100644
--- a/sickbeard/providers/torrentday.py
+++ b/sickbeard/providers/torrentday.py
@@ -19,8 +19,7 @@ import re
import time
from . import generic
-from sickbeard import tvcache
-from sickbeard.helpers import (has_anime, tryInt)
+from sickbeard.helpers import tryInt
class TorrentDayProvider(generic.TorrentProvider):
@@ -28,22 +27,19 @@ class TorrentDayProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'TorrentDay')
- self.url_base = 'https://torrentday.eu/'
- self.urls = {'config_provider_home_uri': self.url_base,
- 'login': self.url_base + 'torrents/',
- 'search': self.url_base + 'V3/API/API.php',
- 'get': self.url_base + 'download.php/%s/%s'}
+ self.url_home = ['https://%s/' % u for u in 'torrentday.eu', 'secure.torrentday.com', 'tdonline.org',
+ 'torrentday.it', 'www.td.af', 'www.torrentday.com']
- self.categories = {'Season': {'c31': 1, 'c33': 1, 'c14': 1},
- 'Episode': {'c32': 1, 'c26': 1, 'c7': 1, 'c2': 1},
- 'Cache': {'c31': 1, 'c33': 1, 'c14': 1, 'c32': 1, 'c26': 1, 'c7': 1, 'c2': 1}}
+ self.url_vars = {'login': 'torrents/', 'search': 'V3/API/API.php', 'get': 'download.php/%s/%s'}
+ self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s',
+ 'search': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'}
+
+ self.categories = {'Season': [31, 33, 14], 'Episode': [24, 32, 26, 7, 2], 'Anime': [29]}
+ self.categories['Cache'] = self.categories['Season'] + self.categories['Episode']
self.proper_search_terms = None
- self.url = self.urls['config_provider_home_uri']
- self.username, self.password, self.minseed, self.minleech = 4 * [None]
- self.freeleech = False
- self.cache = TorrentDayCache(self)
+ self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None]
def _authorised(self, **kwargs):
@@ -66,11 +62,8 @@ class TorrentDayProvider(generic.TorrentProvider):
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string = '+'.join(search_string.split())
- post_data = dict({'/browse.php?': None, 'cata': 'yes', 'jxt': 8, 'jxw': 'b', 'search': search_string},
- **self.categories[(mode, 'Episode')['Propers' == mode]])
- if ('Cache' == mode and has_anime()) or (
- mode in ['Season', 'Episode'] and self.show and self.show.is_anime):
- post_data.update({'c29': 1})
+ post_data = dict((x.split('=') for x in self._categories_string(mode).split('&')),
+ search=search_string, cata='yes', jxt=8, jxw='b')
if self.freeleech:
post_data.update({'free': 'on'})
@@ -112,14 +105,4 @@ class TorrentDayProvider(generic.TorrentProvider):
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='.', date_or=True, **kwargs)
-class TorrentDayCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = TorrentDayProvider()
diff --git a/sickbeard/providers/torrenting.py b/sickbeard/providers/torrenting.py
index 0a483ffa..7b1fb831 100644
--- a/sickbeard/providers/torrenting.py
+++ b/sickbeard/providers/torrenting.py
@@ -19,7 +19,7 @@ import re
import traceback
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
@@ -30,25 +30,20 @@ class TorrentingProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'Torrenting')
- self.url_base = 'https://www.torrenting.com/'
+ self.url_home = ['https://%s/' % u for u in 'www.torrenting.com', 'ttonline.us']
- self.api = 'https://ttonline.us/'
- self.urls = {'config_provider_home_uri': self.url_base,
- 'login': self.api + 'secure.php',
- 'search': self.api + 'browse.php?%s&search=%s',
- 'get': self.api + '%s'}
+ self.url_vars = {'login': 'rss.php', 'search': 'browse.php?%s&search=%s', 'get': '%s'}
+ self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s',
+ 'search': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'}
self.categories = {'shows': [4, 5]}
- self.url = self.urls['config_provider_home_uri']
-
self.digest, self.minseed, self.minleech = 3 * [None]
- self.cache = TorrentingCache(self)
def _authorised(self, **kwargs):
return super(TorrentingProvider, self)._authorised(
- logged_in=(lambda x=None: (None is x or 'Other Links' in x) and self.has_all_cookies() and
+ logged_in=(lambda x=None: (None is x or 'RSS link' in x) and self.has_all_cookies() and
self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest),
failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings'))
@@ -60,9 +55,9 @@ class TorrentingProvider(generic.TorrentProvider):
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
- rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download',
- 'cats': 'cat=(?:%s)' % self._categories_string(template='', delimiter='|')
- }.items())
+ rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
+ 'info': 'detail', 'cats': 'cat=(?:%s)' % self._categories_string(template='', delimiter='|'),
+ 'get': 'download'}.items())
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
@@ -90,8 +85,9 @@ class TorrentingProvider(generic.TorrentProvider):
continue
info = tr.find('a', href=rc['info'])
- title = 'title' in info.attrs and info.attrs['title'] or info.get_text().strip()
- download_url = self.urls['get'] % tr.find('a', href=rc['get']).get('href')
+ title = info.attrs.get('title') or info.get_text().strip()
+ download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
+
except (AttributeError, TypeError, ValueError):
continue
@@ -117,14 +113,4 @@ class TorrentingProvider(generic.TorrentProvider):
return 'torrenting_digest' == key and 'use... \'uid=xx; pass=yy\'' or ''
-class TorrentingCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = TorrentingProvider()
diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py
index c672ccbd..cfe5f05b 100644
--- a/sickbeard/providers/torrentleech.py
+++ b/sickbeard/providers/torrentleech.py
@@ -19,14 +19,14 @@ import re
import traceback
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from lib.unidecode import unidecode
class TorrentLeechProvider(generic.TorrentProvider):
def __init__(self):
- generic.TorrentProvider.__init__(self, 'TorrentLeech')
+ generic.TorrentProvider.__init__(self, 'TorrentLeech', cache_update_freq=20)
self.url_base = 'https://torrentleech.org/'
self.urls = {'config_provider_home_uri': self.url_base,
@@ -40,7 +40,6 @@ class TorrentLeechProvider(generic.TorrentProvider):
self.url = self.urls['config_provider_home_uri']
self.username, self.password, self.minseed, self.minleech = 4 * [None]
- self.cache = TorrentLeechCache(self)
def _authorised(self, **kwargs):
@@ -111,14 +110,5 @@ class TorrentLeechProvider(generic.TorrentProvider):
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='|', **kwargs)
-class TorrentLeechCache(tvcache.TVCache):
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 20 # cache update frequency
-
- def _cache_data(self):
- return self.provider.cache_data()
-
provider = TorrentLeechProvider()
diff --git a/sickbeard/providers/torrentshack.py b/sickbeard/providers/torrentshack.py
index 6a131418..19ea689b 100644
--- a/sickbeard/providers/torrentshack.py
+++ b/sickbeard/providers/torrentshack.py
@@ -21,7 +21,7 @@ import re
import traceback
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
@@ -30,13 +30,14 @@ from lib.unidecode import unidecode
class TorrentShackProvider(generic.TorrentProvider):
def __init__(self):
- generic.TorrentProvider.__init__(self, 'TorrentShack')
+ generic.TorrentProvider.__init__(self, 'TorrentShack', cache_update_freq=20)
self.url_base = 'https://torrentshack.me/'
self.urls = {'config_provider_home_uri': self.url_base,
'login': self.url_base + 'login.php?lang=',
'search': self.url_base + 'torrents.php?searchstr=%s&%s&' + '&'.join(
- ['release_type=both', 'searchtags=', 'tags_type=0', 'order_by=s3', 'order_way=desc', 'torrent_preset=all']),
+ ['release_type=both', 'searchtags=', 'tags_type=0',
+ 'order_by=s3', 'order_way=desc', 'torrent_preset=all']),
'get': self.url_base + '%s'}
self.categories = {'shows': [600, 620, 700, 981, 980], 'anime': [850]}
@@ -44,7 +45,6 @@ class TorrentShackProvider(generic.TorrentProvider):
self.url = self.urls['config_provider_home_uri']
self.username, self.password, self.minseed, self.minleech = 4 * [None]
- self.cache = TorrentShackCache(self)
def _authorised(self, **kwargs):
@@ -117,16 +117,4 @@ class TorrentShackProvider(generic.TorrentProvider):
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='.', **kwargs)
-class TorrentShackCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 20 # cache update frequency
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = TorrentShackProvider()
diff --git a/sickbeard/providers/transmithe_net.py b/sickbeard/providers/transmithe_net.py
index 2ab1d6df..b7cd5c35 100644
--- a/sickbeard/providers/transmithe_net.py
+++ b/sickbeard/providers/transmithe_net.py
@@ -19,7 +19,8 @@ import re
import traceback
from . import generic
-from sickbeard import helpers, logger, tvcache
+from sickbeard import common, helpers, logger
+from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
@@ -27,7 +28,7 @@ from lib.unidecode import unidecode
class TransmithenetProvider(generic.TorrentProvider):
def __init__(self):
- generic.TorrentProvider.__init__(self, 'Transmithe.net')
+ generic.TorrentProvider.__init__(self, 'Transmithe.net', cache_update_freq=17)
self.url_base = 'https://transmithe.net/'
self.urls = {'config_provider_home_uri': self.url_base,
@@ -39,10 +40,9 @@ class TransmithenetProvider(generic.TorrentProvider):
self.url = self.urls['config_provider_home_uri']
self.user_authkey, self.user_passkey = 2 * [None]
+ self.chk_td = True
- self.username, self.password, self.minseed, self.minleech = 4 * [None]
- self.freeleech = False
- self.cache = TransmithenetCache(self)
+ self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None]
def _authorised(self, **kwargs):
@@ -88,11 +88,13 @@ class TransmithenetProvider(generic.TorrentProvider):
try:
title_parts = group_name.split('[')
- maybe_res = re.findall('((?:72|108)0\w)', title_parts[1])
+ maybe_res = re.findall('((?:72|108|216)0\w)', title_parts[1])
+ maybe_ext = re.findall('(?i)(%s)' % '|'.join(common.mediaExtensions), title_parts[1])
detail = title_parts[1].split('/')
detail[1] = detail[1].strip().lower().replace('mkv', 'x264')
- title = '%s.%s' % (title_parts[0].strip(), '.'.join(
- (len(maybe_res) and [maybe_res[0]] or []) + [detail[0].strip(), detail[1]]))
+ title = '%s.%s' % (BS4Parser(title_parts[0].strip(), 'html.parser').soup.string, '.'.join(
+ (maybe_res and [maybe_res[0]] or []) +
+ [detail[0].strip(), detail[1], maybe_ext and maybe_ext[0].lower() or 'mkv']))
except (IndexError, KeyError):
title = group_name
download_url = self.urls['get'] % (self.user_authkey, self.user_passkey, torrent_id)
@@ -119,16 +121,4 @@ class TransmithenetProvider(generic.TorrentProvider):
return generic.TorrentProvider._episode_strings(self, ep_obj, scene=False, **kwargs)
-class TransmithenetCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- self.update_freq = 17
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = TransmithenetProvider()
diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py
index 9515f95c..49e29cbf 100644
--- a/sickbeard/providers/tvchaosuk.py
+++ b/sickbeard/providers/tvchaosuk.py
@@ -19,7 +19,7 @@ import re
import traceback
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from sickbeard.config import naming_ep_type
@@ -32,9 +32,9 @@ class TVChaosUKProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'TVChaosUK')
- self.url_base = 'https://tvchaosuk.com/'
+ self.url_base = 'https://www.tvchaosuk.com/'
self.urls = {'config_provider_home_uri': self.url_base,
- 'login': self.url_base + 'takelogin.php',
+ 'login_action': self.url_base + 'login.php',
'search': self.url_base + 'browse.php',
'get': self.url_base + '%s'}
@@ -42,7 +42,6 @@ class TVChaosUKProvider(generic.TorrentProvider):
self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None]
self.search_fallback = True
- self.cache = TVChaosUKCache(self)
def _authorised(self, **kwargs):
@@ -92,8 +91,9 @@ class TVChaosUKProvider(generic.TorrentProvider):
info = tr.find('a', href=rc['info'])
title = (tr.find('div', attrs={'class': 'tooltip-content'}).get_text() or info.get_text()).strip()
title = re.findall('(?m)(^[^\r\n]+)', title)[0]
- download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip(
- '/').replace(self.urls['config_provider_home_uri'], '')
+ download_url = str(tr.find('a', href=rc['get'])['href'])
+ if not download_url.startswith('http'):
+ download_url = self.urls['get'] % download_url.lstrip('/')
except Exception:
continue
@@ -134,6 +134,7 @@ class TVChaosUKProvider(generic.TorrentProvider):
add_pad = re.findall('((?:19|20)\d\d\-\d\d\-\d\d)([\w\W])', title)
if len(add_pad) and add_pad[0][1] not in [' ', '.']:
title = title.replace(''.join(add_pad[0]), '%s %s' % (add_pad[0][0], add_pad[0][1]))
+ title = re.sub(r'(?sim)(.*?)(?:Episode|Season).\d+.(.*)', r'\1\2', title)
if title and download_url:
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
@@ -176,14 +177,4 @@ class TVChaosUKProvider(generic.TorrentProvider):
return 'tvchaosuk_tip' == key and 'has missing quality data so you must add quality Custom/Unknown to any wanted show' or ''
-class TVChaosUKCache(tvcache.TVCache):
-
- def __init__(self, this_provider):
- tvcache.TVCache.__init__(self, this_provider)
-
- def _cache_data(self):
-
- return self.provider.cache_data()
-
-
provider = TVChaosUKProvider()
diff --git a/sickbeard/providers/womble.py b/sickbeard/providers/womble.py
index 9f264ad7..e0f479c6 100644
--- a/sickbeard/providers/womble.py
+++ b/sickbeard/providers/womble.py
@@ -17,7 +17,8 @@
# along with SickGear. If not, see .
from . import generic
-from sickbeard import logger, tvcache
+from sickbeard import tvcache
+import time
class WombleProvider(generic.NZBProvider):
@@ -34,46 +35,23 @@ class WombleCache(tvcache.TVCache):
def __init__(self, this_provider):
tvcache.TVCache.__init__(self, this_provider)
- self.update_freq = 15 # cache update frequency
+ self.update_freq = 6 # cache update frequency
- def updateCache(self):
+ def _cache_data(self):
- # delete anything older then 7 days
- self._clearCache()
-
- if not self.shouldUpdate():
- return
-
- cl = []
- data = None
- for url in [self.provider.url + 'rss/?sec=tv-x264&fr=false',
- self.provider.url + 'rss/?sec=tv-sd&fr=false',
- self.provider.url + 'rss/?sec=tv-dvd&fr=false',
- self.provider.url + 'rss/?sec=tv-hd&fr=false']:
- logger.log(u'Womble\'s Index cache update URL: ' + url, logger.DEBUG)
+ result = []
+ for section in ['sd', 'hd', 'x264', 'dvd']:
+ url = '%srss/?sec=tv-%s&fr=false' % (self.provider.url, section)
data = self.getRSSFeed(url)
+ time.sleep(1.1)
+ cnt = len(result)
+ for entry in (data and data.get('entries', []) or []):
+ if entry.get('title') and entry.get('link', '').startswith('http'):
+ result.append((entry['title'], entry['link'], None, None))
- # As long as we got something from the provider we count it as an update
- if not data:
- return []
+ self.provider.log_result(count=len(result) - cnt, url=url)
- # By now we know we've got data and no auth errors, all we need to do is put it in the database
- for item in data.entries:
- title, url = self._title_and_url(item)
- ci = self._parseItem(title, url)
- if None is not ci:
- cl.append(ci)
-
- if 0 < len(cl):
- my_db = self.get_db()
- my_db.mass_action(cl)
-
- # set last updated
- if data:
- self.setLastUpdate()
-
- def _checkAuth(self, *data):
- return 'Invalid Link' != data[0]
+ return result
provider = WombleProvider()
diff --git a/sickbeard/search.py b/sickbeard/search.py
index b0cfa869..22e86af6 100644
--- a/sickbeard/search.py
+++ b/sickbeard/search.py
@@ -179,6 +179,21 @@ def snatch_episode(result, end_status=SNATCHED):
return True
+
+def pass_show_wordlist_checks(name, show):
+ re_extras = dict(re_prefix='.*', re_suffix='.*')
+ result = show_name_helpers.contains_any(name, show.rls_ignore_words, **re_extras)
+ if None is not result and result:
+ logger.log(u'Ignored: %s for containing ignore word' % name)
+ return False
+
+ result = show_name_helpers.contains_any(name, show.rls_require_words, **re_extras)
+ if None is not result and not result:
+ logger.log(u'Ignored: %s for not containing any required word match' % name)
+ return False
+ return True
+
+
def pick_best_result(results, show, quality_list=None):
logger.log(u'Picking the best result out of %s' % [x.name for x in results], logger.DEBUG)
@@ -195,15 +210,7 @@ def pick_best_result(results, show, quality_list=None):
logger.log(u'%s is an unwanted quality, rejecting it' % cur_result.name, logger.DEBUG)
continue
- re_extras = dict(re_prefix='.*', re_suffix='.*')
- result = show_name_helpers.contains_any(cur_result.name, show.rls_ignore_words, **re_extras)
- if None is not result and result:
- logger.log(u'Ignored: %s for containing ignore word' % cur_result.name)
- continue
-
- result = show_name_helpers.contains_any(cur_result.name, show.rls_require_words, **re_extras)
- if None is not result and not result:
- logger.log(u'Ignored: %s for not containing any required word match' % cur_result.name)
+ if not pass_show_wordlist_checks(cur_result.name, show):
continue
cur_size = getattr(cur_result, 'size', None)
@@ -427,8 +434,11 @@ def search_for_needed_episodes(episodes):
threading.currentThread().name = orig_thread_name
- if not search_done:
- logger.log(u'No NZB/Torrent provider enabled to do recent searches. Please check provider options.', logger.ERROR)
+ if not len(providers):
+ logger.log('No NZB/Torrent sources enabled in Search Provider options to do recent searches', logger.WARNING)
+ elif not search_done:
+ logger.log('Failed recent search of %s enabled provider%s. More info in debug log.' % (
+ len(providers), helpers.maybe_plural(len(providers))), logger.ERROR)
return found_results.values()
@@ -672,12 +682,39 @@ def search_providers(show, episodes, manual_search=False):
continue
# filter out possible bad torrents from providers
- if 'torrent' == best_result.resultType and 'blackhole' != sickbeard.TORRENT_METHOD:
- best_result.content = None
- if not best_result.url.startswith('magnet'):
- best_result.content = best_result.provider.get_url(best_result.url)
- if not best_result.content:
+ if 'torrent' == best_result.resultType:
+ if best_result.url.startswith('magnet'):
+ if 'blackhole' != sickbeard.TORRENT_METHOD:
+ best_result.content = None
+ else:
+ td = best_result.provider.get_url(best_result.url)
+ if not td:
continue
+ if getattr(best_result.provider, 'chk_td', None):
+ name = None
+ try:
+ hdr = re.findall('(\w+(\d+):)', td[0:6])[0]
+ x, v = len(hdr[0]), int(hdr[1])
+ for item in range(0, 12):
+ y = x + v
+ name = 'name' == td[x: y]
+ w = re.findall('((?:i\d+e|d|l)?(\d+):)', td[y: y + 32])[0]
+ x, v = y + len(w[0]), int(w[1])
+ if name:
+ name = td[x: x + v]
+ break
+ except:
+ continue
+ if name:
+ if not pass_show_wordlist_checks(name, show):
+ continue
+ if not show_name_helpers.pass_wordlist_checks(name):
+ logger.log(u'Ignored: %s (debug log has detail)' % name)
+ continue
+ best_result.name = name
+
+ if 'blackhole' != sickbeard.TORRENT_METHOD:
+ best_result.content = td
# add result if its not a duplicate and
found = False
@@ -702,8 +739,10 @@ def search_providers(show, episodes, manual_search=False):
if len(episodes) == wanted_ep_count:
break
- if not search_done:
- logger.log(u'No NZB/Torrent providers found or enabled in the SickGear config for backlog searches. Please check your settings.',
- logger.ERROR)
+ if not len(provider_list):
+ logger.log('No NZB/Torrent sources enabled in Search Provider options to do backlog searches', logger.WARNING)
+ elif not search_done:
+ logger.log('Failed backlog search of %s enabled provider%s. More info in debug log.' % (
+ len(provider_list), helpers.maybe_plural(len(provider_list))), logger.ERROR)
return final_results
diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py
index 5f53f37d..58dbbdf3 100644
--- a/sickbeard/search_queue.py
+++ b/sickbeard/search_queue.py
@@ -18,7 +18,6 @@
from __future__ import with_statement
-import time
import traceback
import threading
import datetime
@@ -288,21 +287,29 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
orig_thread_name = threading.currentThread().name
threads = []
- logger.log('Updating provider caches with recent upload data')
-
providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_recentsearch]
for cur_provider in providers:
- # spawn separate threads for each provider so we don't need to wait for providers with slow network operation
+ if not cur_provider.cache.should_update():
+ continue
+
+ if not threads:
+ logger.log('Updating provider caches with recent upload data')
+
+ # spawn a thread for each provider to save time waiting for slow response providers
threads.append(threading.Thread(target=cur_provider.cache.updateCache,
name='%s :: [%s]' % (orig_thread_name, cur_provider.name)))
# start the thread we just created
threads[-1].start()
- # wait for all threads to finish
- for t in threads:
- t.join()
+ if not len(providers):
+ logger.log('No NZB/Torrent sources enabled in Search Provider options for cache update', logger.WARNING)
- logger.log('Finished updating provider caches')
+ if threads:
+ # wait for all threads to finish
+ for t in threads:
+ t.join()
+
+ logger.log('Finished updating provider caches')
class ProperSearchQueueItem(generic_queue.QueueItem):
@@ -427,7 +434,7 @@ class FailedQueueItem(generic_queue.QueueItem):
history.logFailed(epObj, release, provider)
failed_history.revertEpisode(epObj)
- logger.log(u'Beginning failed download search for: []' % epObj.prettyName())
+ logger.log(u'Beginning failed download search for: [%s]' % epObj.prettyName())
search_result = search.search_providers(self.show, self.segment, True)
diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py
index 39d0e3e1..195b6453 100644
--- a/sickbeard/tvcache.py
+++ b/sickbeard/tvcache.py
@@ -32,6 +32,7 @@ from name_parser.parser import NameParser, InvalidNameException, InvalidShowExce
from sickbeard.rssfeeds import RSSFeeds
import itertools
+
class CacheDBConnection(db.DBConnection):
def __init__(self, providerName):
db.DBConnection.__init__(self, 'cache.db')
@@ -44,6 +45,7 @@ class CacheDBConnection(db.DBConnection):
if str(e) != 'table lastUpdate already exists':
raise
+
class TVCache:
def __init__(self, provider):
@@ -56,7 +58,7 @@ class TVCache:
return CacheDBConnection(self.providerID)
def _clearCache(self):
- if self.shouldClearCache():
+ if self.should_clear_cache():
myDB = self.get_db()
myDB.action('DELETE FROM provider_cache WHERE provider = ?', [self.providerID])
@@ -81,21 +83,16 @@ class TVCache:
logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
return []
- if self.shouldUpdate():
- # as long as the http request worked we count this as an update
+ if self.should_update():
data = self._cache_data()
- if not data:
- return []
# clear cache
- self._clearCache()
-
- # set updated
- self.setLastUpdate()
+ if data:
+ self._clearCache()
# parse data
cl = []
- for item in data:
+ for item in data or []:
title, url = self._title_and_url(item)
ci = self._parseItem(title, url)
if ci is not None:
@@ -105,6 +102,9 @@ class TVCache:
myDB = self.get_db()
myDB.mass_action(cl)
+ # set updated as time the attempt to fetch data is
+ self.setLastUpdate()
+
return []
def getRSSFeed(self, url, **kwargs):
@@ -180,21 +180,13 @@ class TVCache:
lastUpdate = property(_getLastUpdate)
lastSearch = property(_getLastSearch)
- def shouldUpdate(self):
+ def should_update(self):
# if we've updated recently then skip the update
- if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.update_freq):
- logger.log(u'Last update was too soon, using old cache: today()-' + str(self.lastUpdate) + '<' + str(
- datetime.timedelta(minutes=self.update_freq)), logger.DEBUG)
- return False
+ return datetime.datetime.today() - self.lastUpdate >= datetime.timedelta(minutes=self.update_freq)
- return True
-
- def shouldClearCache(self):
+ def should_clear_cache(self):
# if recent search hasn't used our previous results yet then don't clear the cache
- if self.lastUpdate > self.lastSearch:
- return False
-
- return True
+ return self.lastSearch >= self.lastUpdate
def add_cache_entry(self, name, url, parse_result=None, indexer_id=0):
diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py
index e1774d59..744d1600 100644
--- a/sickbeard/webserve.py
+++ b/sickbeard/webserve.py
@@ -4662,10 +4662,7 @@ class ConfigProviders(Config):
providerDict[name].key = key
# a 0 in the key spot indicates that no key is needed
- if key == '0':
- providerDict[name].needs_auth = False
- else:
- providerDict[name].needs_auth = True
+ providerDict[name].needs_auth = '0' != key
return providerDict[name].get_id() + '|' + providerDict[name].config_str()
@@ -4767,16 +4764,11 @@ class ConfigProviders(Config):
def saveProviders(self, newznab_string='', torrentrss_string='', provider_order=None, **kwargs):
results = []
-
- provider_str_list = provider_order.split()
provider_list = []
- newznabProviderDict = dict(
- zip([x.get_id() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList))
-
- finishedNames = []
-
- # add all the newznab info we got into our list
+ # add all the newznab info we have into our list
+ newznab_sources = dict(zip([x.get_id() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList))
+ active_ids = []
if newznab_string:
for curNewznabProviderStr in newznab_string.split('!!!'):
@@ -4789,282 +4781,157 @@ class ConfigProviders(Config):
if starify(cur_key, True):
cur_key = ''
- newProvider = newznab.NewznabProvider(cur_name, cur_url, key=cur_key)
+ new_provider = newznab.NewznabProvider(cur_name, cur_url, key=cur_key)
- cur_id = newProvider.get_id()
+ cur_id = new_provider.get_id()
# if it already exists then update it
- if cur_id in newznabProviderDict:
- newznabProviderDict[cur_id].name = cur_name
- newznabProviderDict[cur_id].url = cur_url
+ if cur_id in newznab_sources:
+ nzb_src = newznab_sources[cur_id]
+
+ nzb_src.name, nzb_src.url, nzb_src.cat_ids = cur_name, cur_url, cur_cat
+
if cur_key:
- newznabProviderDict[cur_id].key = cur_key
- newznabProviderDict[cur_id].cat_ids = cur_cat
+ nzb_src.key = cur_key
+
# a 0 in the key spot indicates that no key is needed
- if cur_key == '0':
- newznabProviderDict[cur_id].needs_auth = False
- else:
- newznabProviderDict[cur_id].needs_auth = True
+ nzb_src.needs_auth = '0' != cur_key
- try:
- newznabProviderDict[cur_id].search_mode = str(kwargs[cur_id + '_search_mode']).strip()
- except:
- pass
+ attr = 'search_mode'
+ if cur_id + '_' + attr in kwargs:
+ setattr(nzb_src, attr, str(kwargs.get(cur_id + '_' + attr)).strip())
- try:
- newznabProviderDict[cur_id].search_fallback = config.checkbox_to_value(
- kwargs[cur_id + '_search_fallback'])
- except:
- newznabProviderDict[cur_id].search_fallback = 0
+ for attr in ['search_fallback', 'enable_recentsearch', 'enable_backlog']:
+ setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(cur_id + '_' + attr)))
- try:
- newznabProviderDict[cur_id].enable_recentsearch = config.checkbox_to_value(
- kwargs[cur_id + '_enable_recentsearch'])
- except:
- newznabProviderDict[cur_id].enable_recentsearch = 0
-
- try:
- newznabProviderDict[cur_id].enable_backlog = config.checkbox_to_value(
- kwargs[cur_id + '_enable_backlog'])
- except:
- newznabProviderDict[cur_id].enable_backlog = 0
else:
- sickbeard.newznabProviderList.append(newProvider)
+ sickbeard.newznabProviderList.append(new_provider)
- finishedNames.append(cur_id)
+ active_ids.append(cur_id)
# delete anything that is missing
- for curProvider in sickbeard.newznabProviderList:
- if curProvider.get_id() not in finishedNames:
- sickbeard.newznabProviderList.remove(curProvider)
-
- torrentRssProviderDict = dict(
- zip([x.get_id() for x in sickbeard.torrentRssProviderList], sickbeard.torrentRssProviderList))
- finishedNames = []
+ for source in [x for x in sickbeard.newznabProviderList if x.get_id() not in active_ids]:
+ sickbeard.newznabProviderList.remove(source)
+ # add all the torrent RSS info we have into our list
+ torrent_rss_sources = dict(zip([x.get_id() for x in sickbeard.torrentRssProviderList],
+ sickbeard.torrentRssProviderList))
+ active_ids = []
if torrentrss_string:
for curTorrentRssProviderStr in torrentrss_string.split('!!!'):
if not curTorrentRssProviderStr:
continue
- curName, curURL, curCookies = curTorrentRssProviderStr.split('|')
- curURL = config.clean_url(curURL, False)
+ cur_name, cur_url, cur_cookies = curTorrentRssProviderStr.split('|')
+ cur_url = config.clean_url(cur_url, False)
- if starify(curCookies, True):
- curCookies = ''
+ if starify(cur_cookies, True):
+ cur_cookies = ''
- newProvider = rsstorrent.TorrentRssProvider(curName, curURL, curCookies)
+ new_provider = rsstorrent.TorrentRssProvider(cur_name, cur_url, cur_cookies)
- curID = newProvider.get_id()
+ cur_id = new_provider.get_id()
# if it already exists then update it
- if curID in torrentRssProviderDict:
- torrentRssProviderDict[curID].name = curName
- torrentRssProviderDict[curID].url = curURL
- if curCookies:
- torrentRssProviderDict[curID].cookies = curCookies
+ if cur_id in torrent_rss_sources:
+ torrent_rss_sources[cur_id].name = cur_name
+ torrent_rss_sources[cur_id].url = cur_url
+ if cur_cookies:
+ torrent_rss_sources[cur_id].cookies = cur_cookies
else:
- sickbeard.torrentRssProviderList.append(newProvider)
+ sickbeard.torrentRssProviderList.append(new_provider)
- finishedNames.append(curID)
+ active_ids.append(cur_id)
# delete anything that is missing
- for curProvider in sickbeard.torrentRssProviderList:
- if curProvider.get_id() not in finishedNames:
- sickbeard.torrentRssProviderList.remove(curProvider)
+ for source in [x for x in sickbeard.torrentRssProviderList if x.get_id() not in active_ids]:
+ sickbeard.torrentRssProviderList.remove(source)
- # do the enable/disable
- for curProviderStr in provider_str_list:
- curProvider, curEnabled = curProviderStr.split(':')
- curEnabled = config.to_int(curEnabled)
+ # enable/disable states of source providers
+ provider_str_list = provider_order.split()
+ sources = dict(zip([x.get_id() for x in sickbeard.providers.sortedProviderList()],
+ sickbeard.providers.sortedProviderList()))
+ for cur_src_str in provider_str_list:
+ src_name, src_enabled = cur_src_str.split(':')
- curProvObj = [x for x in sickbeard.providers.sortedProviderList() if
- x.get_id() == curProvider and hasattr(x, 'enabled')]
- if curProvObj:
- curProvObj[0].enabled = bool(curEnabled)
+ provider_list.append(src_name)
+ src_enabled = bool(config.to_int(src_enabled))
- provider_list.append(curProvider)
- if curProvider in newznabProviderDict:
- newznabProviderDict[curProvider].enabled = bool(curEnabled)
- elif curProvider in torrentRssProviderDict:
- torrentRssProviderDict[curProvider].enabled = bool(curEnabled)
+ if src_name in sources and hasattr(sources[src_name], 'enabled'):
+ sources[src_name].enabled = src_enabled
- # dynamically load provider settings
- for curTorrentProvider in [curProvider for curProvider in sickbeard.providers.sortedProviderList() if
- curProvider.providerType == sickbeard.GenericProvider.TORRENT]:
+ if src_name in newznab_sources:
+ newznab_sources[src_name].enabled = src_enabled
+ elif src_name in torrent_rss_sources:
+ torrent_rss_sources[src_name].enabled = src_enabled
- if hasattr(curTorrentProvider, '_seed_ratio'):
- try:
- curTorrentProvider._seed_ratio = str(kwargs[curTorrentProvider.get_id() + '_ratio']).strip()
- except:
- curTorrentProvider._seed_ratio = None
+ # update torrent source settings
+ for torrent_src in [src for src in sickbeard.providers.sortedProviderList()
+ if sickbeard.GenericProvider.TORRENT == src.providerType]:
+ src_id_prefix = torrent_src.get_id() + '_'
- if hasattr(curTorrentProvider, 'seed_time') and curTorrentProvider.get_id() + '_seed_time' in kwargs:
- curTorrentProvider.seed_time = config.to_int(str(kwargs[curTorrentProvider.get_id() + '_seed_time']).strip(), 0)
+ attr = 'url_edit'
+ if getattr(torrent_src, attr, None):
+ url_edit = ','.join(set(['%s' % url.strip() for url in kwargs.get(
+ src_id_prefix + attr, '').split(',')]))
+ torrent_src.url_home = ([url_edit], [])[not url_edit]
- if hasattr(curTorrentProvider, 'minseed'):
- try:
- curTorrentProvider.minseed = int(str(kwargs[curTorrentProvider.get_id() + '_minseed']).strip())
- except:
- curTorrentProvider.minseed = 0
+ for attr in [x for x in ['username', 'uid'] if hasattr(torrent_src, x)]:
+ setattr(torrent_src, attr, str(kwargs.get(src_id_prefix + attr, '')).strip())
- if hasattr(curTorrentProvider, 'minleech'):
- try:
- curTorrentProvider.minleech = int(str(kwargs[curTorrentProvider.get_id() + '_minleech']).strip())
- except:
- curTorrentProvider.minleech = 0
+ for attr in [x for x in ['password', 'api_key', 'passkey', 'digest', 'hash'] if hasattr(torrent_src, x)]:
+ key = str(kwargs.get(src_id_prefix + attr, '')).strip()
+ if 'password' == attr:
+ set('*') != set(key) and setattr(torrent_src, attr, key)
+ elif not starify(key, True):
+ setattr(torrent_src, attr, key)
- if hasattr(curTorrentProvider, 'digest'):
- try:
- key = str(kwargs[curTorrentProvider.get_id() + '_digest']).strip()
- if not starify(key, True):
- curTorrentProvider.digest = key
- except:
- curTorrentProvider.digest = None
+ attr = 'ratio'
+ if hasattr(torrent_src, '_seed_' + attr):
+ setattr(torrent_src, '_seed_' + attr, kwargs.get(src_id_prefix + attr, '').strip() or None)
- if hasattr(curTorrentProvider, 'hash'):
- try:
- key = str(kwargs[curTorrentProvider.get_id() + '_hash']).strip()
- if not starify(key, True):
- curTorrentProvider.hash = key
- except:
- curTorrentProvider.hash = None
+ for attr in [x for x in ['minseed', 'minleech'] if hasattr(torrent_src, x)]:
+ setattr(torrent_src, attr, config.to_int(str(kwargs.get(src_id_prefix + attr)).strip()))
- if hasattr(curTorrentProvider, 'api_key'):
- try:
- key = str(kwargs[curTorrentProvider.get_id() + '_api_key']).strip()
- if not starify(key, True):
- curTorrentProvider.api_key = key
- except:
- curTorrentProvider.api_key = None
+ for attr in [x for x in ['confirmed', 'freeleech', 'reject_m2ts', 'enable_recentsearch',
+ 'enable_backlog', 'search_fallback'] if hasattr(torrent_src, x)]:
+ setattr(torrent_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr)))
- if hasattr(curTorrentProvider, 'username'):
- try:
- curTorrentProvider.username = str(kwargs[curTorrentProvider.get_id() + '_username']).strip()
- except:
- curTorrentProvider.username = None
+ attr = 'seed_time'
+ if hasattr(torrent_src, attr) and src_id_prefix + attr in kwargs:
+ setattr(torrent_src, attr, config.to_int(str(kwargs.get(src_id_prefix + attr)).strip()))
- if hasattr(curTorrentProvider, 'password'):
- try:
- key = str(kwargs[curTorrentProvider.get_id() + '_password']).strip()
- if set('*') != set(key):
- curTorrentProvider.password = key
- except:
- curTorrentProvider.password = None
+ attr = 'search_mode'
+ if hasattr(torrent_src, attr):
+ setattr(torrent_src, attr, str(kwargs.get(src_id_prefix + attr, '')).strip() or 'eponly')
- if hasattr(curTorrentProvider, 'passkey'):
- try:
- key = str(kwargs[curTorrentProvider.get_id() + '_passkey']).strip()
- if not starify(key, True):
- curTorrentProvider.passkey = key
- except:
- curTorrentProvider.passkey = None
+ # update nzb source settings
+ for nzb_src in [src for src in sickbeard.providers.sortedProviderList() if
+ sickbeard.GenericProvider.NZB == src.providerType]:
+ src_id_prefix = nzb_src.get_id() + '_'
- if hasattr(curTorrentProvider, 'confirmed'):
- try:
- curTorrentProvider.confirmed = config.checkbox_to_value(
- kwargs[curTorrentProvider.get_id() + '_confirmed'])
- except:
- curTorrentProvider.confirmed = 0
+ attr = 'api_key'
+ if hasattr(nzb_src, attr):
+ key = str(kwargs.get(src_id_prefix + attr, '')).strip()
+ if not starify(key, True):
+ setattr(nzb_src, attr, key)
- if hasattr(curTorrentProvider, 'proxy'):
- try:
- curTorrentProvider.proxy.enabled = config.checkbox_to_value(
- kwargs[curTorrentProvider.get_id() + '_proxy'])
- except:
- curTorrentProvider.proxy.enabled = 0
+ attr = 'username'
+ if hasattr(nzb_src, attr):
+ setattr(nzb_src, attr, str(kwargs.get(src_id_prefix + attr, '')).strip() or None)
- if hasattr(curTorrentProvider.proxy, 'url'):
- try:
- curTorrentProvider.proxy.url = str(kwargs[curTorrentProvider.get_id() + '_proxy_url']).strip()
- except:
- curTorrentProvider.proxy.url = None
+ attr = 'search_mode'
+ if hasattr(nzb_src, attr):
+ setattr(nzb_src, attr, str(kwargs.get(src_id_prefix + attr, '')).strip() or 'eponly')
- if hasattr(curTorrentProvider, 'freeleech'):
- try:
- curTorrentProvider.freeleech = config.checkbox_to_value(
- kwargs[curTorrentProvider.get_id() + '_freeleech'])
- except:
- curTorrentProvider.freeleech = 0
+ attr = 'enable_recentsearch'
+ if hasattr(nzb_src, attr):
+ setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr)) or
+ not getattr(nzb_src, 'supports_backlog', True))
- if hasattr(curTorrentProvider, 'reject_m2ts'):
- try:
- curTorrentProvider.reject_m2ts = config.checkbox_to_value(
- kwargs[curTorrentProvider.get_id() + '_reject_m2ts'])
- except:
- curTorrentProvider.reject_m2ts = 0
-
- if hasattr(curTorrentProvider, 'search_mode'):
- try:
- curTorrentProvider.search_mode = str(kwargs[curTorrentProvider.get_id() + '_search_mode']).strip()
- except:
- curTorrentProvider.search_mode = 'eponly'
-
- if hasattr(curTorrentProvider, 'search_fallback'):
- try:
- curTorrentProvider.search_fallback = config.checkbox_to_value(
- kwargs[curTorrentProvider.get_id() + '_search_fallback'])
- except:
- curTorrentProvider.search_fallback = 0 # these exceptions are catching unselected checkboxes
-
- if hasattr(curTorrentProvider, 'enable_recentsearch'):
- try:
- curTorrentProvider.enable_recentsearch = config.checkbox_to_value(
- kwargs[curTorrentProvider.get_id() + '_enable_recentsearch'])
- except:
- curTorrentProvider.enable_recentsearch = 0 # these exceptions are actually catching unselected checkboxes
-
- if hasattr(curTorrentProvider, 'enable_backlog'):
- try:
- curTorrentProvider.enable_backlog = config.checkbox_to_value(
- kwargs[curTorrentProvider.get_id() + '_enable_backlog'])
- except:
- curTorrentProvider.enable_backlog = 0 # these exceptions are actually catching unselected checkboxes
-
- for curNzbProvider in [curProvider for curProvider in sickbeard.providers.sortedProviderList() if
- curProvider.providerType == sickbeard.GenericProvider.NZB]:
-
- if hasattr(curNzbProvider, 'api_key'):
- try:
- key = str(kwargs[curNzbProvider.get_id() + '_api_key']).strip()
- if not starify(key, True):
- curNzbProvider.api_key = key
- except:
- curNzbProvider.api_key = None
-
- if hasattr(curNzbProvider, 'username'):
- try:
- curNzbProvider.username = str(kwargs[curNzbProvider.get_id() + '_username']).strip()
- except:
- curNzbProvider.username = None
-
- if hasattr(curNzbProvider, 'search_mode'):
- try:
- curNzbProvider.search_mode = str(kwargs[curNzbProvider.get_id() + '_search_mode']).strip()
- except:
- curNzbProvider.search_mode = 'eponly'
-
- if hasattr(curNzbProvider, 'search_fallback'):
- try:
- curNzbProvider.search_fallback = config.checkbox_to_value(
- kwargs[curNzbProvider.get_id() + '_search_fallback'])
- except:
- curNzbProvider.search_fallback = 0 # these exceptions are actually catching unselected checkboxes
-
- if hasattr(curNzbProvider, 'enable_recentsearch'):
- try:
- curNzbProvider.enable_recentsearch = config.checkbox_to_value(
- kwargs[curNzbProvider.get_id() + '_enable_recentsearch'])
- except:
- curNzbProvider.enable_recentsearch = 0 # these exceptions are actually catching unselected checkboxes
-
- if hasattr(curNzbProvider, 'enable_backlog'):
- try:
- curNzbProvider.enable_backlog = config.checkbox_to_value(
- kwargs[curNzbProvider.get_id() + '_enable_backlog'])
- except:
- curNzbProvider.enable_backlog = 0 # these exceptions are actually catching unselected checkboxes
+ for attr in [x for x in ['search_fallback', 'enable_backlog'] if hasattr(nzb_src, x)]:
+ setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr)))
sickbeard.NEWZNAB_DATA = '!!!'.join([x.config_str() for x in sickbeard.newznabProviderList])
sickbeard.PROVIDER_ORDER = provider_list
@@ -5073,11 +4940,10 @@ class ConfigProviders(Config):
sickbeard.save_config()
- if len(results) > 0:
+ if 0 < len(results):
for x in results:
logger.log(x, logger.ERROR)
- ui.notifications.error('Error(s) Saving Configuration',
- ' \n'.join(results))
+ ui.notifications.error('Error(s) Saving Configuration', ' \n'.join(results))
else:
ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE))