From b54724c058222209cfc82c3088c18f2d6d995f3c Mon Sep 17 00:00:00 2001 From: Prinz23 Date: Wed, 23 Jul 2014 16:32:41 +0200 Subject: [PATCH 01/23] Remove old Code that caused an exception in the iCal Feed --- sickbeard/webserve.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 232b9461..191fa611 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -416,8 +416,6 @@ class MainHandler(RequestHandler): logger.log(u"Receiving iCal request from %s" % self.request.remote_ip) - poster_url = self.request.url().replace('ical', '') - time_re = re.compile('([0-9]{1,2})\:([0-9]{2})(\ |)([AM|am|PM|pm]{2})') # Create a iCal string From 8313e12267e5bee473f7054ccd8e8c5c84c69105 Mon Sep 17 00:00:00 2001 From: Prinz23 Date: Wed, 23 Jul 2014 16:42:11 +0200 Subject: [PATCH 02/23] The RegEx is also not needed anymore --- sickbeard/webserve.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 191fa611..753660ca 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -416,8 +416,6 @@ class MainHandler(RequestHandler): logger.log(u"Receiving iCal request from %s" % self.request.remote_ip) - time_re = re.compile('([0-9]{1,2})\:([0-9]{2})(\ |)([AM|am|PM|pm]{2})') - # Create a iCal string ical = 'BEGIN:VCALENDAR\r\n' ical += 'VERSION:2.0\r\n' From 32d0552f03e3e14889f452cb6e57c533abcfec4f Mon Sep 17 00:00:00 2001 From: Matthew Haughton <3flex@users.noreply.github.com> Date: Thu, 24 Jul 2014 20:25:58 -0400 Subject: [PATCH 03/23] fix episode filtering Checking the filtering checkboxes on episode listings did nothing on the first checkbox change, then did the opposite of what it was supposed (i.e. checking the checkbox would hide the episodes and vice versa). Commit 95d7d728e00e225dd67a6f0eede10b41e5571ba8 eliminated 'return' statement in function - better to remove the code that 'return' was skipping over anyway. This fixes the issue. --- gui/slick/js/displayShow.js | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/gui/slick/js/displayShow.js b/gui/slick/js/displayShow.js index 63994e1e..85879664 100644 --- a/gui/slick/js/displayShow.js +++ b/gui/slick/js/displayShow.js @@ -117,10 +117,6 @@ $(document).ready(function () { $("#checkboxControls input").change(function (e) { var whichClass = $(this).attr('id'); $(this).showHideRows(whichClass); - - $('tr.' + whichClass).each(function (i) { - $(this).toggle(); - }); }); // initially show/hide all the rows according to the checkboxes @@ -273,4 +269,4 @@ $(document).ready(function () { height:120 }); }); -}); \ No newline at end of file +}); From 877c35cb164d4c71700c28e7499600ffd5b20c21 Mon Sep 17 00:00:00 2001 From: Alex Date: Sat, 26 Jul 2014 21:46:00 +0200 Subject: [PATCH 04/23] Fixed: pushbullet notifications don't work Apparently the pushpubllet api URL has changed. I suspect it will be the case, in the future, to update the devices url (and maybe code) as well. --- sickbeard/notifiers/pushbullet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/notifiers/pushbullet.py b/sickbeard/notifiers/pushbullet.py index 81c51bc5..b4fa360c 100644 --- a/sickbeard/notifiers/pushbullet.py +++ b/sickbeard/notifiers/pushbullet.py @@ -67,7 +67,7 @@ class PushbulletNotifier: pushbullet_device = sickbeard.PUSHBULLET_DEVICE if method == 'POST': - uri = '/api/pushes' + uri = '/v2/pushes' else: uri = '/api/devices' From fcded3c3cf474b7e28371cf84aa748e77ee3358a Mon Sep 17 00:00:00 2001 From: echel0n Date: Sat, 26 Jul 2014 14:49:54 -0700 Subject: [PATCH 05/23] PEP8 Cleanups --- sickbeard/scheduler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/scheduler.py b/sickbeard/scheduler.py index 40289643..6d69dc79 100644 --- a/sickbeard/scheduler.py +++ b/sickbeard/scheduler.py @@ -52,7 +52,7 @@ class Scheduler(threading.Thread): def run(self): - while(not self.stop.is_set()): + while not self.stop.is_set(): current_time = datetime.datetime.now() should_run = False From 14c354b55160f57dd76f83e44de5ed0576f35402 Mon Sep 17 00:00:00 2001 From: echel0n Date: Sun, 27 Jul 2014 03:59:21 -0700 Subject: [PATCH 06/23] Added proxy support to Indexer APIs. Provider getURL and downloadResult functions now removed and replaced with ones from helpers.py to help slim the code down plus allow more better control over request sessions. Removed TVTumbler code. Fixed HDBits provider. Fixed config settings that were ment to be booleans but instead where set as str or int, should help resolve random html errors. XEM Refresh check re-coded. NameParser code for creating show object has been changed to only attempt at the very end once its found the bestMatch result, helps on resources and performance. --- lib/tvdb_api/tvdb_api.py | 36 ++- lib/tvrage_api/tvrage_api.py | 34 ++- sickbeard/__init__.py | 360 +++++++++++++-------------- sickbeard/common.py | 4 +- sickbeard/gh_api.py | 10 +- sickbeard/helpers.py | 254 +++++++++++-------- sickbeard/indexers/indexer_api.py | 3 + sickbeard/indexers/indexer_config.py | 4 +- sickbeard/metadata/helpers.py | 3 +- sickbeard/name_parser/parser.py | 233 ++++++++--------- sickbeard/network_timezones.py | 2 - sickbeard/nzbSplitter.py | 1 - sickbeard/nzbget.py | 2 +- sickbeard/providers/bitsoup.py | 40 +-- sickbeard/providers/btn.py | 1 - sickbeard/providers/ezrss.py | 12 +- sickbeard/providers/freshontv.py | 26 -- sickbeard/providers/generic.py | 92 +++---- sickbeard/providers/hdbits.py | 40 +-- sickbeard/providers/hdtorrents.py | 23 -- sickbeard/providers/iptorrents.py | 24 -- sickbeard/providers/kat.py | 78 ------ sickbeard/providers/newzbin.py | 19 -- sickbeard/providers/nextgen.py | 134 ++++------ sickbeard/providers/omgwtfnzbs.py | 11 +- sickbeard/providers/publichd.py | 69 ----- sickbeard/providers/rsstorrent.py | 40 +-- sickbeard/providers/scc.py | 43 +--- sickbeard/providers/speedcd.py | 39 +-- sickbeard/providers/thepiratebay.py | 84 +------ sickbeard/providers/torrentbytes.py | 26 -- sickbeard/providers/torrentday.py | 30 +-- sickbeard/providers/torrentleech.py | 26 -- sickbeard/rssfeeds.py | 43 ++-- sickbeard/scene_exceptions.py | 9 +- sickbeard/scene_numbering.py | 133 +++++----- sickbeard/search.py | 9 +- sickbeard/search_queue.py | 2 +- sickbeard/show_queue.py | 3 +- sickbeard/tv.py | 8 +- sickbeard/tvcache.py | 16 +- sickbeard/tvtumbler.py | 47 ---- sickbeard/versionChecker.py | 25 +- sickbeard/webapi.py | 51 ++-- sickbeard/webserve.py | 26 +- 45 files changed, 781 insertions(+), 1394 deletions(-) delete mode 100644 sickbeard/tvtumbler.py diff --git a/lib/tvdb_api/tvdb_api.py b/lib/tvdb_api/tvdb_api.py index 444d9958..e9837a79 100644 --- a/lib/tvdb_api/tvdb_api.py +++ b/lib/tvdb_api/tvdb_api.py @@ -6,6 +6,7 @@ #license:unlicense (http://unlicense.org/) from functools import wraps +import traceback __author__ = "dbr/Ben" __version__ = "1.9" @@ -21,7 +22,7 @@ import logging import zipfile import datetime as dt import requests -import cachecontrol +import requests.exceptions import xmltodict try: @@ -35,7 +36,7 @@ except ImportError: gzip = None from lib.dateutil.parser import parse -from cachecontrol import caches +from lib.cachecontrol import CacheControl, caches from tvdb_ui import BaseUI, ConsoleUI from tvdb_exceptions import (tvdb_error, tvdb_userabort, tvdb_shownotfound, @@ -366,7 +367,8 @@ class Tvdb: apikey=None, forceConnect=False, useZip=False, - dvdorder=False): + dvdorder=False, + proxy=None): """interactive (True/False): When True, uses built-in console UI is used to select the correct show. @@ -464,16 +466,18 @@ class Tvdb: self.config['dvdorder'] = dvdorder + self.config['proxy'] = proxy + if cache is True: self.config['cache_enabled'] = True self.config['cache_location'] = self._getTempDir() - self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location'])) + self.sess = CacheControl(cache=caches.FileCache(self.config['cache_location'])) elif cache is False: self.config['cache_enabled'] = False elif isinstance(cache, basestring): self.config['cache_enabled'] = True self.config['cache_location'] = cache - self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location'])) + self.sess = CacheControl(cache=caches.FileCache(self.config['cache_location'])) else: raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache))) @@ -561,18 +565,24 @@ class Tvdb: # get response from TVDB if self.config['cache_enabled']: + if self.config['proxy']: + log().debug("Using proxy for URL: %s" % url) + self.sess.proxies = { + "http": self.config['proxy'], + "https": self.config['proxy'], + } + resp = self.sess.get(url, cache_auto=True, params=params) else: resp = requests.get(url, params=params) - - except requests.HTTPError, e: + except requests.exceptions.HTTPError, e: raise tvdb_error("HTTP error " + str(e.errno) + " while loading URL " + str(url)) - - except requests.ConnectionError, e: + except requests.exceptions.ConnectionError, e: raise tvdb_error("Connection error " + str(e.message) + " while loading URL " + str(url)) - - except requests.Timeout, e: + except requests.exceptions.Timeout, e: raise tvdb_error("Connection timed out " + str(e.message) + " while loading URL " + str(url)) + except Exception: + raise tvdb_error("Unknown exception while loading URL " + url + ": " + traceback.format_exc()) def process(path, key, value): key = key.lower() @@ -703,7 +713,9 @@ class Tvdb: if self.config['custom_ui'] is not None: log().debug("Using custom UI %s" % (repr(self.config['custom_ui']))) - ui = self.config['custom_ui'](config=self.config) + CustomUI = self.config['custom_ui'] + ui = CustomUI(config=self.config) + else: if not self.config['interactive']: log().debug('Auto-selecting first search result using BaseUI') diff --git a/lib/tvrage_api/tvrage_api.py b/lib/tvrage_api/tvrage_api.py index 296819fa..d9aba3ad 100644 --- a/lib/tvrage_api/tvrage_api.py +++ b/lib/tvrage_api/tvrage_api.py @@ -10,6 +10,7 @@ Modified from http://github.com/dbr/tvrage_api Simple-to-use Python interface to The TVRage's API (tvrage.com) """ from functools import wraps +import traceback __author__ = "echel0n" __version__ = "1.0" @@ -23,7 +24,7 @@ import warnings import logging import datetime as dt import requests -import cachecontrol +import requests.exceptions import xmltodict try: @@ -32,7 +33,7 @@ except ImportError: import xml.etree.ElementTree as ElementTree from lib.dateutil.parser import parse -from cachecontrol import caches +from cachecontrol import CacheControl, caches from tvrage_ui import BaseUI from tvrage_exceptions import (tvrage_error, tvrage_userabort, tvrage_shownotfound, @@ -283,7 +284,8 @@ class TVRage: apikey=None, forceConnect=False, useZip=False, - dvdorder=False): + dvdorder=False, + proxy=None): """ cache (True/False/str/unicode/urllib2 opener): @@ -316,16 +318,18 @@ class TVRage: self.config['custom_ui'] = custom_ui + self.config['proxy'] = proxy + if cache is True: self.config['cache_enabled'] = True self.config['cache_location'] = self._getTempDir() - self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location'])) + self.sess = CacheControl(cache=caches.FileCache(self.config['cache_location'])) elif cache is False: self.config['cache_enabled'] = False elif isinstance(cache, basestring): self.config['cache_enabled'] = True self.config['cache_location'] = cache - self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location'])) + self.sess = CacheControl(cache=caches.FileCache(self.config['cache_location'])) else: raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache))) @@ -401,18 +405,25 @@ class TVRage: # get response from TVRage if self.config['cache_enabled']: + if self.config['proxy']: + log().debug("Using proxy for URL: %s" % url) + self.sess.proxies = { + "http": self.config['proxy'], + "https": self.config['proxy'], + } + resp = self.sess.get(url.strip(), cache_auto=True, params=params) else: resp = requests.get(url.strip(), params=params) - except requests.HTTPError, e: + except requests.exceptions.HTTPError, e: raise tvrage_error("HTTP error " + str(e.errno) + " while loading URL " + str(url)) - - except requests.ConnectionError, e: + except requests.exceptions.ConnectionError, e: raise tvrage_error("Connection error " + str(e.message) + " while loading URL " + str(url)) - - except requests.Timeout, e: + except requests.exceptions.Timeout, e: raise tvrage_error("Connection timed out " + str(e.message) + " while loading URL " + str(url)) + except Exception: + raise tvrage_error("Unknown exception while loading URL " + url + ": " + traceback.format_exc()) def remap_keys(path, key, value): name_map = { @@ -564,7 +575,8 @@ class TVRage: if self.config['custom_ui'] is not None: log().debug("Using custom UI %s" % (repr(self.config['custom_ui']))) - ui = self.config['custom_ui'](config=self.config) + CustomUI = self.config['custom_ui'] + ui = CustomUI(config=self.config) else: log().debug('Auto-selecting first search result using BaseUI') ui = BaseUI(config=self.config) diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index f3c928c0..14fa7d2b 100755 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -32,7 +32,8 @@ import sys from sickbeard import providers, metadata, config, webserveInit from sickbeard.providers.generic import GenericProvider from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, iptorrents, \ - omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd, nyaatorrents, fanzub, torrentbytes, animezb, freshontv, bitsoup + omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd, nyaatorrents, fanzub, torrentbytes, animezb, \ + freshontv, bitsoup from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \ naming_ep_type from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \ @@ -98,9 +99,9 @@ metadata_provider_dict = {} NEWEST_VERSION = None NEWEST_VERSION_STRING = None -VERSION_NOTIFY = None -AUTO_UPDATE = None -NOTIFY_ON_UPDATE = None +VERSION_NOTIFY = False +AUTO_UPDATE = False +NOTIFY_ON_UPDATE = False CUR_COMMIT_HASH = None INIT_LOCK = Lock() @@ -119,9 +120,9 @@ WEB_PASSWORD = None WEB_HOST = None WEB_IPV6 = None -PLAY_VIDEOS = None +PLAY_VIDEOS = False -HANDLE_REVERSE_PROXY = None +HANDLE_REVERSE_PROXY = False PROXY_SETTING = None LOCALHOST_IP = None @@ -137,16 +138,15 @@ ENABLE_HTTPS = False HTTPS_CERT = None HTTPS_KEY = None -LAUNCH_BROWSER = None +LAUNCH_BROWSER = False CACHE_DIR = None ACTUAL_CACHE_DIR = None ROOT_DIRS = None -UPDATE_SHOWS_ON_START = None -SORT_ARTICLE = None +UPDATE_SHOWS_ON_START = False +SORT_ARTICLE = False DEBUG = False -CLEAR_CACHE = None -USE_LISTVIEW = None +USE_LISTVIEW = False METADATA_XBMC = None METADATA_XBMC_12PLUS = None METADATA_MEDIABROWSER = None @@ -157,42 +157,42 @@ METADATA_MEDE8ER = None QUALITY_DEFAULT = None STATUS_DEFAULT = None -FLATTEN_FOLDERS_DEFAULT = None -SUBTITLES_DEFAULT = None +FLATTEN_FOLDERS_DEFAULT = False +SUBTITLES_DEFAULT = False INDEXER_DEFAULT = None INDEXER_TIMEOUT = None -SCENE_DEFAULT = None -ANIME_DEFAULT = None +SCENE_DEFAULT = False +ANIME_DEFAULT = False PROVIDER_ORDER = [] -NAMING_MULTI_EP = None +NAMING_MULTI_EP = False NAMING_PATTERN = None NAMING_ABD_PATTERN = None -NAMING_CUSTOM_ABD = None +NAMING_CUSTOM_ABD = False NAMING_SPORTS_PATTERN = None -NAMING_CUSTOM_SPORTS = None +NAMING_CUSTOM_SPORTS = False NAMING_FORCE_FOLDERS = False -NAMING_STRIP_YEAR = None +NAMING_STRIP_YEAR = False NAMING_ANIME = None -USE_NZBS = None -USE_TORRENTS = None +USE_NZBS = False +USE_TORRENTS = False NZB_METHOD = None NZB_DIR = None USENET_RETENTION = None TORRENT_METHOD = None TORRENT_DIR = None -DOWNLOAD_PROPERS = None +DOWNLOAD_PROPERS = False CHECK_PROPERS_INTERVAL = None -ALLOW_HIGH_PRIORITY = None +ALLOW_HIGH_PRIORITY = False AUTOPOSTPROCESSER_FREQUENCY = None DAILYSEARCH_FREQUENCY = None UPDATE_FREQUENCY = None BACKLOG_FREQUENCY = None -DAILYSEARCH_STARTUP = None -BACKLOG_STARTUP = None +DAILYSEARCH_STARTUP = False +BACKLOG_STARTUP = False MIN_AUTOPOSTPROCESSER_FREQUENCY = 1 MIN_BACKLOG_FREQUENCY = 10 @@ -203,8 +203,8 @@ DEFAULT_BACKLOG_FREQUENCY = 10080 DEFAULT_DAILYSEARCH_FREQUENCY = 60 DEFAULT_UPDATE_FREQUENCY = 1 -ADD_SHOWS_WO_DIR = None -CREATE_MISSING_SHOW_DIRS = None +ADD_SHOWS_WO_DIR = False +CREATE_MISSING_SHOW_DIRS = False RENAME_EPISODES = False AIRDATE_EPISODES = False PROCESS_AUTOMATICALLY = False @@ -250,7 +250,7 @@ TORRENT_SEED_TIME = None TORRENT_PAUSED = False TORRENT_HIGH_BANDWIDTH = False TORRENT_LABEL = '' -TORRENT_VERIFY_CERT = True +TORRENT_VERIFY_CERT = False USE_XBMC = False XBMC_ALWAYS_ON = True @@ -331,7 +331,7 @@ ANIMESUPPORT = False USE_ANIDB = False ANIDB_USERNAME = None ANIDB_PASSWORD = None -ANIDB_USE_MYLIST = 0 +ANIDB_USE_MYLIST = False ADBA_CONNECTION = None ANIME_SPLIT_HOME = False @@ -403,9 +403,9 @@ EMAIL_LIST = None GUI_NAME = None HOME_LAYOUT = None HISTORY_LAYOUT = None -DISPLAY_SHOW_SPECIALS = None +DISPLAY_SHOW_SPECIALS = False COMING_EPS_LAYOUT = None -COMING_EPS_DISPLAY_PAUSED = None +COMING_EPS_DISPLAY_PAUSED = False COMING_EPS_SORT = None COMING_EPS_MISSED_RANGE = None FUZZY_DATING = False @@ -438,6 +438,8 @@ TMDB_API_KEY = 'edc5f123313769de83a71e157758030b' TRAKT_API_KEY = 'abd806c54516240c76e4ebc9c5ccf394' __INITIALIZED__ = False + + def initialize(consoleLogging=True): with INIT_LOCK: @@ -474,7 +476,7 @@ def initialize(consoleLogging=True): USE_SYNOLOGYNOTIFIER, SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH, SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD, SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD, \ USE_EMAIL, EMAIL_HOST, EMAIL_PORT, EMAIL_TLS, EMAIL_USER, EMAIL_PASSWORD, EMAIL_FROM, EMAIL_NOTIFY_ONSNATCH, EMAIL_NOTIFY_ONDOWNLOAD, EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD, EMAIL_LIST, \ USE_LISTVIEW, METADATA_XBMC, METADATA_XBMC_12PLUS, METADATA_MEDIABROWSER, METADATA_PS3, metadata_provider_dict, \ - NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, CLEAR_CACHE, dailySearchScheduler, NFO_RENAME, \ + NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, dailySearchScheduler, NFO_RENAME, \ GUI_NAME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, COMING_EPS_MISSED_RANGE, FUZZY_DATING, TRIM_ZERO, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, \ METADATA_WDTV, METADATA_TIVO, METADATA_MEDE8ER, IGNORE_WORDS, CALENDAR_UNPROTECTED, CREATE_MISSING_SHOW_DIRS, \ ADD_SHOWS_WO_DIR, USE_SUBTITLES, SUBTITLES_LANGUAGES, SUBTITLES_DIR, SUBTITLES_SERVICES_LIST, SUBTITLES_SERVICES_ENABLED, SUBTITLES_HISTORY, SUBTITLES_FINDER_FREQUENCY, subtitlesFinderScheduler, \ @@ -583,18 +585,11 @@ def initialize(consoleLogging=True): if not re.match(r'\d+\|[^|]+(?:\|[^|]+)*', ROOT_DIRS): ROOT_DIRS = '' - proxies = getproxies() - proxy_url = None - if 'http' in proxies: - proxy_url = proxies['http'] - elif 'ftp' in proxies: - proxy_url = proxies['ftp'] - QUALITY_DEFAULT = check_setting_int(CFG, 'General', 'quality_default', SD) STATUS_DEFAULT = check_setting_int(CFG, 'General', 'status_default', SKIPPED) - VERSION_NOTIFY = check_setting_int(CFG, 'General', 'version_notify', 1) - AUTO_UPDATE = check_setting_int(CFG, 'General', 'auto_update', 0) - NOTIFY_ON_UPDATE = check_setting_int(CFG, 'General', 'notify_on_update', 1) + VERSION_NOTIFY = bool(check_setting_int(CFG, 'General', 'version_notify', 1)) + AUTO_UPDATE = bool(check_setting_int(CFG, 'General', 'auto_update', 0)) + NOTIFY_ON_UPDATE = bool(check_setting_int(CFG, 'General', 'notify_on_update', 1)) FLATTEN_FOLDERS_DEFAULT = bool(check_setting_int(CFG, 'General', 'flatten_folders_default', 0)) INDEXER_DEFAULT = check_setting_int(CFG, 'General', 'indexer_default', 0) INDEXER_TIMEOUT = check_setting_int(CFG, 'General', 'indexer_timeout', 20) @@ -605,11 +600,11 @@ def initialize(consoleLogging=True): NAMING_PATTERN = check_setting_str(CFG, 'General', 'naming_pattern', 'Season %0S/%SN - S%0SE%0E - %EN') NAMING_ABD_PATTERN = check_setting_str(CFG, 'General', 'naming_abd_pattern', '%SN - %A.D - %EN') - NAMING_CUSTOM_ABD = check_setting_int(CFG, 'General', 'naming_custom_abd', 0) + NAMING_CUSTOM_ABD = bool(check_setting_int(CFG, 'General', 'naming_custom_abd', 0)) NAMING_SPORTS_PATTERN = check_setting_str(CFG, 'General', 'naming_sports_pattern', '%SN - %A-D - %EN') NAMING_ANIME = check_setting_int(CFG, 'General', 'naming_anime', 3) - NAMING_CUSTOM_SPORTS = check_setting_int(CFG, 'General', 'naming_custom_sports', 0) - NAMING_MULTI_EP = check_setting_int(CFG, 'General', 'naming_multi_ep', 1) + NAMING_CUSTOM_SPORTS = bool(check_setting_int(CFG, 'General', 'naming_custom_sports', 0)) + NAMING_MULTI_EP = bool(check_setting_int(CFG, 'General', 'naming_multi_ep', 1)) NAMING_FORCE_FOLDERS = naming.check_force_season_folders() NAMING_STRIP_YEAR = bool(check_setting_int(CFG, 'General', 'naming_strip_year', 0)) @@ -659,16 +654,16 @@ def initialize(consoleLogging=True): TORRENT_DIR = check_setting_str(CFG, 'Blackhole', 'torrent_dir', '') TV_DOWNLOAD_DIR = check_setting_str(CFG, 'General', 'tv_download_dir', '') - PROCESS_AUTOMATICALLY = check_setting_int(CFG, 'General', 'process_automatically', 0) - UNPACK = check_setting_int(CFG, 'General', 'unpack', 0) - RENAME_EPISODES = check_setting_int(CFG, 'General', 'rename_episodes', 1) - AIRDATE_EPISODES = check_setting_int(CFG, 'General', 'airdate_episodes', 0) - KEEP_PROCESSED_DIR = check_setting_int(CFG, 'General', 'keep_processed_dir', 1) + PROCESS_AUTOMATICALLY = bool(check_setting_int(CFG, 'General', 'process_automatically', 0)) + UNPACK = bool(check_setting_int(CFG, 'General', 'unpack', 0)) + RENAME_EPISODES = bool(check_setting_int(CFG, 'General', 'rename_episodes', 1)) + AIRDATE_EPISODES = bool(check_setting_int(CFG, 'General', 'airdate_episodes', 0)) + KEEP_PROCESSED_DIR = bool(check_setting_int(CFG, 'General', 'keep_processed_dir', 1)) PROCESS_METHOD = check_setting_str(CFG, 'General', 'process_method', 'copy' if KEEP_PROCESSED_DIR else 'move') - MOVE_ASSOCIATED_FILES = check_setting_int(CFG, 'General', 'move_associated_files', 0) - NFO_RENAME = check_setting_int(CFG, 'General', 'nfo_rename', 1) - CREATE_MISSING_SHOW_DIRS = check_setting_int(CFG, 'General', 'create_missing_show_dirs', 0) - ADD_SHOWS_WO_DIR = check_setting_int(CFG, 'General', 'add_shows_wo_dir', 0) + MOVE_ASSOCIATED_FILES = bool(check_setting_int(CFG, 'General', 'move_associated_files', 0)) + NFO_RENAME = bool(check_setting_int(CFG, 'General', 'nfo_rename', 1)) + CREATE_MISSING_SHOW_DIRS = bool(check_setting_int(CFG, 'General', 'create_missing_show_dirs', 0)) + ADD_SHOWS_WO_DIR = bool(check_setting_int(CFG, 'General', 'add_shows_wo_dir', 0)) NZBS = bool(check_setting_int(CFG, 'NZBs', 'nzbs', 0)) NZBS_UID = check_setting_str(CFG, 'NZBs', 'nzbs_uid', '') @@ -761,7 +756,8 @@ def initialize(consoleLogging=True): USE_PUSHOVER = bool(check_setting_int(CFG, 'Pushover', 'use_pushover', 0)) PUSHOVER_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_onsnatch', 0)) PUSHOVER_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_ondownload', 0)) - PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_onsubtitledownload', 0)) + PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD = bool( + check_setting_int(CFG, 'Pushover', 'pushover_notify_onsubtitledownload', 0)) PUSHOVER_USERKEY = check_setting_str(CFG, 'Pushover', 'pushover_userkey', '') PUSHOVER_APIKEY = check_setting_str(CFG, 'Pushover', 'pushover_apikey', '') USE_LIBNOTIFY = bool(check_setting_int(CFG, 'Libnotify', 'use_libnotify', 0)) @@ -796,7 +792,7 @@ def initialize(consoleLogging=True): TRAKT_API = check_setting_str(CFG, 'Trakt', 'trakt_api', '') TRAKT_REMOVE_WATCHLIST = bool(check_setting_int(CFG, 'Trakt', 'trakt_remove_watchlist', 0)) TRAKT_USE_WATCHLIST = bool(check_setting_int(CFG, 'Trakt', 'trakt_use_watchlist', 0)) - TRAKT_METHOD_ADD = check_setting_str(CFG, 'Trakt', 'trakt_method_add', "0") + TRAKT_METHOD_ADD = check_setting_int(CFG, 'Trakt', 'trakt_method_add', 0) TRAKT_START_PAUSED = bool(check_setting_int(CFG, 'Trakt', 'trakt_start_paused', 0)) TRAKT_USE_RECOMMENDED = bool(check_setting_int(CFG, 'Trakt', 'trakt_use_recommended', 0)) TRAKT_SYNC = bool(check_setting_int(CFG, 'Trakt', 'trakt_sync', 0)) @@ -874,10 +870,11 @@ def initialize(consoleLogging=True): USE_LISTVIEW = bool(check_setting_int(CFG, 'General', 'use_listview', 0)) ANIMESUPPORT = False - USE_ANIDB = check_setting_str(CFG, 'ANIDB', 'use_anidb', '') + USE_ANIDB = bool(check_setting_int(CFG, 'ANIDB', 'use_anidb', 0)) ANIDB_USERNAME = check_setting_str(CFG, 'ANIDB', 'anidb_username', '') ANIDB_PASSWORD = check_setting_str(CFG, 'ANIDB', 'anidb_password', '') ANIDB_USE_MYLIST = bool(check_setting_int(CFG, 'ANIDB', 'anidb_use_mylist', 0)) + ANIME_SPLIT_HOME = bool(check_setting_int(CFG, 'ANIME', 'anime_split_home', 0)) METADATA_XBMC = check_setting_str(CFG, 'General', 'metadata_xbmc', '0|0|0|0|0|0|0|0|0|0') @@ -902,125 +899,15 @@ def initialize(consoleLogging=True): TIME_PRESET = TIME_PRESET_W_SECONDS.replace(u":%S", u"") TIMEZONE_DISPLAY = check_setting_str(CFG, 'GUI', 'timezone_display', 'network') + # initialize NZB and TORRENT providers + providerList = providers.makeProviderList() + NEWZNAB_DATA = check_setting_str(CFG, 'Newznab', 'newznab_data', '') newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA) TORRENTRSS_DATA = check_setting_str(CFG, 'TorrentRss', 'torrentrss_data', '') torrentRssProviderList = providers.getTorrentRssProviderList(TORRENTRSS_DATA) - if not os.path.isfile(CONFIG_FILE): - logger.log(u"Unable to find '" + CONFIG_FILE + "', all settings will be default!", logger.DEBUG) - save_config() - - # start up all the threads - logger.sb_log_instance.initLogging(consoleLogging=consoleLogging) - - # initialize the main SB database - myDB = db.DBConnection() - db.upgradeDatabase(myDB, mainDB.InitialSchema) - - # initialize the cache database - myDB = db.DBConnection('cache.db') - db.upgradeDatabase(myDB, cache_db.InitialSchema) - - # initialize the failed downloads database - myDB = db.DBConnection('failed.db') - db.upgradeDatabase(myDB, failed_db.InitialSchema) - - # fix up any db problems - myDB = db.DBConnection() - db.sanityCheckDatabase(myDB, mainDB.MainSanityCheck) - - # migrate the config if it needs it - migrator = ConfigMigrator(CFG) - migrator.migrate_config() - - # initialize metadata_providers - metadata_provider_dict = metadata.get_metadata_generator_dict() - for cur_metadata_tuple in [(METADATA_XBMC, metadata.xbmc), - (METADATA_XBMC_12PLUS, metadata.xbmc_12plus), - (METADATA_MEDIABROWSER, metadata.mediabrowser), - (METADATA_PS3, metadata.ps3), - (METADATA_WDTV, metadata.wdtv), - (METADATA_TIVO, metadata.tivo), - (METADATA_MEDE8ER, metadata.mede8er), - ]: - (cur_metadata_config, cur_metadata_class) = cur_metadata_tuple - tmp_provider = cur_metadata_class.metadata_class() - tmp_provider.set_config(cur_metadata_config) - metadata_provider_dict[tmp_provider.name] = tmp_provider - - # initialize newznab providers - newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA) - providerList = providers.makeProviderList() - - # initialize schedulers - # updaters - update_now = datetime.timedelta(minutes=0) - versionCheckScheduler = scheduler.Scheduler(versionChecker.CheckVersion(), - cycleTime=datetime.timedelta(hours=UPDATE_FREQUENCY), - threadName="CHECKVERSION", - silent=False) - - showQueueScheduler = scheduler.Scheduler(show_queue.ShowQueue(), - cycleTime=datetime.timedelta(seconds=3), - threadName="SHOWQUEUE") - - showUpdateScheduler = scheduler.Scheduler(showUpdater.ShowUpdater(), - cycleTime=datetime.timedelta(hours=1), - threadName="SHOWUPDATER", - start_time=datetime.time(hour=3)) # 3 AM - - # searchers - searchQueueScheduler = scheduler.Scheduler(search_queue.SearchQueue(), - cycleTime=datetime.timedelta(seconds=3), - threadName="SEARCHQUEUE") - - update_interval = datetime.timedelta(minutes=DAILYSEARCH_FREQUENCY) - dailySearchScheduler = scheduler.Scheduler(dailysearcher.DailySearcher(), - cycleTime=update_interval, - threadName="DAILYSEARCHER", - run_delay=update_now if DAILYSEARCH_STARTUP - else update_interval) - - update_interval = datetime.timedelta(minutes=BACKLOG_FREQUENCY) - backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(searchBacklog.BacklogSearcher(), - cycleTime=update_interval, - threadName="BACKLOG", - run_delay=update_now if BACKLOG_STARTUP - else update_interval) - - search_intervals = {'15m': 15, '45m': 45, '90m': 90, '4h': 4*60, 'daily': 24*60} - if CHECK_PROPERS_INTERVAL in search_intervals: - update_interval = datetime.timedelta(minutes=search_intervals[CHECK_PROPERS_INTERVAL]) - run_at = None - else: - update_interval = datetime.timedelta(hours=1) - run_at = datetime.time(hour=1) # 1 AM - - properFinderScheduler = scheduler.Scheduler(properFinder.ProperFinder(), - cycleTime=update_interval, - threadName="FINDPROPERS", - start_time=run_at, - run_delay=update_interval) - - # processors - autoPostProcesserScheduler = scheduler.Scheduler(autoPostProcesser.PostProcesser(), - cycleTime=datetime.timedelta( - minutes=AUTOPOSTPROCESSER_FREQUENCY), - threadName="POSTPROCESSER", - silent=not PROCESS_AUTOMATICALLY) - - traktCheckerScheduler = scheduler.Scheduler(traktChecker.TraktChecker(), - cycleTime=datetime.timedelta(hours=1), - threadName="TRAKTCHECKER", - silent=not USE_TRAKT) - - subtitlesFinderScheduler = scheduler.Scheduler(subtitles.SubtitlesFinder(), - cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_FREQUENCY), - threadName="FINDSUBTITLES", - silent=not USE_SUBTITLES) - # dynamically load provider settings for curTorrentProvider in [curProvider for curProvider in providers.sortedProviderList() if curProvider.providerType == GenericProvider.TORRENT]: @@ -1104,17 +991,114 @@ def initialize(consoleLogging=True): curNzbProvider.getID() + '_backlog_only', 0)) - try: - url = 'http://raw.github.com/echel0n/sickrage-init/master/settings.ini' - clear_cache = ElementTree.XML(helpers.getURL(url)).find('cache/clear').text - CLEAR_CACHE = check_setting_str(CFG, 'General', 'clear_cache', '') - if CLEAR_CACHE != clear_cache: - for curProvider in [x for x in providers.sortedProviderList() if x.isActive()]: - curProvider.cache._clearCache() - CLEAR_CACHE = clear_cache - save_config() - except: - pass + if not os.path.isfile(CONFIG_FILE): + logger.log(u"Unable to find '" + CONFIG_FILE + "', all settings will be default!", logger.DEBUG) + save_config() + + # start up all the threads + logger.sb_log_instance.initLogging(consoleLogging=consoleLogging) + + # initialize the main SB database + myDB = db.DBConnection() + db.upgradeDatabase(myDB, mainDB.InitialSchema) + + # initialize the cache database + myDB = db.DBConnection('cache.db') + db.upgradeDatabase(myDB, cache_db.InitialSchema) + + # initialize the failed downloads database + myDB = db.DBConnection('failed.db') + db.upgradeDatabase(myDB, failed_db.InitialSchema) + + # fix up any db problems + myDB = db.DBConnection() + db.sanityCheckDatabase(myDB, mainDB.MainSanityCheck) + + # migrate the config if it needs it + migrator = ConfigMigrator(CFG) + migrator.migrate_config() + + # initialize metadata_providers + metadata_provider_dict = metadata.get_metadata_generator_dict() + for cur_metadata_tuple in [(METADATA_XBMC, metadata.xbmc), + (METADATA_XBMC_12PLUS, metadata.xbmc_12plus), + (METADATA_MEDIABROWSER, metadata.mediabrowser), + (METADATA_PS3, metadata.ps3), + (METADATA_WDTV, metadata.wdtv), + (METADATA_TIVO, metadata.tivo), + (METADATA_MEDE8ER, metadata.mede8er), + ]: + (cur_metadata_config, cur_metadata_class) = cur_metadata_tuple + tmp_provider = cur_metadata_class.metadata_class() + tmp_provider.set_config(cur_metadata_config) + metadata_provider_dict[tmp_provider.name] = tmp_provider + + # initialize schedulers + # updaters + update_now = datetime.timedelta(minutes=0) + versionCheckScheduler = scheduler.Scheduler(versionChecker.CheckVersion(), + cycleTime=datetime.timedelta(hours=UPDATE_FREQUENCY), + threadName="CHECKVERSION", + silent=False) + + showQueueScheduler = scheduler.Scheduler(show_queue.ShowQueue(), + cycleTime=datetime.timedelta(seconds=3), + threadName="SHOWQUEUE") + + showUpdateScheduler = scheduler.Scheduler(showUpdater.ShowUpdater(), + cycleTime=datetime.timedelta(hours=1), + threadName="SHOWUPDATER", + start_time=datetime.time(hour=3)) # 3 AM + + # searchers + searchQueueScheduler = scheduler.Scheduler(search_queue.SearchQueue(), + cycleTime=datetime.timedelta(seconds=3), + threadName="SEARCHQUEUE") + + update_interval = datetime.timedelta(minutes=DAILYSEARCH_FREQUENCY) + dailySearchScheduler = scheduler.Scheduler(dailysearcher.DailySearcher(), + cycleTime=update_interval, + threadName="DAILYSEARCHER", + run_delay=update_now if DAILYSEARCH_STARTUP + else update_interval) + + update_interval = datetime.timedelta(minutes=BACKLOG_FREQUENCY) + backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(searchBacklog.BacklogSearcher(), + cycleTime=update_interval, + threadName="BACKLOG", + run_delay=update_now if BACKLOG_STARTUP + else update_interval) + + search_intervals = {'15m': 15, '45m': 45, '90m': 90, '4h': 4 * 60, 'daily': 24 * 60} + if CHECK_PROPERS_INTERVAL in search_intervals: + update_interval = datetime.timedelta(minutes=search_intervals[CHECK_PROPERS_INTERVAL]) + run_at = None + else: + update_interval = datetime.timedelta(hours=1) + run_at = datetime.time(hour=1) # 1 AM + + properFinderScheduler = scheduler.Scheduler(properFinder.ProperFinder(), + cycleTime=update_interval, + threadName="FINDPROPERS", + start_time=run_at, + run_delay=update_interval) + + # processors + autoPostProcesserScheduler = scheduler.Scheduler(autoPostProcesser.PostProcesser(), + cycleTime=datetime.timedelta( + minutes=AUTOPOSTPROCESSER_FREQUENCY), + threadName="POSTPROCESSER", + silent=not PROCESS_AUTOMATICALLY) + + traktCheckerScheduler = scheduler.Scheduler(traktChecker.TraktChecker(), + cycleTime=datetime.timedelta(hours=1), + threadName="TRAKTCHECKER", + silent=not USE_TRAKT) + + subtitlesFinderScheduler = scheduler.Scheduler(subtitles.SubtitlesFinder(), + cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_FREQUENCY), + threadName="FINDSUBTITLES", + silent=not USE_SUBTITLES) showList = [] loadingShowList = {} @@ -1126,11 +1110,10 @@ def start(): global __INITIALIZED__, backlogSearchScheduler, \ showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \ properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \ - subtitlesFinderScheduler, USE_SUBTITLES,traktCheckerScheduler, \ + subtitlesFinderScheduler, USE_SUBTITLES, traktCheckerScheduler, \ dailySearchScheduler, events, started with INIT_LOCK: - if __INITIALIZED__: # start sysetm events queue events.start() @@ -1269,11 +1252,13 @@ def halt(): __INITIALIZED__ = False started = False + def sig_handler(signum=None, frame=None): if type(signum) != type(None): logger.log(u"Signal %i caught, saving and exiting..." % int(signum)) events.put(events.SystemEvent.SHUTDOWN) + def saveAll(): global showList @@ -1286,6 +1271,7 @@ def saveAll(): logger.log(u"Saving config file to disk") save_config() + def restart(soft=True): if soft: halt() @@ -1391,8 +1377,6 @@ def save_config(): new_config['General']['ignore_words'] = IGNORE_WORDS new_config['General']['calendar_unprotected'] = int(CALENDAR_UNPROTECTED) - new_config['General']['clear_cache'] = CLEAR_CACHE - new_config['Blackhole'] = {} new_config['Blackhole']['nzb_dir'] = NZB_DIR new_config['Blackhole']['torrent_dir'] = TORRENT_DIR @@ -1617,7 +1601,7 @@ def save_config(): new_config['Trakt']['trakt_api'] = TRAKT_API new_config['Trakt']['trakt_remove_watchlist'] = int(TRAKT_REMOVE_WATCHLIST) new_config['Trakt']['trakt_use_watchlist'] = int(TRAKT_USE_WATCHLIST) - new_config['Trakt']['trakt_method_add'] = TRAKT_METHOD_ADD + new_config['Trakt']['trakt_method_add'] = int(TRAKT_METHOD_ADD) new_config['Trakt']['trakt_start_paused'] = int(TRAKT_START_PAUSED) new_config['Trakt']['trakt_use_recommended'] = int(TRAKT_USE_RECOMMENDED) new_config['Trakt']['trakt_sync'] = int(TRAKT_SYNC) @@ -1705,10 +1689,10 @@ def save_config(): new_config['FailedDownloads']['delete_failed'] = int(DELETE_FAILED) new_config['ANIDB'] = {} - new_config['ANIDB']['use_anidb'] = USE_ANIDB + new_config['ANIDB']['use_anidb'] = int(USE_ANIDB) new_config['ANIDB']['anidb_username'] = ANIDB_USERNAME new_config['ANIDB']['anidb_password'] = helpers.encrypt(ANIDB_PASSWORD, ENCRYPTION_VERSION) - new_config['ANIDB']['anidb_use_mylist'] = ANIDB_USE_MYLIST + new_config['ANIDB']['anidb_use_mylist'] = int(ANIDB_USE_MYLIST) new_config['ANIME'] = {} new_config['ANIME']['anime_split_home'] = int(ANIME_SPLIT_HOME) diff --git a/sickbeard/common.py b/sickbeard/common.py index 2e532750..3ca2f452 100644 --- a/sickbeard/common.py +++ b/sickbeard/common.py @@ -265,8 +265,8 @@ class Quality: return (status, Quality.NONE) @staticmethod - def statusFromName(name, assume=True): - quality = Quality.nameQuality(name) + def statusFromName(name, assume=True, anime=False): + quality = Quality.nameQuality(name, anime) if assume and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(name) return Quality.compositeStatus(DOWNLOADED, quality) diff --git a/sickbeard/gh_api.py b/sickbeard/gh_api.py index 8ddbd810..e3d60e42 100644 --- a/sickbeard/gh_api.py +++ b/sickbeard/gh_api.py @@ -51,14 +51,12 @@ class GitHub(object): if params and type(params) is dict: url += '?' + '&'.join([str(x) + '=' + str(params[x]) for x in params.keys()]) - data = helpers.getURL(url) - - if data: - json_data = json.loads(data) - return json_data - else: + parsedJSON = helpers.getURL(url, json=True) + if not parsedJSON: return [] + return parsedJSON + def commits(self): """ Uses the API to get a list of the 100 most recent commits from the specified user/repo/branch, starting from HEAD. diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py index 73976b78..0f870fc0 100644 --- a/sickbeard/helpers.py +++ b/sickbeard/helpers.py @@ -33,9 +33,11 @@ import uuid import base64 import zipfile -from lib import requests -from lib.requests import exceptions -from itertools import izip, cycle +import sickbeard +import subliminal +import adba +import requests +import requests.exceptions try: import json @@ -49,20 +51,18 @@ except ImportError: from xml.dom.minidom import Node -import sickbeard -from sickbeard.exceptions import MultipleShowObjectsException, EpisodeNotFoundByAbsoluteNumberException, ex +from sickbeard.exceptions import MultipleShowObjectsException, ex from sickbeard import logger, classes -from sickbeard.common import USER_AGENT, mediaExtensions, subtitleExtensions, XML_NSMAP +from sickbeard.common import USER_AGENT, mediaExtensions, subtitleExtensions from sickbeard import db from sickbeard import encodingKludge as ek from sickbeard import notifiers -from lib import subliminal -from lib import adba -from lib import trakt +from sickbeard import clients + +from cachecontrol import CacheControl, caches +from itertools import izip, cycle urllib._urlopener = classes.SickBeardURLopener() -session = requests.Session() - def indentXML(elem, level=0): ''' @@ -191,101 +191,12 @@ def sanitizeFileName(name): return name - -def getURL(url, post_data=None, headers=None, params=None, timeout=30, json=False, use_proxy=False): - """ - Returns a byte-string retrieved from the url provider. - """ - - global session - if not session: - session = requests.Session() - - req_headers = ['User-Agent', USER_AGENT, 'Accept-Encoding', 'gzip,deflate'] - if headers: - for cur_header in headers: - req_headers.append(cur_header) - - try: - # Remove double-slashes from url - parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one - url = urlparse.urlunparse(parsed) - - it = iter(req_headers) - - if use_proxy and sickbeard.PROXY_SETTING: - logger.log("Using proxy for url: " + url, logger.DEBUG) - proxies = { - "http": sickbeard.PROXY_SETTING, - "https": sickbeard.PROXY_SETTING, - } - - r = session.get(url, params=params, data=post_data, headers=dict(zip(it, it)), proxies=proxies, - timeout=timeout, verify=False) - else: - r = session.get(url, params=params, data=post_data, headers=dict(zip(it, it)), timeout=timeout, - verify=False) - except requests.HTTPError, e: - logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING) - return None - - except requests.ConnectionError, e: - logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING) - return None - - except requests.Timeout, e: - logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING) - return None - - if r.ok: - if json: - return r.json() - - return r.content - - def _remove_file_failed(file): try: ek.ek(os.remove, file) except: pass - -def download_file(url, filename): - global session - if not session: - session = requests.Session() - - try: - r = session.get(url, stream=True, verify=False) - with open(filename, 'wb') as fp: - for chunk in r.iter_content(chunk_size=1024): - if chunk: - fp.write(chunk) - fp.flush() - - except requests.HTTPError, e: - _remove_file_failed(filename) - logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING) - return False - - except requests.ConnectionError, e: - logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING) - return False - - except requests.Timeout, e: - logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING) - return False - - except Exception: - _remove_file_failed(filename) - logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING) - return False - - return True - - def findCertainShow(showList, indexerid): if not showList: return None @@ -610,6 +521,12 @@ def delete_empty_folders(check_empty_dir, keep_dir=None): else: break +def fileBitFilter(mode): + for bit in [stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH, stat.S_ISUID, stat.S_ISGID]: + if mode & bit: + mode -= bit + + return mode def chmodAsParent(childPath): if os.name == 'nt' or os.name == 'ce': @@ -649,15 +566,6 @@ def chmodAsParent(childPath): except OSError: logger.log(u"Failed to set permission for %s to %o" % (childPath, childMode), logger.ERROR) - -def fileBitFilter(mode): - for bit in [stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH, stat.S_ISUID, stat.S_ISGID]: - if mode & bit: - mode -= bit - - return mode - - def fixSetGroupID(childPath): if os.name == 'nt' or os.name == 'ce': return @@ -1272,4 +1180,130 @@ def touchFile(fname, atime=None): logger.log(u"File air date stamping not available on your OS", logger.DEBUG) pass - return False \ No newline at end of file + return False + +def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, json=False): + """ + Returns a byte-string retrieved from the url provider. + """ + + # request session + session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(sickbeard.CACHE_DIR, 'sessions'))) + + # request session headers + req_headers = {'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'} + if headers: + req_headers.update(headers) + session.headers.update(req_headers) + + # request session ssl verify + session.verify = False + + # request session paramaters + session.params = params + + try: + # Remove double-slashes from url + parsed = list(urlparse.urlparse(url)) + parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one + url = urlparse.urlunparse(parsed) + + # request session proxies + if sickbeard.PROXY_SETTING: + logger.log("Using proxy for url: " + url, logger.DEBUG) + session.proxies = { + "http": sickbeard.PROXY_SETTING, + "https": sickbeard.PROXY_SETTING, + } + + resp = session.get(url, data=post_data, timeout=timeout) + except requests.exceptions.HTTPError, e: + logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING) + return + except requests.exceptions.ConnectionError, e: + logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING) + return + except requests.exceptions.Timeout, e: + logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING) + return + except Exception: + logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING) + return + + if not resp: + logger.log(u"No data returned from " + url, logger.DEBUG) + return + elif not resp.ok: + logger.log(u"Requested url " + url + " returned status code is " + str( + resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.WARNING) + return + + if json: + return resp.json() + + return resp.content + +def download_file(url, filename, session=None): + + # create session + session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(sickbeard.CACHE_DIR, 'sessions'))) + + # request session headers + session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'}) + + # request session ssl verify + session.verify = False + + # request session streaming + session.stream = True + + # request session proxies + if sickbeard.PROXY_SETTING: + logger.log("Using proxy for url: " + url, logger.DEBUG) + session.proxies = { + "http": sickbeard.PROXY_SETTING, + "https": sickbeard.PROXY_SETTING, + } + + try: + resp = session.get(url) + if not resp.ok: + return False + + with open(filename, 'wb') as fp: + for chunk in resp.iter_content(chunk_size=1024): + if chunk: + fp.write(chunk) + fp.flush() + + chmodAsParent(filename) + except requests.exceptions.HTTPError, e: + _remove_file_failed(filename) + logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING) + return False + except requests.exceptions.ConnectionError, e: + _remove_file_failed(filename) + logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING) + return False + except requests.exceptions.Timeout, e: + _remove_file_failed(filename) + logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING) + return False + except EnvironmentError, e: + _remove_file_failed(filename) + logger.log(u"Unable to save the file: " + ex(e), logger.ERROR) + return False + except Exception: + _remove_file_failed(filename) + logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING) + return False + + if not resp: + logger.log(u"No data returned from " + url, logger.DEBUG) + return False + elif not resp.ok: + logger.log(u"Requested url " + url + " returned status code is " + str( + resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.WARNING) + return False + + return True \ No newline at end of file diff --git a/sickbeard/indexers/indexer_api.py b/sickbeard/indexers/indexer_api.py index 28969e66..5e82d2f4 100644 --- a/sickbeard/indexers/indexer_api.py +++ b/sickbeard/indexers/indexer_api.py @@ -48,6 +48,9 @@ class indexerApi(object): if self.indexerID: if sickbeard.CACHE_DIR: indexerConfig[self.indexerID]['api_params']['cache'] = os.path.join(sickbeard.CACHE_DIR, self.name) + if sickbeard.PROXY_SETTING: + indexerConfig[self.indexerID]['api_params']['proxy'] = sickbeard.PROXY_SETTING + return indexerConfig[self.indexerID]['api_params'] @property diff --git a/sickbeard/indexers/indexer_config.py b/sickbeard/indexers/indexer_config.py index eaa38fe6..2bf67068 100644 --- a/sickbeard/indexers/indexer_config.py +++ b/sickbeard/indexers/indexer_config.py @@ -23,7 +23,7 @@ indexerConfig[INDEXER_TVDB] = { 'module': Tvdb, 'api_params': {'apikey': 'F9C450E78D99172E', 'language': 'en', - 'useZip': True + 'useZip': True, }, } @@ -32,7 +32,7 @@ indexerConfig[INDEXER_TVRAGE] = { 'name': 'TVRage', 'module': TVRage, 'api_params': {'apikey': 'Uhewg1Rr0o62fvZvUIZt', - 'language': 'en' + 'language': 'en', }, } diff --git a/sickbeard/metadata/helpers.py b/sickbeard/metadata/helpers.py index 2b3a3eae..4d8951b8 100644 --- a/sickbeard/metadata/helpers.py +++ b/sickbeard/metadata/helpers.py @@ -35,9 +35,8 @@ def getShowImage(url, imgNum=None): logger.log(u"Fetching image from " + tempURL, logger.DEBUG) image_data = helpers.getURL(tempURL) - if image_data is None: logger.log(u"There was an error trying to retrieve the image, aborting", logger.ERROR) - return None + return return image_data diff --git a/sickbeard/name_parser/parser.py b/sickbeard/name_parser/parser.py index 47ddb7e2..fd9f75c0 100644 --- a/sickbeard/name_parser/parser.py +++ b/sickbeard/name_parser/parser.py @@ -31,9 +31,10 @@ from dateutil import parser class NameParser(object): - NORMAL_REGEX = 0 - SPORTS_REGEX = 1 - ANIME_REGEX = 2 + ALL_REGEX = 0 + NORMAL_REGEX = 1 + SPORTS_REGEX = 2 + ANIME_REGEX = 3 def __init__(self, file_name=True, showObj=None, tryIndexers=False, convert=False, naming_pattern=False): @@ -44,13 +45,14 @@ class NameParser(object): self.convert = convert self.naming_pattern = naming_pattern - self.regexModes = [self.NORMAL_REGEX, self.SPORTS_REGEX, self.ANIME_REGEX] if self.showObj and not self.showObj.is_anime and not self.showObj.is_sports: - self.regexModes = [self.NORMAL_REGEX] + self._compile_regexes(self.NORMAL_REGEX) elif self.showObj and self.showObj.is_anime: - self.regexModes = [self.ANIME_REGEX] + self._compile_regexes(self.ANIME_REGEX) elif self.showObj and self.showObj.is_sports: - self.regexModes = [self.SPORTS_REGEX] + self._compile_regexes(self.SPORTS_REGEX) + else: + self._compile_regexes(self.ALL_REGEX) def clean_series_name(self, series_name): """Cleans up series name by removing any . and _ @@ -83,9 +85,12 @@ class NameParser(object): elif regexMode == self.ANIME_REGEX: logger.log(u"Using ANIME regexs", logger.DEBUG) uncompiled_regex = [regexes.anime_regexes, regexes.normal_regexes] - else: - logger.log(u"Using NORMAL reqgexs", logger.DEBUG) + elif regexMode == self.NORMAL_REGEX: + logger.log(u"Using NORMAL regexs", logger.DEBUG) uncompiled_regex = [regexes.normal_regexes] + else: + logger.log(u"Using ALL regexes", logger.DEBUG) + uncompiled_regex = [regexes.normal_regexes, regexes.sports_regexs, regexes.anime_regexes] self.compiled_regexes = [] for regexItem in uncompiled_regex: @@ -95,7 +100,7 @@ class NameParser(object): except re.error, errormsg: logger.log(u"WARNING: Invalid episode_pattern, %s. %s" % (errormsg, cur_pattern)) else: - self.compiled_regexes.append((regexMode, cur_pattern_num, cur_pattern_name, cur_regex)) + self.compiled_regexes.append((cur_pattern_num, cur_pattern_name, cur_regex)) def _parse_string(self, name): if not name: @@ -103,144 +108,126 @@ class NameParser(object): matches = [] bestResult = None - doneSearch = False - for regexMode in self.regexModes: - if doneSearch: - break + for (cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes: + match = cur_regex.match(name) - self._compile_regexes(regexMode) - for (cur_regexMode, cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes: - match = cur_regex.match(name) + if not match: + continue - if not match: + result = ParseResult(name) + result.which_regex = [cur_regex_name] + result.score = 0 - cur_regex_num + + named_groups = match.groupdict().keys() + + if 'series_name' in named_groups: + result.series_name = match.group('series_name') + if result.series_name: + result.series_name = self.clean_series_name(result.series_name) + result.score += 1 + + if 'season_num' in named_groups: + tmp_season = int(match.group('season_num')) + if cur_regex_name == 'bare' and tmp_season in (19, 20): continue + result.season_number = tmp_season + result.score += 1 - result = ParseResult(name) - result.which_regex = [cur_regex_name] - result.score = 0 - cur_regex_num + if 'ep_num' in named_groups: + ep_num = self._convert_number(match.group('ep_num')) + if 'extra_ep_num' in named_groups and match.group('extra_ep_num'): + result.episode_numbers = range(ep_num, self._convert_number(match.group('extra_ep_num')) + 1) + result.score += 1 + else: + result.episode_numbers = [ep_num] + result.score += 1 - named_groups = match.groupdict().keys() + if 'ep_ab_num' in named_groups: + ep_ab_num = self._convert_number(match.group('ep_ab_num')) + if 'extra_ab_ep_num' in named_groups and match.group('extra_ab_ep_num'): + result.ab_episode_numbers = range(ep_ab_num, + self._convert_number(match.group('extra_ab_ep_num')) + 1) + result.score += 1 + else: + result.ab_episode_numbers = [ep_ab_num] + result.score += 1 - if 'series_name' in named_groups: - result.series_name = match.group('series_name') - if result.series_name: - result.series_name = self.clean_series_name(result.series_name) + if 'sports_event_id' in named_groups: + sports_event_id = match.group('sports_event_id') + if sports_event_id: + result.sports_event_id = int(match.group('sports_event_id')) + result.score += 1 + + if 'sports_event_name' in named_groups: + result.sports_event_name = match.group('sports_event_name') + if result.sports_event_name: + result.sports_event_name = self.clean_series_name(result.sports_event_name) + result.score += 1 + + if 'sports_air_date' in named_groups: + sports_air_date = match.group('sports_air_date') + if result.show and result.show.is_sports: + try: + result.sports_air_date = parser.parse(sports_air_date, fuzzy=True).date() result.score += 1 - - # get show object - if not result.show and not self.naming_pattern: - result.show = helpers.get_show(result.series_name, self.tryIndexers) - - # confirm result show object variables - if result.show: - # confirm passed in show object indexer id matches result show object indexer id - if self.showObj and self.showObj.indexerid != result.show.indexerid: - doneSearch = True - break - - # confirm we are using correct regex mode - if regexMode == self.NORMAL_REGEX and not (result.show.is_anime or result.show.is_sports): - result.score += 1 - elif regexMode == self.SPORTS_REGEX and result.show.is_sports: - result.score += 1 - elif regexMode == self.ANIME_REGEX and result.show.is_anime: - result.score += 1 - elif not result.show.is_anime: - break - - if 'season_num' in named_groups: - tmp_season = int(match.group('season_num')) - if cur_regex_name == 'bare' and tmp_season in (19, 20): + except: continue - result.season_number = tmp_season - result.score += 1 - if 'ep_num' in named_groups: - ep_num = self._convert_number(match.group('ep_num')) - if 'extra_ep_num' in named_groups and match.group('extra_ep_num'): - result.episode_numbers = range(ep_num, self._convert_number(match.group('extra_ep_num')) + 1) + if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups: + if result.show and result.show.air_by_date: + year = int(match.group('air_year')) + month = int(match.group('air_month')) + day = int(match.group('air_day')) + + try: + dtStr = '%s-%s-%s' % (year, month, day) + result.air_date = datetime.datetime.strptime(dtStr, "%Y-%m-%d").date() result.score += 1 - else: - result.episode_numbers = [ep_num] - result.score += 1 - - if 'ep_ab_num' in named_groups: - ep_ab_num = self._convert_number(match.group('ep_ab_num')) - if 'extra_ab_ep_num' in named_groups and match.group('extra_ab_ep_num'): - result.ab_episode_numbers = range(ep_ab_num, - self._convert_number(match.group('extra_ab_ep_num')) + 1) - result.score += 1 - else: - result.ab_episode_numbers = [ep_ab_num] - result.score += 1 - - if 'sports_event_id' in named_groups: - sports_event_id = match.group('sports_event_id') - if sports_event_id: - result.sports_event_id = int(match.group('sports_event_id')) - result.score += 1 - - if 'sports_event_name' in named_groups: - result.sports_event_name = match.group('sports_event_name') - if result.sports_event_name: - result.sports_event_name = self.clean_series_name(result.sports_event_name) - result.score += 1 - - if 'sports_air_date' in named_groups: - sports_air_date = match.group('sports_air_date') - if result.show and result.show.is_sports: - try: - result.sports_air_date = parser.parse(sports_air_date, fuzzy=True).date() - result.score += 1 - except: - continue - - if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups: - if result.show and result.show.air_by_date: - year = int(match.group('air_year')) - month = int(match.group('air_month')) - day = int(match.group('air_day')) - - try: - dtStr = '%s-%s-%s' % (year, month, day) - result.air_date = datetime.datetime.strptime(dtStr, "%Y-%m-%d").date() - result.score += 1 - except: - continue - - if 'extra_info' in named_groups: - tmp_extra_info = match.group('extra_info') - - # Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season - if tmp_extra_info and cur_regex_name == 'season_only' and re.search( - r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I): + except: continue - result.extra_info = tmp_extra_info - result.score += 1 - if 'release_group' in named_groups: - result.release_group = match.group('release_group') - result.score += 1 + if 'extra_info' in named_groups: + tmp_extra_info = match.group('extra_info') - matches.append(result) + # Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season + if tmp_extra_info and cur_regex_name == 'season_only' and re.search( + r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I): + continue + result.extra_info = tmp_extra_info + result.score += 1 + + if 'release_group' in named_groups: + result.release_group = match.group('release_group') + result.score += 1 + + + matches.append(result) if len(matches): # pick best match with highest score based on placement bestResult = max(sorted(matches, reverse=True, key=lambda x: x.which_regex), key=lambda x: x.score) - # if no show object was created check and see if we passed one in and use that instead - if not bestResult.show and self.showObj: - bestResult.show = self.showObj + show = None + if not self.naming_pattern: + # try and create a show object for this result + show = helpers.get_show(bestResult.series_name, self.tryIndexers) - # get quality - bestResult.quality = common.Quality.nameQuality(name, - bestResult.show.is_anime if bestResult.show else False) + # confirm passed in show object indexer id matches result show object indexer id + if show: + if self.showObj and show.indexerid != self.showObj.indexerid: + show = None + bestResult.show = show + elif not show and self.showObj: + bestResult.show = self.showObj # if this is a naming pattern test or result doesn't have a show object then return best result if not bestResult.show or self.naming_pattern: return bestResult + # get quality + bestResult.quality = common.Quality.nameQuality(name, bestResult.show.is_anime) + new_episode_numbers = [] new_season_numbers = [] new_absolute_numbers = [] diff --git a/sickbeard/network_timezones.py b/sickbeard/network_timezones.py index 21c822c0..56981782 100644 --- a/sickbeard/network_timezones.py +++ b/sickbeard/network_timezones.py @@ -77,7 +77,6 @@ def _update_zoneinfo(): url_zv = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/zoneinfo.txt' url_data = helpers.getURL(url_zv) - if url_data is None: # When urlData is None, trouble connecting to github logger.log(u"Loading zoneinfo.txt failed. Unable to get URL: " + url_zv, logger.ERROR) @@ -148,7 +147,6 @@ def update_network_dict(): url = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/network_timezones.txt' url_data = helpers.getURL(url) - if url_data is None: # When urlData is None, trouble connecting to github logger.log(u"Loading Network Timezones update failed. Unable to get URL: " + url, logger.ERROR) diff --git a/sickbeard/nzbSplitter.py b/sickbeard/nzbSplitter.py index 29263f40..4ca7485a 100644 --- a/sickbeard/nzbSplitter.py +++ b/sickbeard/nzbSplitter.py @@ -106,7 +106,6 @@ def stripNS(element, ns): def splitResult(result): urlData = helpers.getURL(result.url) - if urlData is None: logger.log(u"Unable to load url " + result.url + ", can't download season NZB", logger.ERROR) return False diff --git a/sickbeard/nzbget.py b/sickbeard/nzbget.py index 8646c0ca..acdba6c7 100644 --- a/sickbeard/nzbget.py +++ b/sickbeard/nzbget.py @@ -111,7 +111,7 @@ def sendNZB(nzb, proper=False): if (data == None): return False nzbcontent64 = standard_b64encode(data) - nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, addToTop, nzbcontent64) + nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, addToTop, nzbcontent64) elif nzbget_version == 12: if nzbcontent64 is not None: nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, nzbgetprio, False, diff --git a/sickbeard/providers/bitsoup.py b/sickbeard/providers/bitsoup.py index d16ca3f2..240f1a09 100644 --- a/sickbeard/providers/bitsoup.py +++ b/sickbeard/providers/bitsoup.py @@ -19,9 +19,11 @@ import re import traceback import datetime -import urlparse import sickbeard import generic +import requests +import requests.exceptions + from sickbeard.common import Quality from sickbeard import logger from sickbeard import tvcache @@ -30,12 +32,9 @@ from sickbeard import classes from sickbeard import helpers from sickbeard import show_name_helpers from sickbeard.exceptions import ex -from sickbeard import clients -from lib import requests -from lib.requests import exceptions -from sickbeard.bs4_parser import BS4Parser -from lib.unidecode import unidecode from sickbeard.helpers import sanitizeSceneName +from sickbeard.bs4_parser import BS4Parser +from unidecode import unidecode class BitSoupProvider(generic.TorrentProvider): @@ -83,7 +82,8 @@ class BitSoupProvider(generic.TorrentProvider): 'ssl': 'yes' } - self.session = requests.Session() + if not self.session: + self.session = requests.session() try: response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False) @@ -227,32 +227,6 @@ class BitSoupProvider(generic.TorrentProvider): return (title, url) - def getURL(self, url, post_data=None, headers=None, json=False): - - if not self.session: - self._doLogin() - - if not headers: - headers = [] - - try: - # Remove double-slashes from url - parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one - url = urlparse.urlunparse(parsed) - - response = self.session.get(url, verify=False) - except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: - logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR) - return None - - if response.status_code != 200: - logger.log(self.name + u" page requested with url " + url + " returned status code is " + str( - response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) - return None - - return response.content - def findPropers(self, search_date=datetime.datetime.today()): results = [] diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index 88f482e7..405f0811 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -89,7 +89,6 @@ class BTNProvider(generic.TorrentProvider): params.update(search_params) parsedJSON = self._api_call(apikey, params) - if not parsedJSON: logger.log(u"No data returned from " + self.name, logger.ERROR) return [] diff --git a/sickbeard/providers/ezrss.py b/sickbeard/providers/ezrss.py index 76f7c362..48ec3a4c 100644 --- a/sickbeard/providers/ezrss.py +++ b/sickbeard/providers/ezrss.py @@ -56,7 +56,7 @@ class EZRSSProvider(generic.TorrentProvider): def getQuality(self, item, anime=False): filename = item.filename - quality = Quality.nameQuality(filename) + quality = Quality.sceneQuality(filename, anime) return quality @@ -81,10 +81,8 @@ class EZRSSProvider(generic.TorrentProvider): params['show_name'] = helpers.sanitizeSceneName(self.show.name, ezrss=True).replace('.', ' ').encode('utf-8') - if ep_obj.show.air_by_date: - params['date'] = str(ep_obj.airdate).split('-')[0] - elif ep_obj.show.sports: - params['date'] = str(ep_obj.airdate).split('-')[0] + if ep_obj.show.air_by_date or ep_obj.show.sports: + params['season'] = str(ep_obj.airdate).split('-')[0] elif ep_obj.show.anime: params['season'] = "%d" % ep_obj.scene_absolute_number else: @@ -101,9 +99,7 @@ class EZRSSProvider(generic.TorrentProvider): params['show_name'] = helpers.sanitizeSceneName(self.show.name, ezrss=True).replace('.', ' ').encode('utf-8') - if self.show.air_by_date: - params['date'] = str(ep_obj.airdate) - elif self.show.sports: + if self.show.air_by_date or self.show.sports: params['date'] = str(ep_obj.airdate) elif self.show.anime: params['episode'] = "%i" % int(ep_obj.scene_absolute_number) diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index 3d7792cd..45581e4c 100755 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -258,32 +258,6 @@ class FreshOnTVProvider(generic.TorrentProvider): return (title, url) - def getURL(self, url, post_data=None, headers=None, json=False): - - if not self.session: - self._doLogin() - - if not headers: - headers = [] - - try: - # Remove double-slashes from url - parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one - url = urlparse.urlunparse(parsed) - - response = self.session.get(url, verify=False) - except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: - logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR) - return None - - if response.status_code != 200: - logger.log(self.name + u" page requested with url " + url + " returned status code is " + str( - response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) - return None - - return response.content - def findPropers(self, search_date=datetime.datetime.today()): results = [] diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index b6088861..0419839d 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -34,9 +34,11 @@ from sickbeard import encodingKludge as ek from sickbeard.exceptions import ex from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException from sickbeard.common import Quality +from sickbeard import clients from lib.hachoir_parser import createParser + class GenericProvider: NZB = "nzb" TORRENT = "torrent" @@ -61,10 +63,10 @@ class GenericProvider: self.cache = tvcache.TVCache(self) + self.cookies = None self.session = requests.session() - self.session.verify = False - self.session.headers.update({ - 'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'}) + self.headers = { + 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'} def getID(self): return GenericProvider.makeID(self.name) @@ -79,6 +81,9 @@ class GenericProvider: def _checkAuth(self): return + def _doLogin(self): + return True + def isActive(self): if self.providerType == GenericProvider.NZB and sickbeard.USE_NZBS: return self.isEnabled() @@ -109,60 +114,61 @@ class GenericProvider: return result - def getURL(self, url, post_data=None, headers=None, json=False): + def getURL(self, url, post_data=None, params=None, timeout=30, json=False): """ By default this is just a simple urlopen call but this method should be overridden for providers with special URL requirements (like cookies) """ - if not headers: - headers = [] + # check for auth + if not self._doLogin(): + return - data = helpers.getURL(url, post_data, headers, json=json) - - if not data: - logger.log(u"Error loading " + self.name + " URL: " + url, logger.ERROR) - return None - - return data + return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout, + session=self.session, json=json) def downloadResult(self, result): """ Save the result to disk. """ - logger.log(u"Downloading a result from " + self.name + " at " + result.url) + # check for auth + if not self._doLogin(): + return - data = self.getURL(result.url) + if self.providerType == GenericProvider.TORRENT: + torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper() + if not torrent_hash: + logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) + return False - if data is None: - return False + urls = [ + 'http://torcache.net/torrent/' + torrent_hash + '.torrent', + 'http://torrage.com/torrent/' + torrent_hash + '.torrent', + 'http://zoink.it/torrent/' + torrent_hash + '.torrent', + ] + + filename = ek.ek(os.path.join, sickbeard.TORRENT_DIR, + helpers.sanitizeFileName(result.name) + '.' + self.providerType) + elif self.providerType == GenericProvider.NZB: + urls = [result.url] + + filename = ek.ek(os.path.join, sickbeard.NZB_DIR, + helpers.sanitizeFileName(result.name) + '.' + self.providerType) - # use the appropriate watch folder - if self.providerType == GenericProvider.NZB: - saveDir = sickbeard.NZB_DIR - writeMode = 'w' - elif self.providerType == GenericProvider.TORRENT: - saveDir = sickbeard.TORRENT_DIR - writeMode = 'wb' else: - return False + return - # use the result name as the filename - file_name = ek.ek(os.path.join, saveDir, helpers.sanitizeFileName(result.name) + '.' + self.providerType) + for url in urls: + if helpers.download_file(url, filename, session=self.session): + logger.log(u"Downloading a result from " + self.name + " at " + url) - logger.log(u"Saving to " + file_name, logger.DEBUG) + if self.providerType == GenericProvider.TORRENT: + logger.log(u"Saved magnet link to " + filename, logger.MESSAGE) + else: + logger.log(u"Saved result to " + filename, logger.MESSAGE) - try: - with open(file_name, writeMode) as fileOut: - fileOut.write(data) - helpers.chmodAsParent(file_name) - except EnvironmentError, e: - logger.log("Unable to save the file: " + ex(e), logger.ERROR) - return False - - # as long as it's a valid download then consider it a successful snatch - return self._verify_download(file_name) + return self._verify_download(filename) def _verify_download(self, file_name=None): """ @@ -312,14 +318,16 @@ class GenericProvider: if not len(parse_result.episode_numbers) and ( parse_result.season_number and parse_result.season_number != season) or ( not parse_result.season_number and season != 1): - logger.log(u"The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring", - logger.DEBUG) + logger.log( + u"The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring", + logger.DEBUG) addCacheEntry = True elif len(parse_result.episode_numbers) and ( parse_result.season_number != season or not [ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers]): - logger.log(u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring", - logger.DEBUG) + logger.log( + u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring", + logger.DEBUG) addCacheEntry = True if not addCacheEntry: diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py index f715e698..3b143661 100644 --- a/sickbeard/providers/hdbits.py +++ b/sickbeard/providers/hdbits.py @@ -80,7 +80,7 @@ class HDBitsProvider(generic.TorrentProvider): return True def _get_season_search_strings(self, ep_obj): - season_search_string = [self._make_post_data_JSON(show=ep_obj.show, season=ep_obj.scene_season)] + season_search_string = [self._make_post_data_JSON(show=ep_obj.show, season=ep_obj)] return season_search_string def _get_episode_search_strings(self, ep_obj, add_string=''): @@ -105,16 +105,8 @@ class HDBitsProvider(generic.TorrentProvider): logger.log(u"Search url: " + self.search_url + " search_params: " + search_params, logger.DEBUG) - data = self.getURL(self.search_url, post_data=search_params) - - if not data: - logger.log(u"No data returned from " + self.search_url, logger.ERROR) - return [] - - parsedJSON = helpers.parse_json(data) - - if parsedJSON is None: - logger.log(u"Error trying to load " + self.name + " JSON data", logger.ERROR) + parsedJSON = self.getURL(self.search_url, post_data=search_params, json=True) + if not parsedJSON: return [] if self._checkAuthFromData(parsedJSON): @@ -195,7 +187,7 @@ class HDBitsProvider(generic.TorrentProvider): else: post_data['tvdb'] = { 'id': show.indexerid, - 'season': season, + 'season': episode.scene_season, } if search_term: @@ -225,20 +217,14 @@ class HDBitsCache(tvcache.TVCache): if self._checkAuth(None): - data = self._getRSSData() - - # As long as we got something from the provider we count it as an update - if data: - self.setLastUpdate() - else: - return [] - - parsedJSON = helpers.parse_json(data) - - if parsedJSON is None: + parsedJSON = self._getRSSData() + if not parsedJSON: logger.log(u"Error trying to load " + self.provider.name + " JSON feed", logger.ERROR) return [] + # mark updated + self.setLastUpdate() + if self._checkAuth(parsedJSON): if parsedJSON and 'data' in parsedJSON: items = parsedJSON['data'] @@ -249,27 +235,21 @@ class HDBitsCache(tvcache.TVCache): cl = [] for item in items: - ci = self._parseItem(item) if ci is not None: cl.append(ci) - - if len(cl) > 0: myDB = self._getDB() myDB.mass_action(cl) - - else: raise exceptions.AuthException( "Your authentication info for " + self.provider.name + " is incorrect, check your config") - else: return [] def _getRSSData(self): - return self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON()) + return self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON(), json=True) def _parseItem(self, item): diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index 4d556ac8..445cd714 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -288,29 +288,6 @@ class HDTorrentsProvider(generic.TorrentProvider): return (title, url) - def getURL(self, url, post_data=None, headers=None, json=False): - - if not self.session: - self._doLogin() - - if not headers: - headers = [] - try: - parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one - url = urlparse.urlunparse(parsed) - response = self.session.get(url, verify=False) - except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: - logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR) - return None - - if response.status_code != 200: - logger.log(self.name + u" page requested with url " + url + " returned status code is " + str( - response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) - return None - - return response.content - def findPropers(self, search_date=datetime.datetime.today()): results = [] diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index baa027cc..c3de8a9d 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -230,30 +230,6 @@ class IPTorrentsProvider(generic.TorrentProvider): return (title, url) - def getURL(self, url, post_data=None, headers=None, json=False): - - if not self.session: - self._doLogin() - - if not headers: - headers = [] - - try: - parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one - url = urlparse.urlunparse(parsed) - response = self.session.get(url, verify=False) - except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: - logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR) - return None - - if response.status_code != 200: - logger.log(self.name + u" page requested with url " + url + " returned status code is " + str( - response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) - return None - - return response.content - def findPropers(self, search_date=datetime.datetime.today()): results = [] diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index bc10769c..e9abc71c 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -112,7 +112,6 @@ class KATProvider(generic.TorrentProvider): fileName = None data = self.getURL(torrent_link) - if not data: return None @@ -316,83 +315,6 @@ class KATProvider(generic.TorrentProvider): return (title, url) - def getURL(self, url, post_data=None, headers=None, json=False): - - if not self.session: - self.session = requests.Session() - - try: - # Remove double-slashes from url - parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one - url = urlparse.urlunparse(parsed) - - if sickbeard.PROXY_SETTING: - proxies = { - "http": sickbeard.PROXY_SETTING, - "https": sickbeard.PROXY_SETTING, - } - - r = self.session.get(url, proxies=proxies, verify=False) - else: - r = self.session.get(url, verify=False) - except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: - logger.log(u"Error loading " + self.name + " URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR) - return None - - if r.status_code != 200: - logger.log(self.name + u" page requested with url " + url + " returned status code is " + str( - r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING) - return None - - return r.content - - def downloadResult(self, result): - """ - Save the result to disk. - """ - - if not self.session: - self.session = requests.Session() - - torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper() - - if not torrent_hash: - logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) - return False - - try: - r = self.session.get('http://torcache.net/torrent/' + torrent_hash + '.torrent', verify=False) - except Exception, e: - logger.log("Unable to connect to TORCACHE: " + ex(e), logger.ERROR) - try: - logger.log("Trying TORRAGE cache instead") - r = self.session.get('http://torrage.com/torrent/' + torrent_hash + '.torrent', verify=False) - except Exception, e: - logger.log("Unable to connect to TORRAGE: " + ex(e), logger.ERROR) - return False - - if not r.status_code == 200: - return False - - magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, - helpers.sanitizeFileName(result.name) + '.' + self.providerType) - magnetFileContent = r.content - - try: - with open(magnetFileName, 'wb') as fileOut: - fileOut.write(magnetFileContent) - - helpers.chmodAsParent(magnetFileName) - - except EnvironmentError, e: - logger.log("Unable to save the file: " + ex(e), logger.ERROR) - return False - - logger.log(u"Saved magnet link to " + magnetFileName + " ", logger.MESSAGE) - return True - - def findPropers(self, search_date=datetime.datetime.today()): results = [] diff --git a/sickbeard/providers/newzbin.py b/sickbeard/providers/newzbin.py index 7b04b00f..6da3747e 100644 --- a/sickbeard/providers/newzbin.py +++ b/sickbeard/providers/newzbin.py @@ -227,25 +227,6 @@ class NewzbinProvider(generic.NZBProvider): return True - def getURL(self, url, post_data=None, headers=None, json=False): - - myOpener = classes.AuthURLOpener(sickbeard.NEWZBIN_USERNAME, sickbeard.NEWZBIN_PASSWORD) - try: - # Remove double-slashes from url - parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one - url = urlparse.urlunparse(parsed) - - f = myOpener.openit(url) - except (urllib.ContentTooShortError, IOError), e: - logger.log("Error loading search results: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR) - return None - - data = f.read() - f.close() - - return data - def _get_season_search_strings(self, ep_obj): return ['^' + x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)] diff --git a/sickbeard/providers/nextgen.py b/sickbeard/providers/nextgen.py index 33b738db..f0e0fb4d 100644 --- a/sickbeard/providers/nextgen.py +++ b/sickbeard/providers/nextgen.py @@ -200,66 +200,66 @@ class NextGenProvider(generic.TorrentProvider): logger.log(u"" + self.name + " search page URL: " + searchURL, logger.DEBUG) data = self.getURL(searchURL) + if not data: + continue - if data: + try: + with BS4Parser(data.decode('iso-8859-1'), features=["html5lib", "permissive"]) as html: + resultsTable = html.find('div', attrs={'id': 'torrent-table-wrapper'}) - try: - with BS4Parser(data.decode('iso-8859-1'), features=["html5lib", "permissive"]) as html: - resultsTable = html.find('div', attrs={'id': 'torrent-table-wrapper'}) + if not resultsTable: + logger.log(u"The Data returned from " + self.name + " do not contains any torrent", + logger.DEBUG) + continue - if not resultsTable: - logger.log(u"The Data returned from " + self.name + " do not contains any torrent", + # Collecting entries + entries_std = html.find_all('div', attrs={'id': 'torrent-std'}) + entries_sticky = html.find_all('div', attrs={'id': 'torrent-sticky'}) + + entries = entries_std + entries_sticky + + #Xirg STANDARD TORRENTS + #Continue only if one Release is found + if len(entries) > 0: + + for result in entries: + + try: + torrentName = \ + ((result.find('div', attrs={'id': 'torrent-udgivelse2-users'})).find('a'))['title'] + torrentId = ( + ((result.find('div', attrs={'id': 'torrent-download'})).find('a'))['href']).replace( + 'download.php?id=', '') + torrent_name = str(torrentName) + torrent_download_url = (self.urls['download'] % torrentId).encode('utf8') + torrent_details_url = (self.urls['detail'] % torrentId).encode('utf8') + #torrent_seeders = int(result.find('div', attrs = {'id' : 'torrent-seeders'}).find('a')['class'][0]) + ## Not used, perhaps in the future ## + #torrent_id = int(torrent['href'].replace('/details.php?id=', '')) + #torrent_leechers = int(result.find('td', attrs = {'class' : 'ac t_leechers'}).string) + except (AttributeError, TypeError): + continue + + # Filter unseeded torrent and torrents with no name/url + #if mode != 'RSS' and torrent_seeders == 0: + # continue + + if not torrent_name or not torrent_download_url: + continue + + item = torrent_name, torrent_download_url + logger.log(u"Found result: " + torrent_name + " (" + torrent_details_url + ")", logger.DEBUG) - continue + items[mode].append(item) - # Collecting entries - entries_std = html.find_all('div', attrs={'id': 'torrent-std'}) - entries_sticky = html.find_all('div', attrs={'id': 'torrent-sticky'}) + else: + logger.log(u"The Data returned from " + self.name + " do not contains any torrent", + logger.WARNING) + continue - entries = entries_std + entries_sticky - - #Xirg STANDARD TORRENTS - #Continue only if one Release is found - if len(entries) > 0: - - for result in entries: - - try: - torrentName = \ - ((result.find('div', attrs={'id': 'torrent-udgivelse2-users'})).find('a'))['title'] - torrentId = ( - ((result.find('div', attrs={'id': 'torrent-download'})).find('a'))['href']).replace( - 'download.php?id=', '') - torrent_name = str(torrentName) - torrent_download_url = (self.urls['download'] % torrentId).encode('utf8') - torrent_details_url = (self.urls['detail'] % torrentId).encode('utf8') - #torrent_seeders = int(result.find('div', attrs = {'id' : 'torrent-seeders'}).find('a')['class'][0]) - ## Not used, perhaps in the future ## - #torrent_id = int(torrent['href'].replace('/details.php?id=', '')) - #torrent_leechers = int(result.find('td', attrs = {'class' : 'ac t_leechers'}).string) - except (AttributeError, TypeError): - continue - - # Filter unseeded torrent and torrents with no name/url - #if mode != 'RSS' and torrent_seeders == 0: - # continue - - if not torrent_name or not torrent_download_url: - continue - - item = torrent_name, torrent_download_url - logger.log(u"Found result: " + torrent_name + " (" + torrent_details_url + ")", - logger.DEBUG) - items[mode].append(item) - - else: - logger.log(u"The Data returned from " + self.name + " do not contains any torrent", - logger.WARNING) - continue - - except Exception, e: - logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), - logger.ERROR) + except Exception, e: + logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), + logger.ERROR) results += items[mode] @@ -278,32 +278,6 @@ class NextGenProvider(generic.TorrentProvider): return title, url - def getURL(self, url, post_data=None, headers=None, json=False): - - if not self.session: - self._doLogin() - - if not headers: - headers = [] - - try: - # Remove double-slashes from url - parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one - url = urlparse.urlunparse(parsed) - - response = self.session.get(url, verify=False) - except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: - logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR) - return None - - if response.status_code != 200: - logger.log(self.name + u" page requested with url " + url + " returned status code is " + str( - response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) - return None - - return response.content - def findPropers(self, search_date=datetime.datetime.today()): results = [] diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py index 90e8676b..dc9aa050 100644 --- a/sickbeard/providers/omgwtfnzbs.py +++ b/sickbeard/providers/omgwtfnzbs.py @@ -114,17 +114,14 @@ class OmgwtfnzbsProvider(generic.NZBProvider): search_url = 'https://api.omgwtfnzbs.org/json/?' + urllib.urlencode(params) logger.log(u"Search url: " + search_url, logger.DEBUG) - data = self.getURL(search_url, json=True) - - if not data: - logger.log(u"No data returned from " + search_url, logger.ERROR) + parsedJSON = self.getURL(search_url, json=True) + if not parsedJSON: return [] - if self._checkAuthFromData(data, is_XML=False): - + if self._checkAuthFromData(parsedJSON, is_XML=False): results = [] - for item in data: + for item in parsedJSON: if 'release' in item and 'getnzb' in item: results.append(item) diff --git a/sickbeard/providers/publichd.py b/sickbeard/providers/publichd.py index dba0647d..fcb44a16 100644 --- a/sickbeard/providers/publichd.py +++ b/sickbeard/providers/publichd.py @@ -141,7 +141,6 @@ class PublicHDProvider(generic.TorrentProvider): logger.log(u"Search string: " + searchURL, logger.DEBUG) html = self.getURL(searchURL) - if not html: continue @@ -205,74 +204,6 @@ class PublicHDProvider(generic.TorrentProvider): return (title, url) - def getURL(self, url, post_data=None, headers=None, json=False): - - if not self.session: - self.session = requests.Session() - - try: - # Remove double-slashes from url - parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one - url = urlparse.urlunparse(parsed) - - r = self.session.get(url, verify=False) - except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: - logger.log(u"Error loading " + self.name + " URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR) - return None - - if r.status_code != 200: - logger.log(self.name + u" page requested with url " + url + " returned status code is " + str( - r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING) - return None - - return r.content - - def downloadResult(self, result): - """ - Save the result to disk. - """ - - if not self.session: - self.session = requests.Session() - - torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper() - - if not torrent_hash: - logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) - return False - - try: - r = self.session.get('http://torcache.net/torrent/' + torrent_hash + '.torrent', verify=False) - except Exception, e: - logger.log("Unable to connect to TORCACHE: " + ex(e), logger.ERROR) - try: - logger.log("Trying TORRAGE cache instead") - r = self.session.get('http://torrage.com/torrent/' + torrent_hash + '.torrent', verify=False) - except Exception, e: - logger.log("Unable to connect to TORRAGE: " + ex(e), logger.ERROR) - return False - - if not r.status_code == 200: - return False - - magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, - helpers.sanitizeFileName(result.name) + '.' + self.providerType) - magnetFileContent = r.content - - try: - with open(magnetFileName, 'wb') as fileOut: - fileOut.write(magnetFileContent) - - helpers.chmodAsParent(magnetFileName) - - except EnvironmentError, e: - logger.log("Unable to save the file: " + ex(e), logger.ERROR) - return False - - logger.log(u"Saved magnet link to " + magnetFileName + " ", logger.MESSAGE) - return True - def findPropers(self, search_date=datetime.datetime.today()): results = [] diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py index 54624c4d..f46c7b30 100644 --- a/sickbeard/providers/rsstorrent.py +++ b/sickbeard/providers/rsstorrent.py @@ -35,7 +35,7 @@ from lib.requests import exceptions from lib.bencode import bdecode class TorrentRssProvider(generic.TorrentProvider): - def __init__(self, name, url, cookies, search_mode='eponly', search_fallback=False, backlog_only=False): + def __init__(self, name, url, cookies='', search_mode='eponly', search_fallback=False, backlog_only=False): generic.TorrentProvider.__init__(self, name) self.cache = TorrentRssCache(self) self.url = re.sub('\/$', '', url) @@ -47,11 +47,7 @@ class TorrentRssProvider(generic.TorrentProvider): self.search_mode = search_mode self.search_fallback = search_fallback self.backlog_only = backlog_only - - if cookies: - self.cookies = cookies - else: - self.cookies = '' + self.cookies = cookies def configStr(self): return self.name + '|' + self.url + '|' + self.cookies + '|' + str(int(self.enabled)) + '|' + self.search_mode + '|' + str(int(self.search_fallback)) + '|' + str(int(self.backlog_only)) @@ -118,6 +114,9 @@ class TorrentRssProvider(generic.TorrentProvider): if url.startswith('magnet:') and re.search('urn:btih:([\w]{32,40})', url): return (True, 'RSS feed Parsed correctly') else: + if self.cookies: + requests.utils.add_dict_to_cookiejar(self.session.cookies, + dict(x.rsplit('=', 1) for x in (self.cookies.split(';')))) torrent_file = self.getURL(url) try: bdecode(torrent_file) @@ -130,30 +129,6 @@ class TorrentRssProvider(generic.TorrentProvider): except Exception, e: return (False, 'Error when trying to load RSS: ' + ex(e)) - def getURL(self, url, post_data=None, headers=None, json=False): - - if not self.session: - self.session = requests.Session() - - if self.cookies: - requests.utils.add_dict_to_cookiejar(self.session.cookies, - dict(x.rsplit('=', 1) for x in (self.cookies.split(';')))) - - try: - parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one - r = self.session.get(url, verify=False) - except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: - logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR) - return None - - if r.status_code != 200: - logger.log(self.name + u" page requested with url " + url + " returned status code is " + str( - r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING) - return None - - return r.content - def dumpHTML(self, data): dumpName = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'custom_torrent.html') @@ -179,10 +154,11 @@ class TorrentRssCache(tvcache.TVCache): def _getRSSData(self): logger.log(u"TorrentRssCache cache update URL: " + self.provider.url, logger.DEBUG) + + request_headers = None if self.provider.cookies: request_headers = { 'Cookie': self.provider.cookies } - else: - request_headers = None + return self.getRSSFeed(self.provider.url, request_headers=request_headers) def _parseItem(self, item): diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index 070bdc4a..5eb6b25c 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -69,8 +69,6 @@ class SCCProvider(generic.TorrentProvider): self.categories = "c27=27&c17=17&c11=11" - self.headers = {'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'} - def isEnabled(self): return self.enabled @@ -178,14 +176,14 @@ class SCCProvider(generic.TorrentProvider): foreignSearchURL = None if mode == 'Season': searchURL = self.urls['archive'] % (search_string) - data = [self.getURL(searchURL, headers=self.headers)] + data = [self.getURL(searchURL)] else: searchURL = self.urls['search'] % (search_string, self.categories) nonsceneSearchURL = self.urls['nonscene'] % (search_string) foreignSearchURL = self.urls['foreign'] % (search_string) - data = [self.getURL(searchURL, headers=self.headers), - self.getURL(nonsceneSearchURL, headers=self.headers), - self.getURL(foreignSearchURL, headers=self.headers)] + data = [self.getURL(searchURL), + self.getURL(nonsceneSearchURL), + self.getURL(foreignSearchURL)] logger.log(u"Search string: " + nonsceneSearchURL, logger.DEBUG) logger.log(u"Search string: " + foreignSearchURL, logger.DEBUG) @@ -222,9 +220,10 @@ class SCCProvider(generic.TorrentProvider): title = link.string if re.search('\.\.\.', title): - with BS4Parser(self.getURL(self.url + "/" + link['href'])) as details_html: - title = re.search('(?<=").+(? (int(rows[0]['last_refreshed']) + MAX_XEM_AGE_SECS) + lastRefresh = int(rows[0]['last_refreshed']) + refresh = int(time.mktime(datetime.datetime.today().timetuple())) > lastRefresh + MAX_REFRESH_AGE_SECS else: refresh = True if refresh or force: + logger.log( + u'Looking up XEM scene mapping using for show %s on %s' % (indexer_id, sickbeard.indexerApi(indexer).name,), + logger.DEBUG) + + # mark refreshed + myDB.upsert("xem_refresh", + {'indexer': indexer, + 'last_refreshed': int(time.mktime(datetime.datetime.today().timetuple()))}, + {'indexer_id': indexer_id}) + try: - logger.log( - u'Looking up XEM scene mapping for show %s on %s' % (indexer_id, sickbeard.indexerApi(indexer).name,), - logger.DEBUG) - data = requests.get("http://thexem.de/map/all?id=%s&origin=%s&destination=scene" % ( - indexer_id, sickbeard.indexerApi(indexer).config['xem_origin'],), verify=False).json() + parsedJSON = sickbeard.helpers.getURL(url, json=True) + if not parsedJSON or parsedJSON == '': + logger.log(u'No XEN data for show "%s on %s"' % (indexer_id, sickbeard.indexerApi(indexer).name,), logger.MESSAGE) + return - if data is None or data == '': - logger.log(u'No XEN data for show "%s on %s", trying TVTumbler' % ( - indexer_id, sickbeard.indexerApi(indexer).name,), logger.MESSAGE) - data = requests.get("http://show-api.tvtumbler.com/api/thexem/all?id=%s&origin=%s&destination=scene" % ( - indexer_id, sickbeard.indexerApi(indexer).config['xem_origin'],), verify=False).json() - if data is None or data == '': - logger.log(u'TVTumbler also failed for show "%s on %s". giving up.' % (indexer_id, indexer,), - logger.MESSAGE) - return None + if 'success' in parsedJSON['result']: + cl = [] + for entry in parsedJSON['data']: + if 'scene' in entry: + cl.append([ + "UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? WHERE showid = ? AND season = ? AND episode = ?", + [entry['scene']['season'], + entry['scene']['episode'], + entry['scene']['absolute'], + indexer_id, + entry[sickbeard.indexerApi(indexer).config['xem_origin']]['season'], + entry[sickbeard.indexerApi(indexer).config['xem_origin']]['episode'] + ]]) + if 'scene_2' in entry: # for doubles + cl.append([ + "UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? WHERE showid = ? AND season = ? AND episode = ?", + [entry['scene_2']['season'], + entry['scene_2']['episode'], + entry['scene_2']['absolute'], + indexer_id, + entry[sickbeard.indexerApi(indexer).config['xem_origin']]['season'], + entry[sickbeard.indexerApi(indexer).config['xem_origin']]['episode'] + ]]) - result = data - - cl = [] - if result: - cl.append(["INSERT OR REPLACE INTO xem_refresh (indexer, indexer_id, last_refreshed) VALUES (?,?,?)", - [indexer, indexer_id, time.time()]]) - if 'success' in result['result']: - for entry in result['data']: - if 'scene' in entry: - cl.append([ - "UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? WHERE showid = ? AND season = ? AND episode = ?", - [entry['scene']['season'], - entry['scene']['episode'], - entry['scene']['absolute'], - indexer_id, - entry[sickbeard.indexerApi(indexer).config['xem_origin']]['season'], - entry[sickbeard.indexerApi(indexer).config['xem_origin']]['episode'] - ]]) - if 'scene_2' in entry: # for doubles - cl.append([ - "UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? WHERE showid = ? AND season = ? AND episode = ?", - [entry['scene_2']['season'], - entry['scene_2']['episode'], - entry['scene_2']['absolute'], - indexer_id, - entry[sickbeard.indexerApi(indexer).config['xem_origin']]['season'], - entry[sickbeard.indexerApi(indexer).config['xem_origin']]['episode'] - ]]) - else: - logger.log(u'Failed to get XEM scene data for show %s from %s because "%s"' % ( - indexer_id, sickbeard.indexerApi(indexer).name, result['message']), logger.DEBUG) + if len(cl) > 0: + myDB = db.DBConnection() + myDB.mass_action(cl) else: logger.log(u"Empty lookup result - no XEM data for show %s on %s" % ( indexer_id, sickbeard.indexerApi(indexer).name,), logger.DEBUG) except Exception, e: - logger.log(u"Exception while refreshing XEM data for show " + str(indexer_id) + " on " + sickbeard.indexerApi( - indexer).name + ": " + ex(e), logger.WARNING) + logger.log( + u"Exception while refreshing XEM data for show " + str(indexer_id) + " on " + sickbeard.indexerApi( + indexer).name + ": " + ex(e), logger.WARNING) logger.log(traceback.format_exc(), logger.DEBUG) - return None - - if len(cl) > 0: - myDB = db.DBConnection() - myDB.mass_action(cl) - def fix_xem_numbering(indexer_id, indexer): """ @@ -553,12 +546,12 @@ def fix_xem_numbering(indexer_id, indexer): # query = [{ # "name": self.show.name, - # "seasons": [{ - # "episodes": [{ - # "episode_number": None, - # "name": None - # }], - # "season_number": None, + # "seasons": [{ + # "episodes": [{ + # "episode_number": None, + # "name": None + # }], + # "season_number": None, # }], # "/tv/tv_program/number_of_seasons": [], # "/tv/tv_program/number_of_episodes": [], diff --git a/sickbeard/search.py b/sickbeard/search.py index 3d4d54ab..49acfcd6 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -59,7 +59,6 @@ def _downloadResult(result): # nzbs with an URL can just be downloaded from the provider if result.resultType == "nzb": newResult = resProvider.downloadResult(result) - # if it's an nzb data result elif result.resultType == "nzbdata": @@ -83,18 +82,12 @@ def _downloadResult(result): elif resProvider.providerType == "torrent": newResult = resProvider.downloadResult(result) - else: logger.log(u"Invalid provider type - this is a coding error, report it please", logger.ERROR) - return False - - if newResult and sickbeard.USE_FAILED_DOWNLOADS: - ui.notifications.message('Episode snatched', - '%s snatched from %s' % (result.name, resProvider.name)) + newResult = False return newResult - def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py index 05453be3..bbda755a 100644 --- a/sickbeard/search_queue.py +++ b/sickbeard/search_queue.py @@ -35,7 +35,7 @@ search_queue_lock = threading.Lock() BACKLOG_SEARCH = 10 DAILY_SEARCH = 20 FAILED_SEARCH = 30 -MANUAL_SEARCH = 30 +MANUAL_SEARCH = 40 class SearchQueue(generic_queue.GenericQueue): diff --git a/sickbeard/show_queue.py b/sickbeard/show_queue.py index eec4b357..6e00b93a 100644 --- a/sickbeard/show_queue.py +++ b/sickbeard/show_queue.py @@ -428,11 +428,10 @@ class QueueItemRefresh(ShowQueueItem): self.show.populateCache() # Load XEM data to DB for show - sickbeard.scene_numbering.xem_refresh(self.show.indexerid, self.show.indexer, force=self.force) + sickbeard.scene_numbering.xem_refresh(self.show.indexerid, self.show.indexer) self.inProgress = False - class QueueItemRename(ShowQueueItem): def __init__(self, show=None): ShowQueueItem.__init__(self, ShowQueueActions.RENAME, show) diff --git a/sickbeard/tv.py b/sickbeard/tv.py index dbc8d823..ec226cc2 100644 --- a/sickbeard/tv.py +++ b/sickbeard/tv.py @@ -721,7 +721,7 @@ class TVShow(object): if newStatus != None: with curEp.lock: logger.log(u"STATUS: we have an associated file, so setting the status from " + str( - curEp.status) + u" to DOWNLOADED/" + str(Quality.statusFromName(file)), logger.DEBUG) + curEp.status) + u" to DOWNLOADED/" + str(Quality.statusFromName(file, anime=self.is_anime)), logger.DEBUG) curEp.status = Quality.compositeStatus(newStatus, newQuality) with curEp.lock: @@ -1676,7 +1676,7 @@ class TVEpisode(object): logger.log( u"5 Status changes from " + str(self.status) + " to " + str(Quality.statusFromName(self.location)), logger.DEBUG) - self.status = Quality.statusFromName(self.location) + self.status = Quality.statusFromName(self.location, anime=self.show.is_anime) # shouldn't get here probably else: @@ -1701,8 +1701,8 @@ class TVEpisode(object): if self.status == UNKNOWN: if sickbeard.helpers.isMediaFile(self.location): logger.log(u"7 Status changes from " + str(self.status) + " to " + str( - Quality.statusFromName(self.location)), logger.DEBUG) - self.status = Quality.statusFromName(self.location) + Quality.statusFromName(self.location, anime=self.show.is_anime)), logger.DEBUG) + self.status = Quality.statusFromName(self.location, anime=self.show.is_anime) nfoFile = sickbeard.helpers.replaceExtension(self.location, "nfo") logger.log(str(self.show.indexerid) + u": Using NFO name " + nfoFile, logger.DEBUG) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 91cea40e..706d67e1 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -106,16 +106,18 @@ class TVCache(): def updateCache(self): if self.shouldUpdate() and self._checkAuth(None): - self._clearCache() - - data = self._getRSSData() - # as long as the http request worked we count this as an update - if data: - self.setLastUpdate() - else: + data = self._getRSSData() + if not data: return [] + # clear cache + self._clearCache() + + # set updated + self.setLastUpdate() + + # parse data if self._checkAuth(data): cl = [] for item in data.entries: diff --git a/sickbeard/tvtumbler.py b/sickbeard/tvtumbler.py deleted file mode 100644 index 4adafa7b..00000000 --- a/sickbeard/tvtumbler.py +++ /dev/null @@ -1,47 +0,0 @@ -''' -Created on Aug 26, 2013 - -Wrappers around tvtumbler access. - -@author: dermot@buckley.ie -''' -import time - -from sickbeard import helpers -from sickbeard import logger - -try: - import json -except ImportError: - from lib import simplejson as json - -UPDATE_INTERVAL = 432000 # 5 days -SHOW_LOOKUP_URL = 'http://show-api.tvtumbler.com/api/show' -_tvtumber_cache = {} - - -def show_info(indexer_id): - try: - cachedResult = _tvtumber_cache[str(indexer_id)] - if time.time() < (cachedResult['mtime'] + UPDATE_INTERVAL): - # cached result is still considered current, use it - return cachedResult['response'] - # otherwise we just fall through to lookup - except KeyError: - pass # no cached value, just fall through to lookup - - url = SHOW_LOOKUP_URL + '?indexer_id=' + str(indexer_id) - data = helpers.getURL(url, timeout=60) # give this a longer timeout b/c it may take a while - result = json.loads(data) - if not result: - logger.log(u"Empty lookup result -> failed to find show id", logger.DEBUG) - return None - if result['error']: - logger.log(u"Lookup failed: " + result['errorMessage'], logger.DEBUG) - return None - - # result is good, store it for later - _tvtumber_cache[str(indexer_id)] = {'mtime': time.time(), - 'response': result['show']} - - return result['show'] diff --git a/sickbeard/versionChecker.py b/sickbeard/versionChecker.py index d2ededdd..9667683a 100644 --- a/sickbeard/versionChecker.py +++ b/sickbeard/versionChecker.py @@ -163,21 +163,18 @@ class WindowsUpdateManager(UpdateManager): regex = ".*SickRage\-win32\-alpha\-build(\d+)(?:\.\d+)?\.zip" version_url_data = helpers.getURL(self.version_url) + if not version_url_data: + return - if version_url_data is None: - return None - else: - for curLine in version_url_data.splitlines(): - logger.log(u"checking line " + curLine, logger.DEBUG) - match = re.match(regex, curLine) - if match: - logger.log(u"found a match", logger.DEBUG) - if whole_link: - return curLine.strip() - else: - return int(match.group(1)) - - return None + for curLine in version_url_data.splitlines(): + logger.log(u"checking line " + curLine, logger.DEBUG) + match = re.match(regex, curLine) + if match: + logger.log(u"found a match", logger.DEBUG) + if whole_link: + return curLine.strip() + else: + return int(match.group(1)) def need_update(self): self._cur_version = self._find_installed_version() diff --git a/sickbeard/webapi.py b/sickbeard/webapi.py index 7e897ecc..476c5a31 100644 --- a/sickbeard/webapi.py +++ b/sickbeard/webapi.py @@ -23,27 +23,25 @@ import os import time import urllib import datetime -import threading import re import traceback import sickbeard import webserve from sickbeard import db, logger, exceptions, history, ui, helpers -from sickbeard.exceptions import ex from sickbeard import encodingKludge as ek from sickbeard import search_queue +from sickbeard import image_cache +from sickbeard import classes +from sickbeard.exceptions import ex from sickbeard.common import SNATCHED, SNATCHED_PROPER, DOWNLOADED, SKIPPED, UNAIRED, IGNORED, ARCHIVED, WANTED, UNKNOWN from common import Quality, qualityPresetStrings, statusStrings -from sickbeard import image_cache try: import json except ImportError: from lib import simplejson as json -import xml.etree.cElementTree as etree - from lib import subliminal dateFormat = "%Y-%m-%d" @@ -1530,7 +1528,7 @@ class CMD_SickBeardRestart(ApiCall): class CMD_SickBeardSearchIndexers(ApiCall): _help = {"desc": "search for show on the indexers with a given string and language", "optionalParameters": {"name": {"desc": "name of the show you want to search for"}, - "indexerid": {"desc": "thetvdb.com unique id of a show"}, + "indexerid": {"desc": "thetvdb.com or tvrage.com unique id of a show"}, "lang": {"desc": "the 2 letter abbreviation lang id"} } } @@ -1555,31 +1553,30 @@ class CMD_SickBeardSearchIndexers(ApiCall): def run(self): """ search for show at tvdb with a given string and language """ if self.name and not self.indexerid: # only name was given - baseURL = "http://thetvdb.com/api/GetSeries.php?" - params = {"seriesname": str(self.name).encode('utf-8'), 'language': self.lang} - finalURL = baseURL + urllib.urlencode(params) - urlData = sickbeard.helpers.getURL(finalURL) + lINDEXER_API_PARMS = sickbeard.indexerApi(self.indexer).api_params.copy() + lINDEXER_API_PARMS['language'] = self.lang + lINDEXER_API_PARMS['custom_ui'] = classes.AllShowsListUI + t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS) - if urlData is None: + apiData = None + + try: + apiData = t[str(self.name).encode()] + except Exception, e: + pass + + if not apiData: return _responds(RESULT_FAILURE, msg="Did not get result from tvdb") - else: - try: - seriesXML = etree.ElementTree(etree.XML(urlData)) - except Exception, e: - logger.log(u"API :: Unable to parse XML for some reason: " + ex(e) + " from XML: " + urlData, - logger.ERROR) - return _responds(RESULT_FAILURE, msg="Unable to read result from tvdb") - series = seriesXML.getiterator('Series') - results = [] - for curSeries in series: - results.append({"indexerid": int(curSeries.findtext('seriesid')), - "tvdbid": int(curSeries.findtext('seriesid')), - "name": curSeries.findtext('SeriesName'), - "first_aired": curSeries.findtext('FirstAired')}) + results = [] + for curSeries in apiData: + results.append({"indexerid": int(curSeries.findtext('seriesid')), + "tvdbid": int(curSeries.findtext('seriesid')), + "name": curSeries.findtext('SeriesName'), + "first_aired": curSeries.findtext('FirstAired')}) - lang_id = self.valid_languages[self.lang] - return _responds(RESULT_SUCCESS, {"results": results, "langid": lang_id}) + lang_id = self.valid_languages[self.lang] + return _responds(RESULT_SUCCESS, {"results": results, "langid": lang_id}) elif self.indexerid: lINDEXER_API_PARMS = sickbeard.indexerApi(self.indexer).api_params.copy() diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 45cf1fa6..df162d68 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -17,6 +17,7 @@ # along with SickRage. If not, see . from __future__ import with_statement + import base64 import inspect import traceback @@ -1429,8 +1430,7 @@ class ConfigGeneral(MainHandler): use_api=None, api_key=None, indexer_default=None, timezone_display=None, cpu_preset=None, web_password=None, version_notify=None, enable_https=None, https_cert=None, https_key=None, handle_reverse_proxy=None, sort_article=None, auto_update=None, notify_on_update=None, - proxy_setting=None, - anon_redirect=None, git_path=None, calendar_unprotected=None, + proxy_setting=None, anon_redirect=None, git_path=None, calendar_unprotected=None, fuzzy_dating=None, trim_zero=None, date_preset=None, date_preset_na=None, time_preset=None, indexer_timeout=None, play_videos=None): @@ -1539,7 +1539,6 @@ class ConfigBackupRestore(MainHandler): def restore(self, backupFile=None): - finalResult = '' if backupFile: @@ -2460,26 +2459,11 @@ class ConfigAnime(MainHandler): results = [] - if use_anidb == "on": - use_anidb = 1 - else: - use_anidb = 0 - - if anidb_use_mylist == "on": - anidb_use_mylist = 1 - else: - anidb_use_mylist = 0 - - if split_home == "on": - split_home = 1 - else: - split_home = 0 - - sickbeard.USE_ANIDB = use_anidb + sickbeard.USE_ANIDB = config.checkbox_to_value(use_anidb) sickbeard.ANIDB_USERNAME = anidb_username sickbeard.ANIDB_PASSWORD = anidb_password - sickbeard.ANIDB_USE_MYLIST = anidb_use_mylist - sickbeard.ANIME_SPLIT_HOME = split_home + sickbeard.ANIDB_USE_MYLIST = config.checkbox_to_value(anidb_use_mylist) + sickbeard.ANIME_SPLIT_HOME = config.checkbox_to_value(split_home) sickbeard.save_config() From b13e72e0a35ee67f9f2a0f1ba8c5e9904632cc0e Mon Sep 17 00:00:00 2001 From: echel0n Date: Sun, 27 Jul 2014 04:58:14 -0700 Subject: [PATCH 07/23] Moved code for cleaning up cache folder to seperate function located in helper.py. Added call to cache folder cleanup during SickRage main init. Changed a error message in rss feed class to a debug message for when url returns no data. Moved indexer api cache files to be placed under cache folder subfolder indexers. Moved rss feed cache files to be placed under cache folder subfolder rss. --- sickbeard/__init__.py | 4 ++++ sickbeard/helpers.py | 39 ++++++++++++++++++++++++++++++- sickbeard/indexers/indexer_api.py | 2 +- sickbeard/providers/kat.py | 4 ---- sickbeard/rssfeeds.py | 14 ++++++----- sickbeard/showUpdater.py | 28 +--------------------- 6 files changed, 52 insertions(+), 39 deletions(-) diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index 14fa7d2b..8f710ab1 100755 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -525,6 +525,10 @@ def initialize(consoleLogging=True): logger.log(u"!!! Creating local cache dir failed, using system default", logger.ERROR) CACHE_DIR = None + # clean cache folders + if CACHE_DIR: + helpers.clearCache() + GUI_NAME = check_setting_str(CFG, 'GUI', 'gui_name', 'slick') ACTUAL_LOG_DIR = check_setting_str(CFG, 'General', 'log_dir', 'Logs') diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py index 0f870fc0..bc72628e 100644 --- a/sickbeard/helpers.py +++ b/sickbeard/helpers.py @@ -32,6 +32,7 @@ import urlparse import uuid import base64 import zipfile +import datetime import sickbeard import subliminal @@ -1306,4 +1307,40 @@ def download_file(url, filename, session=None): resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.WARNING) return False - return True \ No newline at end of file + return True + +def clearCache(force=False): + update_datetime = datetime.datetime.now() + + # clean out cache directory, remove everything > 12 hours old + if sickbeard.CACHE_DIR: + logger.log(u"Trying to clean cache folder " + sickbeard.CACHE_DIR) + + # Does our cache_dir exists + if not ek.ek(os.path.isdir, sickbeard.CACHE_DIR): + logger.log(u"Can't clean " + sickbeard.CACHE_DIR + " if it doesn't exist", logger.WARNING) + else: + max_age = datetime.timedelta(hours=12) + + # Get all our cache files + for cache_root, cache_dirs, cache_files in os.walk(sickbeard.CACHE_DIR): + path = os.path.basename(cache_root) + + # skip rss provider caches + if path == 'rss': + continue + + for file in cache_files: + cache_file = ek.ek(os.path.join, cache_root, file) + + if ek.ek(os.path.isfile, cache_file): + cache_file_modified = datetime.datetime.fromtimestamp( + ek.ek(os.path.getmtime, cache_file)) + + if force or (update_datetime - cache_file_modified > max_age): + try: + ek.ek(os.remove, cache_file) + except OSError, e: + logger.log(u"Unable to clean " + cache_root + ": " + repr(e) + " / " + str(e), + logger.WARNING) + break \ No newline at end of file diff --git a/sickbeard/indexers/indexer_api.py b/sickbeard/indexers/indexer_api.py index 5e82d2f4..50c82738 100644 --- a/sickbeard/indexers/indexer_api.py +++ b/sickbeard/indexers/indexer_api.py @@ -47,7 +47,7 @@ class indexerApi(object): def api_params(self): if self.indexerID: if sickbeard.CACHE_DIR: - indexerConfig[self.indexerID]['api_params']['cache'] = os.path.join(sickbeard.CACHE_DIR, self.name) + indexerConfig[self.indexerID]['api_params']['cache'] = os.path.join(sickbeard.CACHE_DIR, 'indexers', self.name) if sickbeard.PROXY_SETTING: indexerConfig[self.indexerID]['api_params']['proxy'] = sickbeard.PROXY_SETTING diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index e9abc71c..e2e88924 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -224,7 +224,6 @@ class KATProvider(generic.TorrentProvider): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - soup = None for mode in search_params.keys(): for search_string in search_params[mode]: @@ -379,13 +378,10 @@ class KATCache(tvcache.TVCache): if ci is not None: cl.append(ci) - - if len(cl) > 0: myDB = self._getDB() myDB.mass_action(cl) - def _parseItem(self, item): (title, url) = item diff --git a/sickbeard/rssfeeds.py b/sickbeard/rssfeeds.py index 3d3fbab6..9c59f9d9 100644 --- a/sickbeard/rssfeeds.py +++ b/sickbeard/rssfeeds.py @@ -16,7 +16,9 @@ from shove import Shove class RSSFeeds: def __init__(self, db_name): - self.db_name = ek.ek(os.path.join, sickbeard.CACHE_DIR, db_name + '.db') + self.db_name = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'rss', db_name + '.db') + if not os.path.exists(os.path.dirname(self.db_name)): + sickbeard.helpers.makeDir(os.path.dirname(self.db_name)) def clearCache(self, age=None): try: @@ -24,7 +26,7 @@ class RSSFeeds: fc = cache.Cache(fs) fc.purge(age) except Exception as e: - logger.log(u"RSS cache error: " + ex(e), logger.DEBUG) + logger.log(u"RSS error clearing cache: " + ex(e), logger.DEBUG) def getFeed(self, url, post_data=None, request_headers=None): parsed = list(urlparse.urlparse(url)) @@ -39,7 +41,7 @@ class RSSFeeds: feed = fc.fetch(url, False, False, request_headers) if not feed or not feed.entries: - logger.log(u"RSS cache error loading url: " + url, logger.ERROR) + logger.log(u"RSS error loading url: " + url, logger.DEBUG) return elif 'error' in feed.feed: err_code = feed.feed['error']['code'] @@ -48,7 +50,7 @@ class RSSFeeds: logger.log( u"RSS ERROR:[%s] CODE:[%s]" % (err_desc, err_code), logger.DEBUG) return - - return feed + else: + return feed except Exception as e: - logger.log(u"RSS cache error: " + ex(e), logger.DEBUG) \ No newline at end of file + logger.log(u"RSS error: " + ex(e), logger.DEBUG) \ No newline at end of file diff --git a/sickbeard/showUpdater.py b/sickbeard/showUpdater.py index 2789c617..d4b08f8b 100644 --- a/sickbeard/showUpdater.py +++ b/sickbeard/showUpdater.py @@ -47,33 +47,7 @@ class ShowUpdater(): logger.log(u"Doing full update on all shows") # clean out cache directory, remove everything > 12 hours old - if sickbeard.CACHE_DIR: - for indexer in sickbeard.indexerApi().indexers: - cache_dir = sickbeard.indexerApi(indexer).cache - logger.log(u"Trying to clean cache folder " + cache_dir) - - # Does our cache_dir exists - if not ek.ek(os.path.isdir, cache_dir): - logger.log(u"Can't clean " + cache_dir + " if it doesn't exist", logger.WARNING) - else: - max_age = datetime.timedelta(hours=12) - # Get all our cache files - cache_files = ek.ek(os.listdir, cache_dir) - - for cache_file in cache_files: - cache_file_path = ek.ek(os.path.join, cache_dir, cache_file) - - if ek.ek(os.path.isfile, cache_file_path): - cache_file_modified = datetime.datetime.fromtimestamp( - ek.ek(os.path.getmtime, cache_file_path)) - - if update_datetime - cache_file_modified > max_age: - try: - ek.ek(os.remove, cache_file_path) - except OSError, e: - logger.log(u"Unable to clean " + cache_dir + ": " + repr(e) + " / " + str(e), - logger.WARNING) - break + sickbeard.helpers.clearCache() # select 10 'Ended' tv_shows updated more than 90 days ago to include in this update stale_should_update = [] From f2a1331110c90c0eeacba79f7642d4aa93b69c8a Mon Sep 17 00:00:00 2001 From: echel0n Date: Sun, 27 Jul 2014 05:34:03 -0700 Subject: [PATCH 08/23] Skip cleaning images cache folder. --- sickbeard/helpers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py index bc72628e..8ee74f8f 100644 --- a/sickbeard/helpers.py +++ b/sickbeard/helpers.py @@ -1326,8 +1326,8 @@ def clearCache(force=False): for cache_root, cache_dirs, cache_files in os.walk(sickbeard.CACHE_DIR): path = os.path.basename(cache_root) - # skip rss provider caches - if path == 'rss': + # skip these cache folders + if path in ['rss', 'images']: continue for file in cache_files: From f67fa660beb8b06aea232e4eaca998374063ef4f Mon Sep 17 00:00:00 2001 From: echel0n Date: Sun, 27 Jul 2014 05:51:22 -0700 Subject: [PATCH 09/23] Tornado WebUI now asynchronous, huge performance increase. --- sickbeard/webserve.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 97443aea..e677c733 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -81,9 +81,7 @@ except ImportError: from lib import adba from Cheetah.Template import Template - -from tornado.web import RequestHandler, HTTPError - +from tornado.web import RequestHandler, HTTPError, asynchronous def authenticated(handler_class): def wrap_execute(handler_execute): @@ -190,7 +188,6 @@ class MainHandler(RequestHandler): trace_info, request_info)) def _dispatch(self): - path = self.request.uri.replace(sickbeard.WEB_ROOT, '').split('?')[0] method = path.strip('/').split('/')[-1] @@ -236,12 +233,14 @@ class MainHandler(RequestHandler): raise HTTPError(404) + @asynchronous def get(self, *args, **kwargs): try: self.finish(self._dispatch()) except HTTPRedirect, e: self.redirect(e.url, e.permanent, e.status) + @asynchronous def post(self, *args, **kwargs): try: self.finish(self._dispatch()) From fd9be53fcbac5ea926efdd73bc15fa6f5c50622f Mon Sep 17 00:00:00 2001 From: Adam Date: Tue, 22 Jul 2014 12:53:32 +0800 Subject: [PATCH 10/23] Anime propers --- sickbeard/databases/mainDB.py | 16 +++++++++++- sickbeard/history.py | 13 +++++----- sickbeard/name_parser/parser.py | 23 +++++++++++++++-- sickbeard/postProcessor.py | 46 +++++++++++++++++++++++++-------- sickbeard/properFinder.py | 26 +++++++++++++++++++ sickbeard/providers/generic.py | 2 ++ sickbeard/tv.py | 27 ++++++++++++++----- sickbeard/tvcache.py | 13 ++++++++-- 8 files changed, 138 insertions(+), 28 deletions(-) diff --git a/sickbeard/databases/mainDB.py b/sickbeard/databases/mainDB.py index 96e4aa5e..3ccf14a1 100644 --- a/sickbeard/databases/mainDB.py +++ b/sickbeard/databases/mainDB.py @@ -27,7 +27,7 @@ from sickbeard import encodingKludge as ek from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException MIN_DB_VERSION = 9 # oldest db version we support migrating from -MAX_DB_VERSION = 39 +MAX_DB_VERSION = 40 class MainSanityCheck(db.DBSanityCheck): def check(self): @@ -901,3 +901,17 @@ class AddIndexerMapping(AddSceneToTvShows): "CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER, mindexer NUMERIC, PRIMARY KEY (indexer_id, indexer))") self.incDBVersion() + +class AddVersionToTvEpisodes(AddIndexerMapping): + def test(self): + return self.checkDBVersion() >= 40 + + def execute(self): + backupDatabase(40) + + logger.log(u"Adding column version to tv_episodes and history") + self.addColumn("tv_episodes", "version", "NUMERIC", "-1") + self.addColumn("tv_episodes", "release_group", "TEXT", "") + self.addColumn("history", "version", "NUMERIC", "-1") + + self.incDBVersion() diff --git a/sickbeard/history.py b/sickbeard/history.py index 757d6fcb..050fefac 100644 --- a/sickbeard/history.py +++ b/sickbeard/history.py @@ -25,7 +25,7 @@ from sickbeard.common import SNATCHED, SUBTITLED, FAILED, Quality dateFormat = "%Y%m%d%H%M%S" -def _logHistoryItem(action, showid, season, episode, quality, resource, provider): +def _logHistoryItem(action, showid, season, episode, quality, resource, provider, version=-1): logDate = datetime.datetime.today().strftime(dateFormat) if not isinstance(resource, unicode): @@ -33,8 +33,8 @@ def _logHistoryItem(action, showid, season, episode, quality, resource, provider myDB = db.DBConnection() myDB.action( - "INSERT INTO history (action, date, showid, season, episode, quality, resource, provider) VALUES (?,?,?,?,?,?,?,?)", - [action, logDate, showid, season, episode, quality, resource, provider]) + "INSERT INTO history (action, date, showid, season, episode, quality, resource, provider, version) VALUES (?,?,?,?,?,?,?,?,?)", + [action, logDate, showid, season, episode, quality, resource, provider, version]) def logSnatch(searchResult): @@ -44,6 +44,7 @@ def logSnatch(searchResult): season = int(curEpObj.season) episode = int(curEpObj.episode) quality = searchResult.quality + version = searchResult.version providerClass = searchResult.provider if providerClass != None: @@ -55,10 +56,10 @@ def logSnatch(searchResult): resource = searchResult.name - _logHistoryItem(action, showid, season, episode, quality, resource, provider) + _logHistoryItem(action, showid, season, episode, quality, resource, provider, version) -def logDownload(episode, filename, new_ep_quality, release_group=None): +def logDownload(episode, filename, new_ep_quality, release_group=None, version=-1): showid = int(episode.show.indexerid) season = int(episode.season) epNum = int(episode.episode) @@ -73,7 +74,7 @@ def logDownload(episode, filename, new_ep_quality, release_group=None): action = episode.status - _logHistoryItem(action, showid, season, epNum, quality, filename, provider) + _logHistoryItem(action, showid, season, epNum, quality, filename, provider, version) def logSubtitle(showid, season, episode, status, subtitleResult): diff --git a/sickbeard/name_parser/parser.py b/sickbeard/name_parser/parser.py index fd9f75c0..fd70a278 100644 --- a/sickbeard/name_parser/parser.py +++ b/sickbeard/name_parser/parser.py @@ -201,6 +201,16 @@ class NameParser(object): result.release_group = match.group('release_group') result.score += 1 + if 'version' in named_groups: + # assigns version to anime file if detected using anime regex. Non-anime regex receives -1 + version = match.group('version') + if version: + result.version = version + else: + result.version = 1 + else: + result.version = -1 + matches.append(result) @@ -438,6 +448,7 @@ class NameParser(object): final_result.series_name = self._combine_results(dir_name_result, file_name_result, 'series_name') final_result.extra_info = self._combine_results(dir_name_result, file_name_result, 'extra_info') final_result.release_group = self._combine_results(dir_name_result, file_name_result, 'release_group') + final_result.version = self._combine_results(dir_name_result, file_name_result, 'version') final_result.which_regex = [] if final_result == file_name_result: @@ -483,7 +494,8 @@ class ParseResult(object): ab_episode_numbers=None, show=None, score=None, - quality=None + quality=None, + version=None ): self.original_name = original_name @@ -518,6 +530,8 @@ class ParseResult(object): self.show = show self.score = score + self.version = version + def __eq__(self, other): if not other: return False @@ -548,6 +562,8 @@ class ParseResult(object): return False if self.quality != other.quality: return False + if self.version != other.version: + return False return True @@ -569,7 +585,10 @@ class ParseResult(object): to_return += str(self.sports_event_id) to_return += str(self.sports_air_date) if self.ab_episode_numbers: - to_return += ' [Absolute Nums: ' + str(self.ab_episode_numbers) + ']' + to_return += ' [ABS: ' + str(self.ab_episode_numbers) + ']' + if self.version: + to_return += ' [ANIME VER: ' + str(self.version) + ']' + if self.release_group: to_return += ' [GROUP: ' + self.release_group + ']' diff --git a/sickbeard/postProcessor.py b/sickbeard/postProcessor.py index 1c2c511e..90d1cb6e 100644 --- a/sickbeard/postProcessor.py +++ b/sickbeard/postProcessor.py @@ -92,6 +92,8 @@ class PostProcessor(object): self.is_priority = is_priority self.log = '' + + self.version = None def _log(self, message, level=logger.MESSAGE): """ @@ -382,10 +384,10 @@ class PostProcessor(object): """ Look up the NZB name in the history and see if it contains a record for self.nzb_name - Returns a (indexer_id, season, []) tuple. The first two may be None if none were found. + Returns a (indexer_id, season, [], quality, version) tuple. The first two may be None if none were found. """ - to_return = (None, None, [], None) + to_return = (None, None, [], None, None) # if we don't have either of these then there's nothing to use to search the history for anyway if not self.nzb_name and not self.folder_name: @@ -413,6 +415,7 @@ class PostProcessor(object): indexer_id = int(sql_results[0]["showid"]) season = int(sql_results[0]["season"]) quality = int(sql_results[0]["quality"]) + version = int(sql_results[0]["version"]) if quality == common.Quality.UNKNOWN: quality = None @@ -420,7 +423,8 @@ class PostProcessor(object): show = helpers.findCertainShow(sickbeard.showList, indexer_id) self.in_history = True - to_return = (show, season, [], quality) + self.version = version + to_return = (show, season, [], quality, version) self._log("Found result in history: " + str(to_return), logger.DEBUG) return to_return @@ -452,6 +456,7 @@ class PostProcessor(object): logger.log(u" or Parse result(air_date): " + str(parse_result.air_date), logger.DEBUG) logger.log(u"Parse result(release_group): " + str(parse_result.release_group), logger.DEBUG) + def _analyze_name(self, name, file=True): """ Takes a name and tries to figure out a show, season, and episode from it. @@ -464,7 +469,7 @@ class PostProcessor(object): logger.log(u"Analyzing name " + repr(name)) - to_return = (None, None, [], None) + to_return = (None, None, [], None, None) if not name: return to_return @@ -488,7 +493,7 @@ class PostProcessor(object): season = parse_result.season_number episodes = parse_result.episode_numbers - to_return = (show, season, episodes, parse_result.quality) + to_return = (show, season, episodes, parse_result.quality, None) self._finalize(parse_result) return to_return @@ -516,7 +521,7 @@ class PostProcessor(object): For a given file try to find the showid, season, and episode. """ - show = season = quality = None + show = season = quality = version = None episodes = [] # try to look up the nzb in history @@ -542,7 +547,7 @@ class PostProcessor(object): for cur_attempt in attempt_list: try: - (cur_show, cur_season, cur_episodes, cur_quality) = cur_attempt() + (cur_show, cur_season, cur_episodes, cur_quality, cur_version) = cur_attempt() except (InvalidNameException, InvalidShowException), e: logger.log(u"Unable to parse, skipping: " + ex(e), logger.DEBUG) continue @@ -555,6 +560,10 @@ class PostProcessor(object): if cur_quality and not (self.in_history and quality): quality = cur_quality + # we only get current version for animes from history to prevent issues with old database entries + if cur_version is not None: + version = cur_version + if cur_season != None: season = cur_season if cur_episodes: @@ -594,9 +603,9 @@ class PostProcessor(object): season = 1 if show and season and episodes: - return (show, season, episodes, quality) + return (show, season, episodes, quality, version) - return (show, season, episodes, quality) + return (show, season, episodes, quality, version) def _get_ep_obj(self, show, season, episodes): """ @@ -783,7 +792,7 @@ class PostProcessor(object): self.anidbEpisode = None # try to find the file info - (show, season, episodes, quality) = self._find_info() + (show, season, episodes, quality, version) = self._find_info() if not show: self._log(u"This show isn't in your list, you need to add it to SB before post-processing an episode", logger.ERROR) @@ -810,6 +819,14 @@ class PostProcessor(object): priority_download = self._is_priority(ep_obj, new_ep_quality) self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG) + # get the version of the episode we're processing + if version: + self._log(u"Snatch history had a version in it, using that: v" + str(version), + logger.DEBUG) + new_ep_version = version + else: + new_ep_version = -1 + # check for an existing file existing_file_status = self._checkForExistingFile(ep_obj.location) @@ -890,6 +907,13 @@ class PostProcessor(object): cur_ep.is_proper = self.is_proper + cur_ep.version = new_ep_version + + if self.release_group: + cur_ep.release_group = self.release_group + else: + cur_ep.release_group = "" + sql_l.append(cur_ep.get_sql()) if len(sql_l) > 0: @@ -981,7 +1005,7 @@ class PostProcessor(object): ep_obj.createMetaFiles() # log it to history - history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group) + history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group, new_ep_version) # send notifications notifiers.notify_download(ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN')) diff --git a/sickbeard/properFinder.py b/sickbeard/properFinder.py index d0013592..589c478d 100644 --- a/sickbeard/properFinder.py +++ b/sickbeard/properFinder.py @@ -141,6 +141,12 @@ class ProperFinder(): else: curProper.season = parse_result.season_number if parse_result.season_number != None else 1 curProper.episode = parse_result.episode_numbers[0] + if parse_result.is_anime: + if parse_result.release_group and parse_result.version: + curProper.release_group = parse_result.release_group + curProper.version = parse_result.version + else: + continue curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime) @@ -165,6 +171,25 @@ class ProperFinder(): if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality: continue + # check if we actually want this proper (if it's the right release group and a higher version) + if parse_result.is_anime: + myDB = db.DBConnection() + sqlResults = myDB.select( + "SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", + [curProper.indexerid, curProper.season, curProper.episode]) + + oldVersion = int(sqlResults[0]["version"]) + oldRelease_group = (sqlResults[0]["release_group"]) + + if oldVersion > -1 and oldVersion < curProper.version: + logger.log("Found new anime v" + str(curProper.version) + " to replace existing v" + str(oldVersion)) + else: + continue + + if oldRelease_group != curProper.release_group: + logger.log("Skipping proper from release group: " + curProper.release_group + ", does not match existing release group: " + oldRelease_group) + continue + # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if curProper.indexerid != -1 and (curProper.indexerid, curProper.season, curProper.episode) not in map( operator.attrgetter('indexerid', 'season', 'episode'), finalPropers): @@ -221,6 +246,7 @@ class ProperFinder(): result.url = curProper.url result.name = curProper.name result.quality = curProper.quality + result.version = curProper.version # snatch it search.snatchEpisode(result, SNATCHED_PROPER) diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index 0419839d..53164085 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -306,6 +306,7 @@ class GenericProvider: showObj = parse_result.show quality = parse_result.quality release_group = parse_result.release_group + version = parse_result.version addCacheEntry = False if not (showObj.air_by_date or showObj.sports): @@ -394,6 +395,7 @@ class GenericProvider: result.quality = quality result.release_group = release_group result.content = None + result.version = version if len(epObj) == 1: epNum = epObj[0].episode diff --git a/sickbeard/tv.py b/sickbeard/tv.py index ec226cc2..7f7ab5a3 100644 --- a/sickbeard/tv.py +++ b/sickbeard/tv.py @@ -1274,6 +1274,8 @@ class TVEpisode(object): self._file_size = 0 self._release_name = '' self._is_proper = False + self._version = 0 + self._release_group = '' # setting any of the above sets the dirty flag self.dirty = True @@ -1317,6 +1319,8 @@ class TVEpisode(object): file_size = property(lambda self: self._file_size, dirty_setter("_file_size")) release_name = property(lambda self: self._release_name, dirty_setter("_release_name")) is_proper = property(lambda self: self._is_proper, dirty_setter("_is_proper")) + version = property(lambda self: self._version, dirty_setter("_version")) + release_group = property(lambda self: self._release_group, dirty_setter("_release_group")) def _set_location(self, new_location): logger.log(u"Setter sets location to " + new_location, logger.DEBUG) @@ -1523,6 +1527,12 @@ class TVEpisode(object): if sqlResults[0]["is_proper"]: self.is_proper = int(sqlResults[0]["is_proper"]) + if sqlResults[0]["version"]: + self.version = int(sqlResults[0]["version"]) + + if sqlResults[0]["release_group"] is not None: + self.release_group = sqlResults[0]["release_group"] + self.dirty = False return True @@ -1849,23 +1859,26 @@ class TVEpisode(object): "UPDATE tv_episodes SET indexerid = ?, indexer = ?, name = ?, description = ?, subtitles = ?, " "subtitles_searchcount = ?, subtitles_lastsearch = ?, airdate = ?, hasnfo = ?, hastbn = ?, status = ?, " "location = ?, file_size = ?, release_name = ?, is_proper = ?, showid = ?, season = ?, episode = ?, " - "absolute_number = ? WHERE episode_id = ?", + "absolute_number = ?, version = ?, release_group = ? WHERE episode_id = ?", [self.indexerid, self.indexer, self.name, self.description, ",".join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size, self.release_name, self.is_proper, self.show.indexerid, - self.season, self.episode, self.absolute_number, epID]] + self.season, self.episode, self.absolute_number, self.version, self.release_group, epID]] else: # use a custom insert method to get the data into the DB. return [ - "INSERT OR IGNORE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, release_name, is_proper, showid, season, episode, absolute_number) VALUES " - "((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?),?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);", + "INSERT OR IGNORE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, " + "subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, " + "release_name, is_proper, showid, season, episode, absolute_number, version, release_group) VALUES " + "((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?)" + ",?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);", [self.show.indexerid, self.season, self.episode, self.indexerid, self.indexer, self.name, self.description, ",".join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size, self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode, - self.absolute_number]] + self.absolute_number, self.version, self.release_group]] def saveToDB(self, forceSave=False): """ @@ -1898,7 +1911,9 @@ class TVEpisode(object): "file_size": self.file_size, "release_name": self.release_name, "is_proper": self.is_proper, - "absolute_number": self.absolute_number + "absolute_number": self.absolute_number, + "version": self.version, + "release_group": self.release_group } controlValueDict = {"showid": self.show.indexerid, "season": self.season, diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 706d67e1..887f39b5 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -55,6 +55,10 @@ class CacheDBConnection(db.DBConnection): if not self.hasColumn(providerName, 'release_group'): self.addColumn(providerName, 'release_group', "TEXT", "") + # add version column to table if missing + if not self.hasColumn(providerName, 'version'): + self.addColumn(providerName, 'version', "NUMERIC", "-1") + except Exception, e: if str(e) != "table [" + providerName + "] already exists": raise @@ -272,11 +276,14 @@ class TVCache(): # get release group release_group = parse_result.release_group + # get version + version = parse_result.version + logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG) return [ - "INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group) VALUES (?,?,?,?,?,?,?,?)", - [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group]] + "INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?)", + [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]] def searchCache(self, episodes, manualSearch=False): @@ -328,6 +335,7 @@ class TVCache(): curEp = int(curEp) curQuality = int(curResult["quality"]) curReleaseGroup = curResult["release_group"] + curVersion = curResult["version"] # if the show says we want that episode then add it to the list if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch): @@ -347,6 +355,7 @@ class TVCache(): result.name = title result.quality = curQuality result.release_group = curReleaseGroup + result.version = curVersion result.content = self.provider.getURL(url) \ if self.provider.providerType == sickbeard.providers.generic.GenericProvider.TORRENT \ and not url.startswith('magnet') else None From 9493a8f7e539f3541d28af68009087a72407c6c6 Mon Sep 17 00:00:00 2001 From: echel0n Date: Sun, 27 Jul 2014 07:04:37 -0700 Subject: [PATCH 11/23] Added back in missing code for finding propers, was removed by mistake. Fixed issue with torrent clients not snatching due to improper handling of torrent hashes/files. --- sickbeard/clients/__init__.py | 3 +-- sickbeard/properFinder.py | 32 ++++++++++++++++++++++---------- sickbeard/providers/generic.py | 33 +++++++++++++++------------------ sickbeard/search.py | 5 ++++- 4 files changed, 42 insertions(+), 31 deletions(-) diff --git a/sickbeard/clients/__init__.py b/sickbeard/clients/__init__.py index 993b6181..84646ac3 100644 --- a/sickbeard/clients/__init__.py +++ b/sickbeard/clients/__init__.py @@ -82,5 +82,4 @@ def getClientIstance(name): module = getClientModule(name) className = module.api.__class__.__name__ - return getattr(module, className) - + return getattr(module, className) \ No newline at end of file diff --git a/sickbeard/properFinder.py b/sickbeard/properFinder.py index d0013592..4ee89ed1 100644 --- a/sickbeard/properFinder.py +++ b/sickbeard/properFinder.py @@ -60,7 +60,8 @@ class ProperFinder(): run_in = sickbeard.properFinderScheduler.lastRun + sickbeard.properFinderScheduler.cycleTime - datetime.datetime.now() hours, remainder = divmod(run_in.seconds, 3600) minutes, seconds = divmod(remainder, 60) - run_at = u", next check in approx. " + ("%dh, %dm" % (hours, minutes) if 0 < hours else "%dm, %ds" % (minutes, seconds)) + run_at = u", next check in approx. " + ( + "%dh, %dm" % (hours, minutes) if 0 < hours else "%dm, %ds" % (minutes, seconds)) logger.log(u"Completed the search for new propers%s" % run_at) @@ -136,7 +137,8 @@ class ProperFinder(): # populate our Proper instance if parse_result.is_anime: - logger.log(u"I am sorry '"+curProper.name+"' seams to be an anime proper seach is not yet suported", logger.DEBUG) + logger.log(u"I am sorry '" + curProper.name + "' seams to be an anime proper seach is not yet suported", + logger.DEBUG) continue else: curProper.season = parse_result.season_number if parse_result.season_number != None else 1 @@ -149,19 +151,29 @@ class ProperFinder(): logger.DEBUG) continue - if parse_result.show.rls_ignore_words and search.filter_release_name(curProper.name, parse_result.show.rls_ignore_words): - logger.log(u"Ignoring " + curProper.name + " based on ignored words filter: " + parse_result.show.rls_ignore_words, - logger.MESSAGE) + if parse_result.show.rls_ignore_words and search.filter_release_name(curProper.name, + parse_result.show.rls_ignore_words): + logger.log( + u"Ignoring " + curProper.name + " based on ignored words filter: " + parse_result.show.rls_ignore_words, + logger.MESSAGE) continue - if parse_result.show.rls_require_words and not search.filter_release_name(curProper.name, parse_result.show.rls_require_words): - logger.log(u"Ignoring " + curProper.name + " based on required words filter: " + parse_result.show.rls_require_words, - logger.MESSAGE) + if parse_result.show.rls_require_words and not search.filter_release_name(curProper.name, + parse_result.show.rls_require_words): + logger.log( + u"Ignoring " + curProper.name + " based on required words filter: " + parse_result.show.rls_require_words, + logger.MESSAGE) continue - oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"])) + # check if we actually want this proper (if it's the right quality) + myDB = db.DBConnection() + sqlResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", + [curProper.indexerid, curProper.season, curProper.episode]) + if not sqlResults: + continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) + oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"])) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality: continue @@ -212,7 +224,7 @@ class ProperFinder(): showObj = helpers.findCertainShow(sickbeard.showList, curProper.indexerid) if showObj == None: logger.log(u"Unable to find the show with indexerid " + str( - curProper .indexerid) + " so unable to download the proper", logger.ERROR) + curProper.indexerid) + " so unable to download the proper", logger.ERROR) continue epObj = showObj.getEpisode(curProper.season, curProper.episode) diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index 0419839d..8646b8df 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -23,7 +23,6 @@ import datetime import os import re import itertools -import Queue import sickbeard import requests @@ -34,18 +33,14 @@ from sickbeard import encodingKludge as ek from sickbeard.exceptions import ex from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException from sickbeard.common import Quality -from sickbeard import clients - -from lib.hachoir_parser import createParser +from hachoir_parser import createParser class GenericProvider: NZB = "nzb" TORRENT = "torrent" def __init__(self, name): - self.queue = Queue.Queue() - # these need to be set in the subclass self.providerType = None self.name = name @@ -63,8 +58,8 @@ class GenericProvider: self.cache = tvcache.TVCache(self) - self.cookies = None self.session = requests.session() + self.headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'} @@ -134,19 +129,22 @@ class GenericProvider: # check for auth if not self._doLogin(): - return + return False if self.providerType == GenericProvider.TORRENT: - torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper() - if not torrent_hash: - logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) - return False + try: + torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper() + if not torrent_hash: + logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) + return False - urls = [ - 'http://torcache.net/torrent/' + torrent_hash + '.torrent', - 'http://torrage.com/torrent/' + torrent_hash + '.torrent', - 'http://zoink.it/torrent/' + torrent_hash + '.torrent', - ] + urls = [ + 'http://torcache.net/torrent/' + torrent_hash + '.torrent', + 'http://torrage.com/torrent/' + torrent_hash + '.torrent', + 'http://zoink.it/torrent/' + torrent_hash + '.torrent', + ] + except: + urls = [result.url] filename = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) @@ -155,7 +153,6 @@ class GenericProvider: filename = ek.ek(os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) - else: return diff --git a/sickbeard/search.py b/sickbeard/search.py index 49acfcd6..cb035ef2 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -79,7 +79,6 @@ def _downloadResult(result): except EnvironmentError, e: logger.log(u"Error trying to save NZB to black hole: " + ex(e), logger.ERROR) newResult = False - elif resProvider.providerType == "torrent": newResult = resProvider.downloadResult(result) else: @@ -132,7 +131,11 @@ def snatchEpisode(result, endStatus=SNATCHED): else: # Sets per provider seed ratio result.ratio = result.provider.seedRatio() + + # Gets torrent file contents if not magnet link result.content = result.provider.getURL(result.url) if not result.url.startswith('magnet') else None + + # Snatches torrent with client client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dlResult = client.sendTORRENT(result) else: From b5e02829425ae73fecaf2e11e68e21334f574fd0 Mon Sep 17 00:00:00 2001 From: echel0n Date: Sun, 27 Jul 2014 07:14:41 -0700 Subject: [PATCH 12/23] Fixed scene exceptions issue when editing shows. --- sickbeard/scene_exceptions.py | 4 ++-- sickbeard/webserve.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sickbeard/scene_exceptions.py b/sickbeard/scene_exceptions.py index e39b0cd6..72063d65 100644 --- a/sickbeard/scene_exceptions.py +++ b/sickbeard/scene_exceptions.py @@ -252,7 +252,7 @@ def retrieve_exceptions(): anidb_exception_dict.clear() xem_exception_dict.clear() -def update_scene_exceptions(indexer_id, scene_exceptions): +def update_scene_exceptions(indexer_id, scene_exceptions, season=-1): """ Given a indexer_id, and a list of all show scene exceptions, update the db. """ @@ -267,7 +267,7 @@ def update_scene_exceptions(indexer_id, scene_exceptions): cur_exception = unicode(cur_exception, 'utf-8', 'replace') myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name, season) VALUES (?,?,?)", - [indexer_id, cur_exception, -1]) + [indexer_id, cur_exception, season]) def _anidb_exceptions_fetcher(): global anidb_exception_dict diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index e677c733..1753b39d 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -3631,14 +3631,15 @@ class Home(MainHandler): return self._genericMessage("Error", errString) showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show)) - - if showObj is None: + if not showObj: errString = "Unable to find the specified show: " + str(show) if directCall: return [errString] else: return self._genericMessage("Error", errString) + showObj.exceptions = scene_exceptions.get_scene_exceptions(showObj.indexerid) + if not location and not anyQualities and not bestQualities and not flatten_folders: t = PageTemplate(headers=self.request.headers, file="editShow.tmpl") t.submenu = HomeMenu() @@ -3855,8 +3856,7 @@ class Home(MainHandler): if do_update_exceptions: try: - scene_exceptions.update_scene_exceptions(showObj.indexerid, exceptions_list) # @UndefinedVariable - showObj.exceptions = scene_exceptions.get_scene_exceptions(showObj.indexerid) + scene_exceptions.update_scene_exceptions(showObj.indexerid, exceptions_list) # @UndefinedVdexerid) time.sleep(cpu_presets[sickbeard.CPU_PRESET]) except exceptions.CantUpdateException, e: errors.append("Unable to force an update on scene exceptions of the show.") From 4094e2ed3c678ea0c59de894e8994319075af757 Mon Sep 17 00:00:00 2001 From: echel0n Date: Sun, 27 Jul 2014 07:39:33 -0700 Subject: [PATCH 13/23] Fix for trakt connection issues. --- sickbeard/traktChecker.py | 11 +++++------ sickbeard/webserve.py | 18 +++++++++++++----- 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/sickbeard/traktChecker.py b/sickbeard/traktChecker.py index 88810ed2..0a15b01c 100644 --- a/sickbeard/traktChecker.py +++ b/sickbeard/traktChecker.py @@ -55,8 +55,7 @@ class TraktChecker(): logger.log(traceback.format_exc(), logger.DEBUG) def findShow(self, indexer, indexerid): - library = TraktCall("user/library/shows/all.json/%API%/" + sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_API, - sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) + library = TraktCall("user/library/shows/all.json/%API%/", sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) if not library: logger.log(u"Could not connect to trakt service, aborting library check", logger.ERROR) @@ -120,8 +119,8 @@ class TraktChecker(): def updateShows(self): logger.log(u"Starting trakt show watchlist check", logger.DEBUG) - watchlist = TraktCall("user/watchlist/shows.json/%API%/" + sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_API, - sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) + watchlist = TraktCall("user/watchlist/shows.json/%API%/", sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) + if not watchlist: logger.log(u"Could not connect to trakt service, aborting watchlist update", logger.ERROR) return @@ -152,8 +151,8 @@ class TraktChecker(): Sets episodes to wanted that are in trakt watchlist """ logger.log(u"Starting trakt episode watchlist check", logger.DEBUG) - watchlist = TraktCall("user/watchlist/episodes.json/%API%/" + sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_API, - sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) + watchlist = TraktCall("user/watchlist/episodes.json/%API%/", sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) + if not watchlist: logger.log(u"Could not connect to trakt service, aborting watchlist update", logger.ERROR) return diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 1753b39d..38f95356 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -83,6 +83,7 @@ from lib import adba from Cheetah.Template import Template from tornado.web import RequestHandler, HTTPError, asynchronous + def authenticated(handler_class): def wrap_execute(handler_execute): def basicauth(handler, transforms, *args, **kwargs): @@ -145,6 +146,7 @@ def redirect(url, permanent=False, status=None): assert url[0] == '/' raise HTTPRedirect(sickbeard.WEB_ROOT + url, permanent, status) + @authenticated class MainHandler(RequestHandler): def http_error_401_handler(self): @@ -1506,6 +1508,7 @@ class ConfigGeneral(MainHandler): redirect("/config/general/") + class ConfigBackupRestore(MainHandler): def index(self, *args, **kwargs): t = PageTemplate(headers=self.request.headers, file="config_backuprestore.tmpl") @@ -1633,6 +1636,7 @@ class ConfigSearch(MainHandler): redirect("/config/search/") + class ConfigPostProcessing(MainHandler): def index(self, *args, **kwargs): @@ -2178,6 +2182,7 @@ class ConfigProviders(MainHandler): redirect("/config/providers/") + class ConfigNotifications(MainHandler): def index(self, *args, **kwargs): t = PageTemplate(headers=self.request.headers, file="config_notifications.tmpl") @@ -2211,7 +2216,8 @@ class ConfigNotifications(MainHandler): use_nmjv2=None, nmjv2_host=None, nmjv2_dbloc=None, nmjv2_database=None, use_trakt=None, trakt_username=None, trakt_password=None, trakt_api=None, trakt_remove_watchlist=None, trakt_use_watchlist=None, trakt_method_add=None, - trakt_start_paused=None, trakt_use_recommended=None, trakt_sync=None, trakt_default_indexer=None, + trakt_start_paused=None, trakt_use_recommended=None, trakt_sync=None, + trakt_default_indexer=None, use_synologynotifier=None, synologynotifier_notify_onsnatch=None, synologynotifier_notify_ondownload=None, synologynotifier_notify_onsubtitledownload=None, use_pytivo=None, pytivo_notify_onsnatch=None, pytivo_notify_ondownload=None, @@ -2384,6 +2390,7 @@ class ConfigNotifications(MainHandler): redirect("/config/notifications/") + class ConfigSubtitles(MainHandler): def index(self, *args, **kwargs): t = PageTemplate(headers=self.request.headers, file="config_subtitles.tmpl") @@ -2441,6 +2448,7 @@ class ConfigSubtitles(MainHandler): redirect("/config/subtitles/") + class ConfigAnime(MainHandler): def index(self, *args, **kwargs): @@ -2472,6 +2480,7 @@ class ConfigAnime(MainHandler): redirect("/config/anime/") + class Config(MainHandler): def index(self, *args, **kwargs): t = PageTemplate(headers=self.request.headers, file="config.tmpl") @@ -2535,7 +2544,8 @@ class HomePostProcess(MainHandler): redirect("/home/") - def processEpisode(self, dir=None, nzbName=None, jobName=None, quiet=None, process_method=None, force=None, is_priority=None, failed="0", type="auto", *args, **kwargs): + def processEpisode(self, dir=None, nzbName=None, jobName=None, quiet=None, process_method=None, force=None, + is_priority=None, failed="0", type="auto", *args, **kwargs): if failed == "0": failed = False @@ -2758,9 +2768,7 @@ class NewHomeAddShows(MainHandler): final_results = [] logger.log(u"Getting recommended shows from Trakt.tv", logger.DEBUG) - recommendedlist = TraktCall("recommendations/shows.json/%API%/" + sickbeard.TRAKT_USERNAME, - sickbeard.TRAKT_API, - sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) + recommendedlist = TraktCall("recommendations/shows.json/%API%/", sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) if recommendedlist is None: logger.log(u"Could not connect to trakt service, aborting recommended list update", logger.ERROR) return From 91ee2c9ef55c7e0803fcee202506f6590ad25ace Mon Sep 17 00:00:00 2001 From: echel0n Date: Sun, 27 Jul 2014 07:49:13 -0700 Subject: [PATCH 14/23] More fixes for Trakt.tv connection issues. --- sickbeard/notifiers/trakt.py | 2 +- sickbeard/traktChecker.py | 6 +++--- sickbeard/webserve.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sickbeard/notifiers/trakt.py b/sickbeard/notifiers/trakt.py index a6c0946a..f1eaf3a4 100644 --- a/sickbeard/notifiers/trakt.py +++ b/sickbeard/notifiers/trakt.py @@ -75,7 +75,7 @@ class TraktNotifier: Returns: True if the request succeeded, False otherwise """ - data = TraktCall("account/test/%API%", api, username, password, {}) + data = TraktCall("account/test/%API%", api, username, password) if data and data["status"] == "success": return True diff --git a/sickbeard/traktChecker.py b/sickbeard/traktChecker.py index 0a15b01c..bb197ded 100644 --- a/sickbeard/traktChecker.py +++ b/sickbeard/traktChecker.py @@ -55,7 +55,7 @@ class TraktChecker(): logger.log(traceback.format_exc(), logger.DEBUG) def findShow(self, indexer, indexerid): - library = TraktCall("user/library/shows/all.json/%API%/", sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) + library = TraktCall("user/library/shows/all.json/%API%/" + sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) if not library: logger.log(u"Could not connect to trakt service, aborting library check", logger.ERROR) @@ -119,7 +119,7 @@ class TraktChecker(): def updateShows(self): logger.log(u"Starting trakt show watchlist check", logger.DEBUG) - watchlist = TraktCall("user/watchlist/shows.json/%API%/", sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) + watchlist = TraktCall("user/watchlist/shows.json/%API%/" + sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) if not watchlist: logger.log(u"Could not connect to trakt service, aborting watchlist update", logger.ERROR) @@ -151,7 +151,7 @@ class TraktChecker(): Sets episodes to wanted that are in trakt watchlist """ logger.log(u"Starting trakt episode watchlist check", logger.DEBUG) - watchlist = TraktCall("user/watchlist/episodes.json/%API%/", sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) + watchlist = TraktCall("user/watchlist/episodes.json/%API%/" + sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) if not watchlist: logger.log(u"Could not connect to trakt service, aborting watchlist update", logger.ERROR) diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 38f95356..a3e7028f 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -2768,7 +2768,7 @@ class NewHomeAddShows(MainHandler): final_results = [] logger.log(u"Getting recommended shows from Trakt.tv", logger.DEBUG) - recommendedlist = TraktCall("recommendations/shows.json/%API%/", sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) + recommendedlist = TraktCall("recommendations/shows.json/%API%", sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) if recommendedlist is None: logger.log(u"Could not connect to trakt service, aborting recommended list update", logger.ERROR) return @@ -2805,7 +2805,7 @@ class NewHomeAddShows(MainHandler): t = PageTemplate(headers=self.request.headers, file="home_trendingShows.tmpl") t.submenu = HomeMenu() - t.trending_shows = TraktCall("shows/trending.json/%API%/", sickbeard.TRAKT_API_KEY) + t.trending_shows = TraktCall("shows/trending.json/%API%", sickbeard.TRAKT_API_KEY) return _munge(t) From 7dc89c084f3b34e14637ec39b81807c8b55c6d5f Mon Sep 17 00:00:00 2001 From: echel0n Date: Sun, 27 Jul 2014 10:58:19 -0700 Subject: [PATCH 15/23] Added ability to switch branches from general settings/advanced, will perform a forced checkout of whatever branch you choose and restart automatically. Git updates are now forced to overwrite locale changes to avoid needing to manually stash uncommited changes before being able to proceed with a update. Fixed indexer mapping when unable to locate show on indexer that we are trying to map to. --- .../interfaces/default/config_general.tmpl | 17 +++++++ lib/tvdb_api/tvdb_api.py | 4 +- lib/tvrage_api/tvrage_api.py | 3 +- sickbeard/__init__.py | 1 - sickbeard/classes.py | 10 ++-- sickbeard/config.py | 5 ++ sickbeard/gh_api.py | 11 +++++ sickbeard/helpers.py | 47 +++++++++++------- sickbeard/versionChecker.py | 48 ++++++++++++++----- sickbeard/webserve.py | 23 ++++----- 10 files changed, 118 insertions(+), 51 deletions(-) diff --git a/gui/slick/interfaces/default/config_general.tmpl b/gui/slick/interfaces/default/config_general.tmpl index d2cd6a53..7bda4258 100644 --- a/gui/slick/interfaces/default/config_general.tmpl +++ b/gui/slick/interfaces/default/config_general.tmpl @@ -276,6 +276,23 @@
+
+ + +
+