diff --git a/SickBeard.py b/SickBeard.py index 513f5bde..cc964eb9 100644 --- a/SickBeard.py +++ b/SickBeard.py @@ -75,7 +75,7 @@ def loadShowsFromDB(): for sqlShow in sqlResults: try: - curShow = TVShow(sqlShow["indexer"], int(sqlShow["indexer_id"])) + curShow = TVShow(int(sqlShow["indexer"]), int(sqlShow["indexer_id"])) sickbeard.showList.append(curShow) except Exception, e: logger.log(u"There was an error creating the show in " + sqlShow["location"] + ": " + str(e).decode('utf-8'), logger.ERROR) diff --git a/gui/slick/interfaces/default/comingEpisodes.tmpl b/gui/slick/interfaces/default/comingEpisodes.tmpl index 107c5306..8b654896 100644 --- a/gui/slick/interfaces/default/comingEpisodes.tmpl +++ b/gui/slick/interfaces/default/comingEpisodes.tmpl @@ -167,11 +167,7 @@ [imdb] #end if - #if 'Tvdb' in $cur_result["indexer"]: - [tvdb] - #else - [tvrage] - #end if + [ [search] @@ -307,11 +303,7 @@ #if $cur_result["imdb_id"]: [imdb] #end if - #if "Tvdb" in $cur_result["indexer"]: - [tvdb] - #else - [tvrage] - #end if + $sickbeard.indexerApi($cur_result[ [search] diff --git a/gui/slick/interfaces/default/displayShow.tmpl b/gui/slick/interfaces/default/displayShow.tmpl index af4df869..9fad2a12 100644 --- a/gui/slick/interfaces/default/displayShow.tmpl +++ b/gui/slick/interfaces/default/displayShow.tmpl @@ -50,23 +50,14 @@ - $show.genre[1:-1].replace('|',' | ') #end if - #if "Tvdb" in $show.indexer - [tvdb] - #else - [tvrage] - #end if + $sickbeard.indexerApi($show.indexer).name #else ($show.imdb_info['year']) - $show.imdb_info['runtimes'] min - $show.imdb_info['genres'].replace('|',' | ') [imdb] - #if "Tvdb" in $show.indexer - [tvdb] - #else - [tvrage] - #end if + $sickbeard.indexerApi($show.indexer).name - #end if #if $seasonResults: diff --git a/gui/slick/interfaces/default/editShow.tmpl b/gui/slick/interfaces/default/editShow.tmpl index 1acba0f7..2ef5e94d 100644 --- a/gui/slick/interfaces/default/editShow.tmpl +++ b/gui/slick/interfaces/default/editShow.tmpl @@ -112,8 +112,8 @@ This DOES NOT allow Sick Beard to download non-english TV episodes!

(check this if you wish to use the DVD order instead of the Airing order)

-Archive on first match: #if $bestQualities +Archive on first match:
(check this to have the episode archived after the first best match is found from your archive quality list)
diff --git a/gui/slick/interfaces/default/home_addShows.tmpl b/gui/slick/interfaces/default/home_addShows.tmpl index 3ff19a5f..fea556c6 100644 --- a/gui/slick/interfaces/default/home_addShows.tmpl +++ b/gui/slick/interfaces/default/home_addShows.tmpl @@ -39,4 +39,4 @@ -#include $os.path.join($sickbeard.PROG_DIR,"gui/slick/interfaces/default/inc_bottom.tmpl") +#include $os.path.join($sickbeard.PROG_DIR,"gui/slick/interfaces/default/inc_bottom.tmpl") \ No newline at end of file diff --git a/gui/slick/interfaces/default/home_massAddTable.tmpl b/gui/slick/interfaces/default/home_massAddTable.tmpl index de0d239c..6de79abf 100644 --- a/gui/slick/interfaces/default/home_massAddTable.tmpl +++ b/gui/slick/interfaces/default/home_massAddTable.tmpl @@ -1,7 +1,5 @@ #import sickbeard -#from sickbeard.common import indexerStrings -#set $rowidx = 0 @@ -16,27 +14,23 @@ #end if #set $show_id = $curDir['dir'] -#set $indexer = 'Tvdb' +#set $indexer = 1 #if $curDir['existing_info'][0]: -#set $show_id = $show_id + '|' + $str($curDir['existing_info'][0]) + '|' + str($curDir['existing_info'][1]) -#set $indexer = $str($curDir['existing_info'][2]) +#set $show_id = $show_id + '|' + $str($curDir['existing_info'][0]) + '|' + $str($curDir['existing_info'][1]) +#set $indexer = $curDir['existing_info'][2] #end if -#set $rowidx = $rowidx + 1 - - + - #if 'Tvdb' in $indexer - - #elif 'TVRage' in $indexer - - #else - + #if $curDir['existing_info'][1]: + + #else: + #end if
DirectoryShow Name (tvshow.nfo)Indexer
#if $curDir['existing_info'][0] and $curDir['existing_info'][1] then ''+$curDir['existing_info'][1]+'' else "?"##if $curDir['existing_info'][1] then ''+$curDir['existing_info'][1]+'' else "?"#$curDir['existing_info'][1]$curDir['existing_info'][1]? @@ -45,4 +39,4 @@ #end for -
+ \ No newline at end of file diff --git a/gui/slick/interfaces/default/home_newShow.tmpl b/gui/slick/interfaces/default/home_newShow.tmpl index 69834360..16672939 100644 --- a/gui/slick/interfaces/default/home_newShow.tmpl +++ b/gui/slick/interfaces/default/home_newShow.tmpl @@ -27,15 +27,11 @@
- Find a show on the TVDB and TVRAGE + Find a show on the TVDB or TVRAGE
#if $use_provided_info: - #if 'Tvdb' in $provided_indexer - Show retrieved from existing metadata: $provided_indexer_name - #else - Show retrieved from existing metadata: $provided_indexer_name - #end if + Show retrieved from existing metadata: $provided_indexer_name diff --git a/gui/slick/interfaces/default/home_postprocess.tmpl b/gui/slick/interfaces/default/home_postprocess.tmpl index d3ed8718..b866b488 100644 --- a/gui/slick/interfaces/default/home_postprocess.tmpl +++ b/gui/slick/interfaces/default/home_postprocess.tmpl @@ -1,5 +1,4 @@ #import sickbeard -#from sickbeard.common import indexerStrings #set global $header="Post Processing" #set global $title="Post Processing" @@ -8,18 +7,18 @@ #set global $topmenu="home"# #import os.path #include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_top.tmpl") -#if $varExists('header') +#if $varExists('header')

$header

-#else +#else

$title

#end if - + Enter the folder containing the episode:
Show Indexer to be used:                          
@@ -39,7 +38,6 @@ Mark Dir/Files as priority download:           (Check it to replace the file even if it exists at higher quality)
#if $sickbeard.USE_FAILED_DOWNLOADS: - Mark download as failed:                           
#end if @@ -47,11 +45,10 @@ - -#include $os.path.join($sickbeard.PROG_DIR,"gui/slick/interfaces/default/inc_bottom.tmpl") +#include $os.path.join($sickbeard.PROG_DIR,"gui/slick/interfaces/default/inc_bottom.tmpl") \ No newline at end of file diff --git a/gui/slick/js/displayShow.js b/gui/slick/js/displayShow.js index 2647620d..73da8f4c 100644 --- a/gui/slick/js/displayShow.js +++ b/gui/slick/js/displayShow.js @@ -115,7 +115,7 @@ $(document).ready(function(){ $("#checkboxControls input").change(function(e){ var whichClass = $(this).attr('id') $(this).showHideRows(whichClass) - return + $('tr.'+whichClass).each(function(i){ $(this).toggle(); }); diff --git a/gui/slick/js/newShow.js b/gui/slick/js/newShow.js index da3a6ba7..a3536c54 100644 --- a/gui/slick/js/newShow.js +++ b/gui/slick/js/newShow.js @@ -54,28 +54,28 @@ $(document).ready(function () { var whichSeries = obj.join('|'); + resultStr += ' '; - if (obj[0] == 'Tvdb' && data.langid && data.langid != "") { - resultStr += '' + obj[2] + ''; - } else if (obj[0] == 'Tvdb') { - resultStr += '' + obj[2] + ''; + if (data.langid && data.langid != "") { + resultStr += '' + obj[4] + ''; } else { - resultStr += '' + obj[2] + ''; + resultStr += '' + obj[4] + ''; } - if (obj[3] !== null) { - var startDate = new Date(obj[3]); + if (obj[5] !== null) { + var startDate = new Date(obj[5]); var today = new Date(); if (startDate > today) { - resultStr += ' (will debut on ' + obj[3] + ')'; + resultStr += ' (will debut on ' + obj[5] + ')'; } else { - resultStr += ' (started on ' + obj[3] + ')'; + resultStr += ' (started on ' + obj[5] + ')'; } } if (obj[0] !== null) { resultStr += ' [' + obj[0] + ']'; } + resultStr += '
'; }); resultStr += ''; @@ -146,7 +146,7 @@ $(document).ready(function () { var show_name, sep_char; // if they've picked a radio button then use that if ($('input:radio[name=whichSeries]:checked').length) { - show_name = $('input:radio[name=whichSeries]:checked').val().split('|')[2]; + show_name = $('input:radio[name=whichSeries]:checked').val().split('|')[4]; } // if we provided a show in the hidden field, use that else if ($('input:hidden[name=whichSeries]').length && $('input:hidden[name=whichSeries]').val().length) { diff --git a/lib/tvdb_api/tvdb_api.py b/lib/tvdb_api/tvdb_api.py index 14105e3a..0cc6d043 100644 --- a/lib/tvdb_api/tvdb_api.py +++ b/lib/tvdb_api/tvdb_api.py @@ -42,16 +42,19 @@ from lib import requests from tvdb_ui import BaseUI, ConsoleUI from tvdb_exceptions import (tvdb_error, tvdb_userabort, tvdb_shownotfound, - tvdb_seasonnotfound, tvdb_episodenotfound, tvdb_attributenotfound) + tvdb_seasonnotfound, tvdb_episodenotfound, tvdb_attributenotfound) # Cached Session Handler from lib.httpcache import CachingHTTPAdapter + s = requests.Session() s.mount('http://', CachingHTTPAdapter()) + def log(): return logging.getLogger("tvdb_api") + class ShowContainer(dict): """Simple dict that holds a series of Show instances """ @@ -74,13 +77,14 @@ class ShowContainer(dict): _lastgc = time.time() del tbd - + super(ShowContainer, self).__setitem__(key, value) class Show(dict): """Holds a dict of seasons, and show data. """ + def __init__(self): dict.__init__(self) self.data = {} @@ -126,7 +130,7 @@ class Show(dict): raise tvdb_episodenotfound("Could not find any episodes that aired on %s" % date) return ret - def search(self, term = None, key = None): + def search(self, term=None, key=None): """ Search all episodes in show. Can search all data, or a specific key (for example, episodename) @@ -179,7 +183,7 @@ class Show(dict): """ results = [] for cur_season in self.values(): - searchresult = cur_season.search(term = term, key = key) + searchresult = cur_season.search(term=term, key=key) if len(searchresult) != 0: results.extend(searchresult) @@ -187,7 +191,7 @@ class Show(dict): class Season(dict): - def __init__(self, show = None): + def __init__(self, show=None): """The show attribute points to the parent show """ self.show = show @@ -208,7 +212,7 @@ class Season(dict): else: return dict.__getitem__(self, episode_number) - def search(self, term = None, key = None): + def search(self, term=None, key=None): """Search all episodes in season, returns a list of matching Episode instances. @@ -221,7 +225,7 @@ class Season(dict): """ results = [] for ep in self.values(): - searchresult = ep.search(term = term, key = key) + searchresult = ep.search(term=term, key=key) if searchresult is not None: results.append( searchresult @@ -230,7 +234,7 @@ class Season(dict): class Episode(dict): - def __init__(self, season = None): + def __init__(self, season=None): """The season attribute points to the parent season """ self.season = season @@ -255,7 +259,7 @@ class Episode(dict): except KeyError: raise tvdb_attributenotfound("Cannot find attribute %s" % (repr(key))) - def search(self, term = None, key = None): + def search(self, term=None, key=None): """Search episode data for term, if it matches, return the Episode (self). The key parameter can be used to limit the search to a specific element, for example, episodename. @@ -286,7 +290,7 @@ class Episode(dict): if key is not None and cur_key != key: # Do not search this key continue - if cur_value.find( unicode(term).lower() ) > -1: + if cur_value.find(unicode(term).lower()) > -1: return self @@ -305,6 +309,7 @@ class Actor(dict): role, sortorder """ + def __repr__(self): return "" % (self.get("name")) @@ -315,20 +320,21 @@ class Tvdb: >>> t['Scrubs'][1][24]['episodename'] u'My Last Day' """ + def __init__(self, - interactive = False, - select_first = False, - debug = False, - cache = True, - banners = False, - actors = False, - custom_ui = None, - language = None, - search_all_languages = False, - apikey = None, - forceConnect=False, - useZip=False, - dvdorder=False): + interactive=False, + select_first=False, + debug=False, + cache=True, + banners=False, + actors=False, + custom_ui=None, + language=None, + search_all_languages=False, + apikey=None, + forceConnect=False, + useZip=False, + dvdorder=False): """interactive (True/False): When True, uses built-in console UI is used to select the correct show. @@ -402,21 +408,21 @@ class Tvdb: And only the main language xml is used, the actor and banner xml are lost. """ - self.shows = ShowContainer() # Holds all Show classes - self.corrections = {} # Holds show-name to show_id mapping + self.shows = ShowContainer() # Holds all Show classes + self.corrections = {} # Holds show-name to show_id mapping self.config = {} if apikey is not None: self.config['apikey'] = apikey else: - self.config['apikey'] = "0629B785CE550C8D" # tvdb_api's API key + self.config['apikey'] = "0629B785CE550C8D" # tvdb_api's API key - self.config['debug_enabled'] = debug # show debugging messages + self.config['debug_enabled'] = debug # show debugging messages self.config['custom_ui'] = custom_ui - self.config['interactive'] = interactive # prompt for correct series? + self.config['interactive'] = interactive # prompt for correct series? self.config['select_first'] = select_first @@ -445,8 +451,8 @@ class Tvdb: if self.config['debug_enabled']: warnings.warn("The debug argument to tvdb_api.__init__ will be removed in the next version. " - "To enable debug messages, use the following code before importing: " - "import logging; logging.basicConfig(level=logging.DEBUG)") + "To enable debug messages, use the following code before importing: " + "import logging; logging.basicConfig(level=logging.DEBUG)") logging.basicConfig(level=logging.DEBUG) @@ -454,8 +460,8 @@ class Tvdb: # Hard-coded here as it is realtively static, and saves another HTTP request, as # recommended on http://thetvdb.com/wiki/index.php/API:languages.xml self.config['valid_languages'] = [ - "da", "fi", "nl", "de", "it", "es", "fr","pl", "hu","el","tr", - "ru","he","ja","pt","zh","cs","sl", "hr","ko","en","sv","no" + "da", "fi", "nl", "de", "it", "es", "fr", "pl", "hu", "el", "tr", + "ru", "he", "ja", "pt", "zh", "cs", "sl", "hr", "ko", "en", "sv", "no" ] # thetvdb.com should be based around numeric language codes, @@ -463,9 +469,9 @@ class Tvdb: # requires the language ID, thus this mapping is required (mainly # for usage in tvdb_ui - internally tvdb_api will use the language abbreviations) self.config['langabbv_to_id'] = {'el': 20, 'en': 7, 'zh': 27, - 'it': 15, 'cs': 28, 'es': 16, 'ru': 22, 'nl': 13, 'pt': 26, 'no': 9, - 'tr': 21, 'pl': 18, 'fr': 17, 'hr': 31, 'de': 14, 'da': 10, 'fi': 11, - 'hu': 19, 'ja': 25, 'he': 24, 'ko': 32, 'sv': 8, 'sl': 30} + 'it': 15, 'cs': 28, 'es': 16, 'ru': 22, 'nl': 13, 'pt': 26, 'no': 9, + 'tr': 21, 'pl': 18, 'fr': 17, 'hr': 31, 'de': 14, 'da': 10, 'fi': 11, + 'hu': 19, 'ja': 25, 'he': 24, 'ko': 32, 'sv': 8, 'sl': 30} if language is None: self.config['language'] = 'en' @@ -591,9 +597,9 @@ class Tvdb: if sid not in self.shows: self.shows[sid] = Show() if seas not in self.shows[sid]: - self.shows[sid][seas] = Season(show = self.shows[sid]) + self.shows[sid][seas] = Season(show=self.shows[sid]) if ep not in self.shows[sid][seas]: - self.shows[sid][seas][ep] = Episode(season = self.shows[sid][seas]) + self.shows[sid][seas][ep] = Episode(season=self.shows[sid][seas]) self.shows[sid][seas][ep][attrib] = value def _setShowData(self, sid, key, value): @@ -610,28 +616,21 @@ class Tvdb: - Replaces & with & - Trailing whitespace """ - data = data.replace(u"&", u"&") - data = data.strip() + if isinstance(data, str): + data = data.replace(u"&", u"&") + data = data.strip() return data def search(self, series): """This searches TheTVDB.com for the series name and returns the result list """ - series = urllib.quote(series.encode("utf-8")) + series = series.encode("utf-8") log().debug("Searching for show %s" % series) self.config['params_getSeries']['seriesname'] = series seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries']) - allSeries = [] - for series in seriesEt: - result = dict((k.tag.lower(), k.text) for k in series.getchildren()) - result['id'] = int(result['id']) - result['lid'] = self.config['langabbv_to_id'][result['language']] - if 'aliasnames' in result: - result['aliasnames'] = result['aliasnames'].split("|") - log().debug('Found series %(seriesname)s' % result) - allSeries.append(result) - + allSeries = [dict((s.tag.lower(), s.text) for s in x.getchildren()) for x in seriesEt] + return allSeries def _getSeries(self, series): @@ -648,14 +647,14 @@ class Tvdb: if self.config['custom_ui'] is not None: log().debug("Using custom UI %s" % (repr(self.config['custom_ui']))) - ui = self.config['custom_ui'](config = self.config) + ui = self.config['custom_ui'](config=self.config) else: if not self.config['interactive']: log().debug('Auto-selecting first search result using BaseUI') - ui = BaseUI(config = self.config) + ui = BaseUI(config=self.config) else: log().debug('Interactively selecting show using ConsoleUI') - ui = ConsoleUI(config = self.config) + ui = ConsoleUI(config=self.config) return ui.selectSeries(allSeries) @@ -678,7 +677,7 @@ class Tvdb: This interface will be improved in future versions. """ log().debug('Getting season banners for %s' % (sid)) - bannersEt = self._getetsrc( self.config['url_seriesBanner'] % (sid) ) + bannersEt = self._getetsrc(self.config['url_seriesBanner'] % (sid)) banners = {} for cur_banner in bannersEt.findall('Banner'): bid = cur_banner.find('id').text @@ -753,7 +752,7 @@ class Tvdb: cur_actors.append(curActor) self._setShowData(sid, '_actors', cur_actors) - def _getShowData(self, sid, language): + def _getShowData(self, sid, language, seriesSearch=False): """Takes a series ID, gets the epInfo URL and parses the TVDB XML file into the shows dict in layout: shows[series_id][season_number][episode_number] @@ -778,17 +777,27 @@ class Tvdb: seriesInfoEt = self._getetsrc( self.config['url_seriesInfo'] % (sid, getShowInLanguage) ) + + if seriesInfoEt is None: return False for curInfo in seriesInfoEt.findall("Series")[0]: tag = curInfo.tag.lower() value = curInfo.text + if tag == 'seriesname' and value is None: + return False + if value is not None: + if tag == 'id': + value = int(value) + if tag in ['banner', 'fanart', 'poster']: value = self.config['url_artworkPrefix'] % (value) else: value = self._cleanData(value) self._setShowData(sid, tag, value) + if seriesSearch: + return True # Parse banners if self.config['banners_enabled']: @@ -806,7 +815,7 @@ class Tvdb: else: url = self.config['url_epInfo'] % (sid, language) - epsEt = self._getetsrc( url, language=language) + epsEt = self._getetsrc(url, language=language) for cur_ep in epsEt.findall("Episode"): @@ -818,7 +827,7 @@ class Tvdb: if use_dvd: seas_no = int(cur_ep.find('DVD_season').text) - ep_no = int(float(cur_ep.find('DVD_episodenumber').text)) + ep_no = int(float(cur_ep.find('DVD_episodenumber').text)) else: seas_no = int(cur_ep.find('SeasonNumber').text) ep_no = int(cur_ep.find('EpisodeNumber').text) @@ -834,7 +843,7 @@ class Tvdb: if (useDVD): log().debug('Use DVD Order? Yes') seas_no = int(cur_ep.find('DVD_season').text) - ep_no = int(float(cur_ep.find('DVD_episodenumber').text)) + ep_no = int(float(cur_ep.find('DVD_episodenumber').text)) else: log().debug('Use DVD Order? No') seas_no = int(cur_ep.find('SeasonNumber').text) @@ -844,28 +853,34 @@ class Tvdb: tag = cur_item.tag.lower() value = cur_item.text if value is not None: + if tag == 'id': + value = int(value) + if tag == 'filename': value = self.config['url_artworkPrefix'] % (value) else: value = self._cleanData(value) self._setItem(sid, seas_no, ep_no, tag, value) + return True + def _nameToSid(self, name): """Takes show name, returns the correct series ID (if the show has already been grabbed), or grabs all episodes and returns the correct SID. """ + sid = set() if name in self.corrections: - log().debug('Correcting %s to %s' % (name, self.corrections[name]) ) + log().debug('Correcting %s to %s' % (name, self.corrections[name])) sid = self.corrections[name] else: log().debug('Getting show %s' % (name)) - selected_series = self._getSeries( name ) - sname, sid = selected_series['seriesname'], selected_series['id'] - log().debug('Got %(seriesname)s, id %(id)s' % selected_series) - - self.corrections[name] = sid - self._getShowData(selected_series['id'], selected_series['language']) + selected_series = self._getSeries(name) + if isinstance(selected_series, dict): + selected_series = [selected_series] + [sid.add(int(x['id'])) for x in selected_series if + self._getShowData(int(x['id']), self.config['language'], seriesSearch=True)] + [self.corrections.update({x['seriesname']: int(x['id'])}) for x in selected_series] return sid @@ -878,11 +893,10 @@ class Tvdb: if key not in self.shows: self._getShowData(key, self.config['language']) return self.shows[key] - - key = key.lower() # make key lower case - sid = self._nameToSid(key) - log().debug('Got series id %s' % (sid)) - return self.shows[sid] + + key = key.lower() # make key lower case + sids = self._nameToSid(key) + return [self.shows[sid] for sid in sids] def __repr__(self): return str(self.shows) @@ -893,11 +907,13 @@ def main(): grabs an episode name interactively. """ import logging + logging.basicConfig(level=logging.DEBUG) tvdb_instance = Tvdb(interactive=True, cache=False) print tvdb_instance['Lost']['seriesname'] print tvdb_instance['Lost'][1][4]['episodename'] + if __name__ == '__main__': main() diff --git a/lib/tvrage_api/tvrage_api.py b/lib/tvrage_api/tvrage_api.py index 088a2c9a..b2e1cf5e 100644 --- a/lib/tvrage_api/tvrage_api.py +++ b/lib/tvrage_api/tvrage_api.py @@ -29,6 +29,7 @@ try: except ImportError: import xml.etree.ElementTree as ElementTree +from collections import defaultdict from lib.dateutil.parser import parse from lib import requests @@ -318,8 +319,8 @@ class TVRage: self.config['base_url'] = "http://services.tvrage.com" - self.config['url_getSeries'] = u"%(base_url)s/myfeeds/search.php" % self.config - self.config['params_getSeries'] = {"key": self.config['apikey'], "show": ""} + self.config['url_getSeries'] = u"%(base_url)s/feeds/search.php" % self.config + self.config['params_getSeries'] = {"show": ""} self.config['url_epInfo'] = u"%(base_url)s/myfeeds/episode_list.php" % self.config self.config['params_epInfo'] = {"key": self.config['apikey'], "sid": ""} @@ -473,28 +474,21 @@ class TVRage: - Replaces & with & - Trailing whitespace """ - data = data.replace(u"&", u"&") - data = data.strip() + if isinstance(data, str): + data = data.replace(u"&", u"&") + data = data.strip() return data def search(self, series): """This searches tvrage.com for the series name and returns the result list """ - series = urllib.quote(series.encode("utf-8")) + series = series.encode("utf-8") log().debug("Searching for show %s" % series) self.config['params_getSeries']['show'] = series seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries']) - allSeries = [] - seriesResult = {} - for series in seriesEt: - for k in series.getchildren(): - seriesResult.setdefault(k.tag.lower(), k.text) + allSeries = [dict((s.tag.lower(),s.text) for s in x.getchildren()) for x in seriesEt] - seriesResult['id'] = int(seriesResult['id']) - log().debug('Found series %s' % seriesResult['seriesname']) - allSeries.append(seriesResult) - return allSeries def _getSeries(self, series): @@ -518,7 +512,7 @@ class TVRage: return ui.selectSeries(allSeries) - def _getShowData(self, sid): + def _getShowData(self, sid, seriesSearch=False): """Takes a series ID, gets the epInfo URL and parses the TVRAGE XML file into the shows dict in layout: shows[series_id][season_number][episode_number] @@ -532,14 +526,22 @@ class TVRage: self.config['params_seriesInfo'] ) + if seriesInfoEt is None: return False for curInfo in seriesInfoEt: tag = curInfo.tag.lower() value = curInfo.text + if tag == 'seriesname' and value is None: + return False + + if tag == 'id': + value = int(value) + if value is not None: value = self._cleanData(value) self._setShowData(sid, tag, value) + if seriesSearch: return True try: # Parse genre data @@ -572,28 +574,32 @@ class TVRage: value = cur_item.text if value is not None: + if tag == 'id': + value = int(value) + value = self._cleanData(value) self._setItem(sid, seas_no, ep_no, tag, value) except: continue + return True def _nameToSid(self, name): """Takes show name, returns the correct series ID (if the show has already been grabbed), or grabs all episodes and returns the correct SID. """ + sid = set() if name in self.corrections: log().debug('Correcting %s to %s' % (name, self.corrections[name]) ) sid = self.corrections[name] else: log().debug('Getting show %s' % (name)) selected_series = self._getSeries( name ) - sname, sid = selected_series['seriesname'], selected_series['id'] - log().debug('Got %(seriesname)s, id %(id)s' % selected_series) - - self.corrections[name] = sid - self._getShowData(selected_series['id']) + if isinstance(selected_series, dict): + selected_series = [selected_series] + [sid.add(int(x['id'])) for x in selected_series if self._getShowData(int(x['id']), seriesSearch=True)] + [self.corrections.update({x['seriesname']:int(x['id'])}) for x in selected_series] return sid @@ -608,9 +614,8 @@ class TVRage: return self.shows[key] key = key.lower() # make key lower case - sid = self._nameToSid(key) - log().debug('Got series id %s' % (sid)) - return self.shows[sid] + sids = self._nameToSid(key) + return [self.shows[sid] for sid in sids] def __repr__(self): return str(self.shows) diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index fc35b092..e843d56b 100644 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -29,15 +29,19 @@ import urllib from threading import Lock # apparently py2exe won't build these unless they're imported somewhere -from sickbeard import providers, metadata, indexers -from indexers import indexer_api, indexer_exceptions -from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, publichd, iptorrents, omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen +from sickbeard import providers, metadata +from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, publichd, iptorrents, \ + omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator -from sickbeard import searchCurrent, searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, subtitles, traktWatchListChecker +from sickbeard import searchCurrent, searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \ + subtitles, traktWatchListChecker from sickbeard import helpers, db, exceptions, show_queue, search_queue, scheduler, show_name_helpers from sickbeard import logger from sickbeard import naming from sickbeard import scene_numbering +from indexers.indexer_api import indexerApi +from indexers.indexer_exceptions import indexer_shownotfound, indexer_exception, indexer_error, indexer_episodenotfound, \ + indexer_attributenotfound, indexer_seasonnotfound, indexer_userabort, indexerExcepts from common import SD, SKIPPED, NAMING_REPEAT @@ -204,8 +208,8 @@ KAT_VERIFIED = False PUBLICHD = None -SCC = False -SCC_USERNAME = None +SCC = False +SCC_USERNAME = None SCC_PASSWORD = None HDTORRENTS = False @@ -255,7 +259,7 @@ SAB_APIKEY = None SAB_CATEGORY = None SAB_HOST = '' -NZBGET_USERNAME = None +NZBGET_USERNAME = None NZBGET_PASSWORD = None NZBGET_CATEGORY = None NZBGET_HOST = None @@ -410,7 +414,6 @@ DATE_PRESET = None TIME_PRESET = None TIME_PRESET_W_SECONDS = None - USE_SUBTITLES = False SUBTITLES_LANGUAGES = [] SUBTITLES_DIR = '' @@ -434,55 +437,56 @@ TMDB_API_KEY = 'edc5f123313769de83a71e157758030b' __INITIALIZED__ = False + def get_backlog_cycle_time(): - cycletime = SEARCH_FREQUENCY*2+7 + cycletime = SEARCH_FREQUENCY * 2 + 7 return max([cycletime, 720]) -def initialize(consoleLogging=True): +def initialize(consoleLogging=True): with INIT_LOCK: global ACTUAL_LOG_DIR, LOG_DIR, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, USE_API, API_KEY, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \ - USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, ALLOW_HIGH_PRIORITY, TORRENT_METHOD, \ - SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_HOST, \ - NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_HOST, currentSearchScheduler, backlogSearchScheduler, \ - TORRENT_USERNAME, TORRENT_PASSWORD, TORRENT_HOST, TORRENT_PATH, TORRENT_RATIO, TORRENT_PAUSED, TORRENT_HIGH_BANDWIDTH, TORRENT_LABEL, \ - USE_XBMC, XBMC_NOTIFY_ONSNATCH, XBMC_NOTIFY_ONDOWNLOAD, XBMC_NOTIFY_ONSUBTITLEDOWNLOAD, XBMC_UPDATE_FULL, XBMC_UPDATE_ONLYFIRST, \ - XBMC_UPDATE_LIBRARY, XBMC_HOST, XBMC_USERNAME, XBMC_PASSWORD, \ - USE_TRAKT, TRAKT_USERNAME, TRAKT_PASSWORD, TRAKT_API, TRAKT_REMOVE_WATCHLIST, TRAKT_USE_WATCHLIST, TRAKT_METHOD_ADD, TRAKT_START_PAUSED, traktWatchListCheckerSchedular, \ - USE_PLEX, PLEX_NOTIFY_ONSNATCH, PLEX_NOTIFY_ONDOWNLOAD, PLEX_NOTIFY_ONSUBTITLEDOWNLOAD, PLEX_UPDATE_LIBRARY, \ - PLEX_SERVER_HOST, PLEX_HOST, PLEX_USERNAME, PLEX_PASSWORD, \ - showUpdateScheduler, __INITIALIZED__, LAUNCH_BROWSER, UPDATE_SHOWS_ON_START, SORT_ARTICLE, showList, loadingShowList, \ - NEWZNAB_DATA, NZBS, NZBS_UID, NZBS_HASH, EZRSS, TVTORRENTS, TVTORRENTS_DIGEST, TVTORRENTS_HASH, TVTORRENTS_OPTIONS, BTN, BTN_API_KEY, BTN_OPTIONS, \ - THEPIRATEBAY, THEPIRATEBAY_TRUSTED, THEPIRATEBAY_PROXY, THEPIRATEBAY_PROXY_URL, THEPIRATEBAY_BLACKLIST, THEPIRATEBAY_OPTIONS, TORRENTLEECH, TORRENTLEECH_USERNAME, TORRENTLEECH_PASSWORD, TORRENTLEECH_OPTIONS, \ - IPTORRENTS, IPTORRENTS_USERNAME, IPTORRENTS_PASSWORD, IPTORRENTS_FREELEECH, IPTORRENTS_OPTIONS, KAT, KAT_VERIFIED, KAT_OPTIONS, PUBLICHD, PUBLICHD_OPTIONS, SCC, SCC_USERNAME, SCC_PASSWORD, SCC_OPTIONS, HDTORRENTS, HDTORRENTS_USERNAME, HDTORRENTS_PASSWORD, HDTORRENTS_UID, HDTORRENTS_HASH, HDTORRENTS_OPTIONS, TORRENTDAY, TORRENTDAY_USERNAME, TORRENTDAY_PASSWORD, TORRENTDAY_UID, TORRENTDAY_HASH, TORRENTDAY_FREELEECH, TORRENTDAY_OPTIONS, \ - HDBITS, HDBITS_USERNAME, HDBITS_PASSKEY, HDBITS_OPTIONS, TORRENT_DIR, USENET_RETENTION, SOCKET_TIMEOUT, SEARCH_FREQUENCY, DEFAULT_SEARCH_FREQUENCY, BACKLOG_SEARCH_FREQUENCY, \ - NEXTGEN, NEXTGEN_USERNAME, NEXTGEN_PASSWORD, NEXTGEN_FREELEECH, NEXTGEN_OPTIONS, \ - QUALITY_DEFAULT, FLATTEN_FOLDERS_DEFAULT, SUBTITLES_DEFAULT, STATUS_DEFAULT, \ - GROWL_NOTIFY_ONSNATCH, GROWL_NOTIFY_ONDOWNLOAD, GROWL_NOTIFY_ONSUBTITLEDOWNLOAD, TWITTER_NOTIFY_ONSNATCH, TWITTER_NOTIFY_ONDOWNLOAD, TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD, \ - USE_GROWL, GROWL_HOST, GROWL_PASSWORD, USE_PROWL, PROWL_NOTIFY_ONSNATCH, PROWL_NOTIFY_ONDOWNLOAD, PROWL_NOTIFY_ONSUBTITLEDOWNLOAD, PROWL_API, PROWL_PRIORITY, PROG_DIR, \ - USE_PYTIVO, PYTIVO_NOTIFY_ONSNATCH, PYTIVO_NOTIFY_ONDOWNLOAD, PYTIVO_NOTIFY_ONSUBTITLEDOWNLOAD, PYTIVO_UPDATE_LIBRARY, PYTIVO_HOST, PYTIVO_SHARE_NAME, PYTIVO_TIVO_NAME, \ - USE_NMA, NMA_NOTIFY_ONSNATCH, NMA_NOTIFY_ONDOWNLOAD, NMA_NOTIFY_ONSUBTITLEDOWNLOAD, NMA_API, NMA_PRIORITY, \ - USE_PUSHALOT, PUSHALOT_NOTIFY_ONSNATCH, PUSHALOT_NOTIFY_ONDOWNLOAD, PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHALOT_AUTHORIZATIONTOKEN, \ - USE_PUSHBULLET, PUSHBULLET_NOTIFY_ONSNATCH, PUSHBULLET_NOTIFY_ONDOWNLOAD, PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHBULLET_API, PUSHBULLET_DEVICE, \ - versionCheckScheduler, VERSION_NOTIFY, PROCESS_AUTOMATICALLY, UNPACK, \ - KEEP_PROCESSED_DIR, PROCESS_METHOD, TV_DOWNLOAD_DIR, MIN_SEARCH_FREQUENCY, \ - showQueueScheduler, searchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, \ - NAMING_PATTERN, NAMING_MULTI_EP, NAMING_FORCE_FOLDERS, NAMING_ABD_PATTERN, NAMING_CUSTOM_ABD, NAMING_STRIP_YEAR, \ - RENAME_EPISODES, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \ - WOMBLE, OMGWTFNZBS, OMGWTFNZBS_USERNAME, OMGWTFNZBS_APIKEY, providerList, newznabProviderList, torrentRssProviderList,\ - EXTRA_SCRIPTS, USE_TWITTER, TWITTER_USERNAME, TWITTER_PASSWORD, TWITTER_PREFIX, \ - USE_BOXCAR, BOXCAR_USERNAME, BOXCAR_PASSWORD, BOXCAR_NOTIFY_ONDOWNLOAD, BOXCAR_NOTIFY_ONSUBTITLEDOWNLOAD, BOXCAR_NOTIFY_ONSNATCH, \ - USE_PUSHOVER, PUSHOVER_USERKEY, PUSHOVER_NOTIFY_ONDOWNLOAD, PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHOVER_NOTIFY_ONSNATCH, \ - USE_LIBNOTIFY, LIBNOTIFY_NOTIFY_ONSNATCH, LIBNOTIFY_NOTIFY_ONDOWNLOAD, LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD, USE_NMJ, NMJ_HOST, NMJ_DATABASE, NMJ_MOUNT, USE_NMJv2, NMJv2_HOST, NMJv2_DATABASE, NMJv2_DBLOC, USE_SYNOINDEX, \ - USE_SYNOLOGYNOTIFIER, SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH, SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD, SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD, \ - USE_EMAIL, EMAIL_HOST, EMAIL_PORT, EMAIL_TLS, EMAIL_USER, EMAIL_PASSWORD, EMAIL_FROM, EMAIL_NOTIFY_ONSNATCH, EMAIL_NOTIFY_ONDOWNLOAD, EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD, EMAIL_LIST, \ - USE_LISTVIEW, METADATA_XBMC, METADATA_XBMC_12PLUS, METADATA_MEDIABROWSER, METADATA_PS3, metadata_provider_dict, \ - NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, \ - GUI_NAME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, COMING_EPS_MISSED_RANGE, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, \ - METADATA_WDTV, METADATA_TIVO, IGNORE_WORDS, CALENDAR_UNPROTECTED, CREATE_MISSING_SHOW_DIRS, \ - ADD_SHOWS_WO_DIR, USE_SUBTITLES, SUBTITLES_LANGUAGES, SUBTITLES_DIR, SUBTITLES_SERVICES_LIST, SUBTITLES_SERVICES_ENABLED, SUBTITLES_HISTORY, SUBTITLES_FINDER_FREQUENCY, subtitlesFinderScheduler, \ - USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY + USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, ALLOW_HIGH_PRIORITY, TORRENT_METHOD, \ + SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_HOST, \ + NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_HOST, currentSearchScheduler, backlogSearchScheduler, \ + TORRENT_USERNAME, TORRENT_PASSWORD, TORRENT_HOST, TORRENT_PATH, TORRENT_RATIO, TORRENT_PAUSED, TORRENT_HIGH_BANDWIDTH, TORRENT_LABEL, \ + USE_XBMC, XBMC_NOTIFY_ONSNATCH, XBMC_NOTIFY_ONDOWNLOAD, XBMC_NOTIFY_ONSUBTITLEDOWNLOAD, XBMC_UPDATE_FULL, XBMC_UPDATE_ONLYFIRST, \ + XBMC_UPDATE_LIBRARY, XBMC_HOST, XBMC_USERNAME, XBMC_PASSWORD, \ + USE_TRAKT, TRAKT_USERNAME, TRAKT_PASSWORD, TRAKT_API, TRAKT_REMOVE_WATCHLIST, TRAKT_USE_WATCHLIST, TRAKT_METHOD_ADD, TRAKT_START_PAUSED, traktWatchListCheckerSchedular, \ + USE_PLEX, PLEX_NOTIFY_ONSNATCH, PLEX_NOTIFY_ONDOWNLOAD, PLEX_NOTIFY_ONSUBTITLEDOWNLOAD, PLEX_UPDATE_LIBRARY, \ + PLEX_SERVER_HOST, PLEX_HOST, PLEX_USERNAME, PLEX_PASSWORD, \ + showUpdateScheduler, __INITIALIZED__, LAUNCH_BROWSER, UPDATE_SHOWS_ON_START, SORT_ARTICLE, showList, loadingShowList, \ + NEWZNAB_DATA, NZBS, NZBS_UID, NZBS_HASH, EZRSS, TVTORRENTS, TVTORRENTS_DIGEST, TVTORRENTS_HASH, TVTORRENTS_OPTIONS, BTN, BTN_API_KEY, BTN_OPTIONS, \ + THEPIRATEBAY, THEPIRATEBAY_TRUSTED, THEPIRATEBAY_PROXY, THEPIRATEBAY_PROXY_URL, THEPIRATEBAY_BLACKLIST, THEPIRATEBAY_OPTIONS, TORRENTLEECH, TORRENTLEECH_USERNAME, TORRENTLEECH_PASSWORD, TORRENTLEECH_OPTIONS, \ + IPTORRENTS, IPTORRENTS_USERNAME, IPTORRENTS_PASSWORD, IPTORRENTS_FREELEECH, IPTORRENTS_OPTIONS, KAT, KAT_VERIFIED, KAT_OPTIONS, PUBLICHD, PUBLICHD_OPTIONS, SCC, SCC_USERNAME, SCC_PASSWORD, SCC_OPTIONS, HDTORRENTS, HDTORRENTS_USERNAME, HDTORRENTS_PASSWORD, HDTORRENTS_UID, HDTORRENTS_HASH, HDTORRENTS_OPTIONS, TORRENTDAY, TORRENTDAY_USERNAME, TORRENTDAY_PASSWORD, TORRENTDAY_UID, TORRENTDAY_HASH, TORRENTDAY_FREELEECH, TORRENTDAY_OPTIONS, \ + HDBITS, HDBITS_USERNAME, HDBITS_PASSKEY, HDBITS_OPTIONS, TORRENT_DIR, USENET_RETENTION, SOCKET_TIMEOUT, SEARCH_FREQUENCY, DEFAULT_SEARCH_FREQUENCY, BACKLOG_SEARCH_FREQUENCY, \ + NEXTGEN, NEXTGEN_USERNAME, NEXTGEN_PASSWORD, NEXTGEN_FREELEECH, NEXTGEN_OPTIONS, \ + QUALITY_DEFAULT, FLATTEN_FOLDERS_DEFAULT, SUBTITLES_DEFAULT, STATUS_DEFAULT, \ + GROWL_NOTIFY_ONSNATCH, GROWL_NOTIFY_ONDOWNLOAD, GROWL_NOTIFY_ONSUBTITLEDOWNLOAD, TWITTER_NOTIFY_ONSNATCH, TWITTER_NOTIFY_ONDOWNLOAD, TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD, \ + USE_GROWL, GROWL_HOST, GROWL_PASSWORD, USE_PROWL, PROWL_NOTIFY_ONSNATCH, PROWL_NOTIFY_ONDOWNLOAD, PROWL_NOTIFY_ONSUBTITLEDOWNLOAD, PROWL_API, PROWL_PRIORITY, PROG_DIR, \ + USE_PYTIVO, PYTIVO_NOTIFY_ONSNATCH, PYTIVO_NOTIFY_ONDOWNLOAD, PYTIVO_NOTIFY_ONSUBTITLEDOWNLOAD, PYTIVO_UPDATE_LIBRARY, PYTIVO_HOST, PYTIVO_SHARE_NAME, PYTIVO_TIVO_NAME, \ + USE_NMA, NMA_NOTIFY_ONSNATCH, NMA_NOTIFY_ONDOWNLOAD, NMA_NOTIFY_ONSUBTITLEDOWNLOAD, NMA_API, NMA_PRIORITY, \ + USE_PUSHALOT, PUSHALOT_NOTIFY_ONSNATCH, PUSHALOT_NOTIFY_ONDOWNLOAD, PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHALOT_AUTHORIZATIONTOKEN, \ + USE_PUSHBULLET, PUSHBULLET_NOTIFY_ONSNATCH, PUSHBULLET_NOTIFY_ONDOWNLOAD, PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHBULLET_API, PUSHBULLET_DEVICE, \ + versionCheckScheduler, VERSION_NOTIFY, PROCESS_AUTOMATICALLY, UNPACK, \ + KEEP_PROCESSED_DIR, PROCESS_METHOD, TV_DOWNLOAD_DIR, MIN_SEARCH_FREQUENCY, \ + showQueueScheduler, searchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, \ + NAMING_PATTERN, NAMING_MULTI_EP, NAMING_FORCE_FOLDERS, NAMING_ABD_PATTERN, NAMING_CUSTOM_ABD, NAMING_STRIP_YEAR, \ + RENAME_EPISODES, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \ + WOMBLE, OMGWTFNZBS, OMGWTFNZBS_USERNAME, OMGWTFNZBS_APIKEY, providerList, newznabProviderList, torrentRssProviderList, \ + EXTRA_SCRIPTS, USE_TWITTER, TWITTER_USERNAME, TWITTER_PASSWORD, TWITTER_PREFIX, \ + USE_BOXCAR, BOXCAR_USERNAME, BOXCAR_PASSWORD, BOXCAR_NOTIFY_ONDOWNLOAD, BOXCAR_NOTIFY_ONSUBTITLEDOWNLOAD, BOXCAR_NOTIFY_ONSNATCH, \ + USE_PUSHOVER, PUSHOVER_USERKEY, PUSHOVER_NOTIFY_ONDOWNLOAD, PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHOVER_NOTIFY_ONSNATCH, \ + USE_LIBNOTIFY, LIBNOTIFY_NOTIFY_ONSNATCH, LIBNOTIFY_NOTIFY_ONDOWNLOAD, LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD, USE_NMJ, NMJ_HOST, NMJ_DATABASE, NMJ_MOUNT, USE_NMJv2, NMJv2_HOST, NMJv2_DATABASE, NMJv2_DBLOC, USE_SYNOINDEX, \ + USE_SYNOLOGYNOTIFIER, SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH, SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD, SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD, \ + USE_EMAIL, EMAIL_HOST, EMAIL_PORT, EMAIL_TLS, EMAIL_USER, EMAIL_PASSWORD, EMAIL_FROM, EMAIL_NOTIFY_ONSNATCH, EMAIL_NOTIFY_ONDOWNLOAD, EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD, EMAIL_LIST, \ + USE_LISTVIEW, METADATA_XBMC, METADATA_XBMC_12PLUS, METADATA_MEDIABROWSER, METADATA_PS3, metadata_provider_dict, \ + NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, \ + GUI_NAME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, COMING_EPS_MISSED_RANGE, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, \ + METADATA_WDTV, METADATA_TIVO, IGNORE_WORDS, CALENDAR_UNPROTECTED, CREATE_MISSING_SHOW_DIRS, \ + ADD_SHOWS_WO_DIR, USE_SUBTITLES, SUBTITLES_LANGUAGES, SUBTITLES_DIR, SUBTITLES_SERVICES_LIST, SUBTITLES_SERVICES_ENABLED, SUBTITLES_HISTORY, SUBTITLES_FINDER_FREQUENCY, subtitlesFinderScheduler, \ + USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY if __INITIALIZED__: return False @@ -510,13 +514,13 @@ def initialize(consoleLogging=True): ACTUAL_LOG_DIR = check_setting_str(CFG, 'General', 'log_dir', 'Logs') # put the log dir inside the data dir, unless an absolute path LOG_DIR = os.path.normpath(os.path.join(DATA_DIR, ACTUAL_LOG_DIR)) - + if not helpers.makeDir(LOG_DIR): logger.log(u"!!! No log folder, logging to screen only!", logger.ERROR) - + SOCKET_TIMEOUT = check_setting_int(CFG, 'General', 'socket_timeout', 30) socket.setdefaulttimeout(SOCKET_TIMEOUT) - + try: WEB_PORT = check_setting_int(CFG, 'General', 'web_port', 8081) except: @@ -533,14 +537,12 @@ def initialize(consoleLogging=True): WEB_USERNAME = check_setting_str(CFG, 'General', 'web_username', '') WEB_PASSWORD = check_setting_str(CFG, 'General', 'web_password', '') LAUNCH_BROWSER = bool(check_setting_int(CFG, 'General', 'launch_browser', 1)) - - + LOCALHOST_IP = check_setting_str(CFG, 'General', 'localhost_ip', '') ANON_REDIRECT = check_setting_str(CFG, 'General', 'anon_redirect', 'http://dereferer.org/?') # attempt to help prevent users from breaking links by using a bad url if not ANON_REDIRECT.endswith('?'): ANON_REDIRECT = '' - UPDATE_SHOWS_ON_START = bool(check_setting_int(CFG, 'General', 'update_shows_on_start', 0)) SORT_ARTICLE = bool(check_setting_int(CFG, 'General', 'sort_article', 0)) @@ -661,7 +663,7 @@ def initialize(consoleLogging=True): NEXTGEN = bool(check_setting_int(CFG, 'NEXTGEN', 'nextgen', 0)) NEXTGEN_USERNAME = check_setting_str(CFG, 'NEXTGEN', 'nextgen_username', '') NEXTGEN_PASSWORD = check_setting_str(CFG, 'NEXTGEN', 'nextgen_password', '') - NEXTGEN_OPTIONS = check_setting_str(CFG, 'NEXTGEN', 'nextgen_options', '') + NEXTGEN_OPTIONS = check_setting_str(CFG, 'NEXTGEN', 'nextgen_options', '') KAT = bool(check_setting_int(CFG, 'KAT', 'kat', 0)) KAT_VERIFIED = bool(check_setting_int(CFG, 'KAT', 'kat_verified', 1)) @@ -690,7 +692,7 @@ def initialize(consoleLogging=True): HDBITS_USERNAME = check_setting_str(CFG, 'HDBITS', 'hdbits_username', '') HDBITS_PASSKEY = check_setting_str(CFG, 'HDBITS', 'hdbits_passkey', '') HDBITS_OPTIONS = check_setting_str(CFG, 'HDBITS', 'hdbits_options', '') - + NZBS = bool(check_setting_int(CFG, 'NZBs', 'nzbs', 0)) NZBS_UID = check_setting_str(CFG, 'NZBs', 'nzbs_uid', '') NZBS_HASH = check_setting_str(CFG, 'NZBs', 'nzbs_hash', '') @@ -711,7 +713,7 @@ def initialize(consoleLogging=True): SAB_CATEGORY = check_setting_str(CFG, 'SABnzbd', 'sab_category', 'tv') SAB_HOST = check_setting_str(CFG, 'SABnzbd', 'sab_host', '') - NZBGET_USERNAME = check_setting_str(CFG, 'NZBget', 'nzbget_username', 'nzbget') + NZBGET_USERNAME = check_setting_str(CFG, 'NZBget', 'nzbget_username', 'nzbget') NZBGET_PASSWORD = check_setting_str(CFG, 'NZBget', 'nzbget_password', 'tegbzn6789') NZBGET_CATEGORY = check_setting_str(CFG, 'NZBget', 'nzbget_category', 'tv') NZBGET_HOST = check_setting_str(CFG, 'NZBget', 'nzbget_host', '') @@ -763,7 +765,8 @@ def initialize(consoleLogging=True): USE_TWITTER = bool(check_setting_int(CFG, 'Twitter', 'use_twitter', 0)) TWITTER_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Twitter', 'twitter_notify_onsnatch', 0)) TWITTER_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Twitter', 'twitter_notify_ondownload', 0)) - TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Twitter', 'twitter_notify_onsubtitledownload', 0)) + TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD = bool( + check_setting_int(CFG, 'Twitter', 'twitter_notify_onsubtitledownload', 0)) TWITTER_USERNAME = check_setting_str(CFG, 'Twitter', 'twitter_username', '') TWITTER_PASSWORD = check_setting_str(CFG, 'Twitter', 'twitter_password', '') TWITTER_PREFIX = check_setting_str(CFG, 'Twitter', 'twitter_prefix', 'Sick Beard') @@ -777,13 +780,15 @@ def initialize(consoleLogging=True): USE_PUSHOVER = bool(check_setting_int(CFG, 'Pushover', 'use_pushover', 0)) PUSHOVER_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_onsnatch', 0)) PUSHOVER_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_ondownload', 0)) - PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_onsubtitledownload', 0)) + PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD = bool( + check_setting_int(CFG, 'Pushover', 'pushover_notify_onsubtitledownload', 0)) PUSHOVER_USERKEY = check_setting_str(CFG, 'Pushover', 'pushover_userkey', '') USE_LIBNOTIFY = bool(check_setting_int(CFG, 'Libnotify', 'use_libnotify', 0)) LIBNOTIFY_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Libnotify', 'libnotify_notify_onsnatch', 0)) LIBNOTIFY_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Libnotify', 'libnotify_notify_ondownload', 0)) - LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Libnotify', 'libnotify_notify_onsubtitledownload', 0)) + LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD = bool( + check_setting_int(CFG, 'Libnotify', 'libnotify_notify_onsubtitledownload', 0)) USE_NMJ = bool(check_setting_int(CFG, 'NMJ', 'use_nmj', 0)) NMJ_HOST = check_setting_str(CFG, 'NMJ', 'nmj_host', '') @@ -798,9 +803,12 @@ def initialize(consoleLogging=True): USE_SYNOINDEX = bool(check_setting_int(CFG, 'Synology', 'use_synoindex', 0)) USE_SYNOLOGYNOTIFIER = bool(check_setting_int(CFG, 'SynologyNotifier', 'use_synologynotifier', 0)) - SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_onsnatch', 0)) - SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_ondownload', 0)) - SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_onsubtitledownload', 0)) + SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH = bool( + check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_onsnatch', 0)) + SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD = bool( + check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_ondownload', 0)) + SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD = bool( + check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_onsubtitledownload', 0)) USE_TRAKT = bool(check_setting_int(CFG, 'Trakt', 'use_trakt', 0)) TRAKT_USERNAME = check_setting_str(CFG, 'Trakt', 'trakt_username', '') @@ -831,13 +839,15 @@ def initialize(consoleLogging=True): USE_PUSHALOT = bool(check_setting_int(CFG, 'Pushalot', 'use_pushalot', 0)) PUSHALOT_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Pushalot', 'pushalot_notify_onsnatch', 0)) PUSHALOT_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Pushalot', 'pushalot_notify_ondownload', 0)) - PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Pushalot', 'pushalot_notify_onsubtitledownload', 0)) + PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD = bool( + check_setting_int(CFG, 'Pushalot', 'pushalot_notify_onsubtitledownload', 0)) PUSHALOT_AUTHORIZATIONTOKEN = check_setting_str(CFG, 'Pushalot', 'pushalot_authorizationtoken', '') USE_PUSHBULLET = bool(check_setting_int(CFG, 'Pushbullet', 'use_pushbullet', 0)) PUSHBULLET_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Pushbullet', 'pushbullet_notify_onsnatch', 0)) PUSHBULLET_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Pushbullet', 'pushbullet_notify_ondownload', 0)) - PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Pushbullet', 'pushbullet_notify_onsubtitledownload', 0)) + PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD = bool( + check_setting_int(CFG, 'Pushbullet', 'pushbullet_notify_onsubtitledownload', 0)) PUSHBULLET_API = check_setting_str(CFG, 'Pushbullet', 'pushbullet_api', '') PUSHBULLET_DEVICE = check_setting_str(CFG, 'Pushbullet', 'pushbullet_device', '') @@ -859,7 +869,9 @@ def initialize(consoleLogging=True): SUBTITLES_LANGUAGES = [] SUBTITLES_DIR = check_setting_str(CFG, 'Subtitles', 'subtitles_dir', '') SUBTITLES_SERVICES_LIST = check_setting_str(CFG, 'Subtitles', 'SUBTITLES_SERVICES_LIST', '').split(',') - SUBTITLES_SERVICES_ENABLED = [int(x) for x in check_setting_str(CFG, 'Subtitles', 'SUBTITLES_SERVICES_ENABLED', '').split('|') if x] + SUBTITLES_SERVICES_ENABLED = [int(x) for x in + check_setting_str(CFG, 'Subtitles', 'SUBTITLES_SERVICES_ENABLED', '').split('|') + if x] SUBTITLES_DEFAULT = bool(check_setting_int(CFG, 'Subtitles', 'subtitles_default', 0)) SUBTITLES_HISTORY = bool(check_setting_int(CFG, 'Subtitles', 'subtitles_history', 0)) SUBTITLES_FINDER_FREQUENCY = check_setting_int(CFG, 'Subtitles', 'subtitles_finder_frequency', 1) @@ -870,10 +882,11 @@ def initialize(consoleLogging=True): GIT_PATH = check_setting_str(CFG, 'General', 'git_path', '') IGNORE_WORDS = check_setting_str(CFG, 'General', 'ignore_words', IGNORE_WORDS) - + CALENDAR_UNPROTECTED = bool(check_setting_int(CFG, 'General', 'calendar_unprotected', 0)) - EXTRA_SCRIPTS = [x.strip() for x in check_setting_str(CFG, 'General', 'extra_scripts', '').split('|') if x.strip()] + EXTRA_SCRIPTS = [x.strip() for x in check_setting_str(CFG, 'General', 'extra_scripts', '').split('|') if + x.strip()] USE_LISTVIEW = bool(check_setting_int(CFG, 'General', 'use_listview', 0)) @@ -895,18 +908,18 @@ def initialize(consoleLogging=True): COMING_EPS_MISSED_RANGE = check_setting_int(CFG, 'GUI', 'coming_eps_missed_range', 7) DATE_PRESET = check_setting_str(CFG, 'GUI', 'date_preset', '%x') TIME_PRESET_W_SECONDS = check_setting_str(CFG, 'GUI', 'time_preset', '%I:%M:%S %p') - TIME_PRESET = TIME_PRESET_W_SECONDS.replace(u":%S",u"") + TIME_PRESET = TIME_PRESET_W_SECONDS.replace(u":%S", u"") NEWZNAB_DATA = check_setting_str(CFG, 'Newznab', 'newznab_data', '') - newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA) - + newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA) + torrentRssData = check_setting_str(CFG, 'TorrentRss', 'torrentrss_data', '') torrentRssProviderList = providers.getTorrentRssProviderList(torrentRssData) if not os.path.isfile(CONFIG_FILE): logger.log(u"Unable to find '" + CONFIG_FILE + "', all settings will be default!", logger.DEBUG) save_config() - + # start up all the threads logger.sb_log_instance.initLogging(consoleLogging=consoleLogging) @@ -934,8 +947,7 @@ def initialize(consoleLogging=True): (METADATA_PS3, metadata.ps3), (METADATA_WDTV, metadata.wdtv), (METADATA_TIVO, metadata.tivo), - ]: - + ]: (cur_metadata_config, cur_metadata_class) = cur_metadata_tuple tmp_provider = cur_metadata_class.metadata_class() tmp_provider.set_config(cur_metadata_config) @@ -954,59 +966,59 @@ def initialize(consoleLogging=True): # the interval for this is stored inside the ShowUpdater class showUpdaterInstance = showUpdater.ShowUpdater() showUpdateScheduler = scheduler.Scheduler(showUpdaterInstance, - cycleTime=showUpdaterInstance.updateInterval, - threadName="SHOWUPDATER", - runImmediately=False) + cycleTime=showUpdaterInstance.updateInterval, + threadName="SHOWUPDATER", + runImmediately=False) versionCheckScheduler = scheduler.Scheduler(versionChecker.CheckVersion(), - cycleTime=datetime.timedelta(hours=12), - threadName="CHECKVERSION", - runImmediately=True) + cycleTime=datetime.timedelta(hours=12), + threadName="CHECKVERSION", + runImmediately=True) showQueueScheduler = scheduler.Scheduler(show_queue.ShowQueue(), - cycleTime=datetime.timedelta(seconds=3), - threadName="SHOWQUEUE", - silent=True) + cycleTime=datetime.timedelta(seconds=3), + threadName="SHOWQUEUE", + silent=True) searchQueueScheduler = scheduler.Scheduler(search_queue.SearchQueue(), - cycleTime=datetime.timedelta(seconds=3), - threadName="SEARCHQUEUE", - silent=True) + cycleTime=datetime.timedelta(seconds=3), + threadName="SEARCHQUEUE", + silent=True) properFinderInstance = properFinder.ProperFinder() properFinderScheduler = scheduler.Scheduler(properFinderInstance, - cycleTime=properFinderInstance.updateInterval, - threadName="FINDPROPERS", - runImmediately=True) + cycleTime=properFinderInstance.updateInterval, + threadName="FINDPROPERS", + runImmediately=True) if not DOWNLOAD_PROPERS: properFinderScheduler.silent = True autoPostProcesserScheduler = scheduler.Scheduler(autoPostProcesser.PostProcesser(), - cycleTime=datetime.timedelta(minutes=10), - threadName="POSTPROCESSER", - runImmediately=True) + cycleTime=datetime.timedelta(minutes=10), + threadName="POSTPROCESSER", + runImmediately=True) if not PROCESS_AUTOMATICALLY: autoPostProcesserScheduler.silent = True - + traktWatchListCheckerSchedular = scheduler.Scheduler(traktWatchListChecker.TraktChecker(), - cycleTime=datetime.timedelta(hours=1), - threadName="TRAKTWATCHLIST", - runImmediately=True) - + cycleTime=datetime.timedelta(hours=1), + threadName="TRAKTWATCHLIST", + runImmediately=True) + if not USE_TRAKT: traktWatchListCheckerSchedular.silent = True - + backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(searchBacklog.BacklogSearcher(), - cycleTime=datetime.timedelta(minutes=get_backlog_cycle_time()), + cycleTime=datetime.timedelta( + minutes=get_backlog_cycle_time()), threadName="BACKLOG", runImmediately=True) backlogSearchScheduler.action.cycleTime = BACKLOG_SEARCH_FREQUENCY - subtitlesFinderScheduler = scheduler.Scheduler(subtitles.SubtitlesFinder(), - cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_FREQUENCY), - threadName="FINDSUBTITLES", - runImmediately=True) + cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_FREQUENCY), + threadName="FINDSUBTITLES", + runImmediately=True) if not USE_SUBTITLES: subtitlesFinderScheduler.silent = True @@ -1017,13 +1029,13 @@ def initialize(consoleLogging=True): __INITIALIZED__ = True return True -def start(): +def start(): global __INITIALIZED__, currentSearchScheduler, backlogSearchScheduler, \ - showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \ - properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \ - subtitlesFinderScheduler, started, USE_SUBTITLES, \ - traktWatchListCheckerSchedular, started + showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \ + properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \ + subtitlesFinderScheduler, started, USE_SUBTITLES, \ + traktWatchListCheckerSchedular, started with INIT_LOCK: @@ -1062,12 +1074,12 @@ def start(): started = True -def halt (): +def halt(): global __INITIALIZED__, currentSearchScheduler, backlogSearchScheduler, showUpdateScheduler, \ - showQueueScheduler, properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \ - subtitlesFinderScheduler, started, \ - traktWatchListCheckerSchedular + showQueueScheduler, properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \ + subtitlesFinderScheduler, started, \ + traktWatchListCheckerSchedular with INIT_LOCK: @@ -1147,7 +1159,6 @@ def halt (): except: pass - __INITIALIZED__ = False @@ -1158,7 +1169,6 @@ def sig_handler(signum=None, frame=None): def saveAll(): - global showList # write all shows @@ -1172,7 +1182,6 @@ def saveAll(): def saveAndShutdown(restart=False): - halt() saveAll() @@ -1197,7 +1206,8 @@ def saveAndShutdown(restart=False): popen_list = [os.path.join(PROG_DIR, 'updater.exe'), str(PID), sys.executable] else: logger.log(u"Unknown SB launch method, please file a bug report about this", logger.ERROR) - popen_list = [sys.executable, os.path.join(PROG_DIR, 'updater.py'), str(PID), sys.executable, MY_FULLNAME ] + popen_list = [sys.executable, os.path.join(PROG_DIR, 'updater.py'), str(PID), sys.executable, + MY_FULLNAME] if popen_list: popen_list += MY_ARGS @@ -1212,20 +1222,24 @@ def saveAndShutdown(restart=False): def invoke_command(to_call, *args, **kwargs): global invoked_command + def delegate(): to_call(*args, **kwargs) + invoked_command = delegate - logger.log(u"Placed invoked command: "+repr(invoked_command)+" for "+repr(to_call)+" with "+repr(args)+" and "+repr(kwargs), logger.DEBUG) + logger.log(u"Placed invoked command: " + repr(invoked_command) + " for " + repr(to_call) + " with " + repr( + args) + " and " + repr(kwargs), logger.DEBUG) + def invoke_restart(soft=True): invoke_command(restart, soft=soft) + def invoke_shutdown(): invoke_command(saveAndShutdown) def restart(soft=True): - if soft: halt() saveAll() @@ -1238,12 +1252,10 @@ def restart(soft=True): saveAndShutdown(restart=True) - def save_config(): - new_config = ConfigObj() new_config.filename = CONFIG_FILE - + # For passwords you must include the word `password` in the item_name and add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config() new_config['General'] = {} new_config['General']['config_version'] = CONFIG_VERSION @@ -1354,8 +1366,8 @@ def save_config(): new_config['NEXTGEN']['nextgen'] = int(NEXTGEN) new_config['NEXTGEN']['nextgen_username'] = NEXTGEN_USERNAME new_config['NEXTGEN']['nextgen_password'] = helpers.encrypt(NEXTGEN_PASSWORD, ENCRYPTION_VERSION) - new_config['NEXTGEN']['nextgen_options'] = NEXTGEN_OPTIONS - + new_config['NEXTGEN']['nextgen_options'] = NEXTGEN_OPTIONS + new_config['KAT'] = {} new_config['KAT']['kat'] = int(KAT) new_config['KAT']['kat_verified'] = int(KAT_VERIFIED) @@ -1417,7 +1429,7 @@ def save_config(): new_config['NZBget'] = {} - new_config['NZBget']['nzbget_username'] = NZBGET_USERNAME + new_config['NZBget']['nzbget_username'] = NZBGET_USERNAME new_config['NZBget']['nzbget_password'] = helpers.encrypt(NZBGET_PASSWORD, ENCRYPTION_VERSION) new_config['NZBget']['nzbget_category'] = NZBGET_CATEGORY new_config['NZBget']['nzbget_host'] = NZBGET_HOST @@ -1519,7 +1531,8 @@ def save_config(): new_config['SynologyNotifier']['use_synologynotifier'] = int(USE_SYNOLOGYNOTIFIER) new_config['SynologyNotifier']['synologynotifier_notify_onsnatch'] = int(SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH) new_config['SynologyNotifier']['synologynotifier_notify_ondownload'] = int(SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD) - new_config['SynologyNotifier']['synologynotifier_notify_onsubtitledownload'] = int(SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD) + new_config['SynologyNotifier']['synologynotifier_notify_onsubtitledownload'] = int( + SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD) new_config['Trakt'] = {} new_config['Trakt']['use_trakt'] = int(USE_TRAKT) @@ -1564,7 +1577,6 @@ def save_config(): new_config['Pushbullet']['pushbullet_api'] = PUSHBULLET_API new_config['Pushbullet']['pushbullet_device'] = PUSHBULLET_DEVICE - new_config['Email'] = {} new_config['Email']['use_email'] = int(USE_EMAIL) new_config['Email']['email_notify_onsnatch'] = int(EMAIL_NOTIFY_ONSNATCH) @@ -1606,7 +1618,7 @@ def save_config(): new_config['Subtitles']['subtitles_history'] = int(SUBTITLES_HISTORY) new_config['Subtitles']['subtitles_finder_frequency'] = int(SUBTITLES_FINDER_FREQUENCY) - new_config['FailedDownloads']= {} + new_config['FailedDownloads'] = {} new_config['FailedDownloads']['use_failed_downloads'] = int(USE_FAILED_DOWNLOADS) new_config['FailedDownloads']['delete_failed'] = int(DELETE_FAILED) @@ -1628,12 +1640,12 @@ def launchBrowser(startPort=None): except: logger.log(u"Unable to launch a browser", logger.ERROR) -def getEpList(epIDs, showid=None): +def getEpList(epIDs, showid=None): if epIDs == None or len(epIDs) == 0: return [] - query = "SELECT * FROM tv_episodes WHERE indexerid in (%s)" % (",".join(['?']*len(epIDs)),) + query = "SELECT * FROM tv_episodes WHERE indexerid in (%s)" % (",".join(['?'] * len(epIDs)),) params = epIDs if showid != None: diff --git a/sickbeard/autoPostProcesser.py b/sickbeard/autoPostProcesser.py index cbce97c3..c58f4cb3 100644 --- a/sickbeard/autoPostProcesser.py +++ b/sickbeard/autoPostProcesser.py @@ -24,18 +24,21 @@ from sickbeard import logger from sickbeard import encodingKludge as ek from sickbeard import processTV -class PostProcesser(): +class PostProcesser(): def run(self): if not sickbeard.PROCESS_AUTOMATICALLY: return if not ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR): - logger.log(u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " doesn't exist", logger.ERROR) + logger.log(u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " doesn't exist", + logger.ERROR) return if not ek.ek(os.path.isabs, sickbeard.TV_DOWNLOAD_DIR): - logger.log(u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " is relative (and probably not what you really want to process)", logger.ERROR) + logger.log( + u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " is relative (and probably not what you really want to process)", + logger.ERROR) return processTV.processDir(sickbeard.TV_DOWNLOAD_DIR) diff --git a/sickbeard/browser.py b/sickbeard/browser.py index 531e4ae6..71fb65e5 100644 --- a/sickbeard/browser.py +++ b/sickbeard/browser.py @@ -38,7 +38,7 @@ def getWinDrives(): assert os.name == 'nt' drives = [] - bitmask = windll.kernel32.GetLogicalDrives() #@UndefinedVariable + bitmask = windll.kernel32.GetLogicalDrives() #@UndefinedVariable for letter in string.uppercase: if bitmask & 1: drives.append(letter) @@ -79,26 +79,27 @@ def foldersAtPath(path, includeParent=False): if path == parentPath and os.name == 'nt': parentPath = "" - fileList = [{ 'name': filename, 'path': ek.ek(os.path.join, path, filename) } for filename in ek.ek(os.listdir, path)] + fileList = [{'name': filename, 'path': ek.ek(os.path.join, path, filename)} for filename in ek.ek(os.listdir, path)] fileList = filter(lambda entry: ek.ek(os.path.isdir, entry['path']), fileList) # prune out directories to proect the user from doing stupid things (already lower case the dir to reduce calls) - hideList = ["boot", "bootmgr", "cache", "msocache", "recovery", "$recycle.bin", "recycler", "system volume information", "temporary internet files"] # windows specific - hideList += [".fseventd", ".spotlight", ".trashes", ".vol", "cachedmessages", "caches", "trash"] # osx specific + hideList = ["boot", "bootmgr", "cache", "msocache", "recovery", "$recycle.bin", "recycler", + "system volume information", "temporary internet files"] # windows specific + hideList += [".fseventd", ".spotlight", ".trashes", ".vol", "cachedmessages", "caches", "trash"] # osx specific fileList = filter(lambda entry: entry['name'].lower() not in hideList, fileList) - fileList = sorted(fileList, lambda x, y: cmp(os.path.basename(x['name']).lower(), os.path.basename(y['path']).lower())) + fileList = sorted(fileList, + lambda x, y: cmp(os.path.basename(x['name']).lower(), os.path.basename(y['path']).lower())) entries = [{'current_path': path}] if includeParent and parentPath != path: - entries.append({ 'name': "..", 'path': parentPath }) + entries.append({'name': "..", 'path': parentPath}) entries.extend(fileList) return entries class WebFileBrowser: - @cherrypy.expose def index(self, path=''): cherrypy.response.headers['Content-Type'] = "application/json" @@ -108,4 +109,4 @@ class WebFileBrowser: def complete(self, term): cherrypy.response.headers['Content-Type'] = "application/json" paths = [entry['path'] for entry in foldersAtPath(os.path.dirname(term)) if 'path' in entry] - return json.dumps( paths ) + return json.dumps(paths) diff --git a/sickbeard/classes.py b/sickbeard/classes.py index e0f545dd..ccc0170b 100644 --- a/sickbeard/classes.py +++ b/sickbeard/classes.py @@ -25,9 +25,11 @@ import datetime from common import USER_AGENT, Quality + class SickBeardURLopener(urllib.FancyURLopener): version = USER_AGENT + class AuthURLOpener(SickBeardURLopener): """ URLOpener class that supports http auth without needing interactive password entry. @@ -36,13 +38,14 @@ class AuthURLOpener(SickBeardURLopener): user: username to use for HTTP auth pw: password to use for HTTP auth """ + def __init__(self, user, pw): self.username = user self.password = pw # remember if we've tried the username/password before self.numTries = 0 - + # call the base class urllib.FancyURLopener.__init__(self) @@ -56,7 +59,7 @@ class AuthURLOpener(SickBeardURLopener): if self.numTries == 0: self.numTries = 1 return (self.username, self.password) - + # if we've tried before then return blank which cancels the request else: return ('', '') @@ -66,6 +69,7 @@ class AuthURLOpener(SickBeardURLopener): self.numTries = 0 return SickBeardURLopener.open(self, url) + class SearchResult: """ Represents a search result from an indexer. @@ -112,18 +116,21 @@ class SearchResult: def fileName(self): return self.episodes[0].prettyName() + "." + self.resultType + class NZBSearchResult(SearchResult): """ Regular NZB result with an URL to the NZB """ resultType = "nzb" + class NZBDataSearchResult(SearchResult): """ NZB result where the actual NZB XML data is stored in the extraInfo """ resultType = "nzbdata" + class TorrentSearchResult(SearchResult): """ Torrent result with an URL to the torrent @@ -131,27 +138,47 @@ class TorrentSearchResult(SearchResult): resultType = "torrent" +class AllShowsListUI: + """ + This class is for tvdb-api. Instead of prompting with a UI to pick the + desired result out of a list of shows it tries to be smart about it + based on what shows are in SB. + """ + + def __init__(self, config, log=None): + self.config = config + self.log = log + + def selectSeries(self, allSeries): + # get all available shows + if allSeries: + return allSeries + + class ShowListUI: """ This class is for tvdb-api. Instead of prompting with a UI to pick the desired result out of a list of shows it tries to be smart about it based on what shows are in SB. """ + def __init__(self, config, log=None): self.config = config self.log = log def selectSeries(self, allSeries): - idList = [x.indexerid for x in sickbeard.showList] + if sickbeard.showList: + idList = [x.indexerid for x in sickbeard.showList] - # try to pick a show that's in my show list - for curShow in allSeries: - if int(curShow['id']) in idList: - return curShow + # try to pick a show that's in my show list + for curShow in allSeries: + if int(curShow['id']) in idList: + return curShow - # if nothing matches then just go with the first match I guess + # if nothing matches then return everything return allSeries[0] + class Proper: def __init__(self, name, url, date): self.name = name @@ -166,7 +193,8 @@ class Proper: self.episode = -1 def __str__(self): - return str(self.date)+" "+self.name+" "+str(self.season)+"x"+str(self.episode)+" of "+str(self.indexerid+" from "+self.indexer) + return str(self.date) + " " + self.name + " " + str(self.season) + "x" + str(self.episode) + " of " + str( + self.indexerid) + " from " + str(sickbeard.indexerApi(self.indexer).name) class ErrorViewer(): @@ -188,10 +216,12 @@ class ErrorViewer(): def clear(): ErrorViewer.errors = [] + class UIError(): """ Represents an error to be displayed in the web UI. """ + def __init__(self, message): self.message = message self.time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') diff --git a/sickbeard/clients/__init__.py b/sickbeard/clients/__init__.py index d491789a..4e2697cb 100644 --- a/sickbeard/clients/__init__.py +++ b/sickbeard/clients/__init__.py @@ -21,7 +21,7 @@ __all__ = ['utorrent', 'deluge', 'download_station', 'rtorrent' - ] +] import sickbeard @@ -62,24 +62,24 @@ http_error_code = { 505: 'HTTP Version Not Supported', } -default_host = {'utorrent':'http://localhost:8000', - 'transmission' :'http://localhost:9091', - 'deluge':'http://localhost:8112', +default_host = {'utorrent': 'http://localhost:8000', + 'transmission': 'http://localhost:9091', + 'deluge': 'http://localhost:8112', 'download_station': 'http://localhost:5000', 'rtorrent': 'scgi://localhost:5000', - } +} + def getClientModule(name): - name = name.lower() prefix = "sickbeard.clients." - - return __import__(prefix+name, fromlist=__all__) + + return __import__(prefix + name, fromlist=__all__) + def getClientIstance(name): - module = getClientModule(name) className = module.api.__class__.__name__ - + return getattr(module, className) \ No newline at end of file diff --git a/sickbeard/clients/deluge.py b/sickbeard/clients/deluge.py index 137641b9..862c223f 100644 --- a/sickbeard/clients/deluge.py +++ b/sickbeard/clients/deluge.py @@ -23,44 +23,43 @@ import sickbeard from sickbeard import logger from sickbeard.clients.generic import GenericClient -class DelugeAPI(GenericClient): +class DelugeAPI(GenericClient): def __init__(self, host=None, username=None, password=None): super(DelugeAPI, self).__init__('Deluge', host, username, password) - + self.url = self.host + 'json' - + def _get_auth(self): - + post_data = json.dumps({"method": "auth.login", "params": [self.password], "id": 1 - }) - try: - self.response = self.session.post(self.url, data=post_data.encode('utf-8')) - except: - return None - - self.auth = self.response.json()["result"] - - - post_data = json.dumps({"method": "web.connected", - "params": [], - "id": 10 - }) + }) try: self.response = self.session.post(self.url, data=post_data.encode('utf-8')) except: return None - + + self.auth = self.response.json()["result"] + + post_data = json.dumps({"method": "web.connected", + "params": [], + "id": 10 + }) + try: + self.response = self.session.post(self.url, data=post_data.encode('utf-8')) + except: + return None + connected = self.response.json()['result'] - + if not connected: post_data = json.dumps({"method": "web.get_hosts", "params": [], "id": 11 - }) + }) try: self.response = self.session.post(self.url, data=post_data.encode('utf-8')) except: @@ -69,21 +68,20 @@ class DelugeAPI(GenericClient): if len(hosts) == 0: logger.log(self.name + u': WebUI does not contain daemons', logger.ERROR) return None - + post_data = json.dumps({"method": "web.connect", "params": [hosts[0][0]], "id": 11 - }) + }) try: self.response = self.session.post(self.url, data=post_data.encode('utf-8')) except: return None - - + post_data = json.dumps({"method": "web.connected", "params": [], "id": 10 - }) + }) try: self.response = self.session.post(self.url, data=post_data.encode('utf-8')) except: @@ -93,86 +91,90 @@ class DelugeAPI(GenericClient): if not connected: logger.log(self.name + u': WebUI could not connect to daemon', logger.ERROR) return None - + return self.auth - + def _add_torrent_uri(self, result): post_data = json.dumps({"method": "core.add_torrent_magnet", - "params": [result.url,{"move_completed": "true", "move_completed_path": sickbeard.TV_DOWNLOAD_DIR}], + "params": [result.url, {"move_completed": "true", + "move_completed_path": sickbeard.TV_DOWNLOAD_DIR}], "id": 2 - }) + }) self._request(method='post', data=post_data) - + result.hash = self.response.json()['result'] - + return self.response.json()['result'] - + def _add_torrent_file(self, result): post_data = json.dumps({"method": "core.add_torrent_file", - "params": [result.name + '.torrent', b64encode(result.content),{"move_completed": "true", "move_completed_path": sickbeard.TV_DOWNLOAD_DIR}], + "params": [result.name + '.torrent', b64encode(result.content), + {"move_completed": "true", + "move_completed_path": sickbeard.TV_DOWNLOAD_DIR}], "id": 2 - }) + }) self._request(method='post', data=post_data) - + result.hash = self.response.json()['result'] - + return self.response.json()['result'] - + def _set_torrent_label(self, result): - + label = sickbeard.TORRENT_LABEL.lower() if label: # check if label already exists and create it if not post_data = json.dumps({"method": 'label.get_labels', "params": [], "id": 3 - }) + }) self._request(method='post', data=post_data) labels = self.response.json()['result'] - + if labels != None: if label not in labels: - logger.log(self.name + ': ' + label +u" label does not exist in Deluge we must add it", logger.DEBUG) + logger.log(self.name + ': ' + label + u" label does not exist in Deluge we must add it", + logger.DEBUG) post_data = json.dumps({"method": 'label.add', "params": [label], "id": 4 - }) + }) self._request(method='post', data=post_data) - logger.log(self.name + ': ' + label +u" label added to Deluge", logger.DEBUG) - + logger.log(self.name + ': ' + label + u" label added to Deluge", logger.DEBUG) + # add label to torrent - post_data = json.dumps({ "method": 'label.set_torrent', - "params": [result.hash, label], - "id": 5 - }) + post_data = json.dumps({"method": 'label.set_torrent', + "params": [result.hash, label], + "id": 5 + }) self._request(method='post', data=post_data) - logger.log(self.name + ': ' + label +u" label added to torrent", logger.DEBUG) + logger.log(self.name + ': ' + label + u" label added to torrent", logger.DEBUG) else: logger.log(self.name + ': ' + u"label plugin not detected", logger.DEBUG) return False - + return not self.response.json()['error'] - + def _set_torrent_ratio(self, result): if sickbeard.TORRENT_RATIO: post_data = json.dumps({"method": "core.set_torrent_stop_at_ratio", "params": [result.hash, True], "id": 5 - }) + }) self._request(method='post', data=post_data) - + post_data = json.dumps({"method": "core.set_torrent_stop_ratio", - "params": [result.hash,float(sickbeard.TORRENT_RATIO)], + "params": [result.hash, float(sickbeard.TORRENT_RATIO)], "id": 6 - }) + }) self._request(method='post', data=post_data) return not self.response.json()['error'] - + return True def _set_torrent_path(self, result): @@ -181,30 +183,31 @@ class DelugeAPI(GenericClient): post_data = json.dumps({"method": "core.set_torrent_move_completed", "params": [result.hash, True], "id": 7 - }) + }) self._request(method='post', data=post_data) - + post_data = json.dumps({"method": "core.set_torrent_move_completed_path", "params": [result.hash, sickbeard.TORRENT_PATH], "id": 8 - }) + }) self._request(method='post', data=post_data) - + return not self.response.json()['error'] - + return True - + def _set_torrent_pause(self, result): - + if sickbeard.TORRENT_PAUSED: post_data = json.dumps({"method": "core.pause_torrent", "params": [[result.hash]], "id": 9 - }) + }) self._request(method='post', data=post_data) return not self.response.json()['error'] - - return True + + return True + api = DelugeAPI() \ No newline at end of file diff --git a/sickbeard/clients/download_station.py b/sickbeard/clients/download_station.py index 6a2e6123..fdc329f0 100644 --- a/sickbeard/clients/download_station.py +++ b/sickbeard/clients/download_station.py @@ -23,49 +23,50 @@ import sickbeard from sickbeard.clients.generic import GenericClient + class DownloadStationAPI(GenericClient): - def __init__(self, host=None, username=None, password=None): - + super(DownloadStationAPI, self).__init__('DownloadStation', host, username, password) self.url = self.host + 'webapi/DownloadStation/task.cgi' - + def _get_auth(self): - + auth_url = self.host + 'webapi/auth.cgi?api=SYNO.API.Auth&version=2&method=login&account=' + self.username + '&passwd=' + self.password + '&session=DownloadStation&format=sid' - + try: self.response = self.session.get(auth_url) self.auth = self.response.json()['data']['sid'] except: return None - + return self.auth - + def _add_torrent_uri(self, result): - - data = {'api':'SYNO.DownloadStation.Task', - 'version':'1', 'method':'create', - 'session':'DownloadStation', - '_sid':self.auth, - 'uri':result.url - } + + data = {'api': 'SYNO.DownloadStation.Task', + 'version': '1', 'method': 'create', + 'session': 'DownloadStation', + '_sid': self.auth, + 'uri': result.url + } self._request(method='post', data=data) - + return self.response.json()['success'] - + def _add_torrent_file(self, result): - data = {'api':'SYNO.DownloadStation.Task', - 'version':'1', - 'method':'create', - 'session':'DownloadStation', - '_sid':self.auth - } - files = {'file':(result.name + '.torrent', result.content)} + data = {'api': 'SYNO.DownloadStation.Task', + 'version': '1', + 'method': 'create', + 'session': 'DownloadStation', + '_sid': self.auth + } + files = {'file': (result.name + '.torrent', result.content)} self._request(method='post', data=data, files=files) - + return self.response.json()['success'] + api = DownloadStationAPI() diff --git a/sickbeard/clients/generic.py b/sickbeard/clients/generic.py index 5ba4e0de..d7585a1e 100644 --- a/sickbeard/clients/generic.py +++ b/sickbeard/clients/generic.py @@ -10,36 +10,40 @@ from sickbeard.clients import http_error_code from lib.bencode import bencode, bdecode from lib import requests + class GenericClient(object): - def __init__(self, name, host=None, username=None, password=None): self.name = name self.username = sickbeard.TORRENT_USERNAME if username is None else username self.password = sickbeard.TORRENT_PASSWORD if password is None else password self.host = sickbeard.TORRENT_HOST if host is None else host - + self.url = None self.response = None self.auth = None self.last_time = time.time() self.session = requests.session() self.session.auth = (self.username, self.password) - + def _request(self, method='get', params={}, data=None, files=None): if time.time() > self.last_time + 1800 or not self.auth: self.last_time = time.time() self._get_auth() - - logger.log(self.name + u': Requested a ' + method.upper() + ' connection to url '+ self.url + ' with Params= ' + str(params) + ' Data=' + str(data if data else 'None')[0:99] + ('...' if len(data if data else 'None') > 200 else ''), logger.DEBUG) - + + logger.log( + self.name + u': Requested a ' + method.upper() + ' connection to url ' + self.url + ' with Params= ' + str( + params) + ' Data=' + str(data if data else 'None')[0:99] + ( + '...' if len(data if data else 'None') > 200 else ''), logger.DEBUG) + if not self.auth: - logger.log(self.name + u': Authentication Failed' , logger.ERROR) + logger.log(self.name + u': Authentication Failed', logger.ERROR) return False - + try: - self.response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files, timeout=10, verify=False) + self.response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files, + timeout=10, verify=False) except requests.exceptions.ConnectionError, e: logger.log(self.name + u': Unable to connect ' + ex(e), logger.ERROR) return False @@ -53,19 +57,20 @@ class GenericClient(object): logger.log(self.name + u': Connection Timeout ' + ex(e), logger.ERROR) return False except Exception, e: - logger.log(self.name + u': Unknown exception raised when send torrent to ' + self.name + ': ' + ex(e), logger.ERROR) + logger.log(self.name + u': Unknown exception raised when send torrent to ' + self.name + ': ' + ex(e), + logger.ERROR) return False if self.response.status_code == 401: - logger.log(self.name + u': Invalid Username or Password, check your config', logger.ERROR) + logger.log(self.name + u': Invalid Username or Password, check your config', logger.ERROR) return False - + if self.response.status_code in http_error_code.keys(): logger.log(self.name + u': ' + http_error_code[self.response.status_code], logger.DEBUG) return False - - logger.log(self.name + u': Response to '+ method.upper() + ' request is ' + self.response.text, logger.DEBUG) - + + logger.log(self.name + u': Response to ' + method.upper() + ' request is ' + self.response.text, logger.DEBUG) + return True def _get_auth(self): @@ -73,20 +78,20 @@ class GenericClient(object): This should be overridden and should return the auth_id needed for the client """ return None - + def _add_torrent_uri(self, result): """ This should be overridden should return the True/False from the client when a torrent is added via url (magnet or .torrent link) """ - return False - + return False + def _add_torrent_file(self, result): """ This should be overridden should return the True/False from the client when a torrent is added via result.content (only .torrent file) """ - return False + return False def _set_torrent_label(self, result): """ @@ -94,7 +99,7 @@ class GenericClient(object): when a torrent is set with label """ return True - + def _set_torrent_ratio(self, result): """ This should be overridden should return the True/False from the client @@ -113,18 +118,18 @@ class GenericClient(object): """ This should be overridden should return the True/False from the client when a torrent is set with path - """ + """ return True - + def _set_torrent_pause(self, result): """ This should be overridden should return the True/False from the client when a torrent is set with pause """ return True - + def _get_torrent_hash(self, result): - + if result.url.startswith('magnet'): torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0] if len(torrent_hash) == 32: @@ -134,38 +139,38 @@ class GenericClient(object): torrent_hash = sha1(bencode(info)).hexdigest() return torrent_hash - + def sendTORRENT(self, result): - + r_code = False logger.log(u'Calling ' + self.name + ' Client', logger.DEBUG) if not self._get_auth(): - logger.log(self.name + u': Authentication Failed' , logger.ERROR) + logger.log(self.name + u': Authentication Failed', logger.ERROR) return r_code - + try: result.hash = self._get_torrent_hash(result) - + if result.url.startswith('magnet'): r_code = self._add_torrent_uri(result) else: r_code = self._add_torrent_file(result) - + if not r_code: return False - + if not self._set_torrent_pause(result): logger.log(self.name + u': Unable to set the pause for Torrent', logger.ERROR) - + if not self._set_torrent_label(result): logger.log(self.name + u': Unable to set the label for Torrent', logger.ERROR) - + if not self._set_torrent_ratio(result): logger.log(self.name + u': Unable to set the ratio for Torrent', logger.ERROR) - + if not self._set_torrent_path(result): logger.log(self.name + u': Unable to set the path for Torrent', logger.ERROR) @@ -176,7 +181,7 @@ class GenericClient(object): logger.log(self.name + u': Failed Sending Torrent ', logger.ERROR) logger.log(self.name + u': Exception raised when sending torrent: ' + ex(e), logger.DEBUG) return r_code - + return r_code def testAuthentication(self): @@ -186,16 +191,16 @@ class GenericClient(object): except requests.exceptions.ConnectionError, e: return False, 'Error: ' + self.name + ' Connection Error' except (requests.exceptions.MissingSchema, requests.exceptions.InvalidURL): - return False,'Error: Invalid ' + self.name + ' host' + return False, 'Error: Invalid ' + self.name + ' host' - if self.response.status_code == 401: - return False, 'Error: Invalid ' + self.name + ' Username or Password, check your config!' - - try: - self._get_auth() - if self.response.status_code == 200 and self.auth: - return True, 'Success: Connected and Authenticated' - else: - return False, 'Error: Unable to get ' + self.name + ' Authentication, check your config!' - except Exception: - return False, 'Error: Unable to connect to '+ self.name + if self.response.status_code == 401: + return False, 'Error: Invalid ' + self.name + ' Username or Password, check your config!' + + try: + self._get_auth() + if self.response.status_code == 200 and self.auth: + return True, 'Success: Connected and Authenticated' + else: + return False, 'Error: Unable to get ' + self.name + ' Authentication, check your config!' + except Exception: + return False, 'Error: Unable to connect to ' + self.name diff --git a/sickbeard/clients/rtorrent.py b/sickbeard/clients/rtorrent.py index ac39df2c..30b19a0f 100644 --- a/sickbeard/clients/rtorrent.py +++ b/sickbeard/clients/rtorrent.py @@ -23,8 +23,8 @@ from sickbeard.clients.generic import GenericClient from lib.rtorrent import RTorrent from lib.rtorrent.err import MethodError -class rTorrentAPI(GenericClient): +class rTorrentAPI(GenericClient): def __init__(self, host=None, username=None, password=None): super(rTorrentAPI, self).__init__('rTorrent', host, username, password) @@ -112,8 +112,8 @@ class rTorrentAPI(GenericClient): if not result: return False - # group_name = 'sb_test'.lower() ##### Use provider instead of _test - # if not self._set_torrent_ratio(group_name): + # group_name = 'sb_test'.lower() ##### Use provider instead of _test + # if not self._set_torrent_ratio(group_name): # return False # Send request to rTorrent @@ -145,36 +145,36 @@ class rTorrentAPI(GenericClient): def _set_torrent_ratio(self, name): # if not name: - # return False + # return False # # if not self.auth: - # return False + # return False # # views = self.auth.get_views() # # if name not in views: - # self.auth.create_group(name) + # self.auth.create_group(name) # group = self.auth.get_group(name) # ratio = int(float(sickbeard.TORRENT_RATIO) * 100) # # try: - # if ratio > 0: - # - # # Explicitly set all group options to ensure it is setup correctly - # group.set_upload('1M') - # group.set_min(ratio) - # group.set_max(ratio) - # group.set_command('d.stop') - # group.enable() - # else: - # # Reset group action and disable it - # group.set_command() - # group.disable() + # if ratio > 0: + # + # # Explicitly set all group options to ensure it is setup correctly + # group.set_upload('1M') + # group.set_min(ratio) + # group.set_max(ratio) + # group.set_command('d.stop') + # group.enable() + # else: + # # Reset group action and disable it + # group.set_command() + # group.disable() # # except: - # return False + # return False return True @@ -187,6 +187,7 @@ class rTorrentAPI(GenericClient): else: return False, 'Error: Unable to get ' + self.name + ' Authentication, check your config!' except Exception: - return False, 'Error: Unable to connect to '+ self.name + return False, 'Error: Unable to connect to ' + self.name + api = rTorrentAPI() diff --git a/sickbeard/clients/transmission.py b/sickbeard/clients/transmission.py index 7b6318ad..6bb490f0 100644 --- a/sickbeard/clients/transmission.py +++ b/sickbeard/clients/transmission.py @@ -23,91 +23,91 @@ from base64 import b64encode import sickbeard from sickbeard.clients.generic import GenericClient + class TransmissionAPI(GenericClient): - def __init__(self, host=None, username=None, password=None): - + super(TransmissionAPI, self).__init__('Transmission', host, username, password) - + self.url = self.host + 'transmission/rpc' def _get_auth(self): - post_data = json.dumps({'method': 'session-get',}) + post_data = json.dumps({'method': 'session-get', }) - try: + try: self.response = self.session.post(self.url, data=post_data.encode('utf-8')) self.auth = re.search('X-Transmission-Session-Id:\s*(\w+)', self.response.text).group(1) except: - return None - + return None + self.session.headers.update({'x-transmission-session-id': self.auth}) - + #Validating Transmission authorization post_data = json.dumps({'arguments': {}, 'method': 'session-get', - }) - self._request(method='post', data=post_data) - - return self.auth + }) + self._request(method='post', data=post_data) + + return self.auth def _add_torrent_uri(self, result): - arguments = { 'filename': result.url, - 'paused': 1 if sickbeard.TORRENT_PAUSED else 0, - 'download-dir': sickbeard.TORRENT_PATH - } - post_data = json.dumps({ 'arguments': arguments, - 'method': 'torrent-add', - }) + arguments = {'filename': result.url, + 'paused': 1 if sickbeard.TORRENT_PAUSED else 0, + 'download-dir': sickbeard.TORRENT_PATH + } + post_data = json.dumps({'arguments': arguments, + 'method': 'torrent-add', + }) self._request(method='post', data=post_data) return self.response.json()['result'] == "success" def _add_torrent_file(self, result): - arguments = { 'metainfo': b64encode(result.content), - 'paused': 1 if sickbeard.TORRENT_PAUSED else 0, - 'download-dir': sickbeard.TORRENT_PATH - } + arguments = {'metainfo': b64encode(result.content), + 'paused': 1 if sickbeard.TORRENT_PAUSED else 0, + 'download-dir': sickbeard.TORRENT_PATH + } post_data = json.dumps({'arguments': arguments, 'method': 'torrent-add', - }) + }) self._request(method='post', data=post_data) - + return self.response.json()['result'] == "success" def _set_torrent_ratio(self, result): - + torrent_id = self._get_torrent_hash(result) - + if sickbeard.TORRENT_RATIO == '': # Use global settings ratio = None mode = 0 elif float(sickbeard.TORRENT_RATIO) == 0: ratio = 0 - mode = 2 + mode = 2 elif float(sickbeard.TORRENT_RATIO) > 0: ratio = float(sickbeard.TORRENT_RATIO) - mode = 1 # Stop seeding at seedRatioLimit + mode = 1 # Stop seeding at seedRatioLimit - arguments = { 'ids': [torrent_id], - 'seedRatioLimit': ratio, - 'seedRatioMode': mode - } + arguments = {'ids': [torrent_id], + 'seedRatioLimit': ratio, + 'seedRatioMode': mode + } post_data = json.dumps({'arguments': arguments, 'method': 'torrent-set', - }) - self._request(method='post', data=post_data) - - return self.response.json()['result'] == "success" + }) + self._request(method='post', data=post_data) + + return self.response.json()['result'] == "success" def _set_torrent_priority(self, result): torrent_id = self._get_torrent_hash(result) - arguments = { 'ids': [torrent_id]} + arguments = {'ids': [torrent_id]} if result.priority == -1: arguments['priority-low'] = [] @@ -120,13 +120,13 @@ class TransmissionAPI(GenericClient): arguments['bandwidthPriority'] = 1 else: arguments['priority-normal'] = [] - + post_data = json.dumps({'arguments': arguments, 'method': 'torrent-set', - }) - self._request(method='post', data=post_data) - - return self.response.json()['result'] == "success" - + }) + self._request(method='post', data=post_data) + + return self.response.json()['result'] == "success" + api = TransmissionAPI() diff --git a/sickbeard/clients/utorrent.py b/sickbeard/clients/utorrent.py index ac5f0cac..9127671a 100644 --- a/sickbeard/clients/utorrent.py +++ b/sickbeard/clients/utorrent.py @@ -21,56 +21,57 @@ import re import sickbeard from sickbeard.clients.generic import GenericClient + class uTorrentAPI(GenericClient): - def __init__(self, host=None, username=None, password=None): - + super(uTorrentAPI, self).__init__('uTorrent', host, username, password) - + self.url = self.host + 'gui/' - + def _request(self, method='get', params={}, files=None): - params.update({'token':self.auth}) + params.update({'token': self.auth}) return super(uTorrentAPI, self)._request(method=method, params=params, files=files) def _get_auth(self): - try: + try: self.response = self.session.get(self.url + 'token.html') self.auth = re.findall("(.*?) x * 100: return (status - x * 100, x) @@ -252,18 +247,23 @@ class Quality: FAILED = None SNATCHED_BEST = None + Quality.DOWNLOADED = [Quality.compositeStatus(DOWNLOADED, x) for x in Quality.qualityStrings.keys()] Quality.SNATCHED = [Quality.compositeStatus(SNATCHED, x) for x in Quality.qualityStrings.keys()] Quality.SNATCHED_PROPER = [Quality.compositeStatus(SNATCHED_PROPER, x) for x in Quality.qualityStrings.keys()] Quality.FAILED = [Quality.compositeStatus(FAILED, x) for x in Quality.qualityStrings.keys()] Quality.SNATCHED_BEST = [Quality.compositeStatus(SNATCHED_BEST, x) for x in Quality.qualityStrings.keys()] -SD = Quality.combineQualities([Quality.SDTV, Quality.SDDVD], []) -HD = Quality.combineQualities([Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.HDBLURAY, Quality.FULLHDBLURAY], []) # HD720p + HD1080p -HD720p = Quality.combineQualities([Quality.HDTV, Quality.HDWEBDL, Quality.HDBLURAY], []) -HD1080p = Quality.combineQualities([Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY], []) -ANY = Quality.combineQualities([Quality.SDTV, Quality.SDDVD, Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.HDBLURAY, Quality.FULLHDBLURAY, Quality.UNKNOWN], []) # SD + HD - +SD = Quality.combineQualities([Quality.SDTV, Quality.SDDVD], []) +HD = Quality.combineQualities( + [Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.HDBLURAY, Quality.FULLHDBLURAY], + []) # HD720p + HD1080p +HD720p = Quality.combineQualities([Quality.HDTV, Quality.HDWEBDL, Quality.HDBLURAY], []) +HD1080p = Quality.combineQualities([Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY], []) +ANY = Quality.combineQualities( + [Quality.SDTV, Quality.SDDVD, Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL, + Quality.HDBLURAY, Quality.FULLHDBLURAY, Quality.UNKNOWN], []) # SD + HD + # legacy template, cant remove due to reference in mainDB upgrade? BEST = Quality.combineQualities([Quality.SDTV, Quality.HDTV, Quality.HDWEBDL], [Quality.HDTV]) @@ -274,6 +274,7 @@ qualityPresetStrings = {SD: "SD", HD1080p: "HD1080p", ANY: "Any"} + class StatusStrings: def __init__(self): self.statusStrings = {UNKNOWN: "Unknown", @@ -302,18 +303,20 @@ class StatusStrings: def has_key(self, name): return name in self.statusStrings or name in Quality.DOWNLOADED or name in Quality.SNATCHED or name in Quality.SNATCHED_PROPER or name in Quality.SNATCHED_BEST + statusStrings = StatusStrings() + class Overview: - UNAIRED = UNAIRED # 1 + UNAIRED = UNAIRED # 1 QUAL = 2 - WANTED = WANTED # 3 + WANTED = WANTED # 3 GOOD = 4 - SKIPPED = SKIPPED # 5 - + SKIPPED = SKIPPED # 5 + # For both snatched statuses. Note: SNATCHED/QUAL have same value and break dict. SNATCHED = SNATCHED_PROPER = SNATCHED_BEST # 9 - + overviewStrings = {SKIPPED: "skipped", WANTED: "wanted", QUAL: "qual", @@ -325,9 +328,8 @@ class Overview: XML_NSMAP = {'xsi': 'http://www.w3.org/2001/XMLSchema-instance', 'xsd': 'http://www.w3.org/2001/XMLSchema'} - countryList = {'Australia': 'AU', 'Canada': 'CA', 'USA': 'US' - } +} diff --git a/sickbeard/config.py b/sickbeard/config.py index ad578f6a..34c254fa 100644 --- a/sickbeard/config.py +++ b/sickbeard/config.py @@ -29,9 +29,9 @@ from sickbeard import db import sickbeard naming_ep_type = ("%(seasonnumber)dx%(episodenumber)02d", - "s%(seasonnumber)02de%(episodenumber)02d", - "S%(seasonnumber)02dE%(episodenumber)02d", - "%(seasonnumber)02dx%(episodenumber)02d") + "s%(seasonnumber)02de%(episodenumber)02d", + "S%(seasonnumber)02dE%(episodenumber)02d", + "%(seasonnumber)02dx%(episodenumber)02d") naming_ep_type_text = ("1x02", "s01e02", "S01E02", "01x02") naming_multi_ep_type = {0: ["-%(episodenumber)02d"] * len(naming_ep_type), @@ -44,7 +44,6 @@ naming_sep_type_text = (" - ", "space") def change_HTTPS_CERT(https_cert): - if https_cert == '': sickbeard.HTTPS_CERT = '' return True @@ -60,7 +59,6 @@ def change_HTTPS_CERT(https_cert): def change_HTTPS_KEY(https_key): - if https_key == '': sickbeard.HTTPS_KEY = '' return True @@ -76,7 +74,6 @@ def change_HTTPS_KEY(https_key): def change_LOG_DIR(log_dir, web_log): - log_dir_changed = False abs_log_dir = os.path.normpath(os.path.join(sickbeard.DATA_DIR, log_dir)) web_log_value = checkbox_to_value(web_log) @@ -110,7 +107,6 @@ def change_LOG_DIR(log_dir, web_log): def change_NZB_DIR(nzb_dir): - if nzb_dir == '': sickbeard.NZB_DIR = '' return True @@ -126,7 +122,6 @@ def change_NZB_DIR(nzb_dir): def change_TORRENT_DIR(torrent_dir): - if torrent_dir == '': sickbeard.TORRENT_DIR = '' return True @@ -142,7 +137,6 @@ def change_TORRENT_DIR(torrent_dir): def change_TV_DOWNLOAD_DIR(tv_download_dir): - if tv_download_dir == '': sickbeard.TV_DOWNLOAD_DIR = '' return True @@ -158,7 +152,6 @@ def change_TV_DOWNLOAD_DIR(tv_download_dir): def change_SEARCH_FREQUENCY(freq): - sickbeard.SEARCH_FREQUENCY = to_int(freq, default=sickbeard.DEFAULT_SEARCH_FREQUENCY) if sickbeard.SEARCH_FREQUENCY < sickbeard.MIN_SEARCH_FREQUENCY: @@ -169,7 +162,6 @@ def change_SEARCH_FREQUENCY(freq): def change_VERSION_NOTIFY(version_notify): - oldSetting = sickbeard.VERSION_NOTIFY sickbeard.VERSION_NOTIFY = version_notify @@ -235,7 +227,6 @@ def clean_host(host, default_port=None): def clean_hosts(hosts, default_port=None): - cleaned_hosts = [] for cur_host in [x.strip() for x in hosts.split(",")]: @@ -344,7 +335,7 @@ def check_setting_str(config, cfg_name, item_name, def_val, log=True): encryption_version = sickbeard.ENCRYPTION_VERSION else: encryption_version = 0 - + try: my_val = helpers.decrypt(config[cfg_name][item_name], encryption_version) except: @@ -363,7 +354,6 @@ def check_setting_str(config, cfg_name, item_name, def_val, log=True): class ConfigMigrator(): - def __init__(self, config_obj): """ Initializes a config migrator that can take the config from the version indicated in the config @@ -380,7 +370,7 @@ class ConfigMigrator(): 3: 'Rename omgwtfnzb variables', 4: 'Add newznab catIDs', 5: 'Metadata update' - } + } def migrate_config(self): """ @@ -388,7 +378,9 @@ class ConfigMigrator(): """ if self.config_version > self.expected_config_version: - logger.log_error_and_exit(u"Your config version (" + str(self.config_version) + ") has been incremented past what this version of Sick Beard supports (" + str(self.expected_config_version) + ").\n" + \ + logger.log_error_and_exit(u"Your config version (" + str( + self.config_version) + ") has been incremented past what this version of Sick Beard supports (" + str( + self.expected_config_version) + ").\n" + \ "If you have used other forks or a newer version of Sick Beard, your config file may be unusable due to their modifications.") sickbeard.CONFIG_VERSION = self.config_version @@ -452,7 +444,8 @@ class ConfigMigrator(): new_season_format = new_season_format.replace('09', '%0S') new_season_format = new_season_format.replace('9', '%S') - logger.log(u"Changed season folder format from " + old_season_format + " to " + new_season_format + ", prepending it to your naming config") + logger.log( + u"Changed season folder format from " + old_season_format + " to " + new_season_format + ", prepending it to your naming config") sickbeard.NAMING_PATTERN = new_season_format + os.sep + sickbeard.NAMING_PATTERN except (TypeError, ValueError): @@ -552,7 +545,8 @@ class ConfigMigrator(): try: name, url, key, enabled = cur_provider_data.split("|") except ValueError: - logger.log(u"Skipping Newznab provider string: '" + cur_provider_data + "', incorrect format", logger.ERROR) + logger.log(u"Skipping Newznab provider string: '" + cur_provider_data + "', incorrect format", + logger.ERROR) continue if name == 'Sick Beard Index': @@ -623,7 +617,8 @@ class ConfigMigrator(): logger.log(u"Upgrading " + metadata_name + " metadata, new value: " + metadata) else: - logger.log(u"Skipping " + metadata_name + " metadata: '" + metadata + "', incorrect format", logger.ERROR) + logger.log(u"Skipping " + metadata_name + " metadata: '" + metadata + "', incorrect format", + logger.ERROR) metadata = '0|0|0|0|0|0|0|0|0|0' logger.log(u"Setting " + metadata_name + " metadata, new value: " + metadata) diff --git a/sickbeard/databases/cache_db.py b/sickbeard/databases/cache_db.py index 13156b08..46ccfebc 100644 --- a/sickbeard/databases/cache_db.py +++ b/sickbeard/databases/cache_db.py @@ -19,7 +19,7 @@ from sickbeard import db # Add new migrations at the bottom of the list; subclass the previous migration. -class InitialSchema (db.SchemaUpgrade): +class InitialSchema(db.SchemaUpgrade): def test(self): return self.hasTable("lastUpdate") @@ -36,12 +36,15 @@ class InitialSchema (db.SchemaUpgrade): else: self.connection.action(query[0], query[1:]) + class AddSceneExceptions(InitialSchema): def test(self): return self.hasTable("scene_exceptions") def execute(self): - self.connection.action("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, tvdb_id INTEGER KEY, show_name TEXT)") + self.connection.action( + "CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, tvdb_id INTEGER KEY, show_name TEXT)") + class AddSceneNameCache(AddSceneExceptions): def test(self): @@ -50,6 +53,7 @@ class AddSceneNameCache(AddSceneExceptions): def execute(self): self.connection.action("CREATE TABLE scene_names (tvdb_id INTEGER, name TEXT)") + class AddNetworkTimezones(AddSceneNameCache): def test(self): return self.hasTable("network_timezones") @@ -57,19 +61,24 @@ class AddNetworkTimezones(AddSceneNameCache): def execute(self): self.connection.action("CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT)") + class AddXemNumbering(AddNetworkTimezones): def test(self): return self.hasTable("xem_numbering") def execute(self): - self.connection.action("CREATE TABLE xem_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode))") + self.connection.action( + "CREATE TABLE xem_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode))") + class AddXemRefresh(AddXemNumbering): def test(self): return self.hasTable("xem_refresh") def execute(self): - self.connection.action("CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)") + self.connection.action( + "CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)") + class ConvertSceneExceptionsToIndexerID(AddXemRefresh): def test(self): @@ -77,10 +86,13 @@ class ConvertSceneExceptionsToIndexerID(AddXemRefresh): def execute(self): self.connection.action("ALTER TABLE scene_exceptions RENAME TO tmp_scene_exceptions") - self.connection.action("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER KEY, show_name TEXT)") - self.connection.action("INSERT INTO scene_exceptions(exception_id, indexer_id, show_name) SELECT exception_id, tvdb_id, show_name FROM tmp_scene_exceptions") + self.connection.action( + "CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER KEY, show_name TEXT)") + self.connection.action( + "INSERT INTO scene_exceptions(exception_id, indexer_id, show_name) SELECT exception_id, tvdb_id, show_name FROM tmp_scene_exceptions") self.connection.action("DROP TABLE tmp_scene_exceptions") + class ConvertSceneNamesToIndexerID(ConvertSceneExceptionsToIndexerID): def test(self): return self.hasColumn("scene_names", "indexer_id") diff --git a/sickbeard/databases/failed_db.py b/sickbeard/databases/failed_db.py index 99d6501d..a8719928 100644 --- a/sickbeard/databases/failed_db.py +++ b/sickbeard/databases/failed_db.py @@ -49,6 +49,7 @@ class SizeAndProvider(InitialSchema): class History(SizeAndProvider): """Snatch history that can't be modified by the user""" + def test(self): return self.hasTable('history') @@ -59,6 +60,7 @@ class History(SizeAndProvider): class HistoryStatus(History): """Store episode status before snatch to revert to if necessary""" + def test(self): return self.hasColumn('history', 'old_status') diff --git a/sickbeard/databases/mainDB.py b/sickbeard/databases/mainDB.py index a6b73673..ea4f20f5 100644 --- a/sickbeard/databases/mainDB.py +++ b/sickbeard/databases/mainDB.py @@ -26,11 +26,11 @@ from sickbeard import db, common, helpers, logger from sickbeard import encodingKludge as ek from sickbeard.name_parser.parser import NameParser, InvalidNameException -MIN_DB_VERSION = 9 # oldest db version we support migrating from +MIN_DB_VERSION = 9 # oldest db version we support migrating from MAX_DB_VERSION = 27 -class MainSanityCheck(db.DBSanityCheck): +class MainSanityCheck(db.DBSanityCheck): def check(self): self.fix_duplicate_shows() self.fix_duplicate_episodes() @@ -38,18 +38,23 @@ class MainSanityCheck(db.DBSanityCheck): def fix_duplicate_shows(self): - sqlResults = self.connection.select("SELECT show_id, indexer_id, COUNT(indexer_id) as count FROM tv_shows GROUP BY indexer_id HAVING count > 1") + sqlResults = self.connection.select( + "SELECT show_id, indexer_id, COUNT(indexer_id) as count FROM tv_shows GROUP BY indexer_id HAVING count > 1") for cur_duplicate in sqlResults: - logger.log(u"Duplicate show detected! indexer_id: " + str(cur_duplicate["indexer_id"]) + u" count: " + str(cur_duplicate["count"]), logger.DEBUG) + logger.log(u"Duplicate show detected! indexer_id: " + str(cur_duplicate["indexer_id"]) + u" count: " + str( + cur_duplicate["count"]), logger.DEBUG) - cur_dupe_results = self.connection.select("SELECT show_id, indexer_id FROM tv_shows WHERE indexer_id = ? LIMIT ?", - [cur_duplicate["indexer_id"], int(cur_duplicate["count"])-1] - ) + cur_dupe_results = self.connection.select( + "SELECT show_id, indexer_id FROM tv_shows WHERE indexer_id = ? LIMIT ?", + [cur_duplicate["indexer_id"], int(cur_duplicate["count"]) - 1] + ) for cur_dupe_id in cur_dupe_results: - logger.log(u"Deleting duplicate show with indexer_id: " + str(cur_dupe_id["indexer_id"]) + u" show_id: " + str(cur_dupe_id["show_id"])) + logger.log( + u"Deleting duplicate show with indexer_id: " + str(cur_dupe_id["indexer_id"]) + u" show_id: " + str( + cur_dupe_id["show_id"])) self.connection.action("DELETE FROM tv_shows WHERE show_id = ?", [cur_dupe_id["show_id"]]) else: @@ -57,15 +62,20 @@ class MainSanityCheck(db.DBSanityCheck): def fix_duplicate_episodes(self): - sqlResults = self.connection.select("SELECT showid, season, episode, COUNT(showid) as count FROM tv_episodes GROUP BY showid, season, episode HAVING count > 1") + sqlResults = self.connection.select( + "SELECT showid, season, episode, COUNT(showid) as count FROM tv_episodes GROUP BY showid, season, episode HAVING count > 1") for cur_duplicate in sqlResults: - logger.log(u"Duplicate episode detected! showid: " + str(cur_duplicate["showid"]) + u" season: "+str(cur_duplicate["season"]) + u" episode: "+str(cur_duplicate["episode"]) + u" count: " + str(cur_duplicate["count"]), logger.DEBUG) + logger.log(u"Duplicate episode detected! showid: " + str(cur_duplicate["showid"]) + u" season: " + str( + cur_duplicate["season"]) + u" episode: " + str(cur_duplicate["episode"]) + u" count: " + str( + cur_duplicate["count"]), logger.DEBUG) - cur_dupe_results = self.connection.select("SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? and episode = ? ORDER BY episode_id DESC LIMIT ?", - [cur_duplicate["showid"], cur_duplicate["season"], cur_duplicate["episode"], int(cur_duplicate["count"])-1] - ) + cur_dupe_results = self.connection.select( + "SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? and episode = ? ORDER BY episode_id DESC LIMIT ?", + [cur_duplicate["showid"], cur_duplicate["season"], cur_duplicate["episode"], + int(cur_duplicate["count"]) - 1] + ) for cur_dupe_id in cur_dupe_results: logger.log(u"Deleting duplicate episode with episode_id: " + str(cur_dupe_id["episode_id"])) @@ -76,16 +86,19 @@ class MainSanityCheck(db.DBSanityCheck): def fix_orphan_episodes(self): - sqlResults = self.connection.select("SELECT episode_id, showid, tv_shows.indexer_id FROM tv_episodes LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.indexer_id WHERE tv_shows.indexer_id is NULL") + sqlResults = self.connection.select( + "SELECT episode_id, showid, tv_shows.indexer_id FROM tv_episodes LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.indexer_id WHERE tv_shows.indexer_id is NULL") for cur_orphan in sqlResults: - logger.log(u"Orphan episode detected! episode_id: " + str(cur_orphan["episode_id"]) + " showid: " + str(cur_orphan["showid"]), logger.DEBUG) - logger.log(u"Deleting orphan episode with episode_id: "+str(cur_orphan["episode_id"])) + logger.log(u"Orphan episode detected! episode_id: " + str(cur_orphan["episode_id"]) + " showid: " + str( + cur_orphan["showid"]), logger.DEBUG) + logger.log(u"Deleting orphan episode with episode_id: " + str(cur_orphan["episode_id"])) self.connection.action("DELETE FROM tv_episodes WHERE episode_id = ?", [cur_orphan["episode_id"]]) else: logger.log(u"No orphan episodes, check passed") + def backupDatabase(version): logger.log(u"Backing up database before upgrade") if not helpers.backupVersionedFile(db.dbFilename(), version): @@ -93,12 +106,13 @@ def backupDatabase(version): else: logger.log(u"Proceeding with upgrade") + # ====================== # = Main DB Migrations = # ====================== # Add new migrations at the bottom of the list; subclass the previous migration. -class InitialSchema (db.SchemaUpgrade): +class InitialSchema(db.SchemaUpgrade): def test(self): return self.hasTable("db_version") @@ -115,7 +129,7 @@ class InitialSchema (db.SchemaUpgrade): "CREATE INDEX idx_showid ON tv_episodes (showid);", "CREATE UNIQUE INDEX idx_tvdb_id ON tv_shows (tvdb_id);", "INSERT INTO db_version (db_version) VALUES (18);" - ] + ] for query in queries: self.connection.action(query) @@ -123,20 +137,21 @@ class InitialSchema (db.SchemaUpgrade): cur_db_version = self.checkDBVersion() if cur_db_version < MIN_DB_VERSION: - logger.log_error_and_exit(u"Your database version (" + str(cur_db_version) + ") is too old to migrate from what this version of Sick Beard supports (" + \ + logger.log_error_and_exit(u"Your database version (" + str( + cur_db_version) + ") is too old to migrate from what this version of Sick Beard supports (" + \ str(MIN_DB_VERSION) + ").\n" + \ "Upgrade using a previous version (tag) build 496 to build 501 of Sick Beard first or remove database file to begin fresh." - ) + ) if cur_db_version > MAX_DB_VERSION: - logger.log_error_and_exit(u"Your database version (" + str(cur_db_version) + ") has been incremented past what this version of Sick Beard supports (" + \ + logger.log_error_and_exit(u"Your database version (" + str( + cur_db_version) + ") has been incremented past what this version of Sick Beard supports (" + \ str(MAX_DB_VERSION) + ").\n" + \ "If you have used other forks of Sick Beard, your database may be unusable due to their modifications." - ) + ) class AddSizeAndSceneNameFields(InitialSchema): - def test(self): return self.checkDBVersion() >= 10 @@ -160,7 +175,8 @@ class AddSizeAndSceneNameFields(InitialSchema): # if there is no size yet then populate it for us if (not cur_ep["file_size"] or not int(cur_ep["file_size"])) and ek.ek(os.path.isfile, cur_ep["location"]): cur_size = ek.ek(os.path.getsize, cur_ep["location"]) - self.connection.action("UPDATE tv_episodes SET file_size = ? WHERE episode_id = ?", [cur_size, int(cur_ep["episode_id"])]) + self.connection.action("UPDATE tv_episodes SET file_size = ? WHERE episode_id = ?", + [cur_size, int(cur_ep["episode_id"])]) # check each snatch to see if we can use it to get a release name from history_results = self.connection.select("SELECT * FROM history WHERE provider != -1 ORDER BY date ASC") @@ -168,10 +184,12 @@ class AddSizeAndSceneNameFields(InitialSchema): logger.log(u"Adding release name to all episodes still in history") for cur_result in history_results: # find the associated download, if there isn't one then ignore it - download_results = self.connection.select("SELECT resource FROM history WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?", - [cur_result["showid"], cur_result["season"], cur_result["episode"], cur_result["date"]]) + download_results = self.connection.select( + "SELECT resource FROM history WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?", + [cur_result["showid"], cur_result["season"], cur_result["episode"], cur_result["date"]]) if not download_results: - logger.log(u"Found a snatch in the history for "+cur_result["resource"]+" but couldn't find the associated download, skipping it", logger.DEBUG) + logger.log(u"Found a snatch in the history for " + cur_result[ + "resource"] + " but couldn't find the associated download, skipping it", logger.DEBUG) continue nzb_name = cur_result["resource"] @@ -182,10 +200,13 @@ class AddSizeAndSceneNameFields(InitialSchema): file_name = file_name.rpartition('.')[0] # find the associated episode on disk - ep_results = self.connection.select("SELECT episode_id, status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ''", - [cur_result["showid"], cur_result["season"], cur_result["episode"]]) + ep_results = self.connection.select( + "SELECT episode_id, status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ''", + [cur_result["showid"], cur_result["season"], cur_result["episode"]]) if not ep_results: - logger.log(u"The episode "+nzb_name+" was found in history but doesn't exist on disk anymore, skipping", logger.DEBUG) + logger.log( + u"The episode " + nzb_name + " was found in history but doesn't exist on disk anymore, skipping", + logger.DEBUG) continue # get the status/quality of the existing ep and make sure it's what we expect @@ -198,7 +219,7 @@ class AddSizeAndSceneNameFields(InitialSchema): # make sure this is actually a real release name and not a season pack or something for cur_name in (nzb_name, file_name): - logger.log(u"Checking if "+cur_name+" is actually a good release name", logger.DEBUG) + logger.log(u"Checking if " + cur_name + " is actually a good release name", logger.DEBUG) try: np = NameParser(False) parse_result = np.parse(cur_name) @@ -207,7 +228,8 @@ class AddSizeAndSceneNameFields(InitialSchema): if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group: # if all is well by this point we'll just put the release name into the database - self.connection.action("UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", [cur_name, ep_results[0]["episode_id"]]) + self.connection.action("UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", + [cur_name, ep_results[0]["episode_id"]]) break # check each snatch to see if we can use it to get a release name from @@ -232,21 +254,24 @@ class AddSizeAndSceneNameFields(InitialSchema): if not parse_result.release_group: continue - logger.log(u"Name "+ep_file_name+" gave release group of "+parse_result.release_group+", seems valid", logger.DEBUG) - self.connection.action("UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", [ep_file_name, cur_result["episode_id"]]) + logger.log( + u"Name " + ep_file_name + " gave release group of " + parse_result.release_group + ", seems valid", + logger.DEBUG) + self.connection.action("UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", + [ep_file_name, cur_result["episode_id"]]) self.incDBVersion() -class RenameSeasonFolders(AddSizeAndSceneNameFields): +class RenameSeasonFolders(AddSizeAndSceneNameFields): def test(self): return self.checkDBVersion() >= 11 def execute(self): - # rename the column self.connection.action("ALTER TABLE tv_shows RENAME TO tmp_tv_shows") - self.connection.action("CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, location TEXT, show_name TEXT, tvdb_id NUMERIC, network TEXT, genre TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, tvr_id NUMERIC, tvr_name TEXT, air_by_date NUMERIC, lang TEXT)") + self.connection.action( + "CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, location TEXT, show_name TEXT, tvdb_id NUMERIC, network TEXT, genre TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, tvr_id NUMERIC, tvr_name TEXT, air_by_date NUMERIC, lang TEXT)") sql = "INSERT INTO tv_shows(show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, tvr_id, tvr_name, air_by_date, lang) SELECT show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, seasonfolders, paused, startyear, tvr_id, tvr_name, air_by_date, lang FROM tmp_tv_shows" self.connection.action(sql) @@ -258,27 +283,29 @@ class RenameSeasonFolders(AddSizeAndSceneNameFields): self.incDBVersion() + class AddSubtitlesSupport(RenameSeasonFolders): def test(self): return self.checkDBVersion() >= 12 def execute(self): - self.addColumn("tv_shows", "subtitles") self.addColumn("tv_episodes", "subtitles", "TEXT", "") self.addColumn("tv_episodes", "subtitles_searchcount") self.addColumn("tv_episodes", "subtitles_lastsearch", "TIMESTAMP", str(datetime.datetime.min)) self.incDBVersion() + class AddIMDbInfo(RenameSeasonFolders): def test(self): return self.checkDBVersion() >= 13 def execute(self): - - self.connection.action("CREATE TABLE imdb_info (tvdb_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)") + self.connection.action( + "CREATE TABLE imdb_info (tvdb_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)") self.incDBVersion() + class Add1080pAndRawHDQualities(AddIMDbInfo): """Add support for 1080p related qualities along with RawHD @@ -311,17 +338,17 @@ class Add1080pAndRawHDQualities(AddIMDbInfo): result = old_quality # move fullhdbluray from 1<<5 to 1<<8 if set - if(result & (1<<5)): - result = result & ~(1<<5) - result = result | (1<<8) + if (result & (1 << 5)): + result = result & ~(1 << 5) + result = result | (1 << 8) # move hdbluray from 1<<4 to 1<<7 if set - if(result & (1<<4)): - result = result & ~(1<<4) - result = result | (1<<7) + if (result & (1 << 4)): + result = result & ~(1 << 4) + result = result | (1 << 7) # move hdwebdl from 1<<3 to 1<<5 if set - if(result & (1<<3)): - result = result & ~(1<<3) - result = result | (1<<5) + if (result & (1 << 3)): + result = result & ~(1 << 3) + result = result | (1 << 5) return result @@ -350,12 +377,19 @@ class Add1080pAndRawHDQualities(AddIMDbInfo): sickbeard.save_config() # upgrade previous HD to HD720p -- shift previous qualities to new placevalues - old_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3], []) - new_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL, common.Quality.HDBLURAY], []) + old_hd = common.Quality.combineQualities( + [common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3], []) + new_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL, common.Quality.HDBLURAY], + []) # update ANY -- shift existing qualities and add new 1080p qualities, note that rawHD was not added to the ANY template - old_any = common.Quality.combineQualities([common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3, common.Quality.UNKNOWN], []) - new_any = common.Quality.combineQualities([common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.FULLHDTV, common.Quality.HDWEBDL, common.Quality.FULLHDWEBDL, common.Quality.HDBLURAY, common.Quality.FULLHDBLURAY, common.Quality.UNKNOWN], []) + old_any = common.Quality.combineQualities( + [common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.HDWEBDL >> 2, + common.Quality.HDBLURAY >> 3, common.Quality.UNKNOWN], []) + new_any = common.Quality.combineQualities( + [common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.FULLHDTV, + common.Quality.HDWEBDL, common.Quality.FULLHDWEBDL, common.Quality.HDBLURAY, common.Quality.FULLHDBLURAY, + common.Quality.UNKNOWN], []) # update qualities (including templates) logger.log(u"[1/4] Updating pre-defined templates and the quality for each show...", logger.MESSAGE) @@ -376,7 +410,8 @@ class Add1080pAndRawHDQualities(AddIMDbInfo): ql = [] episodes = self.connection.select("SELECT * FROM tv_episodes WHERE status < 3276800 AND status >= 800") for cur_episode in episodes: - ql.append(["UPDATE tv_episodes SET status = ? WHERE episode_id = ?", [self._update_status(cur_episode["status"]), cur_episode["episode_id"]]]) + ql.append(["UPDATE tv_episodes SET status = ? WHERE episode_id = ?", + [self._update_status(cur_episode["status"]), cur_episode["episode_id"]]]) self.connection.mass_action(ql) # make two seperate passes through the history since snatched and downloaded (action & quality) may not always coordinate together @@ -386,7 +421,8 @@ class Add1080pAndRawHDQualities(AddIMDbInfo): ql = [] historyAction = self.connection.select("SELECT * FROM history WHERE action < 3276800 AND action >= 800") for cur_entry in historyAction: - ql.append(["UPDATE history SET action = ? WHERE showid = ? AND date = ?", [self._update_status(cur_entry["action"]), cur_entry["showid"], cur_entry["date"]]]) + ql.append(["UPDATE history SET action = ? WHERE showid = ? AND date = ?", + [self._update_status(cur_entry["action"]), cur_entry["showid"], cur_entry["date"]]]) self.connection.mass_action(ql) # update previous history so it shows the correct quality @@ -394,7 +430,8 @@ class Add1080pAndRawHDQualities(AddIMDbInfo): ql = [] historyQuality = self.connection.select("SELECT * FROM history WHERE quality < 32768 AND quality >= 8") for cur_entry in historyQuality: - ql.append(["UPDATE history SET quality = ? WHERE showid = ? AND date = ?", [self._update_quality(cur_entry["quality"]), cur_entry["showid"], cur_entry["date"]]]) + ql.append(["UPDATE history SET quality = ? WHERE showid = ? AND date = ?", + [self._update_quality(cur_entry["quality"]), cur_entry["showid"], cur_entry["date"]]]) self.connection.mass_action(ql) self.incDBVersion() @@ -403,6 +440,7 @@ class Add1080pAndRawHDQualities(AddIMDbInfo): logger.log(u"Performing a vacuum on the database.", logger.DEBUG) self.connection.action("VACUUM") + class AddProperNamingSupport(Add1080pAndRawHDQualities): def test(self): return self.checkDBVersion() >= 15 @@ -411,6 +449,7 @@ class AddProperNamingSupport(Add1080pAndRawHDQualities): self.addColumn("tv_episodes", "is_proper") self.incDBVersion() + class AddEmailSubscriptionTable(AddProperNamingSupport): def test(self): return self.hasColumn("tv_shows", "notify_list") @@ -419,6 +458,7 @@ class AddEmailSubscriptionTable(AddProperNamingSupport): self.addColumn('tv_shows', 'notify_list', 'TEXT', None) self.incDBVersion() + class AddShowidTvdbidIndex(AddEmailSubscriptionTable): """ Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries """ @@ -439,6 +479,7 @@ class AddShowidTvdbidIndex(AddEmailSubscriptionTable): self.incDBVersion() + class AddLastUpdateTVDB(AddShowidTvdbidIndex): """ Adding column last_update_tvdb to tv_shows for controlling nightly updates """ @@ -454,6 +495,7 @@ class AddLastUpdateTVDB(AddShowidTvdbidIndex): self.incDBVersion() + class AddLastProperSearch(AddLastUpdateTVDB): def test(self): return self.checkDBVersion() >= 19 @@ -467,6 +509,7 @@ class AddLastProperSearch(AddLastUpdateTVDB): self.incDBVersion() + class AddDvdOrderOption(AddLastProperSearch): def test(self): return self.checkDBVersion() >= 20 @@ -478,6 +521,7 @@ class AddDvdOrderOption(AddLastProperSearch): self.incDBVersion() + class AddIndicesToTvEpisodes(AddDvdOrderOption): """ Adding indices to tv episodes """ @@ -498,6 +542,7 @@ class AddIndicesToTvEpisodes(AddDvdOrderOption): self.incDBVersion() + class ConvertTVShowsToIndexerScheme(AddIndicesToTvEpisodes): def test(self): return self.checkDBVersion() >= 22 @@ -507,14 +552,17 @@ class ConvertTVShowsToIndexerScheme(AddIndicesToTvEpisodes): logger.log(u"Converting TV Shows table to Indexer Scheme...") self.connection.action("ALTER TABLE tv_shows RENAME TO tmp_tv_shows") - self.connection.action("CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer TEXT, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC)") - self.connection.action("UPDATE tv_shows SET indexer = 'Tvdb'") + self.connection.action( + "CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMBERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC)") + self.connection.action("UPDATE tv_shows SET indexer = 1") self.connection.action("UPDATE tv_shows SET classification = 'Scripted'") - self.connection.action("INSERT INTO tv_shows(show_id, indexer_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_indexer, dvdorder) SELECT show_id, tvdb_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_tvdb, dvdorder FROM tmp_tv_shows") + self.connection.action( + "INSERT INTO tv_shows(show_id, indexer_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_indexer, dvdorder) SELECT show_id, tvdb_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_tvdb, dvdorder FROM tmp_tv_shows") self.connection.action("DROP TABLE tmp_tv_shows") self.incDBVersion() + class ConvertTVEpisodesToIndexerScheme(ConvertTVShowsToIndexerScheme): def test(self): return self.checkDBVersion() >= 23 @@ -524,13 +572,16 @@ class ConvertTVEpisodesToIndexerScheme(ConvertTVShowsToIndexerScheme): logger.log(u"Converting TV Episodes table to Indexer Scheme...") self.connection.action("ALTER TABLE tv_episodes RENAME TO tmp_tv_episodes") - self.connection.action("CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid NUMERIC, indexer TEXT, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC)") - self.connection.action("UPDATE tv_episodes SET indexer = 'Tvdb'") - self.connection.action("INSERT INTO tv_episodes(episode_id, showid, indexerid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper) SELECT episode_id, showid, tvdbid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper FROM tmp_tv_episodes") + self.connection.action( + "CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid NUMERIC, indexer NUMERIC, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC)") + self.connection.action("UPDATE tv_episodes SET indexer = 1") + self.connection.action( + "INSERT INTO tv_episodes(episode_id, showid, indexerid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper) SELECT episode_id, showid, tvdbid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper FROM tmp_tv_episodes") self.connection.action("DROP TABLE tmp_tv_episodes") self.incDBVersion() + class ConvertIMDBInfoToIndexerScheme(ConvertTVEpisodesToIndexerScheme): def test(self): return self.checkDBVersion() >= 24 @@ -540,12 +591,15 @@ class ConvertIMDBInfoToIndexerScheme(ConvertTVEpisodesToIndexerScheme): logger.log(u"Converting IMDB Info table to Indexer Scheme...") self.connection.action("ALTER TABLE imdb_info RENAME TO tmp_imdb_info") - self.connection.action("CREATE TABLE imdb_info (indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)") - self.connection.action("INSERT INTO imdb_info(indexer_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update) SELECT tvdb_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update FROM tmp_imdb_info") + self.connection.action( + "CREATE TABLE imdb_info (indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)") + self.connection.action( + "INSERT INTO imdb_info(indexer_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update) SELECT tvdb_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update FROM tmp_imdb_info") self.connection.action("DROP TABLE tmp_imdb_info") self.incDBVersion() + class ConvertInfoToIndexerScheme(ConvertIMDBInfoToIndexerScheme): def test(self): return self.checkDBVersion() >= 25 @@ -555,12 +609,15 @@ class ConvertInfoToIndexerScheme(ConvertIMDBInfoToIndexerScheme): logger.log(u"Converting Info table to Indexer Scheme...") self.connection.action("ALTER TABLE info RENAME TO tmp_info") - self.connection.action("CREATE TABLE info (last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC)") - self.connection.action("INSERT INTO info(last_backlog, last_indexer, last_proper_search) SELECT last_backlog, last_tvdb, last_proper_search FROM tmp_info") + self.connection.action( + "CREATE TABLE info (last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC)") + self.connection.action( + "INSERT INTO info(last_backlog, last_indexer, last_proper_search) SELECT last_backlog, last_tvdb, last_proper_search FROM tmp_info") self.connection.action("DROP TABLE tmp_info") self.incDBVersion() + class AddArchiveFirstMatchOption(ConvertInfoToIndexerScheme): def test(self): return self.checkDBVersion() >= 26 @@ -572,6 +629,7 @@ class AddArchiveFirstMatchOption(ConvertInfoToIndexerScheme): self.incDBVersion() + class AddSceneNumbering(AddArchiveFirstMatchOption): def test(self): return self.checkDBVersion() >= 27 @@ -582,6 +640,7 @@ class AddSceneNumbering(AddArchiveFirstMatchOption): if self.hasTable("scene_numbering"): self.connection.action("DROP TABLE scene_numbering") - self.connection.action("CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode))") + self.connection.action( + "CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode))") self.incDBVersion() \ No newline at end of file diff --git a/sickbeard/db.py b/sickbeard/db.py index 06ab5301..aae4d68f 100644 --- a/sickbeard/db.py +++ b/sickbeard/db.py @@ -32,6 +32,7 @@ from sickbeard.exceptions import ex db_lock = threading.Lock() + def dbFilename(filename="sickbeard.db", suffix=None): """ @param filename: The sqlite database filename to use. If not specified, @@ -44,6 +45,7 @@ def dbFilename(filename="sickbeard.db", suffix=None): filename = "%s.%s" % (filename, suffix) return ek.ek(os.path.join, sickbeard.DATA_DIR, filename) + class DBConnection: def __init__(self, filename="sickbeard.db", suffix=None, row_type=None): @@ -127,7 +129,7 @@ class DBConnection: logger.log(qu[0] + " with args " + str(qu[1]), logger.DEBUG) sqlResult.append(self.connection.execute(qu[0], qu[1])) self.connection.commit() - logger.log(u"Transaction with " + str(len(querylist)) + u" query's executed", logger.DEBUG) + logger.log(u"Transaction with " + str(len(querylist)) + u" query's executed", logger.DEBUG) return sqlResult except sqlite3.OperationalError, e: sqlResult = [] @@ -198,15 +200,16 @@ class DBConnection: changesBefore = self.connection.total_changes - genParams = lambda myDict : [x + " = ?" for x in myDict.keys()] + genParams = lambda myDict: [x + " = ?" for x in myDict.keys()] - query = "UPDATE " + tableName + " SET " + ", ".join(genParams(valueDict)) + " WHERE " + " AND ".join(genParams(keyDict)) + query = "UPDATE " + tableName + " SET " + ", ".join(genParams(valueDict)) + " WHERE " + " AND ".join( + genParams(keyDict)) self.action(query, valueDict.values() + keyDict.values()) if self.connection.total_changes == changesBefore: query = "INSERT INTO " + tableName + " (" + ", ".join(valueDict.keys() + keyDict.keys()) + ")" + \ - " VALUES (" + ", ".join(["?"] * len(valueDict.keys() + keyDict.keys())) + ")" + " VALUES (" + ", ".join(["?"] * len(valueDict.keys() + keyDict.keys())) + ")" self.action(query, valueDict.values() + keyDict.values()) def tableInfo(self, tableName): @@ -214,7 +217,7 @@ class DBConnection: cursor = self.connection.execute("PRAGMA table_info(%s)" % tableName) columns = {} for column in cursor: - columns[column['name']] = { 'type': column['type'] } + columns[column['name']] = {'type': column['type']} return columns # http://stackoverflow.com/questions/3300464/how-can-i-get-dict-from-sqlite-query @@ -224,9 +227,11 @@ class DBConnection: d[col[0]] = row[idx] return d + def sanityCheckDatabase(connection, sanity_check): sanity_check(connection).check() + class DBSanityCheck(object): def __init__(self, connection): self.connection = connection @@ -234,6 +239,7 @@ class DBSanityCheck(object): def check(self): pass + # =============== # = Upgrade API = # =============== @@ -242,9 +248,11 @@ def upgradeDatabase(connection, schema): logger.log(u"Checking database structure...", logger.MESSAGE) _processUpgrade(connection, schema) + def prettyName(class_name): return ' '.join([x.group() for x in re.finditer("([A-Z])([a-z0-9]+)", class_name)]) + def _processUpgrade(connection, upgradeClass): instance = upgradeClass(connection) logger.log(u"Checking " + prettyName(upgradeClass.__name__) + " database upgrade", logger.DEBUG) @@ -262,8 +270,9 @@ def _processUpgrade(connection, upgradeClass): for upgradeSubClass in upgradeClass.__subclasses__(): _processUpgrade(connection, upgradeSubClass) + # Base migration class. All future DB changes should be subclassed from this class -class SchemaUpgrade (object): +class SchemaUpgrade(object): def __init__(self, connection): self.connection = connection diff --git a/sickbeard/encodingKludge.py b/sickbeard/encodingKludge.py index 0d54554d..4b3a9b03 100644 --- a/sickbeard/encodingKludge.py +++ b/sickbeard/encodingKludge.py @@ -30,29 +30,36 @@ def fixStupidEncodings(x, silent=False): try: return x.decode(sickbeard.SYS_ENCODING) except UnicodeDecodeError: - logger.log(u"Unable to decode value: "+repr(x), logger.ERROR) + logger.log(u"Unable to decode value: " + repr(x), logger.ERROR) return None elif type(x) == unicode: return x else: - logger.log(u"Unknown value passed in, ignoring it: "+str(type(x))+" ("+repr(x)+":"+repr(type(x))+")", logger.DEBUG if silent else logger.ERROR) + logger.log( + u"Unknown value passed in, ignoring it: " + str(type(x)) + " (" + repr(x) + ":" + repr(type(x)) + ")", + logger.DEBUG if silent else logger.ERROR) return None return None + def fixListEncodings(x): if type(x) != list and type(x) != tuple: return x else: return filter(lambda x: x != None, map(fixStupidEncodings, x)) + def callPeopleStupid(x): try: return x.encode(sickbeard.SYS_ENCODING) except UnicodeEncodeError: - logger.log(u"YOUR COMPUTER SUCKS! Your data is being corrupted by a bad locale/encoding setting. Report this error on the forums or IRC please: "+repr(x)+", "+sickbeard.SYS_ENCODING, logger.ERROR) + logger.log( + u"YOUR COMPUTER SUCKS! Your data is being corrupted by a bad locale/encoding setting. Report this error on the forums or IRC please: " + repr( + x) + ", " + sickbeard.SYS_ENCODING, logger.ERROR) return x.encode(sickbeard.SYS_ENCODING, 'ignore') + def ek(func, *args, **kwargs): result = None diff --git a/sickbeard/exceptions.py b/sickbeard/exceptions.py index aa608949..0e8de132 100644 --- a/sickbeard/exceptions.py +++ b/sickbeard/exceptions.py @@ -104,10 +104,6 @@ class NewzbinAPIThrottled(SickBeardException): "Newzbin has throttled us, deal with it" -class TVRageException(SickBeardException): - "TVRage API did something bad" - - class ShowDirNotFoundException(SickBeardException): "The show dir doesn't exist" diff --git a/sickbeard/failedProcessor.py b/sickbeard/failedProcessor.py index 7f052ef9..ed7e5402 100644 --- a/sickbeard/failedProcessor.py +++ b/sickbeard/failedProcessor.py @@ -76,7 +76,9 @@ class FailedProcessor(object): self._show_obj = helpers.findCertainShow(sickbeard.showList, show_id) if self._show_obj is None: - self._log(u"Could not create show object. Either the show hasn't been added to SickBeard, or it's still loading (if SB was restarted recently)", logger.WARNING) + self._log( + u"Could not create show object. Either the show hasn't been added to SickBeard, or it's still loading (if SB was restarted recently)", + logger.WARNING) raise exceptions.FailedProcessingFailed() for episode in parsed.episode_numbers: @@ -105,7 +107,7 @@ class FailedProcessor(object): for show_name in show_names: found_info = helpers.searchDBForShow(show_name) if found_info is not None: - return(found_info[1]) + return (found_info[1]) return None diff --git a/sickbeard/failed_history.py b/sickbeard/failed_history.py index a6f09fdd..42a3d45c 100644 --- a/sickbeard/failed_history.py +++ b/sickbeard/failed_history.py @@ -20,7 +20,6 @@ import re import urllib import datetime - from sickbeard import db from sickbeard import logger from sickbeard import exceptions @@ -38,7 +37,7 @@ def prepareFailedName(release): """Standardizes release name for failed DB""" fixed = urllib.unquote(release) - if(fixed.endswith(".nzb")): + if (fixed.endswith(".nzb")): fixed = fixed.rpartition(".")[0] fixed = re.sub("[\.\-\+\ ]", "_", fixed) @@ -56,8 +55,10 @@ def logFailed(release): sql_results = myDB.select("SELECT * FROM history WHERE release=?", [release]) if len(sql_results) == 0: - log_str += _log_helper(u"Release not found in snatch history. Recording it as bad with no size and no proivder.", logger.WARNING) - log_str += _log_helper(u"Future releases of the same name from providers that don't return size will be skipped.", logger.WARNING) + log_str += _log_helper( + u"Release not found in snatch history. Recording it as bad with no size and no proivder.", logger.WARNING) + log_str += _log_helper( + u"Future releases of the same name from providers that don't return size will be skipped.", logger.WARNING) elif len(sql_results) > 1: log_str += _log_helper(u"Multiple logged snatches found for release", logger.WARNING) sizes = len(set(x["size"] for x in sql_results)) @@ -66,7 +67,9 @@ def logFailed(release): log_str += _log_helper(u"However, they're all the same size. Continuing with found size.", logger.WARNING) size = sql_results[0]["size"] else: - log_str += _log_helper(u"They also vary in size. Deleting the logged snatches and recording this release with no size/provider", logger.WARNING) + log_str += _log_helper( + u"They also vary in size. Deleting the logged snatches and recording this release with no size/provider", + logger.WARNING) for result in sql_results: deleteLoggedSnatch(result["release"], result["size"], result["provider"]) @@ -128,13 +131,15 @@ def revertEpisode(show_obj, season, episode=None): log_str += _log_helper(u"Found in history") ep_obj.status = history_eps[episode]['old_status'] else: - log_str += _log_helper(u"WARNING: Episode not found in history. Setting it back to WANTED", logger.WARNING) + log_str += _log_helper(u"WARNING: Episode not found in history. Setting it back to WANTED", + logger.WARNING) ep_obj.status = WANTED ep_obj.saveToDB() except exceptions.EpisodeNotFoundException, e: - log_str += _log_helper(u"Unable to create episode, please set its status manually: " + exceptions.ex(e), logger.WARNING) + log_str += _log_helper(u"Unable to create episode, please set its status manually: " + exceptions.ex(e), + logger.WARNING) else: # Whole season log_str += _log_helper(u"Setting season to wanted: " + str(season)) @@ -145,13 +150,15 @@ def revertEpisode(show_obj, season, episode=None): log_str += _log_helper(u"Found in history") ep_obj.status = history_eps[ep_obj]['old_status'] else: - log_str += _log_helper(u"WARNING: Episode not found in history. Setting it back to WANTED", logger.WARNING) + log_str += _log_helper(u"WARNING: Episode not found in history. Setting it back to WANTED", + logger.WARNING) ep_obj.status = WANTED ep_obj.saveToDB() return log_str + def markFailed(show_obj, season, episode=None): log_str = u"" @@ -165,7 +172,8 @@ def markFailed(show_obj, season, episode=None): ep_obj.saveToDB() except exceptions.EpisodeNotFoundException, e: - log_str += _log_helper(u"Unable to get episode, please set its status manually: " + exceptions.ex(e), logger.WARNING) + log_str += _log_helper(u"Unable to get episode, please set its status manually: " + exceptions.ex(e), + logger.WARNING) else: # Whole season for ep_obj in show_obj.getAllEpisodes(season): @@ -176,6 +184,7 @@ def markFailed(show_obj, season, episode=None): return log_str + def logSnatch(searchResult): myDB = db.DBConnection("failed.db") @@ -196,7 +205,8 @@ def logSnatch(searchResult): myDB.action( "INSERT INTO history (date, size, release, provider, showid, season, episode, old_status)" "VALUES (?, ?, ?, ?, ?, ?, ?, ?)", - [logDate, searchResult.size, release, provider, show_obj.indexerid, episode.season, episode.episode, old_status]) + [logDate, searchResult.size, release, provider, show_obj.indexerid, episode.season, episode.episode, + old_status]) def deleteLoggedSnatch(release, size, provider): @@ -210,7 +220,8 @@ def deleteLoggedSnatch(release, size, provider): def trimHistory(): myDB = db.DBConnection("failed.db") - myDB.action("DELETE FROM history WHERE date < " + str((datetime.datetime.today() - datetime.timedelta(days=30)).strftime(dateFormat))) + myDB.action("DELETE FROM history WHERE date < " + str( + (datetime.datetime.today() - datetime.timedelta(days=30)).strftime(dateFormat))) def findRelease(show, season, episode): @@ -227,10 +238,13 @@ def findRelease(show, season, episode): myDB = db.DBConnection("failed.db") # Clear old snatches for this release if any exist - myDB.action("DELETE FROM history WHERE showid=" + str(show.indexerid) + " AND season=" + str(season) + " AND episode=" + str(episode) + " AND date < (SELECT max(date) FROM history WHERE showid=" + str(show.indexerid) + " AND season=" + str(season) + " AND episode=" + str(episode) + ")") + myDB.action("DELETE FROM history WHERE showid=" + str(show.indexerid) + " AND season=" + str( + season) + " AND episode=" + str(episode) + " AND date < (SELECT max(date) FROM history WHERE showid=" + str( + show.indexerid) + " AND season=" + str(season) + " AND episode=" + str(episode) + ")") # Search for release in snatch history - results = myDB.select("SELECT release, provider, date FROM history WHERE showid=? AND season=? AND episode=?",[show.indexerid, season, episode]) + results = myDB.select("SELECT release, provider, date FROM history WHERE showid=? AND season=? AND episode=?", + [show.indexerid, season, episode]) for result in results: release = str(result["release"]) @@ -238,7 +252,7 @@ def findRelease(show, season, episode): date = result["date"] # Clear any incomplete snatch records for this release if any exist - myDB.action("DELETE FROM history WHERE release=? AND date!=?",[release, date]) + myDB.action("DELETE FROM history WHERE release=? AND date!=?", [release, date]) # Found a previously failed release logger.log(u"Failed release found for season (%s): (%s)" % (season, result["release"]), logger.DEBUG) diff --git a/sickbeard/generic_queue.py b/sickbeard/generic_queue.py index fd72911e..b188eb6f 100644 --- a/sickbeard/generic_queue.py +++ b/sickbeard/generic_queue.py @@ -21,13 +21,14 @@ import threading from sickbeard import logger + class QueuePriorities: LOW = 10 NORMAL = 20 HIGH = 30 -class GenericQueue(object): +class GenericQueue(object): def __init__(self): self.currentItem = None @@ -38,13 +39,13 @@ class GenericQueue(object): self.queue_name = "QUEUE" self.min_priority = 0 - + self.currentItem = None def pause(self): logger.log(u"Pausing queue") self.min_priority = 999999999999 - + def unpause(self): logger.log(u"Unpausing queue") self.min_priority = 0 @@ -52,7 +53,7 @@ class GenericQueue(object): def add_item(self, item): item.added = datetime.datetime.now() self.queue.append(item) - + return item def run(self): @@ -69,7 +70,7 @@ class GenericQueue(object): if len(self.queue) > 0: # sort by priority - def sorter(x,y): + def sorter(x, y): """ Sorts by priority descending then time ascending """ @@ -81,10 +82,10 @@ class GenericQueue(object): elif y.added > x.added: return -1 else: - return y.priority-x.priority + return y.priority - x.priority self.queue.sort(cmp=sorter) - + queueItem = self.queue[0] if queueItem.priority < self.min_priority: @@ -101,8 +102,9 @@ class GenericQueue(object): # take it out of the queue del self.queue[0] + class QueueItem: - def __init__(self, name, action_id = 0): + def __init__(self, name, action_id=0): self.name = name self.inProgress = False @@ -112,14 +114,14 @@ class QueueItem: self.thread_name = None self.action_id = action_id - + self.added = None def get_thread_name(self): if self.thread_name: return self.thread_name else: - return self.name.replace(" ","-").upper() + return self.name.replace(" ", "-").upper() def execute(self): """Implementing classes should call this""" diff --git a/sickbeard/gh_api.py b/sickbeard/gh_api.py index 90657f3b..bd1f58ce 100644 --- a/sickbeard/gh_api.py +++ b/sickbeard/gh_api.py @@ -69,7 +69,8 @@ class GitHub(object): Returns a deserialized json object containing the commit info. See http://developer.github.com/v3/repos/commits/ """ - access_API = self._access_API(['repos', self.github_repo_user, self.github_repo, 'commits'], params={'per_page': 100, 'sha': self.branch}) + access_API = self._access_API(['repos', self.github_repo_user, self.github_repo, 'commits'], + params={'per_page': 100, 'sha': self.branch}) return access_API def compare(self, base, head, per_page=1): @@ -84,5 +85,7 @@ class GitHub(object): Returns a deserialized json object containing the compare info. See http://developer.github.com/v3/repos/commits/ """ - access_API = self._access_API(['repos', self.github_repo_user, self.github_repo, 'compare', base + '...' + head], params={'per_page': per_page}) + access_API = self._access_API( + ['repos', self.github_repo_user, self.github_repo, 'compare', base + '...' + head], + params={'per_page': per_page}) return access_API diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py index 57b5c34f..a8f67aaf 100644 --- a/sickbeard/helpers.py +++ b/sickbeard/helpers.py @@ -31,10 +31,10 @@ import httplib import urlparse import uuid import base64 +import string from lib import requests from itertools import izip, cycle -from contextlib import closing try: import json @@ -47,20 +47,17 @@ except ImportError: import elementtree.ElementTree as etree from xml.dom.minidom import Node -from datetime import datetime as dt import sickbeard from sickbeard.exceptions import MultipleShowObjectsException, ex from sickbeard import logger, classes -from sickbeard.common import USER_AGENT, mediaExtensions, subtitleExtensions, XML_NSMAP, indexerStrings +from sickbeard.common import USER_AGENT, mediaExtensions, subtitleExtensions, XML_NSMAP from sickbeard import db from sickbeard import encodingKludge as ek from sickbeard import notifiers -from sickbeard.indexers import indexer_api, indexer_exceptions - from lib import subliminal #from sickbeard.subtitles import EXTENSIONS @@ -88,6 +85,7 @@ def indentXML(elem, level=0): if level and (not elem.tail or not elem.tail.strip()): elem.tail = i + def replaceExtension(filename, newExt): ''' >>> replaceExtension('foo.avi', 'mkv') @@ -107,6 +105,7 @@ def replaceExtension(filename, newExt): else: return sepFile[0] + "." + newExt + def isMediaFile(filename): # ignore samples if re.search('(^|[\W_])(sample\d*)[\W_]', filename, re.I): @@ -117,31 +116,34 @@ def isMediaFile(filename): return False sepFile = filename.rpartition(".") - + if re.search('extras?$', sepFile[0], re.I): return False - + if sepFile[2].lower() in mediaExtensions: return True else: return False + def isRarFile(filename): archive_regex = '(?P^(?P(?:(?!\.part\d+\.rar$).)*)\.(?:(?:part0*1\.)?rar)$)' - + if re.search(archive_regex, filename): return True - + return False + def isBeingWritten(filepath): -# Return True if file was modified within 60 seconds. it might still be being written to. + # Return True if file was modified within 60 seconds. it might still be being written to. ctime = max(ek.ek(os.path.getctime, filepath), ek.ek(os.path.getmtime, filepath)) if ctime > time.time() - 60: return True - + return False + def sanitizeFileName(name): ''' >>> sanitizeFileName('a/b/c') @@ -153,14 +155,14 @@ def sanitizeFileName(name): >>> sanitizeFileName('.a.b..') 'a.b' ''' - + # remove bad chars from the filename name = re.sub(r'[\\/\*]', '-', name) name = re.sub(r'[:"<>|?]', '', name) - + # remove leading/trailing periods and spaces name = name.strip(' .') - + return name @@ -177,7 +179,7 @@ Returns a byte-string retrieved from the url provider. try: # Remove double-slashes from url parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one + parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one url = urlparse.urlunparse(parsed) it = iter(req_headers) @@ -196,12 +198,14 @@ Returns a byte-string retrieved from the url provider. return resp.content if resp.ok else None + def _remove_file_failed(file): try: - ek.ek(os.remove,file) + ek.ek(os.remove, file) except: pass + def download_file(url, filename): try: r = requests.get(url, stream=True) @@ -228,9 +232,10 @@ def download_file(url, filename): _remove_file_failed(filename) logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING) return False - + return True + def findCertainShow(showList, indexerid): results = filter(lambda x: x.indexerid == indexerid, showList) if len(results) == 0: @@ -240,6 +245,7 @@ def findCertainShow(showList, indexerid): else: return results[0] + def makeDir(path): if not ek.ek(os.path.isdir, path): try: @@ -252,8 +258,7 @@ def makeDir(path): def searchDBForShow(regShowName, indexer_id=None): - - showNames = [re.sub('[. -]', ' ', regShowName),regShowName] + showNames = [re.sub('[. -]', ' ', regShowName), regShowName] myDB = db.DBConnection() @@ -263,20 +268,25 @@ def searchDBForShow(regShowName, indexer_id=None): show = get_show_by_name(showName, sickbeard.showList) if show: - sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ? OR show_name LIKE ?", [show.name, show.name]) + sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ? OR show_name LIKE ?", + [show.name, show.name]) else: - sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ? OR show_name LIKE ?", [showName, showName]) + sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ? OR show_name LIKE ?", + [showName, showName]) if len(sqlResults) == 1: - return (sqlResults[0]["indexer"], int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"]) + return (int(sqlResults[0]["indexer"]), int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"]) else: # if we didn't get exactly one result then try again with the year stripped off if possible match = re.match(yearRegex, showName) if match and match.group(1): - logger.log(u"Unable to match original name but trying to manually strip and specify show year", logger.DEBUG) - sqlResults = myDB.select("SELECT * FROM tv_shows WHERE (show_name LIKE ? OR show_name LIKE ?) AND startyear = ?", [match.group(1) + '%', match.group(1) + '%', match.group(3)]) + logger.log(u"Unable to match original name but trying to manually strip and specify show year", + logger.DEBUG) + sqlResults = myDB.select( + "SELECT * FROM tv_shows WHERE (show_name LIKE ? OR show_name LIKE ?) AND startyear = ?", + [match.group(1) + '%', match.group(1) + '%', match.group(3)]) if len(sqlResults) == 0: logger.log(u"Unable to match a record in the DB for " + showName, logger.DEBUG) @@ -285,49 +295,52 @@ def searchDBForShow(regShowName, indexer_id=None): logger.log(u"Multiple results for " + showName + " in the DB, unable to match show name", logger.DEBUG) continue else: - return (sqlResults[0]["indexer"], int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"]) + return (int(sqlResults[0]["indexer"]), int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"]) return None -def searchIndexersForShow(regShowName, indexer_id = None): - showNames = [re.sub('[. -]', ' ', regShowName),regShowName] +def searchIndexersForShow(regShowName, indexer_id=None): + showNames = [re.sub('[. -]', ' ', regShowName), regShowName] - for name in showNames: - for indexer in indexerStrings: - logger.log(u"Trying to find the " + name + " on " + indexer, logger.DEBUG) + # Query Indexers for each search term and build the list of results + for indexer in sickbeard.indexerApi().indexers: + def searchShows(): + lINDEXER_API_PARMS = {'indexer': indexer} + lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) - try: - lINDEXER_API_PARMS = {'indexer': indexer} - - lINDEXER_API_PARMS['search_all_languages'] = True - lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI - - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) - showObj = t[name] - return indexer - except (indexer_exceptions.indexer_exception, IOError): - # if none found, search on all languages + for name in showNames: + logger.log(u"Trying to find " + name + " on " + sickbeard.indexerApi(indexer).name, logger.DEBUG) try: - # There's gotta be a better way of doing this but we don't wanna - # change the language value elsewhere + if indexer_id: + search = t[indexer_id] + else: + search = t[name] - lINDEXER_API_PARMS = {'indexer': indexer} + if isinstance(search, dict): + search = [search] - lINDEXER_API_PARMS['search_all_languages'] = True - lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI + # add search results + result = [[t.config['id'], x['id']] for x in search if name.lower() == x['seriesname'].lower()] + if len(result) > 0: + result = [item for sublist in result for item in sublist] + return result - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) - showObj = t[name] - return indexer - except (indexer_exceptions.indexer_exception, IOError): - pass + except KeyError, e: + break - continue - except (IOError): - continue + except Exception, e: + logger.log( + u"Error while auto-detecting show indexer and indexerid on indexer " + sickbeard.indexerApi( + indexer).name + ", retrying: " + ex(e), logger.ERROR) + logger.log(traceback.format_exc(), logger.DEBUG) + continue + + # search indexers for shows + found = searchShows() + if found: return found - return None def sizeof_fmt(num): ''' @@ -347,8 +360,8 @@ def sizeof_fmt(num): return "%3.1f %s" % (num, x) num /= 1024.0 -def listMediaFiles(path): +def listMediaFiles(path): if not dir or not ek.ek(os.path.isdir, path): return [] @@ -365,6 +378,7 @@ def listMediaFiles(path): return files + def copyFile(srcFile, destFile): ek.ek(shutil.copyfile, srcFile, destFile) try: @@ -372,6 +386,7 @@ def copyFile(srcFile, destFile): except OSError: pass + def moveFile(srcFile, destFile): try: ek.ek(os.rename, srcFile, destFile) @@ -380,13 +395,16 @@ def moveFile(srcFile, destFile): copyFile(srcFile, destFile) ek.ek(os.unlink, srcFile) + def link(src, dst): if os.name == 'nt': import ctypes + if ctypes.windll.kernel32.CreateHardLinkW(unicode(dst), unicode(src), 0) == 0: raise ctypes.WinError() else: os.link(src, dst) + def hardlinkFile(srcFile, destFile): try: ek.ek(link, srcFile, destFile) @@ -395,13 +413,17 @@ def hardlinkFile(srcFile, destFile): logger.log(u"Failed to create hardlink of " + srcFile + " at " + destFile + ". Copying instead", logger.ERROR) copyFile(srcFile, destFile) + def symlink(src, dst): if os.name == 'nt': import ctypes - if ctypes.windll.kernel32.CreateSymbolicLinkW(unicode(dst), unicode(src), 1 if os.path.isdir(src) else 0) in [0, 1280]: raise ctypes.WinError() + + if ctypes.windll.kernel32.CreateSymbolicLinkW(unicode(dst), unicode(src), 1 if os.path.isdir(src) else 0) in [0, + 1280]: raise ctypes.WinError() else: os.symlink(src, dst) + def moveAndSymlinkFile(srcFile, destFile): try: ek.ek(os.rename, srcFile, destFile) @@ -411,6 +433,7 @@ def moveAndSymlinkFile(srcFile, destFile): logger.log(u"Failed to create symlink of " + srcFile + " at " + destFile + ". Copying instead", logger.ERROR) copyFile(srcFile, destFile) + def make_dirs(path): """ Creates any folders that are missing and assigns them the permissions of their @@ -466,27 +489,27 @@ def rename_ep_file(cur_path, new_path, old_path_length=0): old_path_length: The length of media file path (old name) WITHOUT THE EXTENSION """ - new_dest_dir, new_dest_name = os.path.split(new_path) #@UnusedVariable + new_dest_dir, new_dest_name = os.path.split(new_path) #@UnusedVariable if old_path_length == 0 or old_path_length > len(cur_path): # approach from the right cur_file_name, cur_file_ext = os.path.splitext(cur_path) # @UnusedVariable else: # approach from the left - cur_file_ext = cur_path[old_path_length:] + cur_file_ext = cur_path[old_path_length:] cur_file_name = cur_path[:old_path_length] - + if cur_file_ext[1:] in subtitleExtensions: #Extract subtitle language from filename sublang = os.path.splitext(cur_file_name)[1][1:] - + #Check if the language extracted from filename is a valid language try: language = subliminal.language.Language(sublang, strict=True) - cur_file_ext = '.'+sublang+cur_file_ext + cur_file_ext = '.' + sublang + cur_file_ext except ValueError: pass - + # put the extension on the incoming file new_path += cur_file_ext @@ -524,7 +547,8 @@ def delete_empty_folders(check_empty_dir, keep_dir=None): check_files = ek.ek(os.listdir, check_empty_dir) - if not check_files or (len(check_files) <= len(ignore_items) and all([check_file in ignore_items for check_file in check_files])): + if not check_files or (len(check_files) <= len(ignore_items) and all( + [check_file in ignore_items for check_file in check_files])): # directory is empty or contains only ignore_items try: logger.log(u"Deleting empty folder: " + check_empty_dir) @@ -539,19 +563,20 @@ def delete_empty_folders(check_empty_dir, keep_dir=None): else: break + def chmodAsParent(childPath): if os.name == 'nt' or os.name == 'ce': return parentPath = ek.ek(os.path.dirname, childPath) - + if not parentPath: logger.log(u"No parent path provided in " + childPath + ", unable to get permissions from it", logger.DEBUG) return - + parentPathStat = ek.ek(os.stat, parentPath) parentMode = stat.S_IMODE(parentPathStat[stat.ST_MODE]) - + childPathStat = ek.ek(os.stat, childPath) childPath_mode = stat.S_IMODE(childPathStat[stat.ST_MODE]) @@ -564,18 +589,20 @@ def chmodAsParent(childPath): return childPath_owner = childPathStat.st_uid - user_id = os.geteuid() # @UndefinedVariable - only available on UNIX + user_id = os.geteuid() # @UndefinedVariable - only available on UNIX - if user_id !=0 and user_id != childPath_owner: + if user_id != 0 and user_id != childPath_owner: logger.log(u"Not running as root or owner of " + childPath + ", not trying to set permissions", logger.DEBUG) return try: ek.ek(os.chmod, childPath, childMode) - logger.log(u"Setting permissions for %s to %o as parent directory has %o" % (childPath, childMode, parentMode), logger.DEBUG) + logger.log(u"Setting permissions for %s to %o as parent directory has %o" % (childPath, childMode, parentMode), + logger.DEBUG) except OSError: logger.log(u"Failed to set permission for %s to %o" % (childPath, childMode), logger.ERROR) + def fileBitFilter(mode): for bit in [stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH, stat.S_ISUID, stat.S_ISGID]: if mode & bit: @@ -583,6 +610,7 @@ def fileBitFilter(mode): return mode + def fixSetGroupID(childPath): if os.name == 'nt' or os.name == 'ce': return @@ -600,19 +628,23 @@ def fixSetGroupID(childPath): return childPath_owner = childStat.st_uid - user_id = os.geteuid() # @UndefinedVariable - only available on UNIX + user_id = os.geteuid() # @UndefinedVariable - only available on UNIX - if user_id !=0 and user_id != childPath_owner: - logger.log(u"Not running as root or owner of " + childPath + ", not trying to set the set-group-ID", logger.DEBUG) + if user_id != 0 and user_id != childPath_owner: + logger.log(u"Not running as root or owner of " + childPath + ", not trying to set the set-group-ID", + logger.DEBUG) return try: - ek.ek(os.chown, childPath, -1, parentGID) # @UndefinedVariable - only available on UNIX + ek.ek(os.chown, childPath, -1, parentGID) # @UndefinedVariable - only available on UNIX logger.log(u"Respecting the set-group-ID bit on the parent directory for %s" % (childPath), logger.DEBUG) except OSError: - logger.log(u"Failed to respect the set-group-ID bit on the parent directory for %s (setting group ID %i)" % (childPath, parentGID), logger.ERROR) + logger.log( + u"Failed to respect the set-group-ID bit on the parent directory for %s (setting group ID %i)" % ( + childPath, parentGID), logger.ERROR) -def sanitizeSceneName (name, ezrss=False): + +def sanitizeSceneName(name, ezrss=False): """ Takes a show name and returns the "scenified" version of it. @@ -640,13 +672,15 @@ def sanitizeSceneName (name, ezrss=False): return name + def create_https_certificates(ssl_cert, ssl_key): """ Create self-signed HTTPS certificares and store in paths 'ssl_cert' and 'ssl_key' """ try: from lib.OpenSSL import crypto # @UnresolvedImport - from lib.certgen import createKeyPair, createCertRequest, createCertificate, TYPE_RSA, serial # @UnresolvedImport + from lib.certgen import createKeyPair, createCertRequest, createCertificate, TYPE_RSA, \ + serial # @UnresolvedImport except: logger.log(u"pyopenssl module missing, please install for https access", logger.WARNING) return False @@ -654,12 +688,12 @@ def create_https_certificates(ssl_cert, ssl_key): # Create the CA Certificate cakey = createKeyPair(TYPE_RSA, 1024) careq = createCertRequest(cakey, CN='Certificate Authority') - cacert = createCertificate(careq, (careq, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years + cacert = createCertificate(careq, (careq, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years cname = 'SickBeard' pkey = createKeyPair(TYPE_RSA, 1024) req = createCertRequest(pkey, CN=cname) - cert = createCertificate(req, (cacert, cakey), serial, (0, 60* 60 * 24 * 365 *10)) # ten years + cert = createCertificate(req, (cacert, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years # Save the key and certificate to disk try: @@ -671,8 +705,10 @@ def create_https_certificates(ssl_cert, ssl_key): return True + if __name__ == '__main__': import doctest + doctest.testmod() @@ -741,7 +777,7 @@ def get_xml_text(element, mini_dom=False): return text.strip() - + def backupVersionedFile(old_file, version): numTries = 0 @@ -771,14 +807,17 @@ def backupVersionedFile(old_file, version): # try to convert to int, if it fails the default will be returned -def tryInt(s, s_default = 0): - try: return int(s) - except: return s_default +def tryInt(s, s_default=0): + try: + return int(s) + except: + return s_default + # generates a md5 hash of a file -def md5_for_file(filename, block_size=2**16): - try: - with open(filename,'rb') as f: +def md5_for_file(filename, block_size=2 ** 16): + try: + with open(filename, 'rb') as f: md5 = hashlib.md5() while True: data = f.read(block_size) @@ -789,7 +828,8 @@ def md5_for_file(filename, block_size=2**16): return md5.hexdigest() except Exception: return None - + + def get_lan_ip(): """ Simple function to get LAN localhost_ip @@ -799,12 +839,12 @@ def get_lan_ip(): if os.name != "nt": import fcntl import struct - + def get_interface_ip(ifname): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) return socket.inet_ntoa(fcntl.ioctl(s.fileno(), 0x8915, struct.pack('256s', - ifname[:15]))[20:24]) - + ifname[:15]))[20:24]) + ip = socket.gethostbyname(socket.gethostname()) if ip.startswith("127.") and os.name != "nt": interfaces = [ @@ -817,16 +857,17 @@ def get_lan_ip(): "ath0", "ath1", "ppp0", - ] + ] for ifname in interfaces: try: ip = get_interface_ip(ifname) - print ifname, ip + print ifname, ip break except IOError: pass return ip + def check_url(url): """ Check if a URL exists without downloading the whole file. @@ -836,14 +877,14 @@ def check_url(url): # http://stackoverflow.com/questions/1140661 good_codes = [httplib.OK, httplib.FOUND, httplib.MOVED_PERMANENTLY] - host, path = urlparse.urlparse(url)[1:3] # elems [1] and [2] + host, path = urlparse.urlparse(url)[1:3] # elems [1] and [2] try: conn = httplib.HTTPConnection(host) conn.request('HEAD', path) return conn.getresponse().status in good_codes except StandardError: return None - + """ Encryption @@ -860,27 +901,30 @@ To add a new encryption_version: """ # Key Generators -unique_key1 = hex(uuid.getnode()**2) # Used in encryption v1 +unique_key1 = hex(uuid.getnode() ** 2) # Used in encryption v1 # Encryption Functions def encrypt(data, encryption_version=0, decrypt=False): - # Version 1: Simple XOR encryption (this is not very secure, but works) if encryption_version == 1: - if decrypt: - return ''.join(chr(ord(x) ^ ord(y)) for (x,y) in izip(base64.decodestring(data), cycle(unique_key1))) + if decrypt: + return ''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(base64.decodestring(data), cycle(unique_key1))) else: - return base64.encodestring(''.join(chr(ord(x) ^ ord(y)) for (x,y) in izip(data, cycle(unique_key1)))).strip() + return base64.encodestring( + ''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(data, cycle(unique_key1)))).strip() # Version 0: Plain text else: return data - + + def decrypt(data, encryption_version=0): - return encrypt(data, encryption_version, decrypt=True) + return encrypt(data, encryption_version, decrypt=True) + def full_sanitizeSceneName(name): return re.sub('[. -]', ' ', sanitizeSceneName(name)).lower().lstrip() + def _check_against_names(name, show): nameInQuestion = full_sanitizeSceneName(name) @@ -895,25 +939,26 @@ def _check_against_names(name, show): return False + def get_show_by_name(name, showList, useIndexer=False): - logger.log(u"Trying to get the indexerid for "+name, logger.DEBUG) + logger.log(u"Trying to get the indexerid for " + name, logger.DEBUG) if showList: for show in showList: if _check_against_names(name, show): - logger.log(u"Matched "+name+" in the showlist to the show "+show.name, logger.DEBUG) + logger.log(u"Matched " + name + " in the showlist to the show " + show.name, logger.DEBUG) return show if useIndexer: - for indexer in indexerStrings: + for indexer in sickbeard.indexerApi().indexers: try: lINDEXER_API_PARMS = {'indexer': indexer} lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) showObj = t[name] - except (indexer_exceptions.indexer_exception, IOError): + except (sickbeard.indexer_exception, IOError): # if none found, search on all languages try: lINDEXER_API_PARMS = {'indexer': indexer} @@ -921,9 +966,9 @@ def get_show_by_name(name, showList, useIndexer=False): lINDEXER_API_PARMS['search_all_languages'] = True lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) showObj = t[name] - except (indexer_exceptions.indexer_exception, IOError): + except (sickbeard.indexer_exception, IOError): pass continue @@ -936,12 +981,15 @@ def get_show_by_name(name, showList, useIndexer=False): return None + def suffix(d): - return 'th' if 11<=d<=13 else {1:'st',2:'nd',3:'rd'}.get(d%10, 'th') + return 'th' if 11 <= d <= 13 else {1: 'st', 2: 'nd', 3: 'rd'}.get(d % 10, 'th') + def custom_strftime(format, t): return t.strftime(format).replace('{S}', str(t.day) + suffix(t.day)) + def is_hidden_folder(folder): """ Returns True if folder is hidden. @@ -954,6 +1002,7 @@ def is_hidden_folder(folder): return False + def real_path(path): """ Returns: the canonicalized absolute pathname. The resulting path will have no symbolic link, '/./' or '/../' components. diff --git a/sickbeard/history.py b/sickbeard/history.py index 4a95655c..c58b3782 100644 --- a/sickbeard/history.py +++ b/sickbeard/history.py @@ -19,25 +19,25 @@ import db import datetime -from sickbeard.common import SNATCHED, SUBTITLED, FAILED, Quality +from sickbeard.common import SNATCHED, SUBTITLED, FAILED, Quality dateFormat = "%Y%m%d%H%M%S" -def _logHistoryItem(action, showid, season, episode, quality, resource, provider): +def _logHistoryItem(action, showid, season, episode, quality, resource, provider): logDate = datetime.datetime.today().strftime(dateFormat) if not isinstance(resource, unicode): resource = unicode(resource, 'utf-8') myDB = db.DBConnection() - myDB.action("INSERT INTO history (action, date, showid, season, episode, quality, resource, provider) VALUES (?,?,?,?,?,?,?,?)", - [action, logDate, showid, season, episode, quality, resource, provider]) + myDB.action( + "INSERT INTO history (action, date, showid, season, episode, quality, resource, provider) VALUES (?,?,?,?,?,?,?,?)", + [action, logDate, showid, season, episode, quality, resource, provider]) def logSnatch(searchResult): - for curEpObj in searchResult.episodes: showid = int(curEpObj.show.indexerid) @@ -57,14 +57,14 @@ def logSnatch(searchResult): _logHistoryItem(action, showid, season, episode, quality, resource, provider) -def logDownload(episode, filename, new_ep_quality, release_group=None): +def logDownload(episode, filename, new_ep_quality, release_group=None): showid = int(episode.show.indexerid) season = int(episode.season) epNum = int(episode.episode) quality = new_ep_quality - + # store the release group as the provider if possible if release_group: provider = release_group @@ -75,21 +75,21 @@ def logDownload(episode, filename, new_ep_quality, release_group=None): _logHistoryItem(action, showid, season, epNum, quality, filename, provider) + def logSubtitle(showid, season, episode, status, subtitleResult): - resource = subtitleResult.path provider = subtitleResult.service - status, quality = Quality.splitCompositeStatus(status) + status, quality = Quality.splitCompositeStatus(status) action = Quality.compositeStatus(SUBTITLED, quality) - + _logHistoryItem(action, showid, season, episode, quality, resource, provider) -def logFailed(indexerid, season, episode, status, release, provider=None): +def logFailed(indexerid, season, episode, status, release, provider=None): showid = int(indexerid) season = int(season) epNum = int(episode) - status, quality = Quality.splitCompositeStatus(status) + status, quality = Quality.splitCompositeStatus(status) action = Quality.compositeStatus(FAILED, quality) _logHistoryItem(action, showid, season, epNum, quality, release, provider) diff --git a/sickbeard/image_cache.py b/sickbeard/image_cache.py index e3c5c0f0..5ebb13d6 100644 --- a/sickbeard/image_cache.py +++ b/sickbeard/image_cache.py @@ -28,11 +28,11 @@ from sickbeard.metadata.generic import GenericMetadata from lib.hachoir_parser import createParser from lib.hachoir_metadata import extractMetadata + class ImageCache: - def __init__(self): pass - + def _cache_dir(self): """ Builds up the full path to the image cache directory @@ -94,7 +94,7 @@ class ImageCache: Returns true if a cached poster exists for the given indexer id """ poster_path = self.poster_path(indexer_id) - logger.log(u"Checking if file "+str(poster_path)+" exists", logger.DEBUG) + logger.log(u"Checking if file " + str(poster_path) + " exists", logger.DEBUG) return ek.ek(os.path.isfile, poster_path) def has_banner(self, indexer_id): @@ -102,7 +102,7 @@ class ImageCache: Returns true if a cached banner exists for the given indexer id """ banner_path = self.banner_path(indexer_id) - logger.log(u"Checking if file "+str(banner_path)+" exists", logger.DEBUG) + logger.log(u"Checking if file " + str(banner_path) + " exists", logger.DEBUG) return ek.ek(os.path.isfile, banner_path) def has_poster_thumbnail(self, indexer_id): @@ -110,7 +110,7 @@ class ImageCache: Returns true if a cached poster thumbnail exists for the given indexer id """ poster_thumb_path = self.poster_thumb_path(indexer_id) - logger.log(u"Checking if file "+str(poster_thumb_path)+" exists", logger.DEBUG) + logger.log(u"Checking if file " + str(poster_thumb_path) + " exists", logger.DEBUG) return ek.ek(os.path.isfile, poster_thumb_path) def has_banner_thumbnail(self, indexer_id): @@ -118,7 +118,7 @@ class ImageCache: Returns true if a cached banner exists for the given indexer id """ banner_thumb_path = self.banner_thumb_path(indexer_id) - logger.log(u"Checking if file "+str(banner_thumb_path)+" exists", logger.DEBUG) + logger.log(u"Checking if file " + str(banner_thumb_path) + " exists", logger.DEBUG) return ek.ek(os.path.isfile, banner_thumb_path) @@ -126,7 +126,7 @@ class ImageCache: POSTER = 2 BANNER_THUMB = 3 POSTER_THUMB = 4 - + def which_type(self, path): """ Analyzes the image provided and attempts to determine whether it is a poster or banner. @@ -137,7 +137,7 @@ class ImageCache: """ if not ek.ek(os.path.isfile, path): - logger.log(u"Couldn't check the type of "+str(path)+" cause it doesn't exist", logger.WARNING) + logger.log(u"Couldn't check the type of " + str(path) + " cause it doesn't exist", logger.WARNING) return None # use hachoir to parse the image for us @@ -145,24 +145,24 @@ class ImageCache: img_metadata = extractMetadata(img_parser) if not img_metadata: - logger.log(u"Unable to get metadata from "+str(path)+", not using your existing image", logger.DEBUG) + logger.log(u"Unable to get metadata from " + str(path) + ", not using your existing image", logger.DEBUG) return None - - img_ratio = float(img_metadata.get('width'))/float(img_metadata.get('height')) + + img_ratio = float(img_metadata.get('width')) / float(img_metadata.get('height')) img_parser.stream._input.close() # most posters are around 0.68 width/height ratio (eg. 680/1000) if 0.55 < img_ratio < 0.8: return self.POSTER - + # most banners are around 5.4 width/height ratio (eg. 758/140) elif 5 < img_ratio < 6: return self.BANNER else: - logger.log(u"Image has size ratio of "+str(img_ratio)+", unknown type", logger.WARNING) + logger.log(u"Image has size ratio of " + str(img_ratio) + ", unknown type", logger.WARNING) return None - + def _cache_image_from_file(self, image_path, img_type, indexer_id): """ Takes the image provided and copies it to the cache folder @@ -180,21 +180,21 @@ class ImageCache: elif img_type == self.BANNER: dest_path = self.banner_path(indexer_id) else: - logger.log(u"Invalid cache image type: "+str(img_type), logger.ERROR) + logger.log(u"Invalid cache image type: " + str(img_type), logger.ERROR) return False # make sure the cache folder exists before we try copying to it if not ek.ek(os.path.isdir, self._cache_dir()): - logger.log(u"Image cache dir didn't exist, creating it at "+str(self._cache_dir())) + logger.log(u"Image cache dir didn't exist, creating it at " + str(self._cache_dir())) ek.ek(os.makedirs, self._cache_dir()) if not ek.ek(os.path.isdir, self._thumbnails_dir()): - logger.log(u"Thumbnails cache dir didn't exist, creating it at "+str(self._thumbnails_dir())) + logger.log(u"Thumbnails cache dir didn't exist, creating it at " + str(self._thumbnails_dir())) ek.ek(os.makedirs, self._thumbnails_dir()) - logger.log(u"Copying from "+image_path+" to "+dest_path) + logger.log(u"Copying from " + image_path + " to " + dest_path) helpers.copyFile(image_path, dest_path) - + return True def _cache_image_from_indexer(self, show_obj, img_type): @@ -221,7 +221,7 @@ class ImageCache: img_type_name = 'banner_thumb' dest_path = self.banner_thumb_path(show_obj.indexerid) else: - logger.log(u"Invalid cache image type: "+str(img_type), logger.ERROR) + logger.log(u"Invalid cache image type: " + str(img_type), logger.ERROR) return False # retrieve the image from indexer using the generic metadata class @@ -231,7 +231,7 @@ class ImageCache: result = metadata_generator._write_image(img_data, dest_path) return result - + def fill_cache(self, show_obj): """ Caches all images for the given show. Copies them from the show dir if possible, or @@ -240,45 +240,51 @@ class ImageCache: show_obj: TVShow object to cache images for """ - logger.log(u"Checking if we need any cache images for show "+str(show_obj.indexerid), logger.DEBUG) + logger.log(u"Checking if we need any cache images for show " + str(show_obj.indexerid), logger.DEBUG) # check if the images are already cached or not need_images = {self.POSTER: not self.has_poster(show_obj.indexerid), self.BANNER: not self.has_banner(show_obj.indexerid), self.POSTER_THUMB: not self.has_poster_thumbnail(show_obj.indexerid), self.BANNER_THUMB: not self.has_banner_thumbnail(show_obj.indexerid)} - - if not need_images[self.POSTER] and not need_images[self.BANNER] and not need_images[self.POSTER_THUMB] and not need_images[self.BANNER_THUMB]: + + if not need_images[self.POSTER] and not need_images[self.BANNER] and not need_images[self.POSTER_THUMB] and not \ + need_images[self.BANNER_THUMB]: logger.log(u"No new cache images needed, not retrieving new ones") return - + # check the show dir for poster or banner images and use them - if need_images[self.POSTER] or need_images[self.BANNER]: + if need_images[self.POSTER] or need_images[self.BANNER]: try: for cur_provider in sickbeard.metadata_provider_dict.values(): - logger.log(u"Checking if we can use the show image from the "+cur_provider.name+" metadata", logger.DEBUG) + logger.log(u"Checking if we can use the show image from the " + cur_provider.name + " metadata", + logger.DEBUG) if ek.ek(os.path.isfile, cur_provider.get_poster_path(show_obj)): cur_file_name = os.path.abspath(cur_provider.get_poster_path(show_obj)) cur_file_type = self.which_type(cur_file_name) - + if cur_file_type == None: - logger.log(u"Unable to retrieve image type, not using the image from "+str(cur_file_name), logger.WARNING) + logger.log(u"Unable to retrieve image type, not using the image from " + str(cur_file_name), + logger.WARNING) continue - - logger.log(u"Checking if image "+cur_file_name+" (type "+str(cur_file_type)+" needs metadata: "+str(need_images[cur_file_type]), logger.DEBUG) - + + logger.log(u"Checking if image " + cur_file_name + " (type " + str( + cur_file_type) + " needs metadata: " + str(need_images[cur_file_type]), logger.DEBUG) + if cur_file_type in need_images and need_images[cur_file_type]: - logger.log(u"Found an image in the show dir that doesn't exist in the cache, caching it: "+cur_file_name+", type "+str(cur_file_type), logger.DEBUG) + logger.log( + u"Found an image in the show dir that doesn't exist in the cache, caching it: " + cur_file_name + ", type " + str( + cur_file_type), logger.DEBUG) self._cache_image_from_file(cur_file_name, cur_file_type, show_obj.indexerid) need_images[cur_file_type] = False except exceptions.ShowDirNotFoundException: logger.log(u"Unable to search for images in show dir because it doesn't exist", logger.WARNING) - + # download from indexer for missing ones for cur_image_type in [self.POSTER, self.BANNER, self.POSTER_THUMB, self.BANNER_THUMB]: - logger.log(u"Seeing if we still need an image of type "+str(cur_image_type)+": "+str(need_images[cur_image_type]), logger.DEBUG) + logger.log(u"Seeing if we still need an image of type " + str(cur_image_type) + ": " + str( + need_images[cur_image_type]), logger.DEBUG) if cur_image_type in need_images and need_images[cur_image_type]: self._cache_image_from_indexer(show_obj, cur_image_type) - logger.log(u"Done cache check") diff --git a/sickbeard/indexers/__init__.py b/sickbeard/indexers/__init__.py index 14a9eb78..82542a8b 100644 --- a/sickbeard/indexers/__init__.py +++ b/sickbeard/indexers/__init__.py @@ -16,20 +16,4 @@ # You should have received a copy of the GNU General Public License # along with Sick Beard. If not, see . -__all__ = ["generic","indexer_api","indexer_exceptions"] - -import indexer_api, indexer_exceptions - -def getClientModule(name): - - name = name.lower() - prefix = "sickbeard.indexers." - - return __import__(prefix+name, fromlist=__all__) - -def getClientIstance(name): - - module = getClientModule(name) - className = module.__class__.__name__ - - return getattr(module, className) \ No newline at end of file +from . import indexer_api, indexer_exceptions \ No newline at end of file diff --git a/sickbeard/indexers/generic.py b/sickbeard/indexers/generic.py deleted file mode 100644 index 90896e87..00000000 --- a/sickbeard/indexers/generic.py +++ /dev/null @@ -1,66 +0,0 @@ -# Author: Nic Wolfe -# URL: http://code.google.com/p/sickbeard/ -# -# This file is part of Sick Beard. -# -# Sick Beard is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Sick Beard is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Sick Beard. If not, see . -import os - -import sickbeard - -class GenericIndexer(object): - def __init__(self, indexer): - - INDEXER_TVDB = 'Tvdb' - INDEXER_TVRAGE = 'TVRage' - - INDEXERS = {} - INDEXERS[INDEXER_TVDB] = 'theTVDB' - INDEXERS[INDEXER_TVRAGE] = 'TVRage' - - INDEXER_API_KEY = {} - INDEXER_API_KEY[INDEXER_TVDB] = '9DAF49C96CBF8DAC' - INDEXER_API_KEY[INDEXER_TVRAGE] = 'Uhewg1Rr0o62fvZvUIZt' - - INDEXER_BASEURL = {} - INDEXER_BASEURL[INDEXER_TVDB] = 'http://thetvdb.com/api/' + INDEXER_API_KEY[INDEXER_TVDB] + '/series/' - INDEXER_BASEURL[INDEXER_TVRAGE] = 'http://tvrage.com/showinfo?key=' + INDEXER_API_KEY[INDEXER_TVRAGE] + 'sid=' - - INDEXER_API_PARMS = {} - INDEXER_API_PARMS[INDEXER_TVDB] = {'apikey': INDEXER_API_KEY[INDEXER_TVDB], - 'language': 'en', - 'useZip': True} - - INDEXER_API_PARMS[INDEXER_TVRAGE] = {'apikey': INDEXER_API_KEY[INDEXER_TVRAGE], - 'language': 'en'} - - self.config = {} - self.config['valid_languages'] = [ - "da", "fi", "nl", "de", "it", "es", "fr","pl", "hu","el","tr", - "ru","he","ja","pt","zh","cs","sl", "hr","ko","en","sv","no"] - - self.config['langabbv_to_id'] = {'el': 20, 'en': 7, 'zh': 27, - 'it': 15, 'cs': 28, 'es': 16, 'ru': 22, 'nl': 13, 'pt': 26, 'no': 9, - 'tr': 21, 'pl': 18, 'fr': 17, 'hr': 31, 'de': 14, 'da': 10, 'fi': 11, - 'hu': 19, 'ja': 25, 'he': 24, 'ko': 32, 'sv': 8, 'sl': 30} - - self.indexers = [x for x in INDEXERS] - - if indexer in INDEXERS: - self.base_url = INDEXER_BASEURL[indexer] - self.api_parms = INDEXER_API_PARMS[indexer] - self.name = INDEXERS[indexer] - - if sickbeard.CACHE_DIR: - self.cache = os.path.join(sickbeard.CACHE_DIR, indexer) \ No newline at end of file diff --git a/sickbeard/indexers/indexer_api.py b/sickbeard/indexers/indexer_api.py index 0ed17055..9a41753b 100644 --- a/sickbeard/indexers/indexer_api.py +++ b/sickbeard/indexers/indexer_api.py @@ -16,30 +16,34 @@ # You should have received a copy of the GNU General Public License # along with Sick Beard. If not, see . import os -import datetime - import sickbeard -import generic -from indexer_exceptions import indexer_attributenotfound -from lib.tvdb_api.tvdb_api import Tvdb -from lib.tvrage_api.tvrage_api import TVRage +from indexer_config import initConfig, indexerConfig -class indexerApi(generic.GenericIndexer): + +class indexerApi(object): def __init__(self, indexer=None, *args, **kwargs): - generic.GenericIndexer.__init__(self, indexer) + self._wrapped = object + self.config = initConfig + self.indexers = {k: v if k is 'id' else v['name'] for k, v in indexerConfig.items()} - if indexer in self.indexers: - self.api_parms.update(**kwargs) + if indexer in indexerConfig: + self.name = indexerConfig[indexer]['name'] + self.config = indexerConfig[indexer] - if sickbeard.CACHE_DIR: - self.api_parms['cache'] = self.cache + # set cache if exists + if sickbeard.CACHE_DIR: indexerConfig[indexer]['api_params']['cache'] = os.path.join(sickbeard.CACHE_DIR, + self.name) - # wrap the indexer API object and return it back - self._wrapped = eval(indexer)(*args, **self.api_parms) + if kwargs: + # update API params + indexerConfig[indexer]['api_params'].update(**kwargs) + + # wrap the indexer API object and return it back + self._wrapped = indexerConfig[indexer]['module'](**indexerConfig[indexer]['api_params']) def __getattr__(self, attr): return getattr(self._wrapped, attr) def __getitem__(self, attr): - return self._wrapped.__getitem__(attr) \ No newline at end of file + return self._wrapped.__getitem__(attr) diff --git a/sickbeard/indexers/indexer_config.py b/sickbeard/indexers/indexer_config.py new file mode 100644 index 00000000..1330a1e8 --- /dev/null +++ b/sickbeard/indexers/indexer_config.py @@ -0,0 +1,51 @@ +from lib.tvdb_api.tvdb_api import Tvdb +from lib.tvrage_api.tvrage_api import TVRage + +INDEXER_TVDB = 1 +INDEXER_TVRAGE = 2 + +initConfig = {} +indexerConfig = {} + +initConfig['valid_languages'] = [ + "da", "fi", "nl", "de", "it", "es", "fr", "pl", "hu", "el", "tr", + "ru", "he", "ja", "pt", "zh", "cs", "sl", "hr", "ko", "en", "sv", "no"] + +initConfig['langabbv_to_id'] = { + 'el': 20, 'en': 7, 'zh': 27, + 'it': 15, 'cs': 28, 'es': 16, 'ru': 22, 'nl': 13, 'pt': 26, 'no': 9, + 'tr': 21, 'pl': 18, 'fr': 17, 'hr': 31, 'de': 14, 'da': 10, 'fi': 11, + 'hu': 19, 'ja': 25, 'he': 24, 'ko': 32, 'sv': 8, 'sl': 30} + +indexerConfig[INDEXER_TVDB] = { + 'id': INDEXER_TVDB, + 'name': 'theTVDB', + 'module': Tvdb, + 'api_params': {'apikey': '9DAF49C96CBF8DAC', + 'language': 'en', + 'useZip': True + }, +} + +indexerConfig[INDEXER_TVRAGE] = { + 'id': INDEXER_TVRAGE, + 'name': 'TVRage', + 'module': TVRage, + 'api_params': {'apikey': 'Uhewg1Rr0o62fvZvUIZt', + 'language': 'en' + }, +} + +# TVDB Indexer Settings +indexerConfig[INDEXER_TVDB]['xem_origin'] = 'tvdb' +indexerConfig[INDEXER_TVDB]['icon'] = 'thetvdb16.png' +indexerConfig[INDEXER_TVDB]['scene_url'] = 'http://midgetspy.github.com/sb_tvdb_scene_exceptions/exceptions.txt' +indexerConfig[INDEXER_TVDB]['show_url'] = 'http://thetvdb.com/?tab=series&id=' +indexerConfig[INDEXER_TVDB]['base_url'] = 'http://thetvdb.com/api/%(apikey)s/series/' % indexerConfig[INDEXER_TVDB]['api_params'] + +# TVRAGE Indexer Settings +indexerConfig[INDEXER_TVRAGE]['xem_origin'] = 'rage' +indexerConfig[INDEXER_TVRAGE]['icon'] = 'tvrage16.png' +indexerConfig[INDEXER_TVRAGE]['scene_url'] = 'http://raw.github.com/echel0n/sb_tvrage_scene_exceptions/master/exceptions.txt' +indexerConfig[INDEXER_TVRAGE]['show_url'] = 'http://tvrage.com/shows/id-' +indexerConfig[INDEXER_TVRAGE]['base_url'] = 'http://tvrage.com/showinfo.php?key=%(apikey)s&sid=' % indexerConfig[INDEXER_TVRAGE]['api_params'] \ No newline at end of file diff --git a/sickbeard/indexers/indexer_exceptions.py b/sickbeard/indexers/indexer_exceptions.py index 5615059e..490e0882 100644 --- a/sickbeard/indexers/indexer_exceptions.py +++ b/sickbeard/indexers/indexer_exceptions.py @@ -19,13 +19,13 @@ from lib.tvdb_api.tvdb_exceptions import \ tvdb_seasonnotfound, tvdb_shownotfound, tvdb_userabort indexerExcepts = ["indexer_exception", "indexer_error", "indexer_userabort", "indexer_shownotfound", -"indexer_seasonnotfound", "indexer_episodenotfound", "indexer_attributenotfound"] + "indexer_seasonnotfound", "indexer_episodenotfound", "indexer_attributenotfound"] tvdbExcepts = ["tvdb_exception", "tvdb_error", "tvdb_userabort", "tvdb_shownotfound", -"tvdb_seasonnotfound", "tvdb_episodenotfound", "tvdb_attributenotfound"] + "tvdb_seasonnotfound", "tvdb_episodenotfound", "tvdb_attributenotfound"] tvrageExcepts = ["tvdb_exception", "tvrage_error", "tvrage_userabort", "tvrage_shownotfound", -"tvrage_seasonnotfound", "tvrage_episodenotfound", "tvrage_attributenotfound"] + "tvrage_seasonnotfound", "tvrage_episodenotfound", "tvrage_attributenotfound"] # link API exceptions to our exception handler indexer_exception = tvdb_exception, tvrage_exception diff --git a/sickbeard/indexers/test/test.py b/sickbeard/indexers/test/test.py index 3973f678..bd8f5e13 100644 --- a/sickbeard/indexers/test/test.py +++ b/sickbeard/indexers/test/test.py @@ -3,33 +3,99 @@ from __future__ import with_statement import unittest import sys +import datetime import os.path +import string + sys.path.append(os.path.abspath('..')) sys.path.append(os.path.abspath('../../../lib')) -from sickbeard.indexers.indexer_api import indexerApi -from sickbeard.indexers.indexer_exceptions import indexer_exception +import sickbeard +import itertools + +from itertools import chain +from sickbeard import classes + class APICheck(unittest.TestCase): - indexer_id = 81189 - indexer = 'Tvdb' + indexer = u'3' + + for i in int([indexer]) and sickbeard.indexerApi().indexers: + print i + + global indexer, keywords, nameUTF8 + + indexer = 0 + name = 'american dad' lang = "en" - # Set our common indexer_api options here - INDEXER_API_PARMS = {'indexer': indexer} - lindexer_api_parms = INDEXER_API_PARMS.copy() + if not lang or lang == 'null': + lang = "en" - try: - lang_id = indexerApi().config['langabbv_to_id'][lang] - t = indexerApi(cache=True, **lindexer_api_parms) - myEp = t[indexer_id] + results = [] - if getattr(myEp, 'seriesname', None) is not None: - print "FOUND" + nameUTF8 = name.encode('utf-8') - except indexer_exception as e: - print e - pass + # Use each word in the show's name as a possible search term + keywords = nameUTF8.split(' ') -if __name__ == "__main__": - unittest.main() \ No newline at end of file + # Insert the whole show's name as the first search term so best results are first + # ex: keywords = ['Some Show Name', 'Some', 'Show', 'Name'] + if len(keywords) > 1: + keywords.insert(0, nameUTF8) + + + # check for indexer preset + indexers = [int(indexer)] + if 0 in indexers: + indexers = sickbeard.indexerApi().indexers + + # Query Indexers for each search term and build the list of results + for i in indexers: + def searchShows(i): + results = [] + + lINDEXER_API_PARMS = {'indexer': i} + lINDEXER_API_PARMS['custom_ui'] = classes.AllShowsListUI + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) + + for searchTerm in keywords: + try: + search = t[searchTerm] + if isinstance(search, dict): + search = [search] + + # add search results + result = [ + [t.name, t.config['id'], t.config["show_url"], int(x['id']), x['seriesname'], x['firstaired']] + for x in search if nameUTF8.lower() in x['seriesname'].lower()] + + # see if we have any matches + if len(result) > 0: + # add result to list of found shows + results += result + + # search through result to see if we have a exact match + for show in result: + # cleanup the series name + seriesname = show[4].encode('utf-8').translate(None, string.punctuation) + + # check if we got a exact match + if nameUTF8.lower() == seriesname.lower(): + return results + + except Exception, e: + continue + + # finished searching a indexer so return the results + return results + + # search indexers for shows + results += searchShows(i) + + # remove duplicates + results = list(results for results, _ in itertools.groupby(results)) + print results + + if __name__ == "__main__": + unittest.main() \ No newline at end of file diff --git a/sickbeard/indexers/test/test_lib.py b/sickbeard/indexers/test/test_lib.py index 0357c389..1bc2fb94 100644 --- a/sickbeard/indexers/test/test_lib.py +++ b/sickbeard/indexers/test/test_lib.py @@ -25,6 +25,7 @@ import sqlite3 import sys import os.path + sys.path.append(os.path.abspath('..')) sys.path.append(os.path.abspath('../lib')) @@ -43,7 +44,6 @@ TESTDIR = os.path.abspath('.') TESTDBNAME = "sickbeard.db" TESTCACHEDBNAME = "cache.db" - SHOWNAME = u"show name" SEASON = 4 EPISODE = 2 @@ -78,9 +78,9 @@ sickbeard.NAMING_PATTERN = '' sickbeard.NAMING_ABD_PATTERN = '' sickbeard.NAMING_MULTI_EP = 1 - sickbeard.PROVIDER_ORDER = ["sick_beard_index"] -sickbeard.newznabProviderList = providers.getNewznabProviderList("Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0!!!NZBs.org|http://nzbs.org/||5030,5040,5070,5090|0!!!Usenet-Crawler|http://www.usenet-crawler.com/||5030,5040|0") +sickbeard.newznabProviderList = providers.getNewznabProviderList( + "Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0!!!NZBs.org|http://nzbs.org/||5030,5040,5070,5090|0!!!Usenet-Crawler|http://www.usenet-crawler.com/||5030,5040|0") sickbeard.providerList = providers.makeProviderList() sickbeard.PROG_DIR = os.path.abspath('..') @@ -95,6 +95,7 @@ sickbeard.logger.sb_log_instance.initLogging(False) #================= def _dummy_saveConfig(): return True + # this overrides the sickbeard save_config which gets called during a db upgrade # this might be considered a hack mainDB.sickbeard.save_config = _dummy_saveConfig @@ -104,6 +105,7 @@ mainDB.sickbeard.save_config = _dummy_saveConfig def _fake_specifyEP(self, season, episode): pass + sickbeard.tv.TVEpisode.specifyEpisode = _fake_specifyEP @@ -125,14 +127,12 @@ class SickbeardTestDBCase(unittest.TestCase): class TestDBConnection(db.DBConnection, object): - def __init__(self, dbFileName=TESTDBNAME): dbFileName = os.path.join(TESTDIR, dbFileName) super(TestDBConnection, self).__init__(dbFileName) class TestCacheDBConnection(TestDBConnection, object): - def __init__(self, providerName): db.DBConnection.__init__(self, os.path.join(TESTDIR, TESTCACHEDBNAME)) @@ -210,6 +210,7 @@ def setUp_test_show_dir(): def tearDown_test_show_dir(): shutil.rmtree(SHOWDIR) + tearDown_test_db() if __name__ == '__main__': diff --git a/sickbeard/logger.py b/sickbeard/logger.py index 99f1df8a..32156b72 100644 --- a/sickbeard/logger.py +++ b/sickbeard/logger.py @@ -39,27 +39,26 @@ ERROR = logging.ERROR WARNING = logging.WARNING MESSAGE = logging.INFO DEBUG = logging.DEBUG -DB = 5 +DB = 5 reverseNames = {u'ERROR': ERROR, u'WARNING': WARNING, u'INFO': MESSAGE, u'DEBUG': DEBUG, - u'DB' : DB} + u'DB': DB} class SBRotatingLogHandler(object): - def __init__(self, log_file, num_files, num_bytes): self.num_files = num_files self.num_bytes = num_bytes - + self.log_file = log_file self.log_file_path = log_file self.cur_handler = None self.writes_since_check = 0 - + self.console_logging = False self.log_lock = threading.Lock() @@ -74,7 +73,7 @@ class SBRotatingLogHandler(object): sb_logger.removeHandler(handler) sub_logger.removeHandler(handler) - imdb_logger.removeHandler(handler) + imdb_logger.removeHandler(handler) handler.flush() handler.close() @@ -83,17 +82,17 @@ class SBRotatingLogHandler(object): if consoleLogging: self.console_logging = consoleLogging - + old_handler = None - + # get old handler in case we want to close it if self.cur_handler: old_handler = self.cur_handler else: - + #Add a new logging level DB - logging.addLevelName(5,'DB') - + logging.addLevelName(5, 'DB') + # only start consoleLogging on first initialize if self.console_logging: # define a Handler which writes INFO messages or higher to the sys.stderr @@ -102,19 +101,21 @@ class SBRotatingLogHandler(object): console.setLevel(logging.INFO) # set a format which is simpler for console use - console.setFormatter(DispatchingFormatter({'sickbeard' : logging.Formatter('%(asctime)s %(levelname)s::%(message)s', '%H:%M:%S'), - 'subliminal' : logging.Formatter('%(asctime)s %(levelname)s::SUBLIMINAL :: %(message)s', '%H:%M:%S'), - 'imdbpy' : logging.Formatter('%(asctime)s %(levelname)s::IMDBPY :: %(message)s', '%H:%M:%S') - }, - logging.Formatter('%(message)s'),)) + console.setFormatter(DispatchingFormatter( + {'sickbeard': logging.Formatter('%(asctime)s %(levelname)s::%(message)s', '%H:%M:%S'), + 'subliminal': logging.Formatter('%(asctime)s %(levelname)s::SUBLIMINAL :: %(message)s', + '%H:%M:%S'), + 'imdbpy': logging.Formatter('%(asctime)s %(levelname)s::IMDBPY :: %(message)s', '%H:%M:%S') + }, + logging.Formatter('%(message)s'), )) # add the handler to the root logger - logging.getLogger('sickbeard').addHandler(console) + logging.getLogger('sickbeard').addHandler(console) logging.getLogger('subliminal').addHandler(console) logging.getLogger('imdbpy').addHandler(console) self.log_file_path = os.path.join(sickbeard.LOG_DIR, self.log_file) - + self.cur_handler = self._config_handler() logging.getLogger('sickbeard').addHandler(self.cur_handler) logging.getLogger('subliminal').addHandler(self.cur_handler) @@ -127,28 +128,30 @@ class SBRotatingLogHandler(object): # already logging in new log folder, close the old handler if old_handler: self.close_log(old_handler) -# old_handler.flush() -# old_handler.close() -# sb_logger = logging.getLogger('sickbeard') -# sub_logger = logging.getLogger('subliminal') -# imdb_logger = logging.getLogger('imdbpy') -# sb_logger.removeHandler(old_handler) -# subli_logger.removeHandler(old_handler) -# imdb_logger.removeHandler(old_handler) + # old_handler.flush() + # old_handler.close() + # sb_logger = logging.getLogger('sickbeard') + # sub_logger = logging.getLogger('subliminal') + # imdb_logger = logging.getLogger('imdbpy') + # sb_logger.removeHandler(old_handler) + # subli_logger.removeHandler(old_handler) + # imdb_logger.removeHandler(old_handler) def _config_handler(self): """ Configure a file handler to log at file_name and return it. """ - + file_handler = logging.FileHandler(self.log_file_path, encoding='utf-8') file_handler.setLevel(DB) - file_handler.setFormatter(DispatchingFormatter({'sickbeard' : logging.Formatter('%(asctime)s %(levelname)-8s %(message)s', '%Y-%m-%d %H:%M:%S'), - 'subliminal' : logging.Formatter('%(asctime)s %(levelname)-8s SUBLIMINAL :: %(message)s', '%Y-%m-%d %H:%M:%S'), - 'imdbpy' : logging.Formatter('%(asctime)s %(levelname)-8s IMDBPY :: %(message)s', '%Y-%m-%d %H:%M:%S') - }, - logging.Formatter('%(message)s'),)) - + file_handler.setFormatter(DispatchingFormatter( + {'sickbeard': logging.Formatter('%(asctime)s %(levelname)-8s %(message)s', '%Y-%m-%d %H:%M:%S'), + 'subliminal': logging.Formatter('%(asctime)s %(levelname)-8s SUBLIMINAL :: %(message)s', + '%Y-%m-%d %H:%M:%S'), + 'imdbpy': logging.Formatter('%(asctime)s %(levelname)-8s IMDBPY :: %(message)s', '%Y-%m-%d %H:%M:%S') + }, + logging.Formatter('%(message)s'), )) + return file_handler def _log_file_name(self, i): @@ -160,7 +163,7 @@ class SBRotatingLogHandler(object): """ return self.log_file_path + ('.' + str(i) if i else '') - + def _num_logs(self): """ Scans the log folder and figures out how many log files there are already on disk @@ -174,15 +177,15 @@ class SBRotatingLogHandler(object): return cur_log - 1 def _rotate_logs(self): - + sb_logger = logging.getLogger('sickbeard') sub_logger = logging.getLogger('subliminal') imdb_logger = logging.getLogger('imdbpy') - + # delete the old handler if self.cur_handler: self.close_log() - + # rename or delete all the old log files for i in range(self._num_logs(), -1, -1): cur_file_name = self._log_file_name(i) @@ -193,12 +196,12 @@ class SBRotatingLogHandler(object): os.rename(cur_file_name, self._log_file_name(i + 1)) except OSError: pass - + # the new log handler will always be on the un-numbered .log file new_file_handler = self._config_handler() - + self.cur_handler = new_file_handler - + sb_logger.addHandler(new_file_handler) sub_logger.addHandler(new_file_handler) imdb_logger.addHandler(new_file_handler) @@ -254,7 +257,6 @@ class SBRotatingLogHandler(object): class DispatchingFormatter: - def __init__(self, formatters, default_formatter): self._formatters = formatters self._default_formatter = default_formatter @@ -266,11 +268,14 @@ class DispatchingFormatter: sb_log_instance = SBRotatingLogHandler('sickbeard.log', NUM_LOGS, LOG_SIZE) + def log(toLog, logLevel=MESSAGE): sb_log_instance.log(toLog, logLevel) + def log_error_and_exit(error_msg): sb_log_instance.log_error_and_exit(error_msg) - + + def close(): sb_log_instance.close_log() \ No newline at end of file diff --git a/sickbeard/metadata/__init__.py b/sickbeard/metadata/__init__.py index c368922f..2a6d405c 100644 --- a/sickbeard/metadata/__init__.py +++ b/sickbeard/metadata/__init__.py @@ -21,26 +21,29 @@ __all__ = ['generic', 'helpers', 'xbmc', 'xbmc_12plus', 'mediabrowser', 'ps3', ' import sys import xbmc, xbmc_12plus, mediabrowser, ps3, wdtv, tivo + def available_generators(): return filter(lambda x: x not in ('generic', 'helpers'), __all__) + def _getMetadataModule(name): name = name.lower() prefix = "sickbeard.metadata." - if name in __all__ and prefix+name in sys.modules: - return sys.modules[prefix+name] + if name in __all__ and prefix + name in sys.modules: + return sys.modules[prefix + name] else: return None -def _getMetadataClass(name): +def _getMetadataClass(name): module = _getMetadataModule(name) - + if not module: return None - + return module.metadata_class() + def get_metadata_generator_dict(): result = {} for cur_generator_id in available_generators(): @@ -48,6 +51,6 @@ def get_metadata_generator_dict(): if not cur_generator: continue result[cur_generator.name] = cur_generator - + return result diff --git a/sickbeard/metadata/generic.py b/sickbeard/metadata/generic.py index d90c661d..16b906c7 100644 --- a/sickbeard/metadata/generic.py +++ b/sickbeard/metadata/generic.py @@ -34,7 +34,7 @@ from sickbeard.exceptions import ex from sickbeard.show_name_helpers import allPossibleShowNames from lib.tmdb_api.tmdb_api import TMDB -from sickbeard.indexers import indexer_api, indexer_exceptions + class GenericMetadata(): """ @@ -88,7 +88,9 @@ class GenericMetadata(): self.season_all_banner = season_all_banner def get_config(self): - config_list = [self.show_metadata, self.episode_metadata, self.fanart, self.poster, self.banner, self.episode_thumbnails, self.season_posters, self.season_banners, self.season_all_poster, self.season_all_banner] + config_list = [self.show_metadata, self.episode_metadata, self.fanart, self.poster, self.banner, + self.episode_thumbnails, self.season_posters, self.season_banners, self.season_all_poster, + self.season_all_banner] return '|'.join([str(int(x)) for x in config_list]) def get_id(self): @@ -161,12 +163,14 @@ class GenericMetadata(): def _has_season_all_poster(self, show_obj): result = ek.ek(os.path.isfile, self.get_season_all_poster_path(show_obj)) - logger.log(u"Checking if " + self.get_season_all_poster_path(show_obj) + " exists: " + str(result), logger.DEBUG) + logger.log(u"Checking if " + self.get_season_all_poster_path(show_obj) + " exists: " + str(result), + logger.DEBUG) return result def _has_season_all_banner(self, show_obj): result = ek.ek(os.path.isfile, self.get_season_all_banner_path(show_obj)) - logger.log(u"Checking if " + self.get_season_all_banner_path(show_obj) + " exists: " + str(result), logger.DEBUG) + logger.log(u"Checking if " + self.get_season_all_banner_path(show_obj) + " exists: " + str(result), + logger.DEBUG) return result def get_show_file_path(self, show_obj): @@ -264,7 +268,8 @@ class GenericMetadata(): def create_episode_metadata(self, ep_obj, force=False): if self.episode_metadata and ep_obj and (not self._has_episode_metadata(ep_obj) or force): - logger.log(u"Metadata provider " + self.name + " creating episode metadata for " + ep_obj.prettyName(), logger.DEBUG) + logger.log(u"Metadata provider " + self.name + " creating episode metadata for " + ep_obj.prettyName(), + logger.DEBUG) return self.write_ep_file(ep_obj) return False @@ -288,7 +293,8 @@ class GenericMetadata(): def create_episode_thumb(self, ep_obj): if self.episode_thumbnails and ep_obj and not self._has_episode_thumb(ep_obj): - logger.log(u"Metadata provider " + self.name + " creating episode thumbnail for " + ep_obj.prettyName(), logger.DEBUG) + logger.log(u"Metadata provider " + self.name + " creating episode thumbnail for " + ep_obj.prettyName(), + logger.DEBUG) return self.save_thumbnail(ep_obj) return False @@ -297,7 +303,8 @@ class GenericMetadata(): result = [] for season, episodes in show_obj.episodes.iteritems(): # @UnusedVariable if not self._has_season_poster(show_obj, season): - logger.log(u"Metadata provider " + self.name + " creating season posters for " + show_obj.name, logger.DEBUG) + logger.log(u"Metadata provider " + self.name + " creating season posters for " + show_obj.name, + logger.DEBUG) result = result + [self.save_season_posters(show_obj, season)] return all(result) return False @@ -307,20 +314,23 @@ class GenericMetadata(): result = [] for season, episodes in show_obj.episodes.iteritems(): # @UnusedVariable if not self._has_season_banner(show_obj, season): - logger.log(u"Metadata provider " + self.name + " creating season banners for " + show_obj.name, logger.DEBUG) + logger.log(u"Metadata provider " + self.name + " creating season banners for " + show_obj.name, + logger.DEBUG) result = result + [self.save_season_banners(show_obj, season)] return all(result) return False def create_season_all_poster(self, show_obj): if self.season_all_poster and show_obj and not self._has_season_all_poster(show_obj): - logger.log(u"Metadata provider " + self.name + " creating season all poster for " + show_obj.name, logger.DEBUG) + logger.log(u"Metadata provider " + self.name + " creating season all poster for " + show_obj.name, + logger.DEBUG) return self.save_season_all_poster(show_obj) return False def create_season_all_banner(self, show_obj): if self.season_all_banner and show_obj and not self._has_season_all_banner(show_obj): - logger.log(u"Metadata provider " + self.name + " creating season all banner for " + show_obj.name, logger.DEBUG) + logger.log(u"Metadata provider " + self.name + " creating season all banner for " + show_obj.name, + logger.DEBUG) return self.save_season_all_banner(show_obj) return False @@ -349,21 +359,24 @@ class GenericMetadata(): if ep_obj.show.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) indexer_show_obj = t[ep_obj.show.indexerid] - except indexer_exceptions.indexer_shownotfound, e: + except sickbeard.indexer_shownotfound, e: raise exceptions.ShowNotFoundException(e.message) - except indexer_exceptions.indexer_error, e: - logger.log(u"Unable to connect to " + ep_obj.show.indexer + " while creating meta files - skipping - " + ex(e), logger.ERROR) + except sickbeard.indexer_error, e: + logger.log(u"Unable to connect to " + sickbeard.indexerApi( + ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR) return None # try all included episodes in case some have thumbs and others don't for cur_ep in all_eps: try: myEp = indexer_show_obj[cur_ep.season][cur_ep.episode] - except (indexer_exceptions.indexer_episodenotfound, indexer_exceptions.indexer_seasonnotfound): - logger.log(u"Unable to find episode " + str(cur_ep.season) + "x" + str(cur_ep.episode) + " on " + ep_obj.show.indexer + ".. has it been removed? Should I delete from db?") + except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound): + logger.log(u"Unable to find episode " + str(cur_ep.season) + "x" + str( + cur_ep.episode) + " on " + sickbeard.indexerApi( + ep_obj.show.indexer).name + ".. has it been removed? Should I delete from db?") continue thumb_url = getattr(myEp, 'filename', None) @@ -410,7 +423,8 @@ class GenericMetadata(): nfo_file.close() helpers.chmodAsParent(nfo_file_path) except IOError, e: - logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), logger.ERROR) + logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), + logger.ERROR) return False return True @@ -454,7 +468,8 @@ class GenericMetadata(): nfo_file.close() helpers.chmodAsParent(nfo_file_path) except IOError, e: - logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), logger.ERROR) + logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), + logger.ERROR) return False return True @@ -580,7 +595,8 @@ class GenericMetadata(): season_poster_file_path = self.get_season_poster_path(show_obj, cur_season) if not season_poster_file_path: - logger.log(u"Path for season " + str(cur_season) + " came back blank, skipping this season", logger.DEBUG) + logger.log(u"Path for season " + str(cur_season) + " came back blank, skipping this season", + logger.DEBUG) continue seasonData = metadata_helpers.getShowImage(season_url) @@ -627,7 +643,8 @@ class GenericMetadata(): season_banner_file_path = self.get_season_banner_path(show_obj, cur_season) if not season_banner_file_path: - logger.log(u"Path for season " + str(cur_season) + " came back blank, skipping this season", logger.DEBUG) + logger.log(u"Path for season " + str(cur_season) + " came back blank, skipping this season", + logger.DEBUG) continue seasonData = metadata_helpers.getShowImage(season_url) @@ -699,7 +716,9 @@ class GenericMetadata(): outFile.close() helpers.chmodAsParent(image_path) except IOError, e: - logger.log(u"Unable to write image to " + image_path + " - are you sure the show folder is writable? " + ex(e), logger.ERROR) + logger.log( + u"Unable to write image to " + image_path + " - are you sure the show folder is writable? " + ex(e), + logger.ERROR) return False return True @@ -730,14 +749,16 @@ class GenericMetadata(): if show_obj.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) indexer_show_obj = t[show_obj.indexerid] - except (indexer_exceptions.indexer_error, IOError), e: - logger.log(u"Unable to look up show on " + show_obj.indexer + ", not downloading images: " + ex(e), logger.ERROR) + except (sickbeard.indexer_error, IOError), e: + logger.log(u"Unable to look up show on " + sickbeard.indexerApi( + show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR) return None if image_type not in ('fanart', 'poster', 'banner', 'poster_thumb', 'banner_thumb'): - logger.log(u"Invalid image type " + str(image_type) + ", couldn't find it in the " + show_obj.indexer + " object", logger.ERROR) + logger.log(u"Invalid image type " + str(image_type) + ", couldn't find it in the " + sickbeard.indexerApi( + show_obj.indexer).name + " object", logger.ERROR) return None if image_type == 'poster_thumb': @@ -793,16 +814,17 @@ class GenericMetadata(): if show_obj.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) indexer_show_obj = t[show_obj.indexerid] - except (indexer_exceptions.indexer_error, IOError), e: - logger.log(u"Unable to look up show on " + show_obj.indexer + ", not downloading images: " + ex(e), logger.ERROR) + except (sickbeard.indexer_error, IOError), e: + logger.log(u"Unable to look up show on " + sickbeard.indexerApi( + show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR) return result # if we have no season banners then just finish if getattr(indexer_show_obj, '_banners', None) is None: return result - + if 'season' not in indexer_show_obj['_banners'] or 'season' not in indexer_show_obj['_banners']['season']: return result @@ -845,10 +867,11 @@ class GenericMetadata(): if indexer_lang and not indexer_lang == 'en': lINDEXER_API_PARMS['language'] = indexer_lang - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) indexer_show_obj = t[show_obj.indexerid] - except (indexer_exceptions.indexer_error, IOError), e: - logger.log(u"Unable to look up show on " + show_obj.indexer + ", not downloading images: " + ex(e), logger.ERROR) + except (sickbeard.indexer_error, IOError), e: + logger.log(u"Unable to look up show on " + sickbeard.indexerApi( + show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR) return result # if we have no season banners then just finish @@ -890,17 +913,24 @@ class GenericMetadata(): with ek.ek(open, metadata_path, 'r') as xmlFileObj: showXML = etree.ElementTree(file=xmlFileObj) - if showXML.findtext('title') == None\ - or (showXML.findtext('tvdbid') == None and showXML.findtext('id') == None): + if showXML.findtext('title') == None \ + or (showXML.findtext('tvdbid') == None + and showXML.findtext('id') == None) \ + and showXML.findtext('indexer') == None: logger.log(u"Invalid info in tvshow.nfo (missing name or id):" \ - + str(showXML.findtext('title')) + " " \ - + str(showXML.findtext('indexer')) + " " \ - + str(showXML.findtext('tvdbid')) + " " \ - + str(showXML.findtext('id'))) + + str(showXML.findtext('title')) + " " \ + + str(showXML.findtext('indexer')) + " " \ + + str(showXML.findtext('tvdbid')) + " " \ + + str(showXML.findtext('id'))) return empty_return name = showXML.findtext('title') - indexer = showXML.findtext('indexer') + + try: + indexer = int(showXML.findtext('indexer')) + except: + indexer = None + if showXML.findtext('tvdbid') != None: indexer_id = int(showXML.findtext('tvdbid')) elif showXML.findtext('id') != None: @@ -914,7 +944,9 @@ class GenericMetadata(): return empty_return except Exception, e: - logger.log(u"There was an error parsing your existing metadata file: '" + metadata_path + "' error: " + ex(e), logger.WARNING) + logger.log( + u"There was an error parsing your existing metadata file: '" + metadata_path + "' error: " + ex(e), + logger.WARNING) return empty_return return (indexer_id, name, indexer) @@ -931,6 +963,7 @@ class GenericMetadata(): def size_str_to_int(x): return float("inf") if x == 'original' else int(x[1:]) + max_size = max(sizes, key=size_str_to_int) try: diff --git a/sickbeard/metadata/helpers.py b/sickbeard/metadata/helpers.py index 67f9751d..4d1ceeb0 100644 --- a/sickbeard/metadata/helpers.py +++ b/sickbeard/metadata/helpers.py @@ -21,7 +21,6 @@ from sickbeard import logger def getShowImage(url, imgNum=None): - image_data = None # @UnusedVariable if url == None: diff --git a/sickbeard/metadata/mediabrowser.py b/sickbeard/metadata/mediabrowser.py index f882c1bc..e3647298 100644 --- a/sickbeard/metadata/mediabrowser.py +++ b/sickbeard/metadata/mediabrowser.py @@ -27,7 +27,6 @@ import generic from sickbeard import logger, exceptions, helpers from sickbeard import encodingKludge as ek -from sickbeard.indexers import indexer_api, indexer_exceptions from sickbeard.exceptions import ex import xml.etree.cElementTree as etree @@ -145,7 +144,8 @@ class MediaBrowserMetadata(generic.GenericMetadata): If no season folder exists, None is returned """ - dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))] + dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if + ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))] season_dir_regex = '^Season\s+(\d+)$' @@ -184,7 +184,8 @@ class MediaBrowserMetadata(generic.GenericMetadata): If no season folder exists, None is returned """ - dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))] + dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if + ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))] season_dir_regex = '^Season\s+(\d+)$' @@ -237,32 +238,36 @@ class MediaBrowserMetadata(generic.GenericMetadata): if show_obj.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) tv_node = etree.Element("Series") try: myShow = t[int(show_obj.indexerid)] - except indexer_exceptions.indexer_shownotfound: - logger.log(u"Unable to find show with id " + str(show_obj.indexerid) + " on " + show_obj.indexer + ", skipping it", logger.ERROR) + except sickbeard.indexer_shownotfound: + logger.log(u"Unable to find show with id " + str(show_obj.indexerid) + " on " + sickbeard.indexerApi( + show_obj.indexer).name + ", skipping it", logger.ERROR) raise - except indexer_exceptions.indexer_error: - logger.log(u"" + show_obj.indexer + " is down, can't use its data to make the NFO", logger.ERROR) + except sickbeard.indexer_error: + logger.log( + u"" + sickbeard.indexerApi(show_obj.indexer).name + " is down, can't use its data to make the NFO", + logger.ERROR) raise # check for title and id if getattr(myShow, 'seriesname', None) is None or getattr(myShow, 'id', None) is None: - logger.log(u"Incomplete info for show with id " + str(show_obj.indexerid) + " on " + show_obj.indexer + ", skipping it", logger.ERROR) + logger.log(u"Incomplete info for show with id " + str(show_obj.indexerid) + " on " + sickbeard.indexerApi( + show_obj.indexer).name + ", skipping it", logger.ERROR) return False indexerid = etree.SubElement(tv_node, "id") if getattr(myShow, 'id', None) is not None: - indexerid.text = myShow['id'] + indexerid.text = str(myShow['id']) indexer = etree.SubElement(tv_node, "indexer") if show_obj.indexer != None: - indexer.text = show_obj.indexer + indexer.text = str(show_obj.indexer) SeriesName = etree.SubElement(tv_node, "SeriesName") if getattr(myShow, 'seriesname', None) is not None: @@ -400,13 +405,14 @@ class MediaBrowserMetadata(generic.GenericMetadata): if ep_obj.show.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) myShow = t[ep_obj.show.indexerid] - except indexer_exceptions.indexer_shownotfound, e: + except sickbeard.indexer_shownotfound, e: raise exceptions.ShowNotFoundException(e.message) - except indexer_exceptions.indexer_error, e: - logger.log(u"Unable to connect to " + ep_obj.show.indexer + " while creating meta files - skipping - " + ex(e), logger.ERROR) + except sickbeard.indexer_error, e: + logger.log(u"Unable to connect to " + sickbeard.indexerApi( + ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR) return False rootNode = etree.Element("Item") @@ -416,8 +422,10 @@ class MediaBrowserMetadata(generic.GenericMetadata): try: myEp = myShow[curEpToWrite.season][curEpToWrite.episode] - except (indexer_exceptions.indexer_episodenotfound, indexer_exceptions.indexer_seasonnotfound): - logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(curEpToWrite.episode) + " on " + ep_obj.show.indexer + ".. has it been removed? Should I delete from db?") + except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound): + logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str( + curEpToWrite.episode) + " on " + sickbeard.indexerApi( + ep_obj.show.indexer).name + ".. has it been removed? Should I delete from db?") return None if curEpToWrite == ep_obj: diff --git a/sickbeard/metadata/tivo.py b/sickbeard/metadata/tivo.py index dac42d93..485e1601 100644 --- a/sickbeard/metadata/tivo.py +++ b/sickbeard/metadata/tivo.py @@ -30,9 +30,6 @@ from sickbeard import encodingKludge as ek from sickbeard.exceptions import ex -from sickbeard.indexers import indexer_api, indexer_exceptions - - class TIVOMetadata(generic.GenericMetadata): """ Metadata generation class for TIVO @@ -179,20 +176,23 @@ class TIVOMetadata(generic.GenericMetadata): if ep_obj.show.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) myShow = t[ep_obj.show.indexerid] - except indexer_exceptions.indexer_shownotfound, e: + except sickbeard.indexer_shownotfound, e: raise exceptions.ShowNotFoundException(str(e)) - except indexer_exceptions.indexer_error, e: - logger.log(u"Unable to connect to " + ep_obj.show.indexer + " while creating meta files - skipping - " + str(e), logger.ERROR) + except sickbeard.indexer_error, e: + logger.log(u"Unable to connect to " + sickbeard.indexerApi( + ep_obj.show.indexer).name + " while creating meta files - skipping - " + str(e), logger.ERROR) return False for curEpToWrite in eps_to_write: try: myEp = myShow[curEpToWrite.season][curEpToWrite.episode] - except (indexer_exceptions.indexer_episodenotfound, indexer_exceptions.indexer_seasonnotfound): - logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(curEpToWrite.episode) + " on " + ep_obj.show.indexer + "... has it been removed? Should I delete from db?") + except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound): + logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str( + curEpToWrite.episode) + " on " + sickbeard.indexerApi( + ep_obj.show.indexer).name + "... has it been removed? Should I delete from db?") return None if getattr(myEp, 'firstaired', None) is None and ep_obj.season == 0: @@ -230,7 +230,8 @@ class TIVOMetadata(generic.GenericMetadata): # Replace double curly quotes sanitizedDescription = sanitizedDescription.replace(u"\u201c", "\"").replace(u"\u201d", "\"") # Replace single curly quotes - sanitizedDescription = sanitizedDescription.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u02BC", "'") + sanitizedDescription = sanitizedDescription.replace(u"\u2018", "'").replace(u"\u2019", "'").replace( + u"\u02BC", "'") data += ("description : " + sanitizedDescription + "\n") @@ -277,15 +278,15 @@ class TIVOMetadata(generic.GenericMetadata): if genre: data += ("vProgramGenre : " + str(genre) + "\n") - # NOTE: The following are metadata keywords are not used - # displayMajorNumber - # showingBits - # displayMinorNumber - # colorCode - # vSeriesGenre - # vGuestStar, vDirector, vExecProducer, vProducer, vWriter, vHost, vChoreographer - # partCount - # partIndex + # NOTE: The following are metadata keywords are not used + # displayMajorNumber + # showingBits + # displayMinorNumber + # colorCode + # vSeriesGenre + # vGuestStar, vDirector, vExecProducer, vProducer, vWriter, vHost, vChoreographer + # partCount + # partIndex return data @@ -324,7 +325,8 @@ class TIVOMetadata(generic.GenericMetadata): helpers.chmodAsParent(nfo_file_path) except EnvironmentError, e: - logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), logger.ERROR) + logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), + logger.ERROR) return False return True diff --git a/sickbeard/metadata/wdtv.py b/sickbeard/metadata/wdtv.py index df3d93bb..34e572f8 100644 --- a/sickbeard/metadata/wdtv.py +++ b/sickbeard/metadata/wdtv.py @@ -27,7 +27,6 @@ import generic from sickbeard import logger, exceptions, helpers from sickbeard import encodingKludge as ek -from sickbeard.indexers import indexer_api, indexer_exceptions from sickbeard.exceptions import ex import xml.etree.cElementTree as etree @@ -135,7 +134,8 @@ class WDTVMetadata(generic.GenericMetadata): If no season folder exists, None is returned """ - dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))] + dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if + ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))] season_dir_regex = '^Season\s+(\d+)$' @@ -187,12 +187,13 @@ class WDTVMetadata(generic.GenericMetadata): if ep_obj.show.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) myShow = t[ep_obj.show.indexerid] - except indexer_exceptions.indexer_shownotfound, e: + except sickbeard.indexer_shownotfound, e: raise exceptions.ShowNotFoundException(e.message) - except indexer_exceptions.indexer_error, e: - logger.log(u"Unable to connect to " + ep_obj.show.indexer + " while creating meta files - skipping - " + ex(e), logger.ERROR) + except sickbeard.indexer_error, e: + logger.log(u"Unable to connect to " + sickbeard.indexerApi( + ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR) return False rootNode = etree.Element("details") @@ -202,8 +203,10 @@ class WDTVMetadata(generic.GenericMetadata): try: myEp = myShow[curEpToWrite.season][curEpToWrite.episode] - except (indexer_exceptions.indexer_episodenotfound, indexer_exceptions.indexer_seasonnotfound): - logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(curEpToWrite.episode) + " on " + ep_obj.show.indexer + "... has it been removed? Should I delete from db?") + except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound): + logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str( + curEpToWrite.episode) + " on " + sickbeard.indexerApi( + ep_obj.show.indexer).name + "... has it been removed? Should I delete from db?") return None if getattr(myEp, 'firstaired', None) is None and ep_obj.season == 0: diff --git a/sickbeard/metadata/xbmc_12plus.py b/sickbeard/metadata/xbmc_12plus.py index bfc02190..0016e4ce 100644 --- a/sickbeard/metadata/xbmc_12plus.py +++ b/sickbeard/metadata/xbmc_12plus.py @@ -20,8 +20,6 @@ import datetime import sickbeard -from sickbeard.indexers import indexer_api, indexer_exceptions - from sickbeard import logger, exceptions, helpers from sickbeard.exceptions import ex @@ -109,23 +107,27 @@ class XBMC_12PlusMetadata(generic.GenericMetadata): if show_obj.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) tv_node = etree.Element("tvshow") try: myShow = t[int(show_ID)] - except indexer_exceptions.indexer_shownotfound: - logger.log(u"Unable to find show with id " + str(show_ID) + " on " + show_obj.indexer + ", skipping it", logger.ERROR) + except sickbeard.indexer_shownotfound: + logger.log(u"Unable to find show with id " + str(show_ID) + " on " + sickbeard.indexerApi( + show_obj.indexer).name + ", skipping it", logger.ERROR) raise - except indexer_exceptions.indexer_error: - logger.log(u"" + show_obj.indexer + " is down, can't use its data to add this show", logger.ERROR) + except sickbeard.indexer_error: + logger.log( + u"" + sickbeard.indexerApi(show_obj.indexer).name + " is down, can't use its data to add this show", + logger.ERROR) raise # check for title and id if getattr(myShow, 'seriesname', None) is None or getattr(myShow, 'id', None) is None: - logger.log(u"Incomplete info for show with id " + str(show_ID) + " on " + show_obj.indexer + ", skipping it", logger.ERROR) + logger.log(u"Incomplete info for show with id " + str(show_ID) + " on " + sickbeard.indexerApi( + show_obj.indexer).name + ", skipping it", logger.ERROR) return False title = etree.SubElement(tv_node, "title") @@ -153,7 +155,7 @@ class XBMC_12PlusMetadata(generic.GenericMetadata): episodeguideurl = etree.SubElement(episodeguide, "url") episodeguideurl2 = etree.SubElement(tv_node, "episodeguideurl") if getattr(myShow, 'id', None) is not None: - showurl = t.base_url + myShow["id"] + '/all/en.zip' + showurl = sickbeard.indexerApi(show_obj.indexer).config['base_url'] + str(myShow["id"]) + '/all/en.zip' episodeguideurl.text = showurl episodeguideurl2.text = showurl @@ -163,11 +165,11 @@ class XBMC_12PlusMetadata(generic.GenericMetadata): indexerid = etree.SubElement(tv_node, "id") if getattr(myShow, 'id', None) is not None: - indexerid.text = myShow["id"] + indexerid.text = str(myShow["id"]) indexer = etree.SubElement(tv_node, "indexer") if show_obj.indexer is not None: - indexer.text = show_obj.indexer + indexer.text = str(show_obj.indexer) genre = etree.SubElement(tv_node, "genre") if getattr(myShow, 'genre', None) is not None: @@ -230,12 +232,13 @@ class XBMC_12PlusMetadata(generic.GenericMetadata): lINDEXER_API_PARMS['dvdorder'] = True try: - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) myShow = t[ep_obj.show.indexerid] - except indexer_exceptions.indexer_shownotfound, e: + except sickbeard.indexer_shownotfound, e: raise exceptions.ShowNotFoundException(e.message) - except indexer_exceptions.indexer_error, e: - logger.log(u"Unable to connect to " + ep_obj.show.indexer + " while creating meta files - skipping - " + ex(e), logger.ERROR) + except sickbeard.indexer_error, e: + logger.log(u"Unable to connect to " + sickbeard.indexerApi( + ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR) return if len(eps_to_write) > 1: @@ -248,8 +251,10 @@ class XBMC_12PlusMetadata(generic.GenericMetadata): try: myEp = myShow[curEpToWrite.season][curEpToWrite.episode] - except (indexer_exceptions.indexer_episodenotfound, indexer_exceptions.indexer_seasonnotfound): - logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(curEpToWrite.episode) + " on " + ep_obj.show.indexer + ".. has it been removed? Should I delete from db?") + except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound): + logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str( + curEpToWrite.episode) + " on " + sickbeard.indexerApi( + ep_obj.show.indexer).name + ".. has it been removed? Should I delete from db?") return None if getattr(myEp, 'firstaired', None) is None: diff --git a/sickbeard/name_cache.py b/sickbeard/name_cache.py index dabaa018..57641570 100644 --- a/sickbeard/name_cache.py +++ b/sickbeard/name_cache.py @@ -19,6 +19,7 @@ from sickbeard import db from sickbeard.helpers import sanitizeSceneName + def addNameToCache(name, indexer_id): """ Adds the show & tvdb id to the scene_names table in cache.db. @@ -26,16 +27,17 @@ def addNameToCache(name, indexer_id): name: The show name to cache indexer_id: the TVDB and TVRAGE id that this show should be cached with (can be None/0 for unknown) """ - + # standardize the name we're using to account for small differences in providers name = sanitizeSceneName(name) - + if not indexer_id: indexer_id = 0 - + cacheDB = db.DBConnection('cache.db') cacheDB.action("INSERT INTO scene_names (indexer_id, name) VALUES (?, ?)", [indexer_id, name]) + def retrieveNameFromCache(name): """ Looks up the given name in the scene_names table in cache.db. @@ -44,18 +46,19 @@ def retrieveNameFromCache(name): Returns: the TVDB and TVRAGE id that resulted from the cache lookup or None if the show wasn't found in the cache """ - + # standardize the name we're using to account for small differences in providers name = sanitizeSceneName(name) - + cacheDB = db.DBConnection('cache.db') cache_results = cacheDB.select("SELECT * FROM scene_names WHERE name = ?", [name]) if not cache_results: return None - + return int(cache_results[0]["indexer_id"]) + def clearCache(): """ Deletes all "unknown" entries from the cache (names with indexer_id of 0). diff --git a/sickbeard/name_parser/parser.py b/sickbeard/name_parser/parser.py index 044d056f..86373d7d 100644 --- a/sickbeard/name_parser/parser.py +++ b/sickbeard/name_parser/parser.py @@ -26,13 +26,12 @@ import calendar from sickbeard import logger, classes from sickbeard import scene_numbering, scene_exceptions -from sickbeard.indexers import indexer_api, indexer_exceptions -from sickbeard.common import indexerStrings from lib.dateutil.parser import parse from time import strptime + class NameParser(object): def __init__(self, file_name=True): @@ -54,14 +53,14 @@ class NameParser(object): Stolen from dbr's tvnamer """ - + series_name = re.sub("(\D)\.(?!\s)(\D)", "\\1 \\2", series_name) - series_name = re.sub("(\d)\.(\d{4})", "\\1 \\2", series_name) # if it ends in a year then don't keep the dot + series_name = re.sub("(\d)\.(\d{4})", "\\1 \\2", series_name) # if it ends in a year then don't keep the dot series_name = re.sub("(\D)\.(?!\s)", "\\1 ", series_name) series_name = re.sub("\.(?!\s)(\D)", " \\1", series_name) series_name = series_name.replace("_", " ") series_name = re.sub("-$", "", series_name) - series_name = re.sub("^\[.*\]", "", series_name) + series_name = re.sub("^\[.*\]", "", series_name) return series_name.strip() def _compile_regexes(self): @@ -74,43 +73,43 @@ class NameParser(object): self.compiled_regexes.append((cur_pattern_name, cur_regex)) def _parse_string(self, name): - + if not name: return None - + for (cur_regex_name, cur_regex) in self.compiled_regexes: match = cur_regex.match(name) if not match: continue - + result = ParseResult(name) result.which_regex = [cur_regex_name] - + named_groups = match.groupdict().keys() if 'series_name' in named_groups: result.series_name = match.group('series_name') if result.series_name: result.series_name = self.clean_series_name(result.series_name) - + if 'season_num' in named_groups: tmp_season = int(match.group('season_num')) - if cur_regex_name == 'bare' and tmp_season in (19,20): + if cur_regex_name == 'bare' and tmp_season in (19, 20): continue result.season_number = tmp_season - + if 'ep_num' in named_groups: ep_num = self._convert_number(match.group('ep_num')) if 'extra_ep_num' in named_groups and match.group('extra_ep_num'): - result.episode_numbers = range(ep_num, self._convert_number(match.group('extra_ep_num'))+1) + result.episode_numbers = range(ep_num, self._convert_number(match.group('extra_ep_num')) + 1) else: result.episode_numbers = [ep_num] if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups: if 'scene_sports_date_format' in cur_regex_name: year = match.group('air_year') - month = strptime(match.group('air_month')[:3],'%b').tm_mon + month = strptime(match.group('air_month')[:3], '%b').tm_mon day = re.sub("(st|nd|rd|th)", "", match.group('air_day')) else: year = int(match.group('air_year')) @@ -125,17 +124,18 @@ class NameParser(object): if 'extra_info' in named_groups: tmp_extra_info = match.group('extra_info') - + # Show.S04.Special is almost certainly not every episode in the season - if tmp_extra_info and cur_regex_name == 'season_only' and re.match(r'([. _-]|^)(special|extra)\w*([. _-]|$)', tmp_extra_info, re.I): + if tmp_extra_info and cur_regex_name == 'season_only' and re.match( + r'([. _-]|^)(special|extra)\w*([. _-]|$)', tmp_extra_info, re.I): continue result.extra_info = tmp_extra_info - + if 'release_group' in named_groups: result.release_group = match.group('release_group') return result - + return None def _combine_results(self, first, second, attr): @@ -149,10 +149,10 @@ class NameParser(object): # if the second doesn't exist then return the first if not second: return getattr(first, attr) - + a = getattr(first, attr) b = getattr(second, attr) - + # if a is good use it if a != None or (type(a) == list and len(a)): return a @@ -160,14 +160,14 @@ class NameParser(object): else: return b - def _unicodify(self, obj, encoding = "utf-8"): + def _unicodify(self, obj, encoding="utf-8"): if isinstance(obj, basestring): if not isinstance(obj, unicode): obj = unicode(obj, encoding) return obj def _convert_number(self, number): - + try: return int(number) except: @@ -175,15 +175,15 @@ class NameParser(object): (1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1), ('M', 'CM', 'D', 'CD', 'C', 'XC', 'L', 'XL', 'X', 'IX', 'V', 'IV', 'I') ) - - n = unicode(number).upper() - + + n = unicode(number).upper() + i = result = 0 for integer, numeral in numeral_map: while n[i:i + len(numeral)] == numeral: result += integer i += len(numeral) - + return result def parse(self, name, fix_scene_numbering=False): @@ -253,47 +253,49 @@ class NameParser(object): return result_fixed return final_result - + @classmethod - def series_name_to_indexer_id(cls, series_name, check_scene_exceptions=True, check_database=True, check_indexer=False): + def series_name_to_indexer_id(cls, series_name, check_scene_exceptions=True, check_database=True, + check_indexer=False): """ Given a series name, return it's tvdbd_id. Returns None if not found. This is mostly robbed from postProcessor._analyze_name """ - + # do a scene reverse-lookup to get a list of all possible names name_list = sickbeard.show_name_helpers.sceneToNormalShowNames(series_name) - + # for each possible interpretation of that scene name if check_scene_exceptions: for cur_name in name_list: - logger.log(u"Checking scene exceptions for a match on "+cur_name, logger.DEBUG) + logger.log(u"Checking scene exceptions for a match on " + cur_name, logger.DEBUG) scene_id = sickbeard.scene_exceptions.get_scene_exception_by_name(cur_name) if scene_id: return scene_id # see if we can find the name directly in the DB, if so use it if check_database: for cur_name in name_list: - logger.log(u"Looking up "+cur_name+u" in the DB", logger.DEBUG) + logger.log(u"Looking up " + str(cur_name) + " in the DB", logger.DEBUG) db_result = sickbeard.helpers.searchDBForShow(cur_name) if db_result: return db_result[1] - + # see if we can find the name with a TVDB lookup if check_indexer: for cur_name in name_list: - for indexer in indexerStrings: + for indexer in sickbeard.indexerApi().indexers: try: lINDEXER_API_PARMS = {'indexer': indexer} lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) - logger.log(u"Looking up name "+cur_name+u" on the Indexer", logger.DEBUG) + logger.log(u"Looking up name " + str(cur_name) + " on " + sickbeard.indexerApi(indexer).name, + logger.DEBUG) showObj = t[cur_name] - except (indexer_exceptions): + except (sickbeard.indexer_exception): # if none found, search on all languages try: lINDEXER_API_PARMS = {'indexer': indexer} @@ -301,11 +303,13 @@ class NameParser(object): lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI lINDEXER_API_PARMS['search_all_languages'] = True - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) - logger.log(u"Looking up name "+cur_name+u" in all languages on the Indexer", logger.DEBUG) + logger.log( + u"Looking up name " + str(cur_name) + " in all languages on " + sickbeard.indexerApi( + indexer).name, logger.DEBUG) showObj = t[cur_name] - except (indexer_exceptions.indexer_exception, IOError): + except (sickbeard.indexer_exception, IOError): pass continue @@ -313,9 +317,10 @@ class NameParser(object): continue return showObj["id"] - + return None + class ParseResult(object): def __init__(self, original_name, @@ -325,10 +330,10 @@ class ParseResult(object): extra_info=None, release_group=None, air_date=None - ): + ): self.original_name = original_name - + self.series_name = series_name self.season_number = season_number if not episode_numbers: @@ -338,15 +343,15 @@ class ParseResult(object): self.extra_info = extra_info self.release_group = release_group - + self.air_date = air_date - + self.which_regex = None - + def __eq__(self, other): if not other: return False - + if self.series_name != other.series_name: return False if self.season_number != other.season_number: @@ -359,7 +364,7 @@ class ParseResult(object): return False if self.air_date != other.air_date: return False - + return True def __str__(self): @@ -368,10 +373,10 @@ class ParseResult(object): else: to_return = u'' if self.season_number != None: - to_return += 'S'+str(self.season_number) + to_return += 'S' + str(self.season_number) if self.episode_numbers and len(self.episode_numbers): for e in self.episode_numbers: - to_return += 'E'+str(e) + to_return += 'E' + str(e) if self.air_by_date: to_return += str(self.air_date) @@ -381,7 +386,7 @@ class ParseResult(object): if self.release_group: to_return += ' (' + self.release_group + ')' - to_return += ' [ABD: '+str(self.air_by_date)+']' + to_return += ' [ABD: ' + str(self.air_by_date) + ']' return to_return.encode('utf-8') @@ -389,59 +394,61 @@ class ParseResult(object): if self.season_number == None and len(self.episode_numbers) == 0 and self.air_date: return True return False + air_by_date = property(_is_air_by_date) - + def fix_scene_numbering(self): """ The changes the parsed result (which is assumed to be scene numbering) to tvdb numbering, if necessary. """ - if self.air_by_date: return self # scene numbering does not apply to air-by-date - if self.season_number == None: return self # can't work without a season - if len(self.episode_numbers) == 0: return self # need at least one episode - + if self.air_by_date: return self # scene numbering does not apply to air-by-date + if self.season_number == None: return self # can't work without a season + if len(self.episode_numbers) == 0: return self # need at least one episode + indexer_id = NameParser.series_name_to_indexer_id(self.series_name, True, True, False) - + new_episode_numbers = [] new_season_numbers = [] for epNo in self.episode_numbers: (s, e) = scene_numbering.get_indexer_numbering(indexer_id, self.season_number, epNo) new_episode_numbers.append(e) new_season_numbers.append(s) - + # need to do a quick sanity check here. It's possible that we now have episodes # from more than one season (by tvdb numbering), and this is just too much # for sickbeard, so we'd need to flag it. - new_season_numbers = list(set(new_season_numbers)) # remove duplicates + new_season_numbers = list(set(new_season_numbers)) # remove duplicates if len(new_season_numbers) > 1: raise InvalidNameException("Scene numbering results episodes from " "seasons %s, (i.e. more than one) and " "sickbeard does not support this. " "Sorry." % (str(new_season_numbers))) - + # I guess it's possible that we'd have duplicate episodes too, so lets # eliminate them new_episode_numbers = list(set(new_episode_numbers)) new_episode_numbers.sort() - + self.episode_numbers = new_episode_numbers self.season_number = new_season_numbers[0] return self + class NameParserCache(object): #TODO: check if the fifo list can beskiped and only use one dict - _previous_parsed_list = [] # keep a fifo list of the cached items + _previous_parsed_list = [] # keep a fifo list of the cached items _previous_parsed = {} _cache_size = 100 - + def add(self, name, parse_result): self._previous_parsed[name] = parse_result self._previous_parsed_list.append(name) while len(self._previous_parsed_list) > self._cache_size: del_me = self._previous_parsed_list.pop(0) self._previous_parsed.pop(del_me) - + def get(self, name): if name in self._previous_parsed: logger.log("Using cached parse result for: " + name, logger.DEBUG) @@ -449,7 +456,9 @@ class NameParserCache(object): else: return None + name_parser_cache = NameParserCache() + class InvalidNameException(Exception): "The given name is not valid" diff --git a/sickbeard/name_parser/regexes.py b/sickbeard/name_parser/regexes.py index f500e038..427d37f9 100644 --- a/sickbeard/name_parser/regexes.py +++ b/sickbeard/name_parser/regexes.py @@ -19,181 +19,181 @@ # all regexes are case insensitive ep_regexes = [ - ('standard_repeat', - # Show.Name.S01E02.S01E03.Source.Quality.Etc-Group - # Show Name - S01E02 - S01E03 - S01E04 - Ep Name - ''' - ^(?P.+?)[. _-]+ # Show_Name and separator - s(?P\d+)[. _-]* # S01 and optional separator - e(?P\d+) # E02 and separator - ([. _-]+s(?P=season_num)[. _-]* # S01 and optional separator - e(?P\d+))+ # E03/etc and separator - [. _-]*((?P.+?) # Source_Quality_Etc- - ((?[^- ]+))?)?$ # Group - '''), - - ('fov_repeat', - # Show.Name.1x02.1x03.Source.Quality.Etc-Group - # Show Name - 1x02 - 1x03 - 1x04 - Ep Name - ''' - ^(?P.+?)[. _-]+ # Show_Name and separator - (?P\d+)x # 1x - (?P\d+) # 02 and separator - ([. _-]+(?P=season_num)x # 1x - (?P\d+))+ # 03/etc and separator - [. _-]*((?P.+?) # Source_Quality_Etc- - ((?[^- ]+))?)?$ # Group - '''), - - ('standard', - # Show.Name.S01E02.Source.Quality.Etc-Group - # Show Name - S01E02 - My Ep Name - # Show.Name.S01.E03.My.Ep.Name - # Show.Name.S01E02E03.Source.Quality.Etc-Group - # Show Name - S01E02-03 - My Ep Name - # Show.Name.S01.E02.E03 - ''' - ^((?P.+?)[. _-]+)? # Show_Name and separator - s(?P\d+)[. _-]* # S01 and optional separator - e(?P\d+) # E02 and separator - (([. _-]*e|-) # linking e/- char - (?P(?!(1080|720)[pi])\d+))* # additional E03/etc - [. _-]*((?P.+?) # Source_Quality_Etc- - ((?[^- ]+))?)?$ # Group - '''), + ('standard_repeat', + # Show.Name.S01E02.S01E03.Source.Quality.Etc-Group + # Show Name - S01E02 - S01E03 - S01E04 - Ep Name + ''' + ^(?P.+?)[. _-]+ # Show_Name and separator + s(?P\d+)[. _-]* # S01 and optional separator + e(?P\d+) # E02 and separator + ([. _-]+s(?P=season_num)[. _-]* # S01 and optional separator + e(?P\d+))+ # E03/etc and separator + [. _-]*((?P.+?) # Source_Quality_Etc- + ((?[^- ]+))?)?$ # Group + '''), - ('fov', - # Show_Name.1x02.Source_Quality_Etc-Group - # Show Name - 1x02 - My Ep Name - # Show_Name.1x02x03x04.Source_Quality_Etc-Group - # Show Name - 1x02-03-04 - My Ep Name - ''' - ^((?P.+?)[\[. _-]+)? # Show_Name and separator - (?P\d+)x # 1x - (?P\d+) # 02 and separator - (([. _-]*x|-) # linking x/- char - (?P - (?!(1080|720)[pi])(?!(?<=x)264) # ignore obviously wrong multi-eps - \d+))* # additional x03/etc - [\]. _-]*((?P.+?) # Source_Quality_Etc- - ((?[^- ]+))?)?$ # Group - '''), - - ('scene_date_format', - # Show.Name.2010.11.23.Source.Quality.Etc-Group - # Show Name - 2010-11-23 - Ep Name - ''' - ^((?P.+?)[. _-]+)? # Show_Name and separator - (?P\d{4})[. _-]+ # 2010 and separator - (?P\d{2})[. _-]+ # 11 and separator - (?P\d{2}) # 23 and separator - [. _-]*((?P.+?) # Source_Quality_Etc- - ((?[^- ]+))?)?$ # Group - '''), + ('fov_repeat', + # Show.Name.1x02.1x03.Source.Quality.Etc-Group + # Show Name - 1x02 - 1x03 - 1x04 - Ep Name + ''' + ^(?P.+?)[. _-]+ # Show_Name and separator + (?P\d+)x # 1x + (?P\d+) # 02 and separator + ([. _-]+(?P=season_num)x # 1x + (?P\d+))+ # 03/etc and separator + [. _-]*((?P.+?) # Source_Quality_Etc- + ((?[^- ]+))?)?$ # Group + '''), - ('scene_sports_date_format', - # Show.Name.2010.Nov.23rd.Source.Quality.Etc-Group - # Show Name - 2010-Nov-23rd - Ep Name - ''' - ^(?P.*?(UEFA|MLB|ESPN|WWE|MMA|UFC|TNA|EPL|NASCAR|NBA|NFL|NHL|NRL|PGA|SUPER LEAGUE|FORMULA|FIFA|NETBALL|MOTOGP).*?) - (?P\d{1,2}[a-zA-Z]{2})[. _-]+ # 23rd and seperator - (?P[a-zA-Z]{3,})[. _-]+ # Nov and seperator - (?P\d{4})[. _-]+ # 2010 - (?P.*?(?.*?)$ # Group - '''), + ('standard', + # Show.Name.S01E02.Source.Quality.Etc-Group + # Show Name - S01E02 - My Ep Name + # Show.Name.S01.E03.My.Ep.Name + # Show.Name.S01E02E03.Source.Quality.Etc-Group + # Show Name - S01E02-03 - My Ep Name + # Show.Name.S01.E02.E03 + ''' + ^((?P.+?)[. _-]+)? # Show_Name and separator + s(?P\d+)[. _-]* # S01 and optional separator + e(?P\d+) # E02 and separator + (([. _-]*e|-) # linking e/- char + (?P(?!(1080|720)[pi])\d+))* # additional E03/etc + [. _-]*((?P.+?) # Source_Quality_Etc- + ((?[^- ]+))?)?$ # Group + '''), - ('stupid', - # tpz-abc102 - ''' - (?P.+?)-\w+?[\. ]? # tpz-abc - (?!264) # don't count x264 - (?P\d{1,2}) # 1 - (?P\d{2})$ # 02 - '''), - - ('verbose', - # Show Name Season 1 Episode 2 Ep Name - ''' - ^(?P.+?)[. _-]+ # Show Name and separator - season[. _-]+ # season and separator - (?P\d+)[. _-]+ # 1 - episode[. _-]+ # episode and separator - (?P\d+)[. _-]+ # 02 and separator - (?P.+)$ # Source_Quality_Etc- - '''), - - ('season_only', - # Show.Name.S01.Source.Quality.Etc-Group - ''' - ^((?P.+?)[. _-]+)? # Show_Name and separator - s(eason[. _-])? # S01/Season 01 - (?P\d+)[. _-]* # S01 and optional separator - [. _-]*((?P.+?) # Source_Quality_Etc- - ((?[^- ]+))?)?$ # Group - ''' - ), + ('fov', + # Show_Name.1x02.Source_Quality_Etc-Group + # Show Name - 1x02 - My Ep Name + # Show_Name.1x02x03x04.Source_Quality_Etc-Group + # Show Name - 1x02-03-04 - My Ep Name + ''' + ^((?P.+?)[\[. _-]+)? # Show_Name and separator + (?P\d+)x # 1x + (?P\d+) # 02 and separator + (([. _-]*x|-) # linking x/- char + (?P + (?!(1080|720)[pi])(?!(?<=x)264) # ignore obviously wrong multi-eps + \d+))* # additional x03/etc + [\]. _-]*((?P.+?) # Source_Quality_Etc- + ((?[^- ]+))?)?$ # Group + '''), - ('no_season_multi_ep', - # Show.Name.E02-03 - # Show.Name.E02.2010 - ''' - ^((?P.+?)[. _-]+)? # Show_Name and separator - (e(p(isode)?)?|part|pt)[. _-]? # e, ep, episode, or part - (?P(\d+|[ivx]+)) # first ep num - ((([. _-]+(and|&|to)[. _-]+)|-) # and/&/to joiner - (?P(?!(1080|720)[pi])(\d+|[ivx]+))[. _-]) # second ep num - ([. _-]*(?P.+?) # Source_Quality_Etc- - ((?[^- ]+))?)?$ # Group - ''' - ), + ('scene_date_format', + # Show.Name.2010.11.23.Source.Quality.Etc-Group + # Show Name - 2010-11-23 - Ep Name + ''' + ^((?P.+?)[. _-]+)? # Show_Name and separator + (?P\d{4})[. _-]+ # 2010 and separator + (?P\d{2})[. _-]+ # 11 and separator + (?P\d{2}) # 23 and separator + [. _-]*((?P.+?) # Source_Quality_Etc- + ((?[^- ]+))?)?$ # Group + '''), - ('no_season_general', - # Show.Name.E23.Test - # Show.Name.Part.3.Source.Quality.Etc-Group - # Show.Name.Part.1.and.Part.2.Blah-Group - ''' - ^((?P.+?)[. _-]+)? # Show_Name and separator - (e(p(isode)?)?|part|pt)[. _-]? # e, ep, episode, or part - (?P(\d+|([ivx]+(?=[. _-])))) # first ep num - ([. _-]+((and|&|to)[. _-]+)? # and/&/to joiner - ((e(p(isode)?)?|part|pt)[. _-]?) # e, ep, episode, or part - (?P(?!(1080|720)[pi]) - (\d+|([ivx]+(?=[. _-]))))[. _-])* # second ep num - ([. _-]*(?P.+?) # Source_Quality_Etc- - ((?[^- ]+))?)?$ # Group - ''' - ), + ('scene_sports_date_format', + # Show.Name.2010.Nov.23rd.Source.Quality.Etc-Group + # Show Name - 2010-Nov-23rd - Ep Name + ''' + ^(?P.*?(UEFA|MLB|ESPN|WWE|MMA|UFC|TNA|EPL|NASCAR|NBA|NFL|NHL|NRL|PGA|SUPER LEAGUE|FORMULA|FIFA|NETBALL|MOTOGP).*?) + (?P\d{1,2}[a-zA-Z]{2})[. _-]+ # 23rd and seperator + (?P[a-zA-Z]{3,})[. _-]+ # Nov and seperator + (?P\d{4})[. _-]+ # 2010 + (?P.*?(?.*?)$ # Group + '''), - ('bare', - # Show.Name.102.Source.Quality.Etc-Group - ''' - ^(?P.+?)[. _-]+ # Show_Name and separator - (?P\d{1,2}) # 1 - (?P\d{2}) # 02 and separator - ([. _-]+(?P(?!\d{3}[. _-]+)[^-]+) # Source_Quality_Etc- - (-(?P.+))?)?$ # Group - '''), - - ('no_season', - # Show Name - 01 - Ep Name - # 01 - Ep Name - # 01 - Ep Name - ''' - ^((?P.+?)(?:[. _-]{2,}|[. _]))? # Show_Name and separator - (?P\d{1,2}) # 02 - (?:-(?P\d{1,2}))* # 02 - [. _-]+((?P.+?) # Source_Quality_Etc- - ((?[^- ]+))?)?$ # Group - ''' - ), - ] + ('stupid', + # tpz-abc102 + ''' + (?P.+?)-\w+?[\. ]? # tpz-abc + (?!264) # don't count x264 + (?P\d{1,2}) # 1 + (?P\d{2})$ # 02 + '''), + + ('verbose', + # Show Name Season 1 Episode 2 Ep Name + ''' + ^(?P.+?)[. _-]+ # Show Name and separator + season[. _-]+ # season and separator + (?P\d+)[. _-]+ # 1 + episode[. _-]+ # episode and separator + (?P\d+)[. _-]+ # 02 and separator + (?P.+)$ # Source_Quality_Etc- + '''), + + ('season_only', + # Show.Name.S01.Source.Quality.Etc-Group + ''' + ^((?P.+?)[. _-]+)? # Show_Name and separator + s(eason[. _-])? # S01/Season 01 + (?P\d+)[. _-]* # S01 and optional separator + [. _-]*((?P.+?) # Source_Quality_Etc- + ((?[^- ]+))?)?$ # Group + ''' + ), + + ('no_season_multi_ep', + # Show.Name.E02-03 + # Show.Name.E02.2010 + ''' + ^((?P.+?)[. _-]+)? # Show_Name and separator + (e(p(isode)?)?|part|pt)[. _-]? # e, ep, episode, or part + (?P(\d+|[ivx]+)) # first ep num + ((([. _-]+(and|&|to)[. _-]+)|-) # and/&/to joiner + (?P(?!(1080|720)[pi])(\d+|[ivx]+))[. _-]) # second ep num + ([. _-]*(?P.+?) # Source_Quality_Etc- + ((?[^- ]+))?)?$ # Group + ''' + ), + + ('no_season_general', + # Show.Name.E23.Test + # Show.Name.Part.3.Source.Quality.Etc-Group + # Show.Name.Part.1.and.Part.2.Blah-Group + ''' + ^((?P.+?)[. _-]+)? # Show_Name and separator + (e(p(isode)?)?|part|pt)[. _-]? # e, ep, episode, or part + (?P(\d+|([ivx]+(?=[. _-])))) # first ep num + ([. _-]+((and|&|to)[. _-]+)? # and/&/to joiner + ((e(p(isode)?)?|part|pt)[. _-]?) # e, ep, episode, or part + (?P(?!(1080|720)[pi]) + (\d+|([ivx]+(?=[. _-]))))[. _-])* # second ep num + ([. _-]*(?P.+?) # Source_Quality_Etc- + ((?[^- ]+))?)?$ # Group + ''' + ), + + ('bare', + # Show.Name.102.Source.Quality.Etc-Group + ''' + ^(?P.+?)[. _-]+ # Show_Name and separator + (?P\d{1,2}) # 1 + (?P\d{2}) # 02 and separator + ([. _-]+(?P(?!\d{3}[. _-]+)[^-]+) # Source_Quality_Etc- + (-(?P.+))?)?$ # Group + '''), + + ('no_season', + # Show Name - 01 - Ep Name + # 01 - Ep Name + # 01 - Ep Name + ''' + ^((?P.+?)(?:[. _-]{2,}|[. _]))? # Show_Name and separator + (?P\d{1,2}) # 02 + (?:-(?P\d{1,2}))* # 02 + [. _-]+((?P.+?) # Source_Quality_Etc- + ((?[^- ]+))?)?$ # Group + ''' + ), +] diff --git a/sickbeard/naming.py b/sickbeard/naming.py index f355a97a..33148168 100644 --- a/sickbeard/naming.py +++ b/sickbeard/naming.py @@ -33,12 +33,13 @@ name_presets = ('%SN - %Sx%0E - %EN', '%Sx%0E - %EN', 'S%0SE%0E - %EN', 'Season %0S/%S.N.S%0SE%0E.%Q.N-%RG' - ) +) name_abd_presets = ('%SN - %A-D - %EN', '%S.N.%A.D.%E.N.%Q.N', '%Y/%0M/%S.N.%A.D.%E.N-%RG' - ) +) + class TVShow(): def __init__(self): @@ -46,6 +47,7 @@ class TVShow(): self.genre = "Comedy" self.air_by_date = 0 + class TVEpisode(tv.TVEpisode): def __init__(self, season, episode, name): self.relatedEps = [] @@ -58,6 +60,7 @@ class TVEpisode(tv.TVEpisode): self._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP' self._is_proper = True + def check_force_season_folders(pattern=None, multi=None): """ Checks if the name can still be parsed if you strip off the folders to determine if we need to force season folders @@ -67,14 +70,15 @@ def check_force_season_folders(pattern=None, multi=None): """ if pattern == None: pattern = sickbeard.NAMING_PATTERN - - valid = not validate_name(pattern, None, file_only=True) - + + valid = not validate_name(pattern, None, file_only=True) + if multi != None: valid = valid or not validate_name(pattern, multi, file_only=True) return valid + def check_valid_naming(pattern=None, multi=None): """ Checks if the name is can be parsed back to its original form for both single and multi episodes. @@ -83,16 +87,17 @@ def check_valid_naming(pattern=None, multi=None): """ if pattern == None: pattern = sickbeard.NAMING_PATTERN - - logger.log(u"Checking whether the pattern "+pattern+" is valid for a single episode", logger.DEBUG) + + logger.log(u"Checking whether the pattern " + pattern + " is valid for a single episode", logger.DEBUG) valid = validate_name(pattern, None) if multi != None: - logger.log(u"Checking whether the pattern "+pattern+" is valid for a multi episode", logger.DEBUG) + logger.log(u"Checking whether the pattern " + pattern + " is valid for a multi episode", logger.DEBUG) valid = valid and validate_name(pattern, multi) return valid + def check_valid_abd_naming(pattern=None): """ Checks if the name is can be parsed back to its original form for an air-by-date format. @@ -101,8 +106,8 @@ def check_valid_abd_naming(pattern=None): """ if pattern == None: pattern = sickbeard.NAMING_PATTERN - - logger.log(u"Checking whether the pattern "+pattern+" is valid for an air-by-date episode", logger.DEBUG) + + logger.log(u"Checking whether the pattern " + pattern + " is valid for an air-by-date episode", logger.DEBUG) valid = validate_name(pattern, abd=True) return valid @@ -119,18 +124,18 @@ def validate_name(pattern, multi=None, file_only=False, abd=False): new_name = ek.ek(os.path.join, new_path, new_name) if not new_name: - logger.log(u"Unable to create a name out of "+pattern, logger.DEBUG) + logger.log(u"Unable to create a name out of " + pattern, logger.DEBUG) return False - logger.log(u"Trying to parse "+new_name, logger.DEBUG) + logger.log(u"Trying to parse " + new_name, logger.DEBUG) try: result = parser.parse(new_name) except InvalidNameException: - logger.log(u"Unable to parse "+new_name+", not valid", logger.DEBUG) + logger.log(u"Unable to parse " + new_name + ", not valid", logger.DEBUG) return False - - logger.log("The name "+new_name + " parsed into " + str(result), logger.DEBUG) + + logger.log("The name " + new_name + " parsed into " + str(result), logger.DEBUG) if abd: if result.air_date != ep.airdate: @@ -146,9 +151,10 @@ def validate_name(pattern, multi=None, file_only=False, abd=False): return True + def _generate_sample_ep(multi=None, abd=False): # make a fake episode object - ep = TVEpisode(2,3,"Ep Name") + ep = TVEpisode(2, 3, "Ep Name") ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) ep._airdate = datetime.date(2011, 3, 9) if abd: @@ -160,11 +166,11 @@ def _generate_sample_ep(multi=None, abd=False): ep._name = "Ep Name (1)" ep._release_name = 'Show.Name.S02E03E04E05.HDTV.XviD-RLSGROUP' - secondEp = TVEpisode(2,4,"Ep Name (2)") + secondEp = TVEpisode(2, 4, "Ep Name (2)") secondEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) secondEp._release_name = ep._release_name - thirdEp = TVEpisode(2,5,"Ep Name (3)") + thirdEp = TVEpisode(2, 5, "Ep Name (3)") thirdEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) thirdEp._release_name = ep._release_name @@ -173,8 +179,8 @@ def _generate_sample_ep(multi=None, abd=False): return ep -def test_name(pattern, multi=None, abd=False): +def test_name(pattern, multi=None, abd=False): ep = _generate_sample_ep(multi, abd) return {'name': ep.formatted_filename(pattern, multi), 'dir': ep.formatted_dir(pattern, multi)} \ No newline at end of file diff --git a/sickbeard/network_timezones.py b/sickbeard/network_timezones.py index c49b5630..f8ff1431 100644 --- a/sickbeard/network_timezones.py +++ b/sickbeard/network_timezones.py @@ -39,33 +39,35 @@ sb_timezone = tz.tzlocal() # helper to remove failed temp download def _remove_zoneinfo_failed(filename): try: - ek.ek(os.remove,filename) + ek.ek(os.remove, filename) except: pass + # helper to remove old unneeded zoneinfo files def _remove_old_zoneinfo(): if (lib.dateutil.zoneinfo.ZONEINFOFILE is not None): cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE) else: return - - cur_file = helpers.real_path(ek.ek(join,ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo)) - - for (path, dirs, files) in ek.ek(os.walk,helpers.real_path(ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__))): + + cur_file = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo)) + + for (path, dirs, files) in ek.ek(os.walk, + helpers.real_path(ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__))): for filename in files: if filename.endswith('.tar.gz'): - file_w_path = ek.ek(join,path,filename) - if file_w_path != cur_file and ek.ek(isfile,file_w_path): + file_w_path = ek.ek(join, path, filename) + if file_w_path != cur_file and ek.ek(isfile, file_w_path): try: - ek.ek(os.remove,file_w_path) + ek.ek(os.remove, file_w_path) logger.log(u"Delete unneeded old zoneinfo File: " + file_w_path) except: - logger.log(u"Unable to delete: " + file_w_path,logger.ERROR) + logger.log(u"Unable to delete: " + file_w_path, logger.ERROR) + # update the dateutil zoneinfo def _update_zoneinfo(): - global sb_timezone sb_timezone = tz.tzlocal() @@ -90,36 +92,37 @@ def _update_zoneinfo(): # now load the new zoneinfo url_tar = u'https://github.com/Prinz23/sb_network_timezones/raw/master/' + new_zoneinfo - - zonefile = helpers.real_path(ek.ek(join,ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo)) - zonefile_tmp = re.sub(r"\.tar\.gz$",'.tmp', zonefile) - if (ek.ek(os.path.exists,zonefile_tmp)): + zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo)) + zonefile_tmp = re.sub(r"\.tar\.gz$", '.tmp', zonefile) + + if (ek.ek(os.path.exists, zonefile_tmp)): try: - ek.ek(os.remove,zonefile_tmp) + ek.ek(os.remove, zonefile_tmp) except: - logger.log(u"Unable to delete: " + zonefile_tmp,logger.ERROR) + logger.log(u"Unable to delete: " + zonefile_tmp, logger.ERROR) return if not helpers.download_file(url_tar, zonefile_tmp): return - if not ek.ek(os.path.exists,zonefile_tmp): - logger.log(u"Download of " + zonefile_tmp + " failed.",logger.ERROR) + if not ek.ek(os.path.exists, zonefile_tmp): + logger.log(u"Download of " + zonefile_tmp + " failed.", logger.ERROR) return new_hash = str(helpers.md5_for_file(zonefile_tmp)) if (zoneinfo_md5.upper() == new_hash.upper()): - logger.log(u"Updating timezone info with new one: " + new_zoneinfo,logger.MESSAGE) + logger.log(u"Updating timezone info with new one: " + new_zoneinfo, logger.MESSAGE) try: # remove the old zoneinfo file if (cur_zoneinfo is not None): - old_file = helpers.real_path(ek.ek(join,ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo)) - if (ek.ek(os.path.exists,old_file)): - ek.ek(os.remove,old_file) + old_file = helpers.real_path( + ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo)) + if (ek.ek(os.path.exists, old_file)): + ek.ek(os.remove, old_file) # rename downloaded file - ek.ek(os.rename,zonefile_tmp,zonefile) + ek.ek(os.rename, zonefile_tmp, zonefile) # load the new zoneinfo reload(lib.dateutil.zoneinfo) sb_timezone = tz.tzlocal() @@ -128,12 +131,12 @@ def _update_zoneinfo(): return else: _remove_zoneinfo_failed(zonefile_tmp) - logger.log(u"MD5 HASH doesn't match: " + zoneinfo_md5.upper() + ' File: ' + new_hash.upper(),logger.ERROR) + logger.log(u"MD5 HASH doesn't match: " + zoneinfo_md5.upper() + ' File: ' + new_hash.upper(), logger.ERROR) return + # update the network timezone table def update_network_dict(): - _remove_old_zoneinfo() _update_zoneinfo() @@ -152,10 +155,10 @@ def update_network_dict(): try: for line in url_data.splitlines(): - (key, val) = line.decode('utf-8').strip().rsplit(u':',1) - if key is None or val is None: - continue - d[key] = val + (key, val) = line.decode('utf-8').strip().rsplit(u':', 1) + if key is None or val is None: + continue + d[key] = val except (IOError, OSError): pass @@ -169,7 +172,8 @@ def update_network_dict(): h_k = old_d.has_key(cur_d) if h_k and cur_t != old_d[cur_d]: # update old record - ql.append(["UPDATE network_timezones SET network_name=?, timezone=? WHERE network_name=?", [cur_d, cur_t, cur_d]]) + ql.append( + ["UPDATE network_timezones SET network_name=?, timezone=? WHERE network_name=?", [cur_d, cur_t, cur_d]]) elif not h_k: # add new record ql.append(["INSERT INTO network_timezones (network_name, timezone) VALUES (?,?)", [cur_d, cur_t]]) @@ -178,12 +182,13 @@ def update_network_dict(): # remove deleted records if len(old_d) > 0: L = list(va for va in old_d) - ql.append(["DELETE FROM network_timezones WHERE network_name IN ("+','.join(['?'] * len(L))+")", L]) + ql.append(["DELETE FROM network_timezones WHERE network_name IN (" + ','.join(['?'] * len(L)) + ")", L]) # change all network timezone infos at once (much faster) if len(ql) > 0: myDB.mass_action(ql) load_network_dict() + # load network timezones from db into dict def load_network_dict(): d = {} @@ -199,6 +204,7 @@ def load_network_dict(): global network_dict network_dict = d + # get timezone of a network or return default timezone def get_network_timezone(network, network_dict): if network is None: @@ -206,7 +212,7 @@ def get_network_timezone(network, network_dict): try: if lib.dateutil.zoneinfo.ZONEINFOFILE is not None: - n_t = tz.gettz(network_dict[network]) + n_t = tz.gettz(network_dict[network]) if n_t is not None: return n_t else: @@ -216,6 +222,7 @@ def get_network_timezone(network, network_dict): except: return sb_timezone + # parse date and time string into local time def parse_date_time(d, t, network): if network_dict is None: @@ -257,6 +264,7 @@ def parse_date_time(d, t, network): except (ValueError): return foreign_naive + def test_timeformat(t): mo = time_regex.search(t) if mo is None or len(mo.groups()) < 2: diff --git a/sickbeard/notifiers/__init__.py b/sickbeard/notifiers/__init__.py index 5365466d..6d6eeb77 100644 --- a/sickbeard/notifiers/__init__.py +++ b/sickbeard/notifiers/__init__.py @@ -64,7 +64,7 @@ trakt_notifier = trakt.TraktNotifier() email_notifier = emailnotify.EmailNotifier() notifiers = [ - libnotify_notifier, # Libnotify notifier goes first because it doesn't involve blocking on network activity. + libnotify_notifier, # Libnotify notifier goes first because it doesn't involve blocking on network activity. xbmc_notifier, plex_notifier, nmj_notifier, @@ -89,10 +89,12 @@ def notify_download(ep_name): for n in notifiers: n.notify_download(ep_name) + def notify_subtitle_download(ep_name, lang): for n in notifiers: n.notify_subtitle_download(ep_name, lang) + def notify_snatch(ep_name): for n in notifiers: n.notify_snatch(ep_name) diff --git a/sickbeard/notifiers/boxcar.py b/sickbeard/notifiers/boxcar.py index 633245f1..84e5db92 100644 --- a/sickbeard/notifiers/boxcar.py +++ b/sickbeard/notifiers/boxcar.py @@ -28,8 +28,8 @@ from sickbeard.exceptions import ex API_URL = "https://boxcar.io/devices/providers/fWc4sgSmpcN6JujtBmR6/notifications" -class BoxcarNotifier: +class BoxcarNotifier: def test_notify(self, email, title="Test"): return self._sendBoxcar("This is a test notification from SickBeard", title, email) @@ -44,7 +44,7 @@ class BoxcarNotifier: returns: True if the message succeeded, False otherwise """ - + # build up the URL and parameters msg = msg.strip() curUrl = API_URL @@ -53,7 +53,7 @@ class BoxcarNotifier: if subscribe: data = urllib.urlencode({'email': email}) curUrl = curUrl + "/subscribe" - + # for normal requests we need all these parameters else: data = urllib.urlencode({ @@ -61,7 +61,7 @@ class BoxcarNotifier: 'notification[from_screen_name]': title, 'notification[message]': msg.encode('utf-8'), 'notification[from_remote_service_id]': int(time.time()) - }) + }) # send the request to boxcar @@ -69,7 +69,7 @@ class BoxcarNotifier: req = urllib2.Request(curUrl) handle = urllib2.urlopen(req, data) handle.close() - + except urllib2.URLError, e: # if we get an error back that doesn't have an error code then who knows what's really happening if not hasattr(e, 'code'): @@ -82,16 +82,16 @@ class BoxcarNotifier: if e.code == 404: logger.log("Username is wrong/not a boxcar email. Boxcar will send an email to it", logger.WARNING) return False - + # For HTTP status code 401's, it is because you are passing in either an invalid token, or the user has not added your service. elif e.code == 401: - + # If the user has already added your service, we'll return an HTTP status code of 401. if subscribe: logger.log("Already subscribed to service", logger.ERROR) # i dont know if this is true or false ... its neither but i also dont know how we got here in the first place return False - + #HTTP status 401 if the user doesn't have the service added else: subscribeNote = self._sendBoxcar(msg, title, email, True) @@ -101,7 +101,7 @@ class BoxcarNotifier: else: logger.log("Subscription could not be send", logger.ERROR) return False - + # If you receive an HTTP status code of 400, it is because you failed to send the proper parameters elif e.code == 400: logger.log("Wrong data send to boxcar", logger.ERROR) @@ -113,7 +113,7 @@ class BoxcarNotifier: def notify_snatch(self, ep_name, title=notifyStrings[NOTIFY_SNATCH]): if sickbeard.BOXCAR_NOTIFY_ONSNATCH: self._notifyBoxcar(title, ep_name) - + def notify_download(self, ep_name, title=notifyStrings[NOTIFY_DOWNLOAD]): if sickbeard.BOXCAR_NOTIFY_ONDOWNLOAD: @@ -146,4 +146,5 @@ class BoxcarNotifier: self._sendBoxcar(message, title, username) return True + notifier = BoxcarNotifier diff --git a/sickbeard/notifiers/emailnotify.py b/sickbeard/notifiers/emailnotify.py index f872e75f..d339a500 100644 --- a/sickbeard/notifiers/emailnotify.py +++ b/sickbeard/notifiers/emailnotify.py @@ -31,10 +31,11 @@ from sickbeard import logger from sickbeard import db from sickbeard.exceptions import ex + class EmailNotifier: def __init__(self): self.last_err = None - + def test_notify(self, host, port, smtp_from, use_tls, user, pwd, to): msg = MIMEText('This is a test message from Sick Beard. If you\'re reading this, the test succeeded.') msg['Subject'] = 'Sick Beard: Test Message' @@ -55,19 +56,25 @@ class EmailNotifier: if len(to) == 0: logger.log('Skipping email notify because there are no configured recepients', logger.WARNING) else: - try: - msg = MIMEMultipart('alternative') - msg.attach(MIMEText("

Sick Beard Notification - Snatched

\n

Show: " + re.search("(.+?) -.+", ep_name).group(1) + "

\n

Episode: " + re.search(".+ - (.+?-.+) -.+", ep_name).group(1) + "

\n\n
Powered by Sick Beard.
", 'html')) - except: - msg = MIMEText(ep_name) - + try: + msg = MIMEMultipart('alternative') + msg.attach(MIMEText( + "

Sick Beard Notification - Snatched

\n

Show: " + re.search( + "(.+?) -.+", ep_name).group(1) + "

\n

Episode: " + re.search( + ".+ - (.+?-.+) -.+", ep_name).group( + 1) + "

\n\n
Powered by Sick Beard.
", + 'html')) + except: + msg = MIMEText(ep_name) + msg['Subject'] = 'Snatched: ' + ep_name msg['From'] = sickbeard.EMAIL_FROM msg['To'] = ','.join(to) - if self._sendmail(sickbeard.EMAIL_HOST, sickbeard.EMAIL_PORT, sickbeard.EMAIL_FROM, sickbeard.EMAIL_TLS, sickbeard.EMAIL_USER, sickbeard.EMAIL_PASSWORD, to, msg): + if self._sendmail(sickbeard.EMAIL_HOST, sickbeard.EMAIL_PORT, sickbeard.EMAIL_FROM, sickbeard.EMAIL_TLS, + sickbeard.EMAIL_USER, sickbeard.EMAIL_PASSWORD, to, msg): logger.log("Snatch notification sent to [%s] for '%s'" % (to, ep_name), logger.DEBUG) else: - logger.log("Snatch notification ERROR: %s" % self.last_err, logger.ERROR) + logger.log("Snatch notification ERROR: %s" % self.last_err, logger.ERROR) def notify_download(self, ep_name, title="Completed:"): """ @@ -82,16 +89,22 @@ class EmailNotifier: if len(to) == 0: logger.log('Skipping email notify because there are no configured recepients', logger.WARNING) else: - try: - msg = MIMEMultipart('alternative') - msg.attach(MIMEText("

Sick Beard Notification - Downloaded

\n

Show: " + re.search("(.+?) -.+", ep_name).group(1) + "

\n

Episode: " + re.search(".+ - (.+?-.+) -.+", ep_name).group(1) + "

\n\n
Powered by Sick Beard.
", 'html')) - except: - msg = MIMEText(ep_name) - + try: + msg = MIMEMultipart('alternative') + msg.attach(MIMEText( + "

Sick Beard Notification - Downloaded

\n

Show: " + re.search( + "(.+?) -.+", ep_name).group(1) + "

\n

Episode: " + re.search( + ".+ - (.+?-.+) -.+", ep_name).group( + 1) + "

\n\n
Powered by Sick Beard.
", + 'html')) + except: + msg = MIMEText(ep_name) + msg['Subject'] = 'Downloaded: ' + ep_name msg['From'] = sickbeard.EMAIL_FROM msg['To'] = ','.join(to) - if self._sendmail(sickbeard.EMAIL_HOST, sickbeard.EMAIL_PORT, sickbeard.EMAIL_FROM, sickbeard.EMAIL_TLS, sickbeard.EMAIL_USER, sickbeard.EMAIL_PASSWORD, to, msg): + if self._sendmail(sickbeard.EMAIL_HOST, sickbeard.EMAIL_PORT, sickbeard.EMAIL_FROM, sickbeard.EMAIL_TLS, + sickbeard.EMAIL_USER, sickbeard.EMAIL_PASSWORD, to, msg): logger.log("Download notification sent to [%s] for '%s'" % (to, ep_name), logger.DEBUG) else: logger.log("Download notification ERROR: %s" % self.last_err, logger.ERROR) @@ -109,16 +122,22 @@ class EmailNotifier: if len(to) == 0: logger.log('Skipping email notify because there are no configured recepients', logger.WARNING) else: - try: - msg = MIMEMultipart('alternative') - msg.attach(MIMEText("

Sick Beard Notification - Subtitle Downloaded

\n

Show: " + re.search("(.+?) -.+", ep_name).group(1) + "

\n

Episode: " + re.search(".+ - (.+?-.+) -.+", ep_name).group(1) + "

\n

Language: " + lang + "

\n\n
Powered by Sick Beard.
", 'html')) - except: - msg = MIMEText(ep_name + ": " + lang) - + try: + msg = MIMEMultipart('alternative') + msg.attach(MIMEText( + "

Sick Beard Notification - Subtitle Downloaded

\n

Show: " + re.search( + "(.+?) -.+", ep_name).group(1) + "

\n

Episode: " + re.search( + ".+ - (.+?-.+) -.+", ep_name).group( + 1) + "

\n

Language: " + lang + "

\n\n
Powered by Sick Beard.
", + 'html')) + except: + msg = MIMEText(ep_name + ": " + lang) + msg['Subject'] = lang + ' Subtitle Downloaded: ' + ep_name msg['From'] = sickbeard.EMAIL_FROM msg['To'] = ','.join(to) - if self._sendmail(sickbeard.EMAIL_HOST, sickbeard.EMAIL_PORT, sickbeard.EMAIL_FROM, sickbeard.EMAIL_TLS, sickbeard.EMAIL_USER, sickbeard.EMAIL_PASSWORD, to, msg): + if self._sendmail(sickbeard.EMAIL_HOST, sickbeard.EMAIL_PORT, sickbeard.EMAIL_FROM, sickbeard.EMAIL_TLS, + sickbeard.EMAIL_USER, sickbeard.EMAIL_PASSWORD, to, msg): logger.log("Download notification sent to [%s] for '%s'" % (to, ep_name), logger.DEBUG) else: logger.log("Download notification ERROR: %s" % self.last_err, logger.ERROR) @@ -128,7 +147,7 @@ class EmailNotifier: # Grab the global recipients for addr in sickbeard.EMAIL_LIST.split(','): - if(len(addr.strip()) > 0): + if (len(addr.strip()) > 0): addrs.append(addr) # Grab the recipients for the show @@ -137,15 +156,16 @@ class EmailNotifier: for subs in mydb.select("SELECT notify_list FROM tv_shows WHERE show_name = ?", (s,)): if subs['notify_list']: for addr in subs['notify_list'].split(','): - if(len(addr.strip()) > 0): + if (len(addr.strip()) > 0): addrs.append(addr) - + addrs = set(addrs) logger.log('Notification recepients: %s' % addrs, logger.DEBUG) return addrs - + def _sendmail(self, host, port, smtp_from, use_tls, user, pwd, to, msg, smtpDebug=False): - logger.log('HOST: %s; PORT: %s; FROM: %s, TLS: %s, USER: %s, PWD: %s, TO: %s' % (host, port, smtp_from, use_tls, user, pwd, to), logger.DEBUG) + logger.log('HOST: %s; PORT: %s; FROM: %s, TLS: %s, USER: %s, PWD: %s, TO: %s' % ( + host, port, smtp_from, use_tls, user, pwd, to), logger.DEBUG) srv = smtplib.SMTP(host, int(port)) if smtpDebug: srv.set_debuglevel(1) @@ -172,5 +192,6 @@ class EmailNotifier: titles.sort(key=len, reverse=True) logger.log("TITLES: %s" % titles, logger.DEBUG) return titles - + + notifier = EmailNotifier diff --git a/sickbeard/notifiers/growl.py b/sickbeard/notifiers/growl.py index 86d901be..d8735aad 100644 --- a/sickbeard/notifiers/growl.py +++ b/sickbeard/notifiers/growl.py @@ -25,11 +25,12 @@ from sickbeard.exceptions import ex from lib.growl import gntp -class GrowlNotifier: +class GrowlNotifier: def test_notify(self, host, password): self._sendRegistration(host, password, 'Test') - return self._sendGrowl("Test Growl", "Testing Growl settings from Sick Beard", "Test", host, password, force=True) + return self._sendGrowl("Test Growl", "Testing Growl settings from Sick Beard", "Test", host, password, + force=True) def notify_snatch(self, ep_name): if sickbeard.GROWL_NOTIFY_ONSNATCH: @@ -43,145 +44,145 @@ class GrowlNotifier: if sickbeard.GROWL_NOTIFY_ONSUBTITLEDOWNLOAD: self._sendGrowl(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_name + ": " + lang) - def _send_growl(self, options,message=None): - + def _send_growl(self, options, message=None): + #Send Notification notice = gntp.GNTPNotice() - + #Required - notice.add_header('Application-Name',options['app']) - notice.add_header('Notification-Name',options['name']) - notice.add_header('Notification-Title',options['title']) - + notice.add_header('Application-Name', options['app']) + notice.add_header('Notification-Name', options['name']) + notice.add_header('Notification-Title', options['title']) + if options['password']: notice.set_password(options['password']) - + #Optional if options['sticky']: - notice.add_header('Notification-Sticky',options['sticky']) + notice.add_header('Notification-Sticky', options['sticky']) if options['priority']: - notice.add_header('Notification-Priority',options['priority']) + notice.add_header('Notification-Priority', options['priority']) if options['icon']: - notice.add_header('Notification-Icon', 'https://raw.github.com/midgetspy/Sick-Beard/master/data/images/sickbeard.png') - - if message: - notice.add_header('Notification-Text',message) + notice.add_header('Notification-Icon', + 'https://raw.github.com/midgetspy/Sick-Beard/master/data/images/sickbeard.png') - response = self._send(options['host'],options['port'],notice.encode(),options['debug']) - if isinstance(response,gntp.GNTPOK): return True + if message: + notice.add_header('Notification-Text', message) + + response = self._send(options['host'], options['port'], notice.encode(), options['debug']) + if isinstance(response, gntp.GNTPOK): return True return False - def _send(self, host,port,data,debug=False): - if debug: print '\n',data,'\n' - + def _send(self, host, port, data, debug=False): + if debug: print '\n', data, '\n' + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - s.connect((host,port)) + s.connect((host, port)) s.send(data) response = gntp.parse_gntp(s.recv(1024)) s.close() - - if debug: print '\n',response,'\n' + + if debug: print '\n', response, '\n' return response - def _sendGrowl(self, title="Sick Beard Notification", message=None, name=None, host=None, password=None, force=False): + def _sendGrowl(self, title="Sick Beard Notification", message=None, name=None, host=None, password=None, + force=False): if not sickbeard.USE_GROWL and not force: return False - + if name == None: name = title - + if host == None: hostParts = sickbeard.GROWL_HOST.split(':') else: hostParts = host.split(':') - + if len(hostParts) != 2 or hostParts[1] == '': port = 23053 else: port = int(hostParts[1]) - - growlHosts = [(hostParts[0],port)] - + + growlHosts = [(hostParts[0], port)] + opts = {} - + opts['name'] = name - + opts['title'] = title opts['app'] = 'SickBeard' - + opts['sticky'] = None opts['priority'] = None opts['debug'] = False - + if password == None: opts['password'] = sickbeard.GROWL_PASSWORD else: opts['password'] = password - + opts['icon'] = True - - + for pc in growlHosts: opts['host'] = pc[0] opts['port'] = pc[1] - logger.log(u"Sending growl to "+opts['host']+":"+str(opts['port'])+": "+message) + logger.log(u"Sending growl to " + opts['host'] + ":" + str(opts['port']) + ": " + message) try: if self._send_growl(opts, message): return True - else: + else: if self._sendRegistration(host, password, 'Sickbeard'): return self._send_growl(opts, message) else: return False except socket.error, e: - logger.log(u"Unable to send growl to "+opts['host']+":"+str(opts['port'])+": "+ex(e)) + logger.log(u"Unable to send growl to " + opts['host'] + ":" + str(opts['port']) + ": " + ex(e)) return False def _sendRegistration(self, host=None, password=None, name='Sick Beard Notification'): opts = {} - + if host == None: hostParts = sickbeard.GROWL_HOST.split(':') else: hostParts = host.split(':') - + if len(hostParts) != 2 or hostParts[1] == '': port = 23053 else: port = int(hostParts[1]) - + opts['host'] = hostParts[0] opts['port'] = port - - + if password == None: opts['password'] = sickbeard.GROWL_PASSWORD else: opts['password'] = password - - + opts['app'] = 'SickBeard' opts['debug'] = False - + #Send Registration register = gntp.GNTPRegister() register.add_header('Application-Name', opts['app']) - register.add_header('Application-Icon', 'https://raw.github.com/midgetspy/Sick-Beard/master/data/images/sickbeard.png') - + register.add_header('Application-Icon', + 'https://raw.github.com/midgetspy/Sick-Beard/master/data/images/sickbeard.png') + register.add_notification('Test', True) register.add_notification(common.notifyStrings[common.NOTIFY_SNATCH], True) register.add_notification(common.notifyStrings[common.NOTIFY_DOWNLOAD], True) if opts['password']: register.set_password(opts['password']) - + try: - return self._send(opts['host'],opts['port'],register.encode(),opts['debug']) + return self._send(opts['host'], opts['port'], register.encode(), opts['debug']) except socket.error, e: - logger.log(u"Unable to send growl to "+opts['host']+":"+str(opts['port'])+": "+str(e).decode('utf-8')) + logger.log( + u"Unable to send growl to " + opts['host'] + ":" + str(opts['port']) + ": " + str(e).decode('utf-8')) return False - - - + + notifier = GrowlNotifier \ No newline at end of file diff --git a/sickbeard/notifiers/libnotify.py b/sickbeard/notifiers/libnotify.py index 2dfac95a..c93c7e12 100644 --- a/sickbeard/notifiers/libnotify.py +++ b/sickbeard/notifiers/libnotify.py @@ -22,6 +22,7 @@ import sickbeard from sickbeard import logger, common + def diagnose(): ''' Check the environment for reasons libnotify isn't working. Return a @@ -115,4 +116,5 @@ class LibnotifyNotifier: except self.gobject.GError: return False + notifier = LibnotifyNotifier diff --git a/sickbeard/notifiers/nma.py b/sickbeard/notifiers/nma.py index 1c67990f..066ad101 100644 --- a/sickbeard/notifiers/nma.py +++ b/sickbeard/notifiers/nma.py @@ -3,54 +3,59 @@ import sickbeard from sickbeard import logger, common from lib.pynma import pynma + class NMA_Notifier: - def test_notify(self, nma_api, nma_priority): - return self._sendNMA(nma_api, nma_priority, event="Test", message="Testing NMA settings from Sick Beard", force=True) + return self._sendNMA(nma_api, nma_priority, event="Test", message="Testing NMA settings from Sick Beard", + force=True) def notify_snatch(self, ep_name): if sickbeard.NMA_NOTIFY_ONSNATCH: - self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name) + self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SNATCH], + message=ep_name) def notify_download(self, ep_name): if sickbeard.NMA_NOTIFY_ONDOWNLOAD: - self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], message=ep_name) + self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], + message=ep_name) def notify_subtitle_download(self, ep_name, lang): if sickbeard.NMA_NOTIFY_ONSUBTITLEDOWNLOAD: - self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], message=ep_name + ": " + lang) - + self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], + message=ep_name + ": " + lang) + def _sendNMA(self, nma_api=None, nma_priority=None, event=None, message=None, force=False): - + title = 'Sick-Beard' - + if not sickbeard.USE_NMA and not force: return False - + if nma_api == None: nma_api = sickbeard.NMA_API - + if nma_priority == None: nma_priority = sickbeard.NMA_PRIORITY - + logger.log(u"NMA title: " + title, logger.DEBUG) logger.log(u"NMA event: " + event, logger.DEBUG) logger.log(u"NMA message: " + message, logger.DEBUG) - + batch = False - + p = pynma.PyNMA() keys = nma_api.split(',') p.addkey(keys) - + if len(keys) > 1: batch = True - + response = p.push(title, event, message, priority=nma_priority, batch_mode=batch) - + if not response[nma_api][u'code'] == u'200': logger.log(u'Could not send notification to NotifyMyAndroid', logger.ERROR) return False else: return True - + + notifier = NMA_Notifier \ No newline at end of file diff --git a/sickbeard/notifiers/nmj.py b/sickbeard/notifiers/nmj.py index ef0ce142..f1337111 100644 --- a/sickbeard/notifiers/nmj.py +++ b/sickbeard/notifiers/nmj.py @@ -38,7 +38,7 @@ class NMJNotifier: Returns: True if the settings were retrieved successfully, False otherwise """ - + # establish a terminal session to the PC terminal = False try: @@ -68,7 +68,7 @@ class NMJNotifier: else: logger.log(u"Could not get current NMJ database on %s, NMJ is probably not running!" % (host), logger.ERROR) return False - + # if the device is a remote host then try to parse the mounting URL and save it to the config if device.startswith("NETWORK_SHARE/"): match = re.search(".*(?=\r\n?%s)" % (re.escape(device[14:])), tnoutput) @@ -78,11 +78,12 @@ class NMJNotifier: logger.log(u"Found mounting url on the Popcorn Hour in configuration: %s" % (mount), logger.DEBUG) sickbeard.NMJ_MOUNT = mount else: - logger.log(u"Detected a network share on the Popcorn Hour, but could not get the mounting url", logger.DEBUG) + logger.log(u"Detected a network share on the Popcorn Hour, but could not get the mounting url", + logger.DEBUG) return False return True - + def notify_snatch(self, ep_name): return False #Not implemented: Start the scanner when snatched does not make any sense @@ -94,7 +95,7 @@ class NMJNotifier: def notify_subtitle_download(self, ep_name, lang): if sickbeard.USE_NMJ: self._notifyNMJ() - + def test_notify(self, host, database, mount): return self._sendNMJ(host, database, mount) @@ -108,7 +109,7 @@ class NMJNotifier: Returns: True if the request succeeded, False otherwise """ - + # if a mount URL is provided then attempt to open a handle to that URL if mount: try: @@ -146,7 +147,7 @@ class NMJNotifier: except SyntaxError, e: logger.log(u"Unable to parse XML returned from the Popcorn Hour: %s" % (e), logger.ERROR) return False - + # if the result was a number then consider that an error if int(result) > 0: logger.log(u"Popcorn Hour returned an errorcode: %s" % (result)) @@ -180,4 +181,5 @@ class NMJNotifier: return self._sendNMJ(host, database, mount) + notifier = NMJNotifier diff --git a/sickbeard/notifiers/nmjv2.py b/sickbeard/notifiers/nmjv2.py index 95dd4436..59a9c203 100644 --- a/sickbeard/notifiers/nmjv2.py +++ b/sickbeard/notifiers/nmjv2.py @@ -17,7 +17,7 @@ # You should have received a copy of the GNU General Public License # along with Sick Beard. If not, see . -import urllib, urllib2,xml.dom.minidom +import urllib, urllib2, xml.dom.minidom from xml.dom.minidom import parseString import sickbeard import telnetlib @@ -33,7 +33,6 @@ except ImportError: class NMJv2Notifier: - def notify_snatch(self, ep_name): return False #Not implemented: Start the scanner when snatched does not make any sense @@ -58,32 +57,35 @@ class NMJv2Notifier: Returns: True if the settings were retrieved successfully, False otherwise """ try: - url_loc = "http://" + host + ":8008/file_operation?arg0=list_user_storage_file&arg1=&arg2="+instance+"&arg3=20&arg4=true&arg5=true&arg6=true&arg7=all&arg8=name_asc&arg9=false&arg10=false" + url_loc = "http://" + host + ":8008/file_operation?arg0=list_user_storage_file&arg1=&arg2=" + instance + "&arg3=20&arg4=true&arg5=true&arg6=true&arg7=all&arg8=name_asc&arg9=false&arg10=false" req = urllib2.Request(url_loc) handle1 = urllib2.urlopen(req) response1 = handle1.read() xml = parseString(response1) - time.sleep (300.0 / 1000.0) + time.sleep(300.0 / 1000.0) for node in xml.getElementsByTagName('path'): - xmlTag=node.toxml(); - xmlData=xmlTag.replace('','').replace('','').replace('[=]','') - url_db = "http://" + host + ":8008/metadata_database?arg0=check_database&arg1="+ xmlData + xmlTag = node.toxml(); + xmlData = xmlTag.replace('', '').replace('', '').replace('[=]', '') + url_db = "http://" + host + ":8008/metadata_database?arg0=check_database&arg1=" + xmlData reqdb = urllib2.Request(url_db) handledb = urllib2.urlopen(reqdb) responsedb = handledb.read() xmldb = parseString(responsedb) - returnvalue=xmldb.getElementsByTagName('returnValue')[0].toxml().replace('','').replace('','') - if returnvalue=="0": - DB_path=xmldb.getElementsByTagName('database_path')[0].toxml().replace('','').replace('','').replace('[=]','') - if dbloc=="local" and DB_path.find("localhost")>-1: - sickbeard.NMJv2_HOST=host - sickbeard.NMJv2_DATABASE=DB_path + returnvalue = xmldb.getElementsByTagName('returnValue')[0].toxml().replace('', '').replace( + '', '') + if returnvalue == "0": + DB_path = xmldb.getElementsByTagName('database_path')[0].toxml().replace('', + '').replace( + '', '').replace('[=]', '') + if dbloc == "local" and DB_path.find("localhost") > -1: + sickbeard.NMJv2_HOST = host + sickbeard.NMJv2_DATABASE = DB_path return True - if dbloc=="network" and DB_path.find("://")>-1: - sickbeard.NMJv2_HOST=host - sickbeard.NMJv2_DATABASE=DB_path + if dbloc == "network" and DB_path.find("://") > -1: + sickbeard.NMJv2_HOST = host + sickbeard.NMJv2_DATABASE = DB_path return True - + except IOError, e: logger.log(u"Warning: Couldn't contact popcorn hour on host %s: %s" % (host, e)) return False @@ -99,52 +101,52 @@ class NMJv2Notifier: Returns: True if the request succeeded, False otherwise """ - + #if a host is provided then attempt to open a handle to that URL try: - url_scandir = "http://" + host + ":8008/metadata_database?arg0=update_scandir&arg1="+ sickbeard.NMJv2_DATABASE +"&arg2=&arg3=update_all" + url_scandir = "http://" + host + ":8008/metadata_database?arg0=update_scandir&arg1=" + sickbeard.NMJv2_DATABASE + "&arg2=&arg3=update_all" logger.log(u"NMJ scan update command send to host: %s" % (host)) - url_updatedb = "http://" + host + ":8008/metadata_database?arg0=scanner_start&arg1="+ sickbeard.NMJv2_DATABASE +"&arg2=background&arg3=" + url_updatedb = "http://" + host + ":8008/metadata_database?arg0=scanner_start&arg1=" + sickbeard.NMJv2_DATABASE + "&arg2=background&arg3=" logger.log(u"Try to mount network drive via url: %s" % (host), logger.DEBUG) prereq = urllib2.Request(url_scandir) req = urllib2.Request(url_updatedb) handle1 = urllib2.urlopen(prereq) response1 = handle1.read() - time.sleep (300.0 / 1000.0) + time.sleep(300.0 / 1000.0) handle2 = urllib2.urlopen(req) response2 = handle2.read() except IOError, e: logger.log(u"Warning: Couldn't contact popcorn hour on host %s: %s" % (host, e)) return False - try: + try: et = etree.fromstring(response1) result1 = et.findtext("returnValue") except SyntaxError, e: - logger.log(u"Unable to parse XML returned from the Popcorn Hour: update_scandir, %s" % (e), logger.ERROR) - return False - try: + logger.log(u"Unable to parse XML returned from the Popcorn Hour: update_scandir, %s" % (e), logger.ERROR) + return False + try: et = etree.fromstring(response2) result2 = et.findtext("returnValue") except SyntaxError, e: logger.log(u"Unable to parse XML returned from the Popcorn Hour: scanner_start, %s" % (e), logger.ERROR) return False - + # if the result was a number then consider that an error - error_codes=["8","11","22","49","50","51","60"] - error_messages=["Invalid parameter(s)/argument(s)", - "Invalid database path", - "Insufficient size", - "Database write error", - "Database read error", - "Open fifo pipe failed", - "Read only file system"] + error_codes = ["8", "11", "22", "49", "50", "51", "60"] + error_messages = ["Invalid parameter(s)/argument(s)", + "Invalid database path", + "Insufficient size", + "Database write error", + "Database read error", + "Open fifo pipe failed", + "Read only file system"] if int(result1) > 0: - index=error_codes.index(result1) + index = error_codes.index(result1) logger.log(u"Popcorn Hour returned an error: %s" % (error_messages[index])) return False else: if int(result2) > 0: - index=error_codes.index(result2) + index = error_codes.index(result2) logger.log(u"Popcorn Hour returned an error: %s" % (error_messages[index])) return False else: @@ -172,4 +174,5 @@ class NMJv2Notifier: return self._sendNMJ(host) + notifier = NMJv2Notifier diff --git a/sickbeard/notifiers/plex.py b/sickbeard/notifiers/plex.py index 04ed2a19..e417f9ef 100644 --- a/sickbeard/notifiers/plex.py +++ b/sickbeard/notifiers/plex.py @@ -32,7 +32,6 @@ from xml.dom import minidom class PLEXNotifier: - def _send_to_plex(self, command, host, username=None, password=None): """Handles communication to Plex hosts via HTTP API @@ -127,16 +126,17 @@ class PLEXNotifier: for curHost in [x.strip() for x in host.split(",")]: logger.log(u"Sending Plex notification to '" + curHost + "' - " + message, logger.MESSAGE) - command = {'command': 'ExecBuiltIn', 'parameter': 'Notification(' + title.encode("utf-8") + ',' + message.encode("utf-8") + ')'} + command = {'command': 'ExecBuiltIn', + 'parameter': 'Notification(' + title.encode("utf-8") + ',' + message.encode("utf-8") + ')'} notifyResult = self._send_to_plex(command, curHost, username, password) if notifyResult: result += curHost + ':' + str(notifyResult) return result -############################################################################## -# Public functions -############################################################################## + ############################################################################## + # Public functions + ############################################################################## def notify_snatch(self, ep_name): if sickbeard.PLEX_NOTIFY_ONSNATCH: @@ -149,9 +149,10 @@ class PLEXNotifier: def notify_subtitle_download(self, ep_name, lang): if sickbeard.PLEX_NOTIFY_ONSUBTITLEDOWNLOAD: self._notify_pmc(ep_name + ": " + lang, common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD]) - + def test_notify(self, host, username, password): - return self._notify_pmc("Testing Plex notifications from Sick Beard", "Test Notification", host, username, password, force=True) + return self._notify_pmc("Testing Plex notifications from Sick Beard", "Test Notification", host, username, + password, force=True) def update_library(self): """Handles updating the Plex Media Server host via HTTP API @@ -168,7 +169,8 @@ class PLEXNotifier: logger.log(u"No Plex Server host specified, check your settings", logger.DEBUG) return False - logger.log(u"Updating library for the Plex Media Server host: " + sickbeard.PLEX_SERVER_HOST, logger.MESSAGE) + logger.log(u"Updating library for the Plex Media Server host: " + sickbeard.PLEX_SERVER_HOST, + logger.MESSAGE) url = "http://%s/library/sections" % sickbeard.PLEX_SERVER_HOST try: @@ -193,4 +195,5 @@ class PLEXNotifier: return True + notifier = PLEXNotifier diff --git a/sickbeard/notifiers/prowl.py b/sickbeard/notifiers/prowl.py index 4f0e932c..e26fff7b 100644 --- a/sickbeard/notifiers/prowl.py +++ b/sickbeard/notifiers/prowl.py @@ -31,56 +31,59 @@ import sickbeard from sickbeard import logger, common -class ProwlNotifier: +class ProwlNotifier: def test_notify(self, prowl_api, prowl_priority): - return self._sendProwl(prowl_api, prowl_priority, event="Test", message="Testing Prowl settings from Sick Beard", force=True) + return self._sendProwl(prowl_api, prowl_priority, event="Test", + message="Testing Prowl settings from Sick Beard", force=True) def notify_snatch(self, ep_name): if sickbeard.PROWL_NOTIFY_ONSNATCH: - self._sendProwl(prowl_api=None, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name) + self._sendProwl(prowl_api=None, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_SNATCH], + message=ep_name) def notify_download(self, ep_name): if sickbeard.PROWL_NOTIFY_ONDOWNLOAD: - self._sendProwl(prowl_api=None, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], message=ep_name) - + self._sendProwl(prowl_api=None, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], + message=ep_name) + def notify_subtitle_download(self, ep_name, lang): if sickbeard.PROWL_NOTIFY_ONSUBTITLEDOWNLOAD: - self._sendProwl(prowl_api=None, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], message=ep_name + ": " + lang) - + self._sendProwl(prowl_api=None, prowl_priority=None, + event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], message=ep_name + ": " + lang) + def _sendProwl(self, prowl_api=None, prowl_priority=None, event=None, message=None, force=False): - + if not sickbeard.USE_PROWL and not force: - return False - + return False + if prowl_api == None: prowl_api = sickbeard.PROWL_API - + if prowl_priority == None: prowl_priority = sickbeard.PROWL_PRIORITY - - + title = "Sick Beard" - + logger.log(u"Prowl title: " + title, logger.DEBUG) logger.log(u"Prowl event: " + event, logger.DEBUG) logger.log(u"Prowl message: " + message, logger.DEBUG) logger.log(u"Prowl api: " + prowl_api, logger.DEBUG) logger.log(u"Prowl priority: " + prowl_priority, logger.DEBUG) - + http_handler = HTTPSConnection("api.prowlapp.com") - + data = {'apikey': prowl_api, 'application': title, 'event': event, 'description': message.encode('utf-8'), - 'priority': prowl_priority } + 'priority': prowl_priority} try: http_handler.request("POST", - "/publicapi/add", - headers = {'Content-type': "application/x-www-form-urlencoded"}, - body = urlencode(data)) + "/publicapi/add", + headers={'Content-type': "application/x-www-form-urlencoded"}, + body=urlencode(data)) except (SSLError, HTTPException): logger.log(u"Prowl notification failed.", logger.ERROR) return False @@ -88,13 +91,14 @@ class ProwlNotifier: request_status = response.status if request_status == 200: - logger.log(u"Prowl notifications sent.", logger.DEBUG) - return True - elif request_status == 401: - logger.log(u"Prowl auth failed: %s" % response.reason, logger.ERROR) - return False + logger.log(u"Prowl notifications sent.", logger.DEBUG) + return True + elif request_status == 401: + logger.log(u"Prowl auth failed: %s" % response.reason, logger.ERROR) + return False else: - logger.log(u"Prowl notification failed.", logger.ERROR) - return False - + logger.log(u"Prowl notification failed.", logger.ERROR) + return False + + notifier = ProwlNotifier diff --git a/sickbeard/notifiers/pushalot.py b/sickbeard/notifiers/pushalot.py index 3d3d634d..81ed9c94 100644 --- a/sickbeard/notifiers/pushalot.py +++ b/sickbeard/notifiers/pushalot.py @@ -24,46 +24,51 @@ from ssl import SSLError import sickbeard from sickbeard import logger, common -class PushalotNotifier: +class PushalotNotifier: def test_notify(self, pushalot_authorizationtoken): - return self._sendPushalot(pushalot_authorizationtoken, event="Test", message="Testing Pushalot settings from Sick Beard", force=True) + return self._sendPushalot(pushalot_authorizationtoken, event="Test", + message="Testing Pushalot settings from Sick Beard", force=True) def notify_snatch(self, ep_name): if sickbeard.PUSHALOT_NOTIFY_ONSNATCH: - self._sendPushalot(pushalot_authorizationtoken=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name) + self._sendPushalot(pushalot_authorizationtoken=None, event=common.notifyStrings[common.NOTIFY_SNATCH], + message=ep_name) def notify_download(self, ep_name): if sickbeard.PUSHALOT_NOTIFY_ONDOWNLOAD: - self._sendPushalot(pushalot_authorizationtoken=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], message=ep_name) + self._sendPushalot(pushalot_authorizationtoken=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], + message=ep_name) def notify_subtitle_download(self, ep_name, lang): if sickbeard.PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD: - self._sendPushalot(pushalot_authorizationtoken=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], message=ep_name + ": " + lang) + self._sendPushalot(pushalot_authorizationtoken=None, + event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], + message=ep_name + ": " + lang) def _sendPushalot(self, pushalot_authorizationtoken=None, event=None, message=None, force=False): - + if not sickbeard.USE_PUSHALOT and not force: - return False - + return False + if pushalot_authorizationtoken == None: pushalot_authorizationtoken = sickbeard.PUSHALOT_AUTHORIZATIONTOKEN - + logger.log(u"Pushalot event: " + event, logger.DEBUG) logger.log(u"Pushalot message: " + message, logger.DEBUG) logger.log(u"Pushalot api: " + pushalot_authorizationtoken, logger.DEBUG) - + http_handler = HTTPSConnection("pushalot.com") - + data = {'AuthorizationToken': pushalot_authorizationtoken, 'Title': event.encode('utf-8'), - 'Body': message.encode('utf-8') } + 'Body': message.encode('utf-8')} try: http_handler.request("POST", - "/api/sendmessage", - headers = {'Content-type': "application/x-www-form-urlencoded"}, - body = urlencode(data)) + "/api/sendmessage", + headers={'Content-type': "application/x-www-form-urlencoded"}, + body=urlencode(data)) except (SSLError, HTTPException): logger.log(u"Pushalot notification failed.", logger.ERROR) return False @@ -71,13 +76,14 @@ class PushalotNotifier: request_status = response.status if request_status == 200: - logger.log(u"Pushalot notifications sent.", logger.DEBUG) - return True - elif request_status == 410: - logger.log(u"Pushalot auth failed: %s" % response.reason, logger.ERROR) - return False + logger.log(u"Pushalot notifications sent.", logger.DEBUG) + return True + elif request_status == 410: + logger.log(u"Pushalot auth failed: %s" % response.reason, logger.ERROR) + return False else: - logger.log(u"Pushalot notification failed.", logger.ERROR) - return False - + logger.log(u"Pushalot notification failed.", logger.ERROR) + return False + + notifier = PushalotNotifier diff --git a/sickbeard/notifiers/pushbullet.py b/sickbeard/notifiers/pushbullet.py index 8135c803..e76d2368 100644 --- a/sickbeard/notifiers/pushbullet.py +++ b/sickbeard/notifiers/pushbullet.py @@ -25,31 +25,36 @@ from ssl import SSLError import sickbeard from sickbeard import logger, common -class PushbulletNotifier: +class PushbulletNotifier: def test_notify(self, pushbullet_api): - return self._sendPushbullet(pushbullet_api, event="Test", message="Testing Pushbullet settings from Sick Beard", method="POST", notificationType="note", force=True) + return self._sendPushbullet(pushbullet_api, event="Test", message="Testing Pushbullet settings from Sick Beard", + method="POST", notificationType="note", force=True) def get_devices(self, pushbullet_api): return self._sendPushbullet(pushbullet_api, method="GET", force=True) def notify_snatch(self, ep_name): if sickbeard.PUSHBULLET_NOTIFY_ONSNATCH: - self._sendPushbullet(pushbullet_api=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name, notificationType="note", method="POST") + self._sendPushbullet(pushbullet_api=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name, + notificationType="note", method="POST") def notify_download(self, ep_name): if sickbeard.PUSHBULLET_NOTIFY_ONDOWNLOAD: - self._sendPushbullet(pushbullet_api=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], message=ep_name, notificationType="note", method="POST") + self._sendPushbullet(pushbullet_api=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], + message=ep_name, notificationType="note", method="POST") def notify_subtitle_download(self, ep_name, lang): if sickbeard.PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD: - self._sendPushbullet(pushbullet_api=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], message=ep_name + ": " + lang, notificationType="note", method="POST") + self._sendPushbullet(pushbullet_api=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], + message=ep_name + ": " + lang, notificationType="note", method="POST") + + def _sendPushbullet(self, pushbullet_api=None, pushbullet_device=None, event=None, message=None, + notificationType=None, method=None, force=False): - def _sendPushbullet(self, pushbullet_api=None, pushbullet_device=None, event=None, message=None, notificationType=None, method=None, force=False): - if not sickbeard.USE_PUSHBULLET and not force: - return False - + return False + if pushbullet_api == None: pushbullet_api = sickbeard.PUSHBULLET_API if pushbullet_device == None: @@ -59,13 +64,13 @@ class PushbulletNotifier: uri = '/api/pushes' else: uri = '/api/devices' - + logger.log(u"Pushbullet event: " + str(event), logger.DEBUG) logger.log(u"Pushbullet message: " + str(message), logger.DEBUG) logger.log(u"Pushbullet api: " + str(pushbullet_api), logger.DEBUG) logger.log(u"Pushbullet devices: " + str(pushbullet_device), logger.DEBUG) logger.log(u"Pushbullet notification type: " + str(notificationType), logger.DEBUG) - + http_handler = HTTPSConnection("api.pushbullet.com") authString = base64.encodestring('%s:' % (pushbullet_api)).replace('\n', '') @@ -74,7 +79,7 @@ class PushbulletNotifier: testMessage = True try: logger.log(u"Testing Pushbullet authentication and retrieving the device list.", logger.DEBUG) - http_handler.request(method, uri, None, headers={'Authorization':'Basic %s:' % authString}) + http_handler.request(method, uri, None, headers={'Authorization': 'Basic %s:' % authString}) except (SSLError, HTTPException): logger.log(u"Pushbullet notification failed.", logger.ERROR) return False @@ -86,7 +91,8 @@ class PushbulletNotifier: 'body': message.encode('utf-8'), 'device_iden': pushbullet_device, 'type': notificationType} - http_handler.request(method, uri, body = urlencode(data), headers={'Authorization':'Basic %s' % authString}) + http_handler.request(method, uri, body=urlencode(data), + headers={'Authorization': 'Basic %s' % authString}) pass except (SSLError, HTTPException): return False @@ -96,17 +102,18 @@ class PushbulletNotifier: request_status = response.status if request_status == 200: - if testMessage: - return request_body - else: - logger.log(u"Pushbullet notifications sent.", logger.DEBUG) - return True + if testMessage: + return request_body + else: + logger.log(u"Pushbullet notifications sent.", logger.DEBUG) + return True elif request_status == 410: - logger.log(u"Pushbullet auth failed: %s" % response.reason, logger.ERROR) - return False + logger.log(u"Pushbullet auth failed: %s" % response.reason, logger.ERROR) + return False else: - logger.log(u"Pushbullet notification failed.", logger.ERROR) - return False - + logger.log(u"Pushbullet notification failed.", logger.ERROR) + return False + + notifier = PushbulletNotifier diff --git a/sickbeard/notifiers/pushover.py b/sickbeard/notifiers/pushover.py index 74b07c44..3dbf4691 100644 --- a/sickbeard/notifiers/pushover.py +++ b/sickbeard/notifiers/pushover.py @@ -30,12 +30,12 @@ from sickbeard.exceptions import ex API_URL = "https://api.pushover.net/1/messages.json" API_KEY = "OKCXmkvHN1syU2e8xvpefTnyvVWGv5" + class PushoverNotifier: - def test_notify(self, userKey=None): - return self._sendPushover("This is a test notification from SickBeard", 'Test', userKey ) + return self._sendPushover("This is a test notification from SickBeard", 'Test', userKey) - def _sendPushover(self, msg, title, userKey=None ): + def _sendPushover(self, msg, title, userKey=None): """ Sends a pushover notification to the address provided @@ -48,7 +48,7 @@ class PushoverNotifier: if not userKey: userKey = sickbeard.PUSHOVER_USERKEY - + # build up the URL and parameters msg = msg.strip() curUrl = API_URL @@ -59,7 +59,7 @@ class PushoverNotifier: 'user': userKey, 'message': msg.encode('utf-8'), 'timestamp': int(time.time()) - }) + }) # send the request to pushover @@ -67,7 +67,7 @@ class PushoverNotifier: req = urllib2.Request(curUrl) handle = urllib2.urlopen(req, data) handle.close() - + except urllib2.URLError, e: # if we get an error back that doesn't have an error code then who knows what's really happening if not hasattr(e, 'code'): @@ -80,19 +80,19 @@ class PushoverNotifier: if e.code == 404: logger.log("Username is wrong/not a pushover email. Pushover will send an email to it", logger.WARNING) return False - + # For HTTP status code 401's, it is because you are passing in either an invalid token, or the user has not added your service. elif e.code == 401: - + #HTTP status 401 if the user doesn't have the service added - subscribeNote = self._sendPushover(msg, title, userKey ) + subscribeNote = self._sendPushover(msg, title, userKey) if subscribeNote: logger.log("Subscription send", logger.DEBUG) return True else: logger.log("Subscription could not be send", logger.ERROR) return False - + # If you receive an HTTP status code of 400, it is because you failed to send the proper parameters elif e.code == 400: logger.log("Wrong data sent to pushover", logger.ERROR) @@ -104,17 +104,17 @@ class PushoverNotifier: def notify_snatch(self, ep_name, title=notifyStrings[NOTIFY_SNATCH]): if sickbeard.PUSHOVER_NOTIFY_ONSNATCH: self._notifyPushover(title, ep_name) - + def notify_download(self, ep_name, title=notifyStrings[NOTIFY_DOWNLOAD]): if sickbeard.PUSHOVER_NOTIFY_ONDOWNLOAD: self._notifyPushover(title, ep_name) - + def notify_subtitle_download(self, ep_name, lang, title=notifyStrings[NOTIFY_SUBTITLE_DOWNLOAD]): if sickbeard.PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD: self._notifyPushover(title, ep_name + ": " + lang) - def _notifyPushover(self, title, message, userKey=None ): + def _notifyPushover(self, title, message, userKey=None): """ Sends a pushover notification based on the provided info or SB config @@ -137,4 +137,5 @@ class PushoverNotifier: self._sendPushover(message, title) return True + notifier = PushoverNotifier diff --git a/sickbeard/notifiers/pytivo.py b/sickbeard/notifiers/pytivo.py index e7def79d..14b6eab4 100644 --- a/sickbeard/notifiers/pytivo.py +++ b/sickbeard/notifiers/pytivo.py @@ -25,28 +25,28 @@ from urllib2 import Request, urlopen, URLError from sickbeard import logger from sickbeard import encodingKludge as ek -class pyTivoNotifier: +class pyTivoNotifier: def notify_snatch(self, ep_name): pass def notify_download(self, ep_name): pass - + def notify_subtitle_download(self, ep_name, lang): pass def update_library(self, ep_obj): # Values from config - + if not sickbeard.USE_PYTIVO: return False - + host = sickbeard.PYTIVO_HOST shareName = sickbeard.PYTIVO_SHARE_NAME tsn = sickbeard.PYTIVO_TIVO_NAME - + # There are two more values required, the container and file. # # container: The share name, show name and season @@ -58,34 +58,35 @@ class pyTivoNotifier: # There might be better ways to arrive at the values, but this is the best I have been able to # come up with. # - - + + # Calculated values - + showPath = ep_obj.show.location showName = ep_obj.show.name - rootShowAndSeason = ek.ek(os.path.dirname, ep_obj.location) + rootShowAndSeason = ek.ek(os.path.dirname, ep_obj.location) absPath = ep_obj.location - + # Some show names have colons in them which are illegal in a path location, so strip them out. # (Are there other characters?) - showName = showName.replace(":","") - + showName = showName.replace(":", "") + root = showPath.replace(showName, "") showAndSeason = rootShowAndSeason.replace(root, "") - + container = shareName + "/" + showAndSeason file = "/" + absPath.replace(root, "") - + # Finally create the url and make request - requestUrl = "http://" + host + "/TiVoConnect?" + urlencode( {'Command':'Push', 'Container':container, 'File':file, 'tsn':tsn} ) - + requestUrl = "http://" + host + "/TiVoConnect?" + urlencode( + {'Command': 'Push', 'Container': container, 'File': file, 'tsn': tsn}) + logger.log(u"pyTivo notification: Requesting " + requestUrl) - - request = Request( requestUrl ) + + request = Request(requestUrl) try: - response = urlopen(request) #@UnusedVariable + response = urlopen(request) #@UnusedVariable except URLError, e: if hasattr(e, 'reason'): logger.log(u"pyTivo notification: Error, failed to reach a server") @@ -99,4 +100,5 @@ class pyTivoNotifier: logger.log(u"pyTivo notification: Successfully requested transfer of file") return True + notifier = pyTivoNotifier diff --git a/sickbeard/notifiers/synoindex.py b/sickbeard/notifiers/synoindex.py index 71703f24..6dcc552e 100644 --- a/sickbeard/notifiers/synoindex.py +++ b/sickbeard/notifiers/synoindex.py @@ -27,14 +27,14 @@ from sickbeard import logger from sickbeard import encodingKludge as ek from sickbeard.exceptions import ex -class synoIndexNotifier: +class synoIndexNotifier: def notify_snatch(self, ep_name): pass def notify_download(self, ep_name): pass - + def notify_subtitle_download(self, ep_name, lang): pass @@ -46,15 +46,17 @@ class synoIndexNotifier: def moveObject(self, old_path, new_path): if sickbeard.USE_SYNOINDEX: - synoindex_cmd = ['/usr/syno/bin/synoindex', '-N', ek.ek(os.path.abspath, new_path), ek.ek(os.path.abspath, old_path)] - logger.log(u"Executing command "+str(synoindex_cmd)) - logger.log(u"Absolute path to command: "+ek.ek(os.path.abspath, synoindex_cmd[0]), logger.DEBUG) + synoindex_cmd = ['/usr/syno/bin/synoindex', '-N', ek.ek(os.path.abspath, new_path), + ek.ek(os.path.abspath, old_path)] + logger.log(u"Executing command " + str(synoindex_cmd)) + logger.log(u"Absolute path to command: " + ek.ek(os.path.abspath, synoindex_cmd[0]), logger.DEBUG) try: - p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR) - out, err = p.communicate() #@UnusedVariable - logger.log(u"Script result: "+str(out), logger.DEBUG) + p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, + cwd=sickbeard.PROG_DIR) + out, err = p.communicate() #@UnusedVariable + logger.log(u"Script result: " + str(out), logger.DEBUG) except OSError, e: - logger.log(u"Unable to run synoindex: "+ex(e)) + logger.log(u"Unable to run synoindex: " + ex(e)) def deleteFolder(self, cur_path): self.makeObject('-D', cur_path) @@ -71,13 +73,15 @@ class synoIndexNotifier: def makeObject(self, cmd_arg, cur_path): if sickbeard.USE_SYNOINDEX: synoindex_cmd = ['/usr/syno/bin/synoindex', cmd_arg, ek.ek(os.path.abspath, cur_path)] - logger.log(u"Executing command "+str(synoindex_cmd)) - logger.log(u"Absolute path to command: "+ek.ek(os.path.abspath, synoindex_cmd[0]), logger.DEBUG) + logger.log(u"Executing command " + str(synoindex_cmd)) + logger.log(u"Absolute path to command: " + ek.ek(os.path.abspath, synoindex_cmd[0]), logger.DEBUG) try: - p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR) - out, err = p.communicate() #@UnusedVariable - logger.log(u"Script result: "+str(out), logger.DEBUG) + p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, + cwd=sickbeard.PROG_DIR) + out, err = p.communicate() #@UnusedVariable + logger.log(u"Script result: " + str(out), logger.DEBUG) except OSError, e: - logger.log(u"Unable to run synoindex: "+ex(e)) + logger.log(u"Unable to run synoindex: " + ex(e)) + notifier = synoIndexNotifier diff --git a/sickbeard/notifiers/synologynotifier.py b/sickbeard/notifiers/synologynotifier.py index a7f9b679..709e5640 100644 --- a/sickbeard/notifiers/synologynotifier.py +++ b/sickbeard/notifiers/synologynotifier.py @@ -27,8 +27,8 @@ from sickbeard import encodingKludge as ek from sickbeard.exceptions import ex from sickbeard import common -class synologyNotifier: +class synologyNotifier: def notify_snatch(self, ep_name): if sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH: self._send_synologyNotifier(ep_name, common.notifyStrings[common.NOTIFY_SNATCH]) @@ -36,20 +36,22 @@ class synologyNotifier: def notify_download(self, ep_name): if sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD: self._send_synologyNotifier(ep_name, common.notifyStrings[common.NOTIFY_DOWNLOAD]) - + def notify_subtitle_download(self, ep_name, lang): if sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD: self._send_synologyNotifier(ep_name + ": " + lang, common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD]) def _send_synologyNotifier(self, message, title): synodsmnotify_cmd = ["/usr/syno/bin/synodsmnotify", "@administrators", title, message] - logger.log(u"Executing command "+str(synodsmnotify_cmd)) - logger.log(u"Absolute path to command: "+ek.ek(os.path.abspath, synodsmnotify_cmd[0]), logger.DEBUG) + logger.log(u"Executing command " + str(synodsmnotify_cmd)) + logger.log(u"Absolute path to command: " + ek.ek(os.path.abspath, synodsmnotify_cmd[0]), logger.DEBUG) try: - p = subprocess.Popen(synodsmnotify_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR) - out, err = p.communicate() #@UnusedVariable - logger.log(u"Script result: "+str(out), logger.DEBUG) + p = subprocess.Popen(synodsmnotify_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, + cwd=sickbeard.PROG_DIR) + out, err = p.communicate() #@UnusedVariable + logger.log(u"Script result: " + str(out), logger.DEBUG) except OSError, e: - logger.log(u"Unable to run synodsmnotify: "+ex(e)) + logger.log(u"Unable to run synodsmnotify: " + ex(e)) + notifier = synologyNotifier diff --git a/sickbeard/notifiers/trakt.py b/sickbeard/notifiers/trakt.py index 09b24ec6..63df3ba4 100644 --- a/sickbeard/notifiers/trakt.py +++ b/sickbeard/notifiers/trakt.py @@ -20,6 +20,7 @@ import sickbeard from sickbeard import logger from lib.trakt import * + class TraktNotifier: """ A "notifier" for trakt.tv which keeps track of what has and hasn't been added to your library. @@ -30,7 +31,7 @@ class TraktNotifier: def notify_download(self, ep_name): pass - + def notify_subtitle_download(self, ep_name, lang): pass @@ -40,20 +41,20 @@ class TraktNotifier: ep_obj: The TVEpisode object to add to trakt """ - + if sickbeard.USE_TRAKT: - + # URL parameters data = { 'indexer_id': ep_obj.show.indexerid, 'title': ep_obj.show.name, 'year': ep_obj.show.startyear, - 'episodes': [ { - 'season': ep_obj.season, - 'episode': ep_obj.episode - } ] - } - + 'episodes': [{ + 'season': ep_obj.season, + 'episode': ep_obj.episode + }] + } + if data is not None: TraktCall("show/episode/library/%API%", self._api(), self._username(), self._password(), data) if sickbeard.TRAKT_REMOVE_WATCHLIST: @@ -70,7 +71,7 @@ class TraktNotifier: Returns: True if the request succeeded, False otherwise """ - + data = TraktCall("account/test/%API%", api, username, password, {}) if data["status"] == "success": return True diff --git a/sickbeard/notifiers/tweet.py b/sickbeard/notifiers/tweet.py index dc1c0f9d..51616920 100644 --- a/sickbeard/notifiers/tweet.py +++ b/sickbeard/notifiers/tweet.py @@ -23,84 +23,84 @@ from sickbeard.exceptions import ex # parse_qsl moved to urlparse module in v2.6 try: - from urlparse import parse_qsl #@UnusedImport + from urlparse import parse_qsl #@UnusedImport except: - from cgi import parse_qsl #@Reimport + from cgi import parse_qsl #@Reimport import lib.oauth2 as oauth import lib.pythontwitter as twitter -class TwitterNotifier: +class TwitterNotifier: consumer_key = "vHHtcB6WzpWDG6KYlBMr8g" consumer_secret = "zMqq5CB3f8cWKiRO2KzWPTlBanYmV0VYxSXZ0Pxds0E" - + REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token' - ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token' + ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token' AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authorize' - SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate' - + SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate' + def notify_snatch(self, ep_name): if sickbeard.TWITTER_NOTIFY_ONSNATCH: - self._notifyTwitter(common.notifyStrings[common.NOTIFY_SNATCH]+': '+ep_name) + self._notifyTwitter(common.notifyStrings[common.NOTIFY_SNATCH] + ': ' + ep_name) def notify_download(self, ep_name): if sickbeard.TWITTER_NOTIFY_ONDOWNLOAD: - self._notifyTwitter(common.notifyStrings[common.NOTIFY_DOWNLOAD]+': '+ep_name) - + self._notifyTwitter(common.notifyStrings[common.NOTIFY_DOWNLOAD] + ': ' + ep_name) + def notify_subtitle_download(self, ep_name, lang): if sickbeard.TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD: - self._notifyTwitter(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD]+' '+ep_name + ": " + lang) + self._notifyTwitter(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD] + ' ' + ep_name + ": " + lang) def test_notify(self): return self._notifyTwitter("This is a test notification from Sick Beard", force=True) def _get_authorization(self): - - signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable - oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret) - oauth_client = oauth.Client(oauth_consumer) - + + signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable + oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret) + oauth_client = oauth.Client(oauth_consumer) + logger.log('Requesting temp token from Twitter') - + resp, content = oauth_client.request(self.REQUEST_TOKEN_URL, 'GET') - + if resp['status'] != '200': logger.log('Invalid respond from Twitter requesting temp token: %s' % resp['status']) else: request_token = dict(parse_qsl(content)) - + sickbeard.TWITTER_USERNAME = request_token['oauth_token'] sickbeard.TWITTER_PASSWORD = request_token['oauth_token_secret'] - - return self.AUTHORIZATION_URL+"?oauth_token="+ request_token['oauth_token'] - + + return self.AUTHORIZATION_URL + "?oauth_token=" + request_token['oauth_token'] + def _get_credentials(self, key): request_token = {} - + request_token['oauth_token'] = sickbeard.TWITTER_USERNAME request_token['oauth_token_secret'] = sickbeard.TWITTER_PASSWORD request_token['oauth_callback_confirmed'] = 'true' - + token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret']) token.set_verifier(key) - - logger.log('Generating and signing request for an access token using key '+key) - - signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable - oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret) - logger.log('oauth_consumer: '+str(oauth_consumer)) - oauth_client = oauth.Client(oauth_consumer, token) - logger.log('oauth_client: '+str(oauth_client)) + + logger.log('Generating and signing request for an access token using key ' + key) + + signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable + oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret) + logger.log('oauth_consumer: ' + str(oauth_consumer)) + oauth_client = oauth.Client(oauth_consumer, token) + logger.log('oauth_client: ' + str(oauth_client)) resp, content = oauth_client.request(self.ACCESS_TOKEN_URL, method='POST', body='oauth_verifier=%s' % key) - logger.log('resp, content: '+str(resp)+','+str(content)) - - access_token = dict(parse_qsl(content)) - logger.log('access_token: '+str(access_token)) - - logger.log('resp[status] = '+str(resp['status'])) + logger.log('resp, content: ' + str(resp) + ',' + str(content)) + + access_token = dict(parse_qsl(content)) + logger.log('access_token: ' + str(access_token)) + + logger.log('resp[status] = ' + str(resp['status'])) if resp['status'] != '200': - logger.log('The request for a token with did not succeed: '+str(resp['status']), logger.ERROR) + logger.log('The request for a token with did not succeed: ' + str(resp['status']), logger.ERROR) return False else: logger.log('Your Twitter Access Token key: %s' % access_token['oauth_token']) @@ -108,33 +108,34 @@ class TwitterNotifier: sickbeard.TWITTER_USERNAME = access_token['oauth_token'] sickbeard.TWITTER_PASSWORD = access_token['oauth_token_secret'] return True - - + + def _send_tweet(self, message=None): - - username=self.consumer_key - password=self.consumer_secret - access_token_key=sickbeard.TWITTER_USERNAME - access_token_secret=sickbeard.TWITTER_PASSWORD - - logger.log(u"Sending tweet: "+message) - + + username = self.consumer_key + password = self.consumer_secret + access_token_key = sickbeard.TWITTER_USERNAME + access_token_secret = sickbeard.TWITTER_PASSWORD + + logger.log(u"Sending tweet: " + message) + api = twitter.Api(username, password, access_token_key, access_token_secret) - + try: api.PostUpdate(message.encode('utf8')) except Exception, e: - logger.log(u"Error Sending Tweet: "+ex(e), logger.ERROR) + logger.log(u"Error Sending Tweet: " + ex(e), logger.ERROR) return False - + return True - + def _notifyTwitter(self, message='', force=False): prefix = sickbeard.TWITTER_PREFIX - + if not sickbeard.USE_TWITTER and not force: return False - - return self._send_tweet(prefix+": "+message) + + return self._send_tweet(prefix + ": " + message) + notifier = TwitterNotifier \ No newline at end of file diff --git a/sickbeard/notifiers/xbmc.py b/sickbeard/notifiers/xbmc.py index 0013c1e4..358cabd5 100644 --- a/sickbeard/notifiers/xbmc.py +++ b/sickbeard/notifiers/xbmc.py @@ -41,7 +41,6 @@ except ImportError: class XBMCNotifier: - sb_logo_url = 'http://www.sickbeard.com/xbmc-notify.png' def _get_xbmc_version(self, host, username, password): @@ -133,18 +132,22 @@ class XBMCNotifier: if xbmcapi: if (xbmcapi <= 4): logger.log(u"Detected XBMC version <= 11, using XBMC HTTP API", logger.DEBUG) - command = {'command': 'ExecBuiltIn', 'parameter': 'Notification(' + title.encode("utf-8") + ',' + message.encode("utf-8") + ')'} + command = {'command': 'ExecBuiltIn', + 'parameter': 'Notification(' + title.encode("utf-8") + ',' + message.encode( + "utf-8") + ')'} notifyResult = self._send_to_xbmc(command, curHost, username, password) if notifyResult: result += curHost + ':' + str(notifyResult) else: logger.log(u"Detected XBMC version >= 12, using XBMC JSON API", logger.DEBUG) - command = '{"jsonrpc":"2.0","method":"GUI.ShowNotification","params":{"title":"%s","message":"%s", "image": "%s"},"id":1}' % (title.encode("utf-8"), message.encode("utf-8"), self.sb_logo_url) + command = '{"jsonrpc":"2.0","method":"GUI.ShowNotification","params":{"title":"%s","message":"%s", "image": "%s"},"id":1}' % ( + title.encode("utf-8"), message.encode("utf-8"), self.sb_logo_url) notifyResult = self._send_to_xbmc_json(command, curHost, username, password) if notifyResult: result += curHost + ':' + notifyResult["result"].decode(sickbeard.SYS_ENCODING) else: - logger.log(u"Failed to detect XBMC version for '" + curHost + "', check configuration and try again.", logger.ERROR) + logger.log(u"Failed to detect XBMC version for '" + curHost + "', check configuration and try again.", + logger.ERROR) result += curHost + ':False' return result @@ -182,14 +185,15 @@ class XBMCNotifier: else: return True else: - logger.log(u"Failed to detect XBMC version for '" + host + "', check configuration and try again.", logger.DEBUG) + logger.log(u"Failed to detect XBMC version for '" + host + "', check configuration and try again.", + logger.DEBUG) return False return False -############################################################################## -# Legacy HTTP API (pre XBMC 12) methods -############################################################################## + ############################################################################## + # Legacy HTTP API (pre XBMC 12) methods + ############################################################################## def _send_to_xbmc(self, command, host=None, username=None, password=None): """Handles communication to XBMC servers via HTTP API @@ -242,7 +246,8 @@ class XBMCNotifier: return result except (urllib2.URLError, IOError), e: - logger.log(u"Warning: Couldn't contact XBMC HTTP at " + fixStupidEncodings(url) + " " + ex(e), logger.WARNING) + logger.log(u"Warning: Couldn't contact XBMC HTTP at " + fixStupidEncodings(url) + " " + ex(e), + logger.WARNING) return False def _update_library(self, host=None, showName=None): @@ -271,11 +276,12 @@ class XBMCNotifier: logger.log(u"Updating library in XBMC via HTTP method for show " + showName, logger.DEBUG) pathSql = 'select path.strPath from path, tvshow, tvshowlinkpath where ' \ - 'tvshow.c00 = "%s" and tvshowlinkpath.idShow = tvshow.idShow ' \ - 'and tvshowlinkpath.idPath = path.idPath' % (showName) + 'tvshow.c00 = "%s" and tvshowlinkpath.idShow = tvshow.idShow ' \ + 'and tvshowlinkpath.idPath = path.idPath' % (showName) # use this to get xml back for the path lookups - xmlCommand = {'command': 'SetResponseFormat(webheader;false;webfooter;false;header;;footer;;opentag;;closetag;;closefinaltag;false)'} + xmlCommand = { + 'command': 'SetResponseFormat(webheader;false;webfooter;false;header;;footer;;opentag;;closetag;;closefinaltag;false)'} # sql used to grab path(s) sqlCommand = {'command': 'QueryVideoDatabase(%s)' % (pathSql)} # set output back to default @@ -313,7 +319,8 @@ class XBMCNotifier: updateCommand = {'command': 'ExecBuiltIn', 'parameter': 'XBMC.updatelibrary(video, %s)' % (unEncPath)} request = self._send_to_xbmc(updateCommand, host) if not request: - logger.log(u"Update of show directory failed on " + showName + " on " + host + " at " + unEncPath, logger.ERROR) + logger.log(u"Update of show directory failed on " + showName + " on " + host + " at " + unEncPath, + logger.ERROR) return False # sleep for a few seconds just to be sure xbmc has a chance to finish each directory if len(paths) > 1: @@ -330,9 +337,9 @@ class XBMCNotifier: return True -############################################################################## -# JSON-RPC API (XBMC 12+) methods -############################################################################## + ############################################################################## + # JSON-RPC API (XBMC 12+) methods + ############################################################################## def _send_to_xbmc_json(self, command, host=None, username=None, password=None): """Handles communication to XBMC servers via JSONRPC @@ -377,7 +384,8 @@ class XBMCNotifier: try: response = urllib2.urlopen(req) except urllib2.URLError, e: - logger.log(u"Error while trying to retrieve XBMC API version for " + host + ": " + ex(e), logger.WARNING) + logger.log(u"Error while trying to retrieve XBMC API version for " + host + ": " + ex(e), + logger.WARNING) return False # parse the json result @@ -385,13 +393,14 @@ class XBMCNotifier: result = json.load(response) response.close() logger.log(u"XBMC JSON response: " + str(result), logger.DEBUG) - return result # need to return response for parsing + return result # need to return response for parsing except ValueError, e: logger.log(u"Unable to decode JSON: " + response, logger.WARNING) return False except IOError, e: - logger.log(u"Warning: Couldn't contact XBMC JSON API at " + fixStupidEncodings(url) + " " + ex(e), logger.WARNING) + logger.log(u"Warning: Couldn't contact XBMC JSON API at " + fixStupidEncodings(url) + " " + ex(e), + logger.WARNING) return False def _update_library_json(self, host=None, showName=None): @@ -430,7 +439,7 @@ class XBMCNotifier: for show in shows: if (show["label"] == showName): tvshowid = show["tvshowid"] - break # exit out of loop otherwise the label and showname will not match up + break # exit out of loop otherwise the label and showname will not match up # this can be big, so free some memory del shows @@ -441,27 +450,34 @@ class XBMCNotifier: return False # lookup tv-show path - pathCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.GetTVShowDetails","params":{"tvshowid":%d, "properties": ["file"]},"id":1}' % (tvshowid) + pathCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.GetTVShowDetails","params":{"tvshowid":%d, "properties": ["file"]},"id":1}' % ( + tvshowid) pathResponse = self._send_to_xbmc_json(pathCommand, host) path = pathResponse["result"]["tvshowdetails"]["file"] - logger.log(u"Received Show: " + show["label"] + " with ID: " + str(tvshowid) + " Path: " + path, logger.DEBUG) + logger.log(u"Received Show: " + show["label"] + " with ID: " + str(tvshowid) + " Path: " + path, + logger.DEBUG) if (len(path) < 1): - logger.log(u"No valid path found for " + showName + " with ID: " + str(tvshowid) + " on " + host, logger.WARNING) + logger.log(u"No valid path found for " + showName + " with ID: " + str(tvshowid) + " on " + host, + logger.WARNING) return False logger.log(u"XBMC Updating " + showName + " on " + host + " at " + path, logger.DEBUG) - updateCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.Scan","params":{"directory":%s},"id":1}' % (json.dumps(path)) + updateCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.Scan","params":{"directory":%s},"id":1}' % ( + json.dumps(path)) request = self._send_to_xbmc_json(updateCommand, host) if not request: - logger.log(u"Update of show directory failed on " + showName + " on " + host + " at " + path, logger.ERROR) + logger.log(u"Update of show directory failed on " + showName + " on " + host + " at " + path, + logger.ERROR) return False # catch if there was an error in the returned request for r in request: if 'error' in r: - logger.log(u"Error while attempting to update show directory for " + showName + " on " + host + " at " + path, logger.ERROR) + logger.log( + u"Error while attempting to update show directory for " + showName + " on " + host + " at " + path, + logger.ERROR) return False # do a full update if requested @@ -476,9 +492,9 @@ class XBMCNotifier: return True -############################################################################## -# Public functions which will call the JSON or Legacy HTTP API methods -############################################################################## + ############################################################################## + # Public functions which will call the JSON or Legacy HTTP API methods + ############################################################################## def notify_snatch(self, ep_name): if sickbeard.XBMC_NOTIFY_ONSNATCH: @@ -493,7 +509,8 @@ class XBMCNotifier: self._notify_xbmc(ep_name + ": " + lang, common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD]) def test_notify(self, host, username, password): - return self._notify_xbmc("Testing XBMC notifications from Sick Beard", "Test Notification", host, username, password, force=True) + return self._notify_xbmc("Testing XBMC notifications from Sick Beard", "Test Notification", host, username, + password, force=True) def update_library(self, showName=None): """Public wrapper for the update library functions to branch the logic for JSON-RPC or legacy HTTP API @@ -521,10 +538,12 @@ class XBMCNotifier: for host in [x.strip() for x in sickbeard.XBMC_HOST.split(",")]: if self._send_update_library(host, showName): if sickbeard.XBMC_UPDATE_ONLYFIRST: - logger.log(u"Successfully updated '" + host + "', stopped sending update library commands.", logger.DEBUG) + logger.log(u"Successfully updated '" + host + "', stopped sending update library commands.", + logger.DEBUG) return True else: - logger.log(u"Failed to detect XBMC version for '" + host + "', check configuration and try again.", logger.ERROR) + logger.log(u"Failed to detect XBMC version for '" + host + "', check configuration and try again.", + logger.ERROR) result = result + 1 # needed for the 'update xbmc' submenu command @@ -534,4 +553,5 @@ class XBMCNotifier: else: return False + notifier = XBMCNotifier diff --git a/sickbeard/nzbSplitter.py b/sickbeard/nzbSplitter.py index bc8a4a69..75c7c865 100644 --- a/sickbeard/nzbSplitter.py +++ b/sickbeard/nzbSplitter.py @@ -32,7 +32,6 @@ from sickbeard.exceptions import ex def getSeasonNZBs(name, urlData, season): - try: showXML = etree.ElementTree(etree.XML(urlData)) except SyntaxError: @@ -78,7 +77,6 @@ def getSeasonNZBs(name, urlData, season): def createNZBString(fileElements, xmlns): - rootElement = etree.Element("nzb") if xmlns: rootElement.set("xmlns", xmlns) @@ -90,7 +88,6 @@ def createNZBString(fileElements, xmlns): def saveNZB(nzbName, nzbString): - try: with ek.ek(open, nzbName + ".nzb", 'w') as nzb_fh: nzb_fh.write(nzbString) @@ -108,7 +105,6 @@ def stripNS(element, ns): def splitResult(result): - urlData = helpers.getURL(result.url) if urlData is None: @@ -143,17 +139,23 @@ def splitResult(result): return False # make sure the result is sane - if (parse_result.season_number != None and parse_result.season_number != season) or (parse_result.season_number == None and season != 1): - logger.log(u"Found " + newNZB + " inside " + result.name + " but it doesn't seem to belong to the same season, ignoring it", logger.WARNING) + if (parse_result.season_number != None and parse_result.season_number != season) or ( + parse_result.season_number == None and season != 1): + logger.log( + u"Found " + newNZB + " inside " + result.name + " but it doesn't seem to belong to the same season, ignoring it", + logger.WARNING) continue elif len(parse_result.episode_numbers) == 0: - logger.log(u"Found " + newNZB + " inside " + result.name + " but it doesn't seem to be a valid episode NZB, ignoring it", logger.WARNING) + logger.log( + u"Found " + newNZB + " inside " + result.name + " but it doesn't seem to be a valid episode NZB, ignoring it", + logger.WARNING) continue wantEp = True for epNo in parse_result.episode_numbers: if not result.extraInfo[0].wantEpisode(season, epNo, result.quality): - logger.log(u"Ignoring result " + newNZB + " because we don't want an episode that is " + Quality.qualityStrings[result.quality], logger.DEBUG) + logger.log(u"Ignoring result " + newNZB + " because we don't want an episode that is " + + Quality.qualityStrings[result.quality], logger.DEBUG) wantEp = False break if not wantEp: diff --git a/sickbeard/nzbget.py b/sickbeard/nzbget.py index 076c1ddc..e53d0a89 100644 --- a/sickbeard/nzbget.py +++ b/sickbeard/nzbget.py @@ -33,8 +33,8 @@ from sickbeard import logger, helpers from common import Quality -def sendNZB(nzb, proper = False): +def sendNZB(nzb, proper=False): addToTop = False nzbgetprio = 0 nzbgetXMLrpc = "http://%(username)s:%(password)s@%(host)s/xmlrpc" @@ -43,17 +43,21 @@ def sendNZB(nzb, proper = False): logger.log(u"No NZBget host found in configuration. Please configure it.", logger.ERROR) return False - url = nzbgetXMLrpc % {"host": sickbeard.NZBGET_HOST, "username": sickbeard.NZBGET_USERNAME, "password": sickbeard.NZBGET_PASSWORD} + url = nzbgetXMLrpc % {"host": sickbeard.NZBGET_HOST, "username": sickbeard.NZBGET_USERNAME, + "password": sickbeard.NZBGET_PASSWORD} nzbGetRPC = xmlrpclib.ServerProxy(url) try: if nzbGetRPC.writelog("INFO", "Sickbeard connected to drop of %s any moment now." % (nzb.name + ".nzb")): logger.log(u"Successful connected to NZBget", logger.DEBUG) else: - logger.log(u"Successful connected to NZBget, but unable to send a message" % (nzb.name + ".nzb"), logger.ERROR) + logger.log(u"Successful connected to NZBget, but unable to send a message" % (nzb.name + ".nzb"), + logger.ERROR) except httplib.socket.error: - logger.log(u"Please check your NZBget host and port (if it is running). NZBget is not responding to this combination", logger.ERROR) + logger.log( + u"Please check your NZBget host and port (if it is running). NZBget is not responding to this combination", + logger.ERROR) return False except xmlrpclib.ProtocolError, e: @@ -102,10 +106,12 @@ def sendNZB(nzb, proper = False): if nzbget_version == 0: nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, addToTop, nzbcontent64) elif nzbget_version >= 12: - nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, nzbgetprio, False, nzbcontent64, False, dupekey, dupescore, "score") + nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, nzbgetprio, False, + nzbcontent64, False, dupekey, dupescore, "score") else: - nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, nzbgetprio, False, nzbcontent64) - + nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, nzbgetprio, False, + nzbcontent64) + if nzbget_result: logger.log(u"NZB sent to NZBget successfully", logger.DEBUG) return True diff --git a/sickbeard/postProcessor.py b/sickbeard/postProcessor.py index e4151dcc..e80895dd 100644 --- a/sickbeard/postProcessor.py +++ b/sickbeard/postProcessor.py @@ -46,8 +46,6 @@ from sickbeard.exceptions import ex from sickbeard.name_parser.parser import NameParser, InvalidNameException -from sickbeard.indexers import indexer_api, indexer_exceptions -from common import indexerStrings class PostProcessor(object): """ @@ -59,7 +57,7 @@ class PostProcessor(object): EXISTS_SMALLER = 3 DOESNT_EXIST = 4 - IGNORED_FILESTRINGS = [ "/.AppleDouble/", ".DS_Store" ] + IGNORED_FILESTRINGS = ["/.AppleDouble/", ".DS_Store"] NZB_NAME = 1 FOLDER_NAME = 2 @@ -149,7 +147,8 @@ class PostProcessor(object): return PostProcessor.EXISTS_SMALLER else: - self._log(u"File " + existing_file + " doesn't exist so there's no worries about replacing it", logger.DEBUG) + self._log(u"File " + existing_file + " doesn't exist so there's no worries about replacing it", + logger.DEBUG) return PostProcessor.DOESNT_EXIST def list_associated_files(self, file_path, base_name_only=False, subtitles_only=False): @@ -185,7 +184,7 @@ class PostProcessor(object): if associated_file_path == file_path: continue # only list it if the only non-shared part is the extension or if it is a subtitle - if subtitles_only and not associated_file_path[len(associated_file_path)-3:] in common.subtitleExtensions: + if subtitles_only and not associated_file_path[len(associated_file_path) - 3:] in common.subtitleExtensions: continue #Exclude .rar files from associated list @@ -227,7 +226,7 @@ class PostProcessor(object): # File is read-only, so make it writeable self._log('Read only mode on file ' + cur_file + ' Will try to make it writeable', logger.DEBUG) try: - ek.ek(os.chmod,cur_file,stat.S_IWRITE) + ek.ek(os.chmod, cur_file, stat.S_IWRITE) except: self._log(u'Cannot change permissions of ' + cur_file, logger.WARNING) @@ -235,7 +234,8 @@ class PostProcessor(object): # do the library update for synoindex notifiers.synoindex_notifier.deleteFile(cur_file) - def _combined_file_operation (self, file_path, new_path, new_base_name, associated_files=False, action=None, subtitles=False): + def _combined_file_operation(self, file_path, new_path, new_base_name, associated_files=False, action=None, + subtitles=False): """ Performs a generic operation (move or copy) on a file. Can rename the file as well as change its location, and optionally move associated files too. @@ -321,7 +321,8 @@ class PostProcessor(object): self._log("Unable to move file " + cur_file_path + " to " + new_file_path + ": " + str(e), logger.ERROR) raise e - self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_move, subtitles=subtitles) + self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_move, + subtitles=subtitles) def _copy(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False): """ @@ -331,7 +332,7 @@ class PostProcessor(object): associated_files: Boolean, whether we should copy similarly-named files too """ - def _int_copy (cur_file_path, new_file_path): + def _int_copy(cur_file_path, new_file_path): self._log(u"Copying file from " + cur_file_path + " to " + new_file_path, logger.DEBUG) try: @@ -341,7 +342,8 @@ class PostProcessor(object): logger.log("Unable to copy file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR) raise e - self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_copy, subtitles=subtitles) + self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_copy, + subtitles=subtitles) def _hardlink(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False): @@ -361,6 +363,7 @@ class PostProcessor(object): except (IOError, OSError), e: self._log("Unable to link file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR) raise e + self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_hard_link) def _moveAndSymlink(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False): @@ -380,7 +383,9 @@ class PostProcessor(object): except (IOError, OSError), e: self._log("Unable to link file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR) raise e - self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_move_and_sym_link) + + self._combined_file_operation(file_path, new_path, new_base_name, associated_files, + action=_int_move_and_sym_link) def _history_lookup(self): """ @@ -476,7 +481,8 @@ class PostProcessor(object): # remember whether it's a proper if parse_result.extra_info: - self.is_proper = re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info, re.I) != None + self.is_proper = re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info, + re.I) != None # if the result is complete then remember that for later if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group: @@ -488,9 +494,11 @@ class PostProcessor(object): elif test_name == self.file_name: self.good_results[self.FILE_NAME] = True else: - logger.log(u"Nothing was good, found " + repr(test_name) + " and wanted either " + repr(self.nzb_name) + ", " + repr(self.folder_name) + ", or " + repr(self.file_name)) + logger.log(u"Nothing was good, found " + repr(test_name) + " and wanted either " + repr( + self.nzb_name) + ", " + repr(self.folder_name) + ", or " + repr(self.file_name)) else: - logger.log(u"Parse result not sufficient(all following have to be set). Will not save release name", logger.DEBUG) + logger.log(u"Parse result not sufficient(all following have to be set). Will not save release name", + logger.DEBUG) logger.log("Parse result(series_name): " + str(parse_result.series_name), logger.DEBUG) logger.log("Parse result(season_number): " + str(parse_result.season_number), logger.DEBUG) logger.log("Parse result(episode_numbers): " + str(parse_result.episode_numbers), logger.DEBUG) @@ -507,7 +515,7 @@ class PostProcessor(object): # see if we can find the name directly in the DB, if so use it for cur_name in name_list: - self._log(u"Looking up " + cur_name +u" in the DB", logger.DEBUG) + self._log(u"Looking up " + cur_name + u" in the DB", logger.DEBUG) db_result = helpers.searchDBForShow(cur_name) if db_result: self._log(u"Lookup successful, using " + db_result[0] + " id " + str(db_result[1]), logger.DEBUG) @@ -521,11 +529,12 @@ class PostProcessor(object): lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) - self._log(u"Looking up name " + cur_name + u" on " + self.indexer + "", logger.DEBUG) + self._log(u"Looking up name " + cur_name + u" on " + sickbeard.indexerApi(self.indexer).name + "", + logger.DEBUG) showObj = t[cur_name] - except (indexer_exceptions.indexer_exception, IOError): + except (sickbeard.indexer_exception, IOError): # if none found, search on all languages try: lINDEXER_API_PARMS = {'indexer': self.indexer} @@ -533,18 +542,21 @@ class PostProcessor(object): lINDEXER_API_PARMS['search_all_languages'] = True lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) - self._log(u"Looking up name " + cur_name + u" in all languages on " + self.indexer + "", logger.DEBUG) + self._log(u"Looking up name " + cur_name + u" in all languages on " + sickbeard.indexerApi( + self.indexer).name + "", logger.DEBUG) showObj = t[cur_name] - except (indexer_exceptions.indexer_exception, IOError): + except (sickbeard.indexer_exception, IOError): pass continue except (IOError): continue - self._log(u"Lookup successful, using " + self.indexer + " id " + str(showObj["id"]), logger.DEBUG) + self._log( + u"Lookup successful, using " + sickbeard.indexerApi(self.indexer).name + " id " + str(showObj["id"]), + logger.DEBUG) _finalize(parse_result) return (int(showObj["id"]), season, episodes) @@ -559,7 +571,7 @@ class PostProcessor(object): indexer_id = season = None episodes = [] - # try to look up the nzb in history + # try to look up the nzb in history attempt_list = [self._history_lookup, # try to analyze the nzb name @@ -577,7 +589,7 @@ class PostProcessor(object): # try to analyze the dir + file name together as one name lambda: self._analyze_name(self.folder_name + u' ' + self.file_name) - ] + ] # attempt every possible method to get our info for cur_attempt in attempt_list: @@ -598,18 +610,19 @@ class PostProcessor(object): # for air-by-date shows we need to look up the season/episode from tvdb if season == -1 and indexer_id and episodes: - self._log(u"Looks like this is an air-by-date show, attempting to convert the date to season/episode", logger.DEBUG) + self._log(u"Looks like this is an air-by-date show, attempting to convert the date to season/episode", + logger.DEBUG) # try to get language set for this show indexer_lang = None try: showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) - if(showObj != None): + if (showObj != None): # set the language of the show indexer_lang = showObj.lang self.indexer = showObj.indexer except exceptions.MultipleShowObjectsException: - raise #TODO: later I'll just log this, for now I want to know about it ASAP + raise #TODO: later I'll just log this, for now I want to know about it ASAP try: lINDEXER_API_PARMS = {'indexer': self.indexer} @@ -617,7 +630,7 @@ class PostProcessor(object): if indexer_lang and not indexer_lang == 'en': lINDEXER_API_PARMS = {'language': indexer_lang} - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) epObj = t[indexer_id].airedOn(episodes[0])[0] @@ -625,22 +638,28 @@ class PostProcessor(object): episodes = [int(epObj["episodenumber"])] self._log(u"Got season " + str(season) + " episodes " + str(episodes), logger.DEBUG) - except indexer_exceptions.indexer_episodenotfound, e: - self._log(u"Unable to find episode with date " + str(episodes[0]) + u" for show " + str(indexer_id) + u", skipping", logger.DEBUG) + except sickbeard.indexer_episodenotfound, e: + self._log(u"Unable to find episode with date " + str(episodes[0]) + u" for show " + str( + indexer_id) + u", skipping", logger.DEBUG) # we don't want to leave dates in the episode list if we couldn't convert them to real episode numbers episodes = [] continue - except indexer_exceptions.indexer_error, e: - logger.log(u"Unable to contact " + self.indexer + ": " + ex(e), logger.WARNING) + except sickbeard.indexer_error, e: + logger.log(u"Unable to contact " + sickbeard.indexerApi(self.indexer).name + ": " + ex(e), + logger.WARNING) episodes = [] continue # if there's no season then we can hopefully just use 1 automatically elif season == None and indexer_id: myDB = db.DBConnection() - numseasonsSQlResult = myDB.select("SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0", [indexer_id]) + numseasonsSQlResult = myDB.select( + "SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0", + [indexer_id]) if int(numseasonsSQlResult[0][0]) == 1 and season == None: - self._log(u"Don't have a season number, but this show appears to only have 1 season, setting seasonnumber to 1...", logger.DEBUG) + self._log( + u"Don't have a season number, but this show appears to only have 1 season, setting seasonnumber to 1...", + logger.DEBUG) season = 1 if indexer_id and season != None and episodes: @@ -667,11 +686,12 @@ class PostProcessor(object): try: show_obj = helpers.findCertainShow(sickbeard.showList, indexer_id) except exceptions.MultipleShowObjectsException: - raise #TODO: later I'll just log this, for now I want to know about it ASAP + raise #TODO: later I'll just log this, for now I want to know about it ASAP # if we can't find the show then there's nothing we can really do if not show_obj: - self._log(u"This show isn't in your list, you need to add it to SB before post-processing an episode", logger.ERROR) + self._log(u"This show isn't in your list, you need to add it to SB before post-processing an episode", + logger.ERROR) raise exceptions.PostProcessingFailed() root_ep = None @@ -687,7 +707,7 @@ class PostProcessor(object): except exceptions.EpisodeNotFoundException, e: self._log(u"Unable to create episode: " + ex(e), logger.DEBUG) raise exceptions.PostProcessingFailed() - + # associate all the episodes together under a single root episode if root_ep == None: root_ep = curEp @@ -711,9 +731,11 @@ class PostProcessor(object): # if there is a quality available in the status then we don't need to bother guessing from the filename if ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER + common.Quality.SNATCHED_BEST: - oldStatus, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) #@UnusedVariable + oldStatus, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) #@UnusedVariable if ep_quality != common.Quality.UNKNOWN: - self._log(u"The old status had a quality in it, using that: " + common.Quality.qualityStrings[ep_quality], logger.DEBUG) + self._log( + u"The old status had a quality in it, using that: " + common.Quality.qualityStrings[ep_quality], + logger.DEBUG) return ep_quality # nzb name is the most reliable if it exists, followed by folder name and lastly file name @@ -727,18 +749,24 @@ class PostProcessor(object): continue ep_quality = common.Quality.nameQuality(cur_name) - self._log(u"Looking up quality for name " + cur_name + u", got " + common.Quality.qualityStrings[ep_quality], logger.DEBUG) + self._log( + u"Looking up quality for name " + cur_name + u", got " + common.Quality.qualityStrings[ep_quality], + logger.DEBUG) # if we find a good one then use it if ep_quality != common.Quality.UNKNOWN: - logger.log(cur_name + u" looks like it has quality " + common.Quality.qualityStrings[ep_quality] + ", using that", logger.DEBUG) + logger.log(cur_name + u" looks like it has quality " + common.Quality.qualityStrings[ + ep_quality] + ", using that", logger.DEBUG) return ep_quality # if we didn't get a quality from one of the names above, try assuming from each of the names ep_quality = common.Quality.assumeQuality(self.file_name) - self._log(u"Guessing quality for name " + self.file_name+u", got " + common.Quality.qualityStrings[ep_quality], logger.DEBUG) + self._log( + u"Guessing quality for name " + self.file_name + u", got " + common.Quality.qualityStrings[ep_quality], + logger.DEBUG) if ep_quality != common.Quality.UNKNOWN: - logger.log(self.file_name + u" looks like it has quality " + common.Quality.qualityStrings[ep_quality] + ", using that", logger.DEBUG) + logger.log(self.file_name + u" looks like it has quality " + common.Quality.qualityStrings[ + ep_quality] + ", using that", logger.DEBUG) return ep_quality test = str(ep_quality) @@ -757,13 +785,15 @@ class PostProcessor(object): script_cmd[0] = ek.ek(os.path.abspath, script_cmd[0]) self._log(u"Absolute path to script: " + script_cmd[0], logger.DEBUG) - script_cmd = script_cmd + [ep_obj.location, self.file_path, str(ep_obj.show.indexerid), str(ep_obj.season), str(ep_obj.episode), str(ep_obj.airdate)] + script_cmd = script_cmd + [ep_obj.location, self.file_path, str(ep_obj.show.indexerid), str(ep_obj.season), + str(ep_obj.episode), str(ep_obj.airdate)] # use subprocess to run the command and capture output self._log(u"Executing command " + str(script_cmd)) try: - p = subprocess.Popen(script_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR) - out, err = p.communicate() # @UnusedVariable + p = subprocess.Popen(script_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR) + out, err = p.communicate() # @UnusedVariable self._log(u"Script result: " + str(out), logger.DEBUG) except OSError, e: @@ -795,12 +825,15 @@ class PostProcessor(object): # if the user downloaded it manually and it's higher quality than the existing episode then it's priority if new_ep_quality > old_ep_quality and new_ep_quality != common.Quality.UNKNOWN: - self._log(u"This was manually downloaded but it appears to be better quality than what we have so I'm marking it as priority", logger.DEBUG) + self._log( + u"This was manually downloaded but it appears to be better quality than what we have so I'm marking it as priority", + logger.DEBUG) return True # if the user downloaded it manually and it appears to be a PROPER/REPACK then it's priority if self.is_proper and new_ep_quality >= old_ep_quality and new_ep_quality != common.Quality.UNKNOWN: - self._log(u"This was manually downloaded but it appears to be a proper so I'm marking it as priority", logger.DEBUG) + self._log(u"This was manually downloaded but it appears to be a proper so I'm marking it as priority", + logger.DEBUG) return True return False @@ -825,7 +858,7 @@ class PostProcessor(object): # try to find the file info indexer_id = season = episodes = None if 'auto' in self.indexer: - for indexer in indexerStrings: + for indexer in sickbeard.indexerApi().indexers: self.indexer = indexer # try to find the file info @@ -833,7 +866,8 @@ class PostProcessor(object): if indexer_id and season != None and episodes: break - self._log(u"Can't find show on " + self.indexer + ", auto trying next indexer in list", logger.WARNING) + self._log(u"Can't find show on " + sickbeard.indexerApi( + self.indexer).name + ", auto trying next indexer in list", logger.WARNING) else: (indexer_id, season, episodes) = self._find_info() @@ -865,17 +899,22 @@ class PostProcessor(object): # if there's an existing file that we don't want to replace stop here if existing_file_status in (PostProcessor.EXISTS_LARGER, PostProcessor.EXISTS_SAME): - self._log(u"File exists and we are not going to replace it because it's not smaller, quitting post-processing", logger.ERROR) + self._log( + u"File exists and we are not going to replace it because it's not smaller, quitting post-processing", + logger.ERROR) return False elif existing_file_status == PostProcessor.EXISTS_SMALLER: self._log(u"File exists and is smaller than the new file so I'm going to replace it", logger.DEBUG) elif existing_file_status != PostProcessor.DOESNT_EXIST: - self._log(u"Unknown existing file status. This should never happen, please log this as a bug.", logger.ERROR) + self._log(u"Unknown existing file status. This should never happen, please log this as a bug.", + logger.ERROR) return False # if the file is priority then we're going to replace it even if it exists else: - self._log(u"This download is marked a priority download so I'm going to replace an existing file if I find one", logger.DEBUG) + self._log( + u"This download is marked a priority download so I'm going to replace an existing file if I find one", + logger.DEBUG) # delete the existing file (and company) for cur_ep in [ep_obj] + ep_obj.relatedEps: @@ -883,7 +922,8 @@ class PostProcessor(object): self._delete(cur_ep.location, associated_files=True) # clean up any left over folders if cur_ep.location: - helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location) + helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location), + keep_dir=ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to delete the existing files") @@ -953,7 +993,8 @@ class PostProcessor(object): dest_path = ek.ek(os.path.dirname, proper_absolute_path) except exceptions.ShowDirNotFoundException: - raise exceptions.PostProcessingFailed(u"Unable to post-process an episode if the show dir doesn't exist, quitting") + raise exceptions.PostProcessingFailed( + u"Unable to post-process an episode if the show dir doesn't exist, quitting") self._log(u"Destination folder for this episode: " + dest_path, logger.DEBUG) @@ -974,16 +1015,20 @@ class PostProcessor(object): try: # move the episode and associated files to the show dir if self.process_method == "copy": - self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) + self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, + sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) elif self.process_method == "move": - self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) + self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, + sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) elif self.process_method == "hardlink": - self._hardlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) + self._hardlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, + sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) elif self.process_method == "symlink": - self._moveAndSymlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) + self._moveAndSymlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, + sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) else: - logger.log(u"Unknown process method: " + sickbeard.PROCESS_METHOD, logger.ERROR) - raise exceptions.PostProcessingFailed("Unable to move the files to their new home") + logger.log(u"Unknown process method: " + sickbeard.PROCESS_METHOD, logger.ERROR) + raise exceptions.PostProcessingFailed("Unable to move the files to their new home") except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to move the files to their new home") diff --git a/sickbeard/processTV.py b/sickbeard/processTV.py index 4506851f..ece848b3 100644 --- a/sickbeard/processTV.py +++ b/sickbeard/processTV.py @@ -36,11 +36,14 @@ from sickbeard import failedProcessor from lib.unrar2 import RarFile, RarInfo from lib.unrar2.rar_exceptions import * -def logHelper (logMessage, logLevel=logger.MESSAGE): + +def logHelper(logMessage, logLevel=logger.MESSAGE): logger.log(logMessage, logLevel) return logMessage + u"\n" -def processDir(dirName, nzbName=None, process_method=None, force=False, is_priority=None, failed=False, type="auto", indexer="auto"): + +def processDir(dirName, nzbName=None, process_method=None, force=False, is_priority=None, failed=False, type="auto", + indexer="auto"): """ Scans through the files in dirName and processes whatever media files it finds @@ -72,7 +75,9 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior # if we didn't find a real dir then quit if not ek.ek(os.path.isdir, dirName): - returnStr += logHelper(u"Unable to figure out what folder to process. If your downloader and Sick Beard aren't on the same PC make sure you fill out your TV download dir in the config.", logger.DEBUG) + returnStr += logHelper( + u"Unable to figure out what folder to process. If your downloader and Sick Beard aren't on the same PC make sure you fill out your TV download dir in the config.", + logger.DEBUG) return returnStr path, dirs, files = get_path_dir_files(dirName, nzbName, type) @@ -128,26 +133,28 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior #Don't Link media when the media is extracted from a rar in the same path if process_method in ('hardlink', 'symlink') and videoInRar: process_media(processPath, videoInRar, nzbName, 'move', force, is_priority, indexer) - process_media(processPath, set(videoFiles) - set(videoInRar), nzbName, process_method, force, is_priority, indexer) + process_media(processPath, set(videoFiles) - set(videoInRar), nzbName, process_method, force, + is_priority, indexer) delete_files(processPath, rarContent) else: process_media(processPath, videoFiles, nzbName, process_method, force, is_priority, indexer) #Delete all file not needed if process_method != "move" or not process_result \ - or type=="manual": #Avoid to delete files if is Manual PostProcessing + or type == "manual": #Avoid to delete files if is Manual PostProcessing continue delete_files(processPath, notwantedFiles) if process_method == "move" and \ - ek.ek(os.path.normpath, processPath) != ek.ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR): + ek.ek(os.path.normpath, processPath) != ek.ek(os.path.normpath, + sickbeard.TV_DOWNLOAD_DIR): delete_dir(processPath) return returnStr -def validateDir(path, dirName, nzbNameOriginal, failed): +def validateDir(path, dirName, nzbNameOriginal, failed): global process_result, returnStr returnStr += logHelper(u"Processing folder " + dirName, logger.DEBUG) @@ -156,10 +163,12 @@ def validateDir(path, dirName, nzbNameOriginal, failed): returnStr += logHelper(u"The directory name indicates it failed to extract.", logger.DEBUG) failed = True elif ek.ek(os.path.basename, dirName).startswith('_UNDERSIZED_'): - returnStr += logHelper(u"The directory name indicates that it was previously rejected for being undersized.", logger.DEBUG) + returnStr += logHelper(u"The directory name indicates that it was previously rejected for being undersized.", + logger.DEBUG) failed = True elif ek.ek(os.path.basename, dirName).startswith('_UNPACK_'): - returnStr += logHelper(u"The directory name indicates that this release is in the process of being unpacked.", logger.DEBUG) + returnStr += logHelper(u"The directory name indicates that this release is in the process of being unpacked.", + logger.DEBUG) if failed: process_failed(os.path.join(path, dirName), nzbNameOriginal) @@ -169,8 +178,12 @@ def validateDir(path, dirName, nzbNameOriginal, failed): myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_shows") for sqlShow in sqlResults: - if dirName.lower().startswith(ek.ek(os.path.realpath, sqlShow["location"]).lower()+os.sep) or dirName.lower() == ek.ek(os.path.realpath, sqlShow["location"]).lower(): - returnStr += logHelper(u"You're trying to post process an episode that's already been moved to its show dir, skipping", logger.ERROR) + if dirName.lower().startswith( + ek.ek(os.path.realpath, sqlShow["location"]).lower() + os.sep) or dirName.lower() == ek.ek( + os.path.realpath, sqlShow["location"]).lower(): + returnStr += logHelper( + u"You're trying to post process an episode that's already been moved to its show dir, skipping", + logger.ERROR) return False # Get the videofile list for the next checks @@ -211,8 +224,8 @@ def validateDir(path, dirName, nzbNameOriginal, failed): return False -def unRAR(path, rarFiles, force): +def unRAR(path, rarFiles, force): global process_result, returnStr unpacked_files = [] @@ -232,27 +245,29 @@ def unRAR(path, rarFiles, force): skip_file = False for file_in_archive in [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir]: if already_postprocessed(path, file_in_archive, force): - returnStr += logHelper(u"Archive file already post-processed, extraction skipped: " + file_in_archive, logger.DEBUG) + returnStr += logHelper( + u"Archive file already post-processed, extraction skipped: " + file_in_archive, + logger.DEBUG) skip_file = True break if skip_file: continue - rar_handle.extract(path = path, withSubpath = False, overwrite = False) + rar_handle.extract(path=path, withSubpath=False, overwrite=False) unpacked_files += [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir] del rar_handle except Exception, e: - returnStr += logHelper(u"Failed Unrar archive " + archive + ': ' + ex(e), logger.ERROR) - process_result = False - continue + returnStr += logHelper(u"Failed Unrar archive " + archive + ': ' + ex(e), logger.ERROR) + process_result = False + continue returnStr += logHelper(u"UnRar content: " + str(unpacked_files), logger.DEBUG) return unpacked_files -def already_postprocessed(dirName, videofile, force): +def already_postprocessed(dirName, videofile, force): global returnStr if force: @@ -266,7 +281,8 @@ def already_postprocessed(dirName, videofile, force): myDB = db.DBConnection() sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [dirName]) if sqlResult: - returnStr += logHelper(u"You're trying to post process a dir that's already been processed, skipping", logger.DEBUG) + returnStr += logHelper(u"You're trying to post process a dir that's already been processed, skipping", + logger.DEBUG) return True # This is needed for video whose name differ from dirName @@ -275,7 +291,8 @@ def already_postprocessed(dirName, videofile, force): sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [videofile.rpartition('.')[0]]) if sqlResult: - returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping", logger.DEBUG) + returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping", + logger.DEBUG) return True #Needed if we have downloaded the same episode @ different quality @@ -285,13 +302,14 @@ def already_postprocessed(dirName, videofile, force): search_sql += " and history.resource LIKE ?" sqlResult = myDB.select(search_sql, [u'%' + videofile]) if sqlResult: - returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping", logger.DEBUG) + returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping", + logger.DEBUG) return True return False -def process_media(processPath, videoFiles, nzbName, process_method, force, is_priority, indexer): +def process_media(processPath, videoFiles, nzbName, process_method, force, is_priority, indexer): global process_result, returnStr for cur_video_file in videoFiles: @@ -314,14 +332,15 @@ def process_media(processPath, videoFiles, nzbName, process_method, force, is_pr if process_result: returnStr += logHelper(u"Processing succeeded for " + cur_video_file_path) else: - returnStr += logHelper(u"Processing failed for " + cur_video_file_path + ": " + process_fail_message, logger.WARNING) + returnStr += logHelper(u"Processing failed for " + cur_video_file_path + ": " + process_fail_message, + logger.WARNING) #If something fail abort the processing on dir if not process_result: break -def delete_files(processPath, notwantedFiles): +def delete_files(processPath, notwantedFiles): global returnStr, process_result if not process_result: @@ -333,30 +352,32 @@ def delete_files(processPath, notwantedFiles): cur_file_path = ek.ek(os.path.join, processPath, cur_file) if not ek.ek(os.path.isfile, cur_file_path): - continue #Prevent error when a notwantedfiles is an associated files + continue #Prevent error when a notwantedfiles is an associated files returnStr += logHelper(u"Deleting file " + cur_file, logger.DEBUG) - #check first the read-only attribute + #check first the read-only attribute file_attribute = ek.ek(os.stat, cur_file_path)[0] if (not file_attribute & stat.S_IWRITE): # File is read-only, so make it writeable returnStr += logHelper(u"Changing ReadOnly Flag for file " + cur_file, logger.DEBUG) try: - ek.ek(os.chmod,cur_file_path,stat.S_IWRITE) + ek.ek(os.chmod, cur_file_path, stat.S_IWRITE) except OSError, e: - returnStr += logHelper(u"Cannot change permissions of " + cur_file_path + ': ' + e.strerror, logger.DEBUG) + returnStr += logHelper(u"Cannot change permissions of " + cur_file_path + ': ' + e.strerror, + logger.DEBUG) try: ek.ek(os.remove, cur_file_path) except OSError, e: returnStr += logHelper(u"Unable to delete file " + cur_file + ': ' + e.strerror, logger.DEBUG) -def delete_dir(processPath): +def delete_dir(processPath): global returnStr if not ek.ek(os.listdir, processPath) == []: - returnStr += logHelper(u"Skipping Deleting folder " + processPath + ' because some files was not deleted/processed', logger.DEBUG) + returnStr += logHelper( + u"Skipping Deleting folder " + processPath + ' because some files was not deleted/processed', logger.DEBUG) return returnStr += logHelper(u"Deleting folder " + processPath, logger.DEBUG) @@ -366,15 +387,16 @@ def delete_dir(processPath): except (OSError, IOError), e: returnStr += logHelper(u"Warning: unable to remove the folder " + processPath + ": " + ex(e), logger.WARNING) -def get_path_dir_files(dirName, nzbName, type): - if dirName == sickbeard.TV_DOWNLOAD_DIR and not nzbName or type =="manual": #Scheduled Post Processing Active +def get_path_dir_files(dirName, nzbName, type): + if dirName == sickbeard.TV_DOWNLOAD_DIR and not nzbName or type == "manual": #Scheduled Post Processing Active #Get at first all the subdir in the dirName for path, dirs, files in ek.ek(os.walk, dirName): break else: - path, dirs = ek.ek(os.path.split, dirName) #Script Post Processing - if not nzbName is None and not nzbName.endswith('.nzb') and os.path.isfile(os.path.join(dirName, nzbName)): #For single torrent file without Dir + path, dirs = ek.ek(os.path.split, dirName) #Script Post Processing + if not nzbName is None and not nzbName.endswith('.nzb') and os.path.isfile( + os.path.join(dirName, nzbName)): #For single torrent file without Dir dirs = [] files = [os.path.join(dirName, nzbName)] else: @@ -383,6 +405,7 @@ def get_path_dir_files(dirName, nzbName, type): return path, dirs, files + def process_failed(dirName, nzbName): """Process a download that did not complete correctly""" @@ -405,4 +428,6 @@ def process_failed(dirName, nzbName): if process_result: returnStr += logHelper(u"Failed Download Processing succeeded: (" + str(nzbName) + ", " + dirName + ")") else: - returnStr += logHelper(u"Failed Download Processing failed: (" + str(nzbName) + ", " + dirName + "): " + process_fail_message, logger.WARNING) + returnStr += logHelper( + u"Failed Download Processing failed: (" + str(nzbName) + ", " + dirName + "): " + process_fail_message, + logger.WARNING) diff --git a/sickbeard/properFinder.py b/sickbeard/properFinder.py index f9d1f3fb..387ba4e9 100644 --- a/sickbeard/properFinder.py +++ b/sickbeard/properFinder.py @@ -31,14 +31,10 @@ from sickbeard import history from sickbeard.common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, Quality - -from sickbeard.indexers import indexer_api, indexer_exceptions - from name_parser.parser import NameParser, InvalidNameException class ProperFinder(): - def __init__(self): self.updateInterval = datetime.timedelta(hours=1) @@ -56,7 +52,7 @@ class ProperFinder(): dayDiff = (datetime.date.today() - self._get_lastProperSearch()).days # if it's less than an interval after the update time then do an update - if hourDiff >= 0 and hourDiff < self.updateInterval.seconds / 3600 or dayDiff >=1: + if hourDiff >= 0 and hourDiff < self.updateInterval.seconds / 3600 or dayDiff >= 1: logger.log(u"Beginning the search for new propers") else: return @@ -64,7 +60,7 @@ class ProperFinder(): propers = self._getProperList() self._downloadPropers(propers) - + self._set_lastProperSearch(datetime.datetime.today().toordinal()) def _getProperList(self): @@ -110,7 +106,9 @@ class ProperFinder(): continue if not parse_result.episode_numbers: - logger.log(u"Ignoring " + curProper.name + " because it's for a full season rather than specific episode", logger.DEBUG) + logger.log( + u"Ignoring " + curProper.name + " because it's for a full season rather than specific episode", + logger.DEBUG) continue # populate our Proper instance @@ -138,7 +136,9 @@ class ProperFinder(): # if it matches if genericName == self._genericName(curSceneName): - logger.log(u"Successful match! Result " + parse_result.series_name + " matched to show " + curShow.name, logger.DEBUG) + logger.log( + u"Successful match! Result " + parse_result.series_name + " matched to show " + curShow.name, + logger.DEBUG) # set the indexerid in the db to the show's indexerid curProper.indexerid = curShow.indexerid @@ -157,7 +157,8 @@ class ProperFinder(): continue if not show_name_helpers.filterBadReleases(curProper.name): - logger.log(u"Proper " + curProper.name + " isn't a valid scene release that we want, igoring it", logger.DEBUG) + logger.log(u"Proper " + curProper.name + " isn't a valid scene release that we want, igoring it", + logger.DEBUG) continue # if we have an air-by-date show then get the real season/episode numbers @@ -175,18 +176,21 @@ class ProperFinder(): lINDEXER_API_PARMS['language'] = indexer_lang try: - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) epObj = t[curProper.indexerid].airedOn(curProper.episode)[0] curProper.season = int(epObj["seasonnumber"]) curProper.episodes = [int(epObj["episodenumber"])] - except indexer_exceptions.indexer_episodenotfound: - logger.log(u"Unable to find episode with date " + str(curProper.episode) + " for show " + parse_result.series_name + ", skipping", logger.WARNING) + except sickbeard.indexer_episodenotfound: + logger.log(u"Unable to find episode with date " + str( + curProper.episode) + " for show " + parse_result.series_name + ", skipping", logger.WARNING) continue # check if we actually want this proper (if it's the right quality) - sqlResults = db.DBConnection().select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [curProper.indexerid, curProper.season, curProper.episode]) + sqlResults = db.DBConnection().select( + "SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", + [curProper.indexerid, curProper.season, curProper.episode]) if not sqlResults: continue oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"])) @@ -196,7 +200,8 @@ class ProperFinder(): continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers - if curProper.indexerid != -1 and (curProper.indexerid, curProper.season, curProper.episode) not in map(operator.attrgetter('indexerid', 'season', 'episode'), finalPropers): + if curProper.indexerid != -1 and (curProper.indexerid, curProper.season, curProper.episode) not in map( + operator.attrgetter('indexerid', 'season', 'episode'), finalPropers): logger.log(u"Found a proper that we need: " + str(curProper.name)) finalPropers.append(curProper) @@ -214,11 +219,13 @@ class ProperFinder(): "SELECT resource FROM history " "WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? " "AND action IN (" + ",".join([str(x) for x in Quality.SNATCHED]) + ")", - [curProper.indexerid, curProper.season, curProper.episode, curProper.quality, historyLimit.strftime(history.dateFormat)]) + [curProper.indexerid, curProper.season, curProper.episode, curProper.quality, + historyLimit.strftime(history.dateFormat)]) # if we didn't download this episode in the first place we don't know what quality to use for the proper so we can't do it if len(historyResults) == 0: - logger.log(u"Unable to find an original history entry for proper " + curProper.name + " so I'm not downloading it.") + logger.log( + u"Unable to find an original history entry for proper " + curProper.name + " so I'm not downloading it.") continue else: @@ -237,7 +244,8 @@ class ProperFinder(): # get the episode object showObj = helpers.findCertainShow(sickbeard.showList, curProper.indexerid) if showObj == None: - logger.log(u"Unable to find the show with indexerid " + str(curProper.indexerid) + " so unable to download the proper", logger.ERROR) + logger.log(u"Unable to find the show with indexerid " + str( + curProper.indexerid) + " so unable to download the proper", logger.ERROR) continue epObj = showObj.getEpisode(curProper.season, curProper.episode) @@ -263,7 +271,8 @@ class ProperFinder(): sqlResults = myDB.select("SELECT * FROM info") if len(sqlResults) == 0: - myDB.action("INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)", [0, 0, str(when)]) + myDB.action("INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)", + [0, 0, str(when)]) else: myDB.action("UPDATE info SET last_proper_search=" + str(when)) diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index 0a8aab0a..f3dd67e0 100644 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -22,7 +22,7 @@ __all__ = ['ezrss', 'btn', 'thepiratebay', 'kat', - 'publichd', + 'publichd', 'torrentleech', 'scc', 'hdtorrents', @@ -30,16 +30,16 @@ __all__ = ['ezrss', 'hdbits', 'iptorrents', 'omgwtfnzbs', - 'nextgen' - ] + 'nextgen' +] import sickbeard from sickbeard import logger from os import sys -def sortedProviderList(): +def sortedProviderList(): initialList = sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList providerDict = dict(zip([x.getID() for x in initialList], initialList)) @@ -57,12 +57,12 @@ def sortedProviderList(): return newList -def makeProviderList(): +def makeProviderList(): return [x.provider for x in [getProviderModule(y) for y in __all__] if x] -def getNewznabProviderList(data): +def getNewznabProviderList(data): defaultList = [makeNewznabProvider(x) for x in getDefaultNewznabProviders().split('!!!')] providerList = filter(lambda x: x, [makeNewznabProvider(x) for x in data.split('!!!')]) @@ -80,12 +80,11 @@ def getNewznabProviderList(data): providerDict[curDefault.name].name = curDefault.name providerDict[curDefault.name].url = curDefault.url providerDict[curDefault.name].needs_auth = curDefault.needs_auth - + return filter(lambda x: x, providerList) def makeNewznabProvider(configString): - if not configString: return None @@ -102,12 +101,13 @@ def makeNewznabProvider(configString): return newProvider + def getTorrentRssProviderList(data): providerList = filter(lambda x: x, [makeTorrentRssProvider(x) for x in data.split('!!!')]) return filter(lambda x: x, providerList) -def makeTorrentRssProvider(configString): +def makeTorrentRssProvider(configString): if not configString: return None @@ -120,20 +120,24 @@ def makeTorrentRssProvider(configString): return newProvider + def getDefaultNewznabProviders(): return 'Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040,5060|0!!!NZBs.org|http://nzbs.org/||5030,5040,5060,5070,5090|0!!!Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040,5060|0' + def getProviderModule(name): name = name.lower() prefix = "sickbeard.providers." - if name in __all__ and prefix+name in sys.modules: - return sys.modules[prefix+name] + if name in __all__ and prefix + name in sys.modules: + return sys.modules[prefix + name] else: - raise Exception("Can't find " + prefix+name + " in " + "Providers") + raise Exception("Can't find " + prefix + name + " in " + "Providers") + def getProviderClass(id): - - providerMatch = [x for x in sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList if x.getID() == id] + providerMatch = [x for x in + sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList if + x.getID() == id] if len(providerMatch) != 1: return None diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index 20f6f624..9d8e394a 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -36,7 +36,6 @@ import math class BTNProvider(generic.TorrentProvider): - def __init__(self): generic.TorrentProvider.__init__(self, "BTN") @@ -64,8 +63,10 @@ class BTNProvider(generic.TorrentProvider): return self._checkAuth() if 'api-error' in parsedJSON: - logger.log(u"Incorrect authentication credentials for " + self.name + " : " + parsedJSON['api-error'], logger.DEBUG) - raise AuthException("Your authentication credentials for " + self.name + " are incorrect, check your config.") + logger.log(u"Incorrect authentication credentials for " + self.name + " : " + parsedJSON['api-error'], + logger.DEBUG) + raise AuthException( + "Your authentication credentials for " + self.name + " are incorrect, check your config.") return True @@ -150,7 +151,7 @@ class BTNProvider(generic.TorrentProvider): except Exception, error: errorstring = str(error) - if(errorstring.startswith('<') and errorstring.endswith('>')): + if (errorstring.startswith('<') and errorstring.endswith('>')): errorstring = errorstring[1:-1] logger.log(u"Unknown error while accessing " + self.name + ": " + errorstring, logger.ERROR) @@ -296,7 +297,6 @@ class BTNProvider(generic.TorrentProvider): class BTNCache(tvcache.TVCache): - def __init__(self, provider): tvcache.TVCache.__init__(self, provider) @@ -328,13 +328,14 @@ class BTNCache(tvcache.TVCache): ci = self._parseItem(item) if ci is not None: cl.append(ci) - + if len(cl) > 0: myDB = self._getDB() myDB.mass_action(cl) else: - raise AuthException("Your authentication info for " + self.provider.name + " is incorrect, check your config") + raise AuthException( + "Your authentication info for " + self.provider.name + " is incorrect, check your config") else: return [] @@ -350,7 +351,9 @@ class BTNCache(tvcache.TVCache): # Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of "RSS" data search, older things will need to be done through backlog if seconds_since_last_update > 86400: - logger.log(u"The last known successful update on " + self.provider.name + " was more than 24 hours ago, only trying to fetch the last 24 hours!", logger.WARNING) + logger.log( + u"The last known successful update on " + self.provider.name + " was more than 24 hours ago, only trying to fetch the last 24 hours!", + logger.WARNING) seconds_since_last_update = 86400 data = self.provider._doSearch(search_params=None, age=seconds_since_last_update) @@ -364,10 +367,12 @@ class BTNCache(tvcache.TVCache): logger.log(u"Adding item to results: " + title, logger.DEBUG) return self._addCacheEntry(title, url) else: - logger.log(u"The data returned from the " + self.provider.name + " is incomplete, this result is unusable", logger.ERROR) + logger.log(u"The data returned from the " + self.provider.name + " is incomplete, this result is unusable", + logger.ERROR) return None def _checkAuth(self, data): return self.provider._checkAuthFromData(data) + provider = BTNProvider() diff --git a/sickbeard/providers/dtt.py b/sickbeard/providers/dtt.py index a51a096e..fe7c003d 100644 --- a/sickbeard/providers/dtt.py +++ b/sickbeard/providers/dtt.py @@ -31,8 +31,8 @@ from sickbeard.helpers import sanitizeSceneName, get_xml_text from sickbeard import show_name_helpers from sickbeard.exceptions import ex -class DTTProvider(generic.TorrentProvider): +class DTTProvider(generic.TorrentProvider): def __init__(self): generic.TorrentProvider.__init__(self, "DailyTvTorrents") self.supportsBacklog = True @@ -41,45 +41,45 @@ class DTTProvider(generic.TorrentProvider): def isEnabled(self): return sickbeard.DTT - + def imageName(self): return 'dailytvtorrents.gif' - + def getQuality(self, item): url = item.getElementsByTagName('enclosure')[0].getAttribute('url') quality = Quality.sceneQuality(url) return quality def findSeasonResults(self, show, season): - + return generic.TorrentProvider.findSeasonResults(self, show, season) - + def _dtt_show_id(self, show_name): - return sanitizeSceneName(show_name).replace('.','-').lower() + return sanitizeSceneName(show_name).replace('.', '-').lower() def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False): search_string = [] for show_name in set(show_name_helpers.allPossibleShowNames(show)): - show_string = sanitizeSceneName(show_name).replace('.','-').lower() + show_string = sanitizeSceneName(show_name).replace('.', '-').lower() search_string.append(show_string) return search_string - + def _get_episode_search_strings(self, episode): return self._get_season_search_strings(episode.show, episode.season) - - def _doSearch(self, search_params, show=None): - -# show_id = self._dtt_show_id(show.name) - params = {"items" : "all"} + def _doSearch(self, search_params, show=None): + + # show_id = self._dtt_show_id(show.name) + + params = {"items": "all"} if sickbeard.DTT_NORAR: - params.update({"norar" : "yes"}) + params.update({"norar": "yes"}) if sickbeard.DTT_SINGLE: - params.update({"single" : "yes"}) + params.update({"single": "yes"}) searchURL = self.url + "rss/show/" + search_params + "?" + urllib.urlencode(params) @@ -89,13 +89,13 @@ class DTTProvider(generic.TorrentProvider): if not data: return [] - + try: parsedXML = parseString(data) items = parsedXML.getElementsByTagName('item') except Exception, e: - logger.log(u"Error trying to load DTT RSS feed: "+ex(e), logger.ERROR) - logger.log(u"RSS data: "+data, logger.DEBUG) + logger.log(u"Error trying to load DTT RSS feed: " + ex(e), logger.ERROR) + logger.log(u"RSS data: " + data, logger.DEBUG) return [] results = [] @@ -114,8 +114,8 @@ class DTTProvider(generic.TorrentProvider): return (title, url) -class DTTCache(tvcache.TVCache): +class DTTCache(tvcache.TVCache): def __init__(self, provider): tvcache.TVCache.__init__(self, provider) @@ -123,23 +123,24 @@ class DTTCache(tvcache.TVCache): self.minTime = 30 def _getRSSData(self): - - params = {"items" : "all"} + + params = {"items": "all"} if sickbeard.DTT_NORAR: - params.update({"norar" : "yes"}) + params.update({"norar": "yes"}) if sickbeard.DTT_SINGLE: - params.update({"single" : "yes"}) + params.update({"single": "yes"}) url = self.provider.url + 'rss/allshows?' + urllib.urlencode(params) - logger.log(u"DTT cache update URL: "+ url, logger.DEBUG) + logger.log(u"DTT cache update URL: " + url, logger.DEBUG) data = self.provider.getURL(url) return data def _parseItem(self, item): title, url = self.provider._get_title_and_url(item) - logger.log(u"Adding item from RSS to cache: "+title, logger.DEBUG) + logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG) return self._addCacheEntry(title, url) + provider = DTTProvider() \ No newline at end of file diff --git a/sickbeard/providers/ezrss.py b/sickbeard/providers/ezrss.py index b3c1cf22..fe6a2815 100644 --- a/sickbeard/providers/ezrss.py +++ b/sickbeard/providers/ezrss.py @@ -18,6 +18,7 @@ import urllib import re + try: import xml.etree.cElementTree as etree except ImportError: @@ -33,7 +34,6 @@ from sickbeard import helpers class EZRSSProvider(generic.TorrentProvider): - def __init__(self): generic.TorrentProvider.__init__(self, "EZRSS") @@ -52,7 +52,8 @@ class EZRSSProvider(generic.TorrentProvider): def getQuality(self, item): - filename = helpers.get_xml_text(item.find('{http://xmlns.ezrss.it/0.1/}torrent/{http://xmlns.ezrss.it/0.1/}fileName')) + filename = helpers.get_xml_text( + item.find('{http://xmlns.ezrss.it/0.1/}torrent/{http://xmlns.ezrss.it/0.1/}fileName')) quality = Quality.nameQuality(filename) return quality @@ -62,7 +63,8 @@ class EZRSSProvider(generic.TorrentProvider): results = {} if show.air_by_date: - logger.log(self.name + u" doesn't support air-by-date backlog because of limitations on their RSS search.", logger.WARNING) + logger.log(self.name + u" doesn't support air-by-date backlog because of limitations on their RSS search.", + logger.WARNING) return results results = generic.TorrentProvider.findSeasonResults(self, show, season) @@ -134,14 +136,17 @@ class EZRSSProvider(generic.TorrentProvider): logger.log(u"Adding item from RSS to results: " + title, logger.DEBUG) results.append(curItem) else: - logger.log(u"The XML returned from the " + self.name + " RSS feed is incomplete, this result is unusable", logger.ERROR) + logger.log( + u"The XML returned from the " + self.name + " RSS feed is incomplete, this result is unusable", + logger.ERROR) return results def _get_title_and_url(self, item): (title, url) = generic.TorrentProvider._get_title_and_url(self, item) - filename = helpers.get_xml_text(item.find('{http://xmlns.ezrss.it/0.1/}torrent/{http://xmlns.ezrss.it/0.1/}fileName')) + filename = helpers.get_xml_text( + item.find('{http://xmlns.ezrss.it/0.1/}torrent/{http://xmlns.ezrss.it/0.1/}fileName')) if filename: new_title = self._extract_name_from_filename(filename) @@ -161,7 +166,6 @@ class EZRSSProvider(generic.TorrentProvider): class EZRSSCache(tvcache.TVCache): - def __init__(self, provider): tvcache.TVCache.__init__(self, provider) @@ -192,7 +196,10 @@ class EZRSSCache(tvcache.TVCache): return self._addCacheEntry(title, url) else: - logger.log(u"The XML returned from the " + self.provider.name + " feed is incomplete, this result is unusable", logger.ERROR) + logger.log( + u"The XML returned from the " + self.provider.name + " feed is incomplete, this result is unusable", + logger.ERROR) return None + provider = EZRSSProvider() diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index 127e7376..985e7d2b 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -31,7 +31,7 @@ import collections import sickbeard from sickbeard import helpers, classes, logger, db -from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT#, SEED_POLICY_TIME, SEED_POLICY_RATIO +from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT #, SEED_POLICY_TIME, SEED_POLICY_RATIO from sickbeard import tvcache from sickbeard import encodingKludge as ek from sickbeard.exceptions import ex @@ -40,8 +40,8 @@ from sickbeard.name_parser.parser import NameParser, InvalidNameException from sickbeard import scene_numbering from sickbeard.common import Quality, Overview -class GenericProvider: +class GenericProvider: NZB = "nzb" TORRENT = "torrent" @@ -61,7 +61,7 @@ class GenericProvider: @staticmethod def makeID(name): - return re.sub("[^\w\d_]", "_", name.strip().lower()) + return re.sub("[^\w\d_]", "_", name.strip().lower()) def imageName(self): return self.getID() + '.png' @@ -94,9 +94,9 @@ class GenericProvider: result = classes.TorrentSearchResult(episodes) else: result = classes.SearchResult(episodes) - - result.provider = self - + + result.provider = self + return result def getURL(self, url, post_data=None, headers=None): @@ -121,7 +121,7 @@ class GenericProvider: Save the result to disk. """ - logger.log(u"Downloading a result from " + self.name+" at " + result.url) + logger.log(u"Downloading a result from " + self.name + " at " + result.url) data = self.getURL(result.url) @@ -189,7 +189,7 @@ class GenericProvider: Returns a Quality value obtained from the node's data """ - (title, url) = self._get_title_and_url(item) # @UnusedVariable + (title, url) = self._get_title_and_url(item) # @UnusedVariable quality = Quality.sceneQuality(title) return quality @@ -201,7 +201,7 @@ class GenericProvider: def _get_episode_search_strings(self, ep_obj): return [] - + def _get_title_and_url(self, item): """ Retrieves the title and URL data from the item XML node @@ -217,9 +217,9 @@ class GenericProvider: url = helpers.get_xml_text(item.find('link')) if url: url = url.replace('&', '&') - + return (title, url) - + def findEpisode(self, episode, manualSearch=False): self._checkAuth() @@ -229,7 +229,7 @@ class GenericProvider: sceneEpisode.convertToSceneNumbering() logger.log(u'Searching "%s" for "%s" as "%s"' - % (self.name, episode.prettyName() , sceneEpisode.prettyName())) + % (self.name, episode.prettyName(), sceneEpisode.prettyName())) self.cache.updateCache() results = self.cache.searchCache(episode, manualSearch) @@ -261,16 +261,20 @@ class GenericProvider: if episode.show.air_by_date: if parse_result.air_date != episode.airdate: - logger.log(u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", logger.DEBUG) + logger.log(u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", + logger.DEBUG) continue elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers: - logger.log(u"Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it", logger.DEBUG) + logger.log(u"Episode " + title + " isn't " + str(episode.season) + "x" + str( + episode.episode) + ", skipping it", logger.DEBUG) continue quality = self.getQuality(item) if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch): - logger.log(u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG) + logger.log( + u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[ + quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) @@ -280,8 +284,8 @@ class GenericProvider: result.name = title result.quality = quality result.provider = self - result.content = None - + result.content = None + results.append(result) return results @@ -298,12 +302,12 @@ class GenericProvider: seasonEp = show.getAllEpisodes(season) wantedEp = [x for x in seasonEp if show.getOverview(x.status) in (Overview.WANTED, Overview.QUAL)] map(lambda x: x.convertToSceneNumbering(), wantedEp) - for x in wantedEp: sceneSeasons.setdefault(x.season,[]).append(x) + for x in wantedEp: sceneSeasons.setdefault(x.season, []).append(x) if wantedEp == seasonEp and not show.air_by_date: searchSeason = True - for sceneSeason,sceneEpisodes in sceneSeasons.iteritems(): + for sceneSeason, sceneEpisodes in sceneSeasons.iteritems(): for curString in self._get_season_search_strings(show, str(sceneSeason), sceneEpisodes, searchSeason): itemList += self._doSearch(curString) @@ -323,8 +327,10 @@ class GenericProvider: if not show.air_by_date: # this check is meaningless for non-season searches - if (parse_result.season_number != None and parse_result.season_number != season) or (parse_result.season_number == None and season != 1): - logger.log(u"The result " + title + " doesn't seem to be a valid episode for season " + str(season) + ", ignoring", logger.DEBUG) + if (parse_result.season_number != None and parse_result.season_number != season) or ( + parse_result.season_number == None and season != 1): + logger.log(u"The result " + title + " doesn't seem to be a valid episode for season " + str( + season) + ", ignoring", logger.DEBUG) continue # we just use the existing info for normal searches @@ -333,14 +339,19 @@ class GenericProvider: else: if not parse_result.air_by_date: - logger.log(u"This is supposed to be an air-by-date search but the result "+title+" didn't parse as one, skipping it", logger.DEBUG) + logger.log( + u"This is supposed to be an air-by-date search but the result " + title + " didn't parse as one, skipping it", + logger.DEBUG) continue myDB = db.DBConnection() - sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [show.indexerid, parse_result.air_date.toordinal()]) + sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", + [show.indexerid, parse_result.air_date.toordinal()]) if len(sql_results) != 1: - logger.log(u"Tried to look up the date for the episode "+title+" but the database didn't give proper results, skipping it", logger.WARNING) + logger.log( + u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it", + logger.WARNING) continue actual_season = int(sql_results[0]["season"]) @@ -354,7 +365,9 @@ class GenericProvider: break if not wantEp: - logger.log(u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG) + logger.log( + u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[ + quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) @@ -375,7 +388,8 @@ class GenericProvider: epNum = epObj[0].episode elif len(epObj) > 1: epNum = MULTI_EP_RESULT - logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str(parse_result.episode_numbers), logger.DEBUG) + logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str( + parse_result.episode_numbers), logger.DEBUG) elif len(epObj) == 0: epNum = SEASON_RESULT result.extraInfo = [show] @@ -396,26 +410,23 @@ class GenericProvider: class NZBProvider(GenericProvider): - def __init__(self, name): - GenericProvider.__init__(self, name) self.providerType = GenericProvider.NZB + class TorrentProvider(GenericProvider): - def __init__(self, name): - GenericProvider.__init__(self, name) self.providerType = GenericProvider.TORRENT - + # self.option = {SEED_POLICY_TIME : '', # SEED_POLICY_RATIO: '', # 'PROCESS_METHOD': '' # } - + # def get_provider_options(self): # pass # diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py index 04d062e0..2a1c8353 100644 --- a/sickbeard/providers/hdbits.py +++ b/sickbeard/providers/hdbits.py @@ -30,7 +30,6 @@ except ImportError: class HDBitsProvider(generic.TorrentProvider): - def __init__(self): generic.TorrentProvider.__init__(self, "HDBits") @@ -49,7 +48,7 @@ class HDBitsProvider(generic.TorrentProvider): def _checkAuth(self): - if not sickbeard.HDBITS_USERNAME or not sickbeard.HDBITS_PASSKEY: + if not sickbeard.HDBITS_USERNAME or not sickbeard.HDBITS_PASSKEY: raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") return True @@ -61,8 +60,10 @@ class HDBitsProvider(generic.TorrentProvider): if 'status' in parsedJSON and 'message' in parsedJSON: if parsedJSON.get('status') == 5: - logger.log(u"Incorrect authentication credentials for " + self.name + " : " + parsedJSON['message'], logger.DEBUG) - raise AuthException("Your authentication credentials for " + self.name + " are incorrect, check your config.") + logger.log(u"Incorrect authentication credentials for " + self.name + " : " + parsedJSON['message'], + logger.DEBUG) + raise AuthException( + "Your authentication credentials for " + self.name + " are incorrect, check your config.") return True @@ -115,16 +116,19 @@ class HDBitsProvider(generic.TorrentProvider): if episode.show.air_by_date: if parse_result.air_date != episode.airdate: - logger.log(u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", logger.DEBUG) + logger.log(u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", + logger.DEBUG) continue elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers: - logger.log(u"Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it", logger.DEBUG) + logger.log(u"Episode " + title + " isn't " + str(episode.season) + "x" + str( + episode.episode) + ", skipping it", logger.DEBUG) continue quality = self.getQuality(item) if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch): - logger.log(u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG) + logger.log(u"Ignoring result " + title + " because we don't want an episode that is " + + Quality.qualityStrings[quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) @@ -170,7 +174,6 @@ class HDBitsProvider(generic.TorrentProvider): class HDBitsCache(tvcache.TVCache): - def __init__(self, provider): tvcache.TVCache.__init__(self, provider) @@ -206,7 +209,8 @@ class HDBitsCache(tvcache.TVCache): if parsedJSON and 'data' in parsedJSON: items = parsedJSON['data'] else: - logger.log(u"Resulting JSON from " + self.provider.name + " isn't correct, not parsing it", logger.ERROR) + logger.log(u"Resulting JSON from " + self.provider.name + " isn't correct, not parsing it", + logger.ERROR) return [] cl = [] @@ -214,13 +218,14 @@ class HDBitsCache(tvcache.TVCache): ci = self._parseItem(item) if ci is not None: cl.append(ci) - + if len(cl) > 0: myDB = self._getDB() myDB.mass_action(cl) else: - raise exceptions.AuthException("Your authentication info for " + self.provider.name + " is incorrect, check your config") + raise exceptions.AuthException( + "Your authentication info for " + self.provider.name + " is incorrect, check your config") else: return [] @@ -236,10 +241,12 @@ class HDBitsCache(tvcache.TVCache): logger.log(u"Adding item to results: " + title, logger.DEBUG) return self._addCacheEntry(title, url) else: - logger.log(u"The data returned from the " + self.provider.name + " is incomplete, this result is unusable", logger.ERROR) + logger.log(u"The data returned from the " + self.provider.name + " is incomplete, this result is unusable", + logger.ERROR) return None def _checkAuth(self, data): return self.provider._checkAuthFromData(data) + provider = HDBitsProvider() diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index 1548441f..00991dc1 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -30,22 +30,22 @@ from sickbeard import db from sickbeard import classes from sickbeard import helpers from sickbeard import show_name_helpers -from sickbeard.common import Overview +from sickbeard.common import Overview from sickbeard.exceptions import ex from sickbeard import clients from lib import requests from bs4 import BeautifulSoup from lib.unidecode import unidecode -class HDTorrentsProvider(generic.TorrentProvider): - urls = {'base_url' : 'https://hdts.ru/index.php', - 'login' : 'https://hdts.ru/login.php', - 'detail' : 'https://www.hdts.ru/details.php?id=%s', - 'search' : 'https://hdts.ru/torrents.php?search=%s&active=1&options=0%s', - 'download' : 'https://www.sceneaccess.eu/%s', - 'home' : 'https://www.hdts.ru/%s' - } +class HDTorrentsProvider(generic.TorrentProvider): + urls = {'base_url': 'https://hdts.ru/index.php', + 'login': 'https://hdts.ru/login.php', + 'detail': 'https://www.hdts.ru/details.php?id=%s', + 'search': 'https://hdts.ru/torrents.php?search=%s&active=1&options=0%s', + 'download': 'https://www.sceneaccess.eu/%s', + 'home': 'https://www.hdts.ru/%s' + } def __init__(self): @@ -60,7 +60,7 @@ class HDTorrentsProvider(generic.TorrentProvider): self.categories = "&category[]=59&category[]=60&category[]=30&category[]=38" self.session = requests.Session() - + self.cookies = None def isEnabled(self): @@ -72,42 +72,42 @@ class HDTorrentsProvider(generic.TorrentProvider): def getQuality(self, item): quality = Quality.sceneQuality(item[0]) - return quality + return quality def _doLogin(self): if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()): return True - + if sickbeard.HDTORRENTS_UID and sickbeard.HDTORRENTS_HASH: - + requests.utils.add_dict_to_cookiejar(self.session.cookies, self.cookies) - - else: + + else: login_params = {'uid': sickbeard.HDTORRENTS_USERNAME, 'pwd': sickbeard.HDTORRENTS_PASSWORD, 'submit': 'Confirm', - } - + } + try: - response = self.session.post(self.urls['login'], data=login_params, timeout=30) + response = self.session.post(self.urls['login'], data=login_params, timeout=30) except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR) return False - + if re.search('You need cookies enabled to log in.', response.text) \ - or response.status_code == 401: + or response.status_code == 401: logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR) return False - + sickbeard.HDTORRENTS_UID = requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] sickbeard.HDTORRENTS_HASH = requests.utils.dict_from_cookiejar(self.session.cookies)['pass'] - + self.cookies = {'uid': sickbeard.HDTORRENTS_UID, 'pass': sickbeard.HDTORRENTS_HASH - } - + } + return True def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False): @@ -121,7 +121,7 @@ class HDTorrentsProvider(generic.TorrentProvider): if searchSeason: search_string = {'Season': [], 'Episode': []} for show_name in set(show_name_helpers.allPossibleShowNames(show)): - ep_string = show_name +' S%02d' % int(season) #1) ShowName SXX + ep_string = show_name + ' S%02d' % int(season) #1) ShowName SXX search_string['Season'].append(ep_string) for ep_obj in wantedEp: @@ -141,16 +141,17 @@ class HDTorrentsProvider(generic.TorrentProvider): if ep_obj.show.air_by_date: for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)): - ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \ - str(ep_obj.airdate) +'|'+\ + ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \ + str(ep_obj.airdate) + '|' + \ helpers.custom_strftime('%Y %b {S}', ep_obj.airdate) search_string['Episode'].append(ep_string) else: for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)): - ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \ - sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} + ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \ + sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, + 'episodenumber': ep_obj.episode} - search_string['Episode'].append(re.sub('\s+', ' ', ep_string)) + search_string['Episode'].append(re.sub('\s+', ' ', ep_string)) return [search_string] @@ -170,17 +171,17 @@ class HDTorrentsProvider(generic.TorrentProvider): if search_string == '': continue - search_string = str(search_string).replace('.',' ') + search_string = str(search_string).replace('.', ' ') searchURL = self.urls['search'] % (search_string, self.categories) logger.log(u"Search string: " + searchURL, logger.DEBUG) - + data = self.getURL(searchURL) if not data: continue - - + + # Remove HDTorrents NEW list split_data = data.partition('\n\n\n\n') data = split_data[2] @@ -189,10 +190,11 @@ class HDTorrentsProvider(generic.TorrentProvider): html = BeautifulSoup(data, features=["html5lib", "permissive"]) #Get first entry in table - entries = html.find_all('td', attrs={'align' : 'center'}) + entries = html.find_all('td', attrs={'align': 'center'}) if not entries: - logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.DEBUG) + logger.log(u"The Data returned from " + self.name + " do not contains any torrent", + logger.DEBUG) continue try: @@ -206,10 +208,10 @@ class HDTorrentsProvider(generic.TorrentProvider): continue if mode != 'RSS' and seeders == 0: - continue + continue if not title or not download_url: - continue + continue item = title, download_url, id, seeders, leechers logger.log(u"Found result: " + title + "(" + searchURL + ")", logger.DEBUG) @@ -217,12 +219,12 @@ class HDTorrentsProvider(generic.TorrentProvider): items[mode].append(item) #Now attempt to get any others - result_table = html.find('table', attrs = {'class' : 'mainblockcontenttt'}) + result_table = html.find('table', attrs={'class': 'mainblockcontenttt'}) if not result_table: continue - entries = result_table.find_all('td', attrs={'align' : 'center', 'class' : 'listas'}) + entries = result_table.find_all('td', attrs={'align': 'center', 'class': 'listas'}) if not entries: continue @@ -232,7 +234,7 @@ class HDTorrentsProvider(generic.TorrentProvider): if not block2: continue cells = block2.find_all('td') - + try: title = cells[1].find('b').get_text().strip('\t ').replace('Blu-ray', 'bd50') url = self.urls['home'] % cells[4].find('a')['href'] @@ -245,7 +247,7 @@ class HDTorrentsProvider(generic.TorrentProvider): continue if mode != 'RSS' and seeders == 0: - continue + continue if not title or not download_url: continue @@ -256,13 +258,13 @@ class HDTorrentsProvider(generic.TorrentProvider): items[mode].append(item) except Exception, e: - logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) + logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) #For each search mode sort all the items by seeders - items[mode].sort(key=lambda tup: tup[3], reverse=True) + items[mode].sort(key=lambda tup: tup[3], reverse=True) + + results += items[mode] - results += items[mode] - return results def _get_title_and_url(self, item): @@ -270,7 +272,7 @@ class HDTorrentsProvider(generic.TorrentProvider): title, url, id, seeders, leechers = item if url: - url = str(url).replace('&','&') + url = str(url).replace('&', '&') return (title, url) @@ -284,15 +286,16 @@ class HDTorrentsProvider(generic.TorrentProvider): try: parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one + parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one url = urlparse.urlunparse(parsed) response = self.session.get(url, verify=False) except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: - logger.log(u"Error loading "+self.name+" URL: " + ex(e), logger.ERROR) + logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR) return None if response.status_code != 200: - logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) + logger.log(self.name + u" page requested with url " + url + " returned status code is " + str( + response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) return None return response.content @@ -301,12 +304,13 @@ class HDTorrentsProvider(generic.TorrentProvider): results = [] - sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + - ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + - ' WHERE e.airdate >= ' + str(search_date.toordinal()) + - ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + - ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' - ) + sqlResults = db.DBConnection().select( + 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + + ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + + ' WHERE e.airdate >= ' + str(search_date.toordinal()) + + ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + + ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' + ) if not sqlResults: return [] @@ -323,7 +327,6 @@ class HDTorrentsProvider(generic.TorrentProvider): class HDTorrentsCache(tvcache.TVCache): - def __init__(self, provider): tvcache.TVCache.__init__(self, provider) @@ -338,12 +341,12 @@ class HDTorrentsCache(tvcache.TVCache): search_params = {'RSS': []} rss_results = self.provider._doSearch(search_params) - + if rss_results: self.setLastUpdate() else: return [] - + logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") self._clearCache() @@ -369,4 +372,5 @@ class HDTorrentsCache(tvcache.TVCache): return self._addCacheEntry(title, url) + provider = HDTorrentsProvider() diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index d19a6a8c..17e847f5 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -29,66 +29,66 @@ from sickbeard import db from sickbeard import classes from sickbeard import helpers from sickbeard import show_name_helpers -from sickbeard.common import Overview +from sickbeard.common import Overview from sickbeard.exceptions import ex from sickbeard import clients from lib import requests from bs4 import BeautifulSoup from lib.unidecode import unidecode -class IPTorrentsProvider(generic.TorrentProvider): - urls = {'base_url' : 'https://www.iptorrents.com', - 'login' : 'https://www.iptorrents.com/torrents/', - 'search' : 'https://www.iptorrents.com/torrents/?%s%s&q=%s&qf=ti', - } +class IPTorrentsProvider(generic.TorrentProvider): + urls = {'base_url': 'https://www.iptorrents.com', + 'login': 'https://www.iptorrents.com/torrents/', + 'search': 'https://www.iptorrents.com/torrents/?%s%s&q=%s&qf=ti', + } def __init__(self): generic.TorrentProvider.__init__(self, "IPTorrents") - + self.supportsBacklog = True self.cache = IPTorrentsCache(self) - + self.url = self.urls['base_url'] - + self.session = None self.categorie = 'l73=1&l78=1&l66=1&l65=1&l79=1&l5=1&l4=1' def isEnabled(self): return sickbeard.IPTORRENTS - + def imageName(self): return 'iptorrents.png' - + def getQuality(self, item): - + quality = Quality.sceneQuality(item[0]) - return quality + return quality def _doLogin(self): login_params = {'username': sickbeard.IPTORRENTS_USERNAME, 'password': sickbeard.IPTORRENTS_PASSWORD, 'login': 'submit', - } - + } + self.session = requests.Session() - + try: response = self.session.post(self.urls['login'], data=login_params, timeout=30) except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR) return False - + if re.search('tries left', response.text) \ - or re.search('IPT', response.text) \ - or response.status_code == 401: - logger.log(u'Invalid username or password for ' + self.name + ', Check your settings!', logger.ERROR) + or re.search('IPT', response.text) \ + or response.status_code == 401: + logger.log(u'Invalid username or password for ' + self.name + ', Check your settings!', logger.ERROR) return False - + return True def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False): @@ -102,7 +102,7 @@ class IPTorrentsProvider(generic.TorrentProvider): if searchSeason: search_string = {'Season': [], 'Episode': []} for show_name in set(show_name_helpers.allPossibleShowNames(show)): - ep_string = show_name +' S%02d' % int(season) #1) ShowName SXX + ep_string = show_name + ' S%02d' % int(season) #1) ShowName SXX search_string['Season'].append(ep_string) for ep_obj in wantedEp: @@ -116,65 +116,67 @@ class IPTorrentsProvider(generic.TorrentProvider): def _get_episode_search_strings(self, ep_obj, add_string=''): search_string = {'Episode': []} - + if not ep_obj: return [] - + if ep_obj.show.air_by_date: for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)): - ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \ - str(ep_obj.airdate) +'|'+\ + ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \ + str(ep_obj.airdate) + '|' + \ helpers.custom_strftime('%Y %b {S}', ep_obj.airdate) search_string['Episode'].append(ep_string) else: for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)): - ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \ - sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} + ' %s' %add_string + ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \ + sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, + 'episodenumber': ep_obj.episode} + ' %s' % add_string search_string['Episode'].append(re.sub('\s+', ' ', ep_string)) - + return [search_string] def _doSearch(self, search_params): - + results = [] items = {'Season': [], 'Episode': [], 'RSS': []} freeleech = '&free=on' if sickbeard.IPTORRENTS_FREELEECH else '' - + if not self._doLogin(): - return [] - + return [] + for mode in search_params.keys(): for search_string in search_params[mode]: # URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile searchURL = self.urls['search'] % (self.categorie, freeleech, unidecode(search_string)) searchURL += ';o=seeders' if mode != 'RSS' else '' - + logger.log(u"" + self.name + " search page URL: " + searchURL, logger.DEBUG) - + data = self.getURL(searchURL) if not data: continue - + try: html = BeautifulSoup(data, features=["html5lib", "permissive"]) if not html: - logger.log(u"Invalid HTML data: " + str(data) , logger.DEBUG) + logger.log(u"Invalid HTML data: " + str(data), logger.DEBUG) continue - + if html.find(text='No Torrents Found!'): logger.log(u"No results found for: " + search_string + " (" + searchURL + ")", logger.DEBUG) continue - - torrent_table = html.find('table', attrs = {'class' : 'torrents'}) + + torrent_table = html.find('table', attrs={'class': 'torrents'}) torrents = torrent_table.find_all('tr') if torrent_table else [] #Continue only if one Release is found - if len(torrents)<2: - logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.WARNING) + if len(torrents) < 2: + logger.log(u"The Data returned from " + self.name + " do not contains any torrent", + logger.WARNING) continue for result in torrents[1:]: @@ -184,7 +186,7 @@ class IPTorrentsProvider(generic.TorrentProvider): torrent_name = torrent.string torrent_download_url = self.urls['base_url'] + (result.find_all('td')[3].find('a'))['href'] torrent_details_url = self.urls['base_url'] + torrent['href'] - torrent_seeders = int(result.find('td', attrs = {'class' : 'ac t_seeders'}).string) + torrent_seeders = int(result.find('td', attrs={'class': 'ac t_seeders'}).string) ## Not used, perhaps in the future ## #torrent_id = int(torrent['href'].replace('/details.php?id=', '')) #torrent_leechers = int(result.find('td', attrs = {'class' : 'ac t_leechers'}).string) @@ -194,7 +196,7 @@ class IPTorrentsProvider(generic.TorrentProvider): # Filter unseeded torrent and torrents with no name/url if mode != 'RSS' and torrent_seeders == 0: continue - + if not torrent_name or not torrent_download_url: continue @@ -203,18 +205,18 @@ class IPTorrentsProvider(generic.TorrentProvider): items[mode].append(item) except Exception, e: - logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) + logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) + + results += items[mode] - results += items[mode] - return results def _get_title_and_url(self, item): - + title, url = item - + if url: - url = str(url).replace('&','&') + url = str(url).replace('&', '&') return (title, url) @@ -228,7 +230,7 @@ class IPTorrentsProvider(generic.TorrentProvider): try: parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one + parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one url = urlparse.urlunparse(parsed) response = self.session.get(url) except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: @@ -236,7 +238,8 @@ class IPTorrentsProvider(generic.TorrentProvider): return None if response.status_code != 200: - logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) + logger.log(self.name + u" page requested with url " + url + " returned status code is " + str( + response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) return None return response.content @@ -245,12 +248,13 @@ class IPTorrentsProvider(generic.TorrentProvider): results = [] - sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + - ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + - ' WHERE e.airdate >= ' + str(search_date.toordinal()) + - ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + - ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' - ) + sqlResults = db.DBConnection().select( + 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + + ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + + ' WHERE e.airdate >= ' + str(search_date.toordinal()) + + ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + + ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' + ) if not sqlResults: return [] @@ -267,7 +271,6 @@ class IPTorrentsProvider(generic.TorrentProvider): class IPTorrentsCache(tvcache.TVCache): - def __init__(self, provider): tvcache.TVCache.__init__(self, provider) @@ -282,12 +285,12 @@ class IPTorrentsCache(tvcache.TVCache): search_params = {'RSS': ['']} rss_results = self.provider._doSearch(search_params) - + if rss_results: self.setLastUpdate() else: return [] - + logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") self._clearCache() @@ -301,7 +304,7 @@ class IPTorrentsCache(tvcache.TVCache): if len(cl) > 0: myDB = self._getDB() myDB.mass_action(cl) - + def _parseItem(self, item): (title, url) = item @@ -311,6 +314,7 @@ class IPTorrentsCache(tvcache.TVCache): logger.log(u"Adding item to cache: " + title, logger.DEBUG) - return self._addCacheEntry(title, url) + return self._addCacheEntry(title, url) + provider = IPTorrentsProvider() diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index 801855bc..a3e82536 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -45,8 +45,8 @@ from lib import requests from bs4 import BeautifulSoup from lib.unidecode import unidecode -class KATProvider(generic.TorrentProvider): +class KATProvider(generic.TorrentProvider): def __init__(self): generic.TorrentProvider.__init__(self, "KickAssTorrents") @@ -57,7 +57,7 @@ class KATProvider(generic.TorrentProvider): self.url = 'http://kickass.to/' - self.searchurl = self.url+'usearch/%s/?field=seeders&sorder=desc' #order by seed + self.searchurl = self.url + 'usearch/%s/?field=seeders&sorder=desc' #order by seed def isEnabled(self): return sickbeard.KAT @@ -95,12 +95,12 @@ class KATProvider(generic.TorrentProvider): return quality_string - def _find_season_quality(self,title, torrent_link, ep_number): + def _find_season_quality(self, title, torrent_link, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' - 'ogv', 'rar', 'zip', 'mp4'] + 'ogv', 'rar', 'zip', 'mp4'] quality = Quality.UNKNOWN @@ -113,18 +113,21 @@ class KATProvider(generic.TorrentProvider): try: soup = BeautifulSoup(data, features=["html5lib", "permissive"]) - file_table = soup.find('table', attrs = {'class': 'torrentFileList'}) + file_table = soup.find('table', attrs={'class': 'torrentFileList'}) if not file_table: return None - files = [x.text for x in file_table.find_all('td', attrs = {'class' : 'torFileName'} )] + files = [x.text for x in file_table.find_all('td', attrs={'class': 'torFileName'})] videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, files) #Filtering SingleEpisode/MultiSeason Torrent - if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1 ): - logger.log(u"Result " + title + " have " + str(ep_number) + " episode and episodes retrived in torrent are " + str(len(videoFiles)), logger.DEBUG) - logger.log(u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) + if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1): + logger.log(u"Result " + title + " have " + str( + ep_number) + " episode and episodes retrived in torrent are " + str(len(videoFiles)), logger.DEBUG) + logger.log( + u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", + logger.DEBUG) return None if Quality.sceneQuality(title) != Quality.UNKNOWN: @@ -134,7 +137,7 @@ class KATProvider(generic.TorrentProvider): quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break - if fileName!=None and quality == Quality.UNKNOWN: + if fileName != None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: @@ -147,15 +150,16 @@ class KATProvider(generic.TorrentProvider): except InvalidNameException: return None - logger.log(u"Season quality for "+title+" is "+Quality.qualityStrings[quality], logger.DEBUG) + logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: - title = parse_result.series_name+' S%02d' % int(parse_result.season_number)+' '+self._reverseQuality(quality) + title = parse_result.series_name + ' S%02d' % int( + parse_result.season_number) + ' ' + self._reverseQuality(quality) return title except Exception, e: - logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) + logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False): @@ -169,10 +173,11 @@ class KATProvider(generic.TorrentProvider): if searchSeason: search_string = {'Season': [], 'Episode': []} for show_name in set(allPossibleShowNames(show)): - ep_string = show_name +' S%02d' % int(season) + ' -S%02d' % int(season) + 'E' + ' category:tv' #1) ShowName SXX -SXXE + ep_string = show_name + ' S%02d' % int(season) + ' -S%02d' % int( + season) + 'E' + ' category:tv' #1) ShowName SXX -SXXE search_string['Season'].append(ep_string) - ep_string = show_name+' Season '+str(season)+' -Ep*' + ' category:tv' #2) ShowName Season X + ep_string = show_name + ' Season ' + str(season) + ' -Ep*' + ' category:tv' #2) ShowName Season X search_string['Season'].append(ep_string) for ep_obj in wantedEp: @@ -184,7 +189,7 @@ class KATProvider(generic.TorrentProvider): return [search_string] def _get_episode_search_strings(self, ep_obj, add_string=''): - + search_string = {'Episode': []} if not ep_obj: @@ -194,20 +199,22 @@ class KATProvider(generic.TorrentProvider): if ep_obj.show.air_by_date: for show_name in set(allPossibleShowNames(ep_obj.show)): - ep_string = sanitizeSceneName(show_name) +' '+\ - str(ep_obj.airdate) +'|'+\ + ep_string = sanitizeSceneName(show_name) + ' ' + \ + str(ep_obj.airdate) + '|' + \ helpers.custom_strftime('%Y %b {S}', ep_obj.airdate) search_string['Episode'].append(ep_string) else: for show_name in set(allPossibleShowNames(ep_obj.show)): - ep_string = sanitizeSceneName(show_name) +' '+\ - sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} +'|'+\ - sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} +'|'+\ - sickbeard.config.naming_ep_type[3] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} + ' %s category:tv' %add_string \ - + ep_string = sanitizeSceneName(show_name) + ' ' + \ + sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, + 'episodenumber': ep_obj.episode} + '|' + \ + sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.season, + 'episodenumber': ep_obj.episode} + '|' + \ + sickbeard.config.naming_ep_type[3] % {'seasonnumber': ep_obj.season, + 'episodenumber': ep_obj.episode} + ' %s category:tv' % add_string search_string['Episode'].append(re.sub('\s+', ' ', ep_string)) - + return [search_string] @@ -218,14 +225,14 @@ class KATProvider(generic.TorrentProvider): for mode in search_params.keys(): for search_string in search_params[mode]: - + if mode != 'RSS': - searchURL = self.searchurl %(urllib.quote(unidecode(search_string))) + searchURL = self.searchurl % (urllib.quote(unidecode(search_string))) logger.log(u"Search string: " + searchURL, logger.DEBUG) else: searchURL = self.url + 'tv/?field=time_add&sorder=desc' - logger.log(u"KAT cache update URL: "+ searchURL, logger.DEBUG) - + logger.log(u"KAT cache update URL: " + searchURL, logger.DEBUG) + html = self.getURL(searchURL) if not html: continue @@ -233,14 +240,15 @@ class KATProvider(generic.TorrentProvider): try: soup = BeautifulSoup(html, features=["html5lib", "permissive"]) - torrent_table = soup.find('table', attrs = {'class' : 'data'}) + torrent_table = soup.find('table', attrs={'class': 'data'}) torrent_rows = torrent_table.find_all('tr') if torrent_table else [] #Continue only if one Release is found - if len(torrent_rows)<2: - logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.WARNING) + if len(torrent_rows) < 2: + logger.log(u"The Data returned from " + self.name + " do not contains any torrent", + logger.WARNING) continue - + for tr in torrent_rows[1:]: try: @@ -249,17 +257,19 @@ class KATProvider(generic.TorrentProvider): title = (tr.find('div', {'class': 'torrentname'}).find_all('a')[1]).text url = tr.find('a', 'imagnet')['href'] verified = True if tr.find('a', 'iverify') else False - trusted = True if tr.find('img', {'alt': 'verified'}) else False + trusted = True if tr.find('img', {'alt': 'verified'}) else False seeders = int(tr.find_all('td')[-2].text) leechers = int(tr.find_all('td')[-1].text) except (AttributeError, TypeError): continue if mode != 'RSS' and seeders == 0: - continue - + continue + if sickbeard.KAT_VERIFIED and not verified: - logger.log(u"KAT Provider found result "+title+" but that doesn't seem like a verified result so I'm ignoring it",logger.DEBUG) + logger.log( + u"KAT Provider found result " + title + " but that doesn't seem like a verified result so I'm ignoring it", + logger.DEBUG) continue #Check number video files = episode in season and find the real Quality for full season torrent analyzing files in torrent @@ -275,21 +285,22 @@ class KATProvider(generic.TorrentProvider): items[mode].append(item) except Exception, e: - logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) - - #For each search mode sort all the items by seeders - items[mode].sort(key=lambda tup: tup[3], reverse=True) + logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(), + logger.ERROR) + + #For each search mode sort all the items by seeders + items[mode].sort(key=lambda tup: tup[3], reverse=True) + + results += items[mode] - results += items[mode] - return results def _get_title_and_url(self, item): - + title, url, id, seeders, leechers = item - + if url: - url = url.replace('&','&') + url = url.replace('&', '&') return (title, url) @@ -298,53 +309,55 @@ class KATProvider(generic.TorrentProvider): try: # Remove double-slashes from url parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one + parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one url = urlparse.urlunparse(parsed) r = requests.get(url) except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: - logger.log(u"Error loading "+self.name+" URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR) + logger.log(u"Error loading " + self.name + " URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR) return None - + if r.status_code != 200: - logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING) + logger.log(self.name + u" page requested with url " + url + " returned status code is " + str( + r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING) return None - + return r.content def downloadResult(self, result): """ Save the result to disk. """ - + torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper() - + if not torrent_hash: - logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) - return False - + logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) + return False + try: r = requests.get('http://torcache.net/torrent/' + torrent_hash + '.torrent') except Exception, e: logger.log("Unable to connect to Torcache: " + ex(e), logger.ERROR) return False - + if not r.status_code == 200: return False - - magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) + + magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, + helpers.sanitizeFileName(result.name) + '.' + self.providerType) magnetFileContent = r.content - try: + try: with open(magnetFileName, 'wb') as fileOut: fileOut.write(magnetFileContent) - + helpers.chmodAsParent(magnetFileName) - + except EnvironmentError, e: logger.log("Unable to save the file: " + ex(e), logger.ERROR) return False - + logger.log(u"Saved magnet link to " + magnetFileName + " ", logger.MESSAGE) return True @@ -353,15 +366,16 @@ class KATProvider(generic.TorrentProvider): results = [] - sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate, s.indexer FROM tv_episodes AS e' + - ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + - ' WHERE e.airdate >= ' + str(search_date.toordinal()) + - ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + - ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' - ) + sqlResults = db.DBConnection().select( + 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate, s.indexer FROM tv_episodes AS e' + + ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + + ' WHERE e.airdate >= ' + str(search_date.toordinal()) + + ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + + ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' + ) if not sqlResults: return [] - + for sqlShow in sqlResults: curShow = helpers.findCertainShow(sickbeard.showList, int(sqlShow["showid"])) curEp = curShow.getEpisode(int(sqlShow["season"]), int(sqlShow["episode"])) @@ -375,7 +389,6 @@ class KATProvider(generic.TorrentProvider): class KATCache(tvcache.TVCache): - def __init__(self, provider): tvcache.TVCache.__init__(self, provider) @@ -390,12 +403,12 @@ class KATCache(tvcache.TVCache): search_params = {'RSS': ['rss']} rss_results = self.provider._doSearch(search_params) - + if rss_results: self.setLastUpdate() else: return [] - + logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") self._clearCache() @@ -420,5 +433,6 @@ class KATCache(tvcache.TVCache): logger.log(u"Adding item to cache: " + title, logger.DEBUG) return self._addCacheEntry(title, url) - + + provider = KATProvider() diff --git a/sickbeard/providers/newzbin.py b/sickbeard/providers/newzbin.py index 32562538..6670ad78 100644 --- a/sickbeard/providers/newzbin.py +++ b/sickbeard/providers/newzbin.py @@ -20,7 +20,7 @@ import os import re import sys import time -import urllib,urlparse +import urllib, urlparse from xml.dom.minidom import parseString from datetime import datetime, timedelta @@ -35,8 +35,8 @@ from sickbeard.common import Quality from sickbeard.exceptions import ex from lib.dateutil.parser import parse as parseDate -class NewzbinDownloader(urllib.FancyURLopener): +class NewzbinDownloader(urllib.FancyURLopener): def __init__(self): urllib.FancyURLopener.__init__(self) @@ -63,8 +63,8 @@ class NewzbinDownloader(urllib.FancyURLopener): raise exceptions.NewzbinAPIThrottled() -class NewzbinProvider(generic.NZBProvider): +class NewzbinProvider(generic.NZBProvider): def __init__(self): generic.NZBProvider.__init__(self, "Newzbin") @@ -92,7 +92,7 @@ class NewzbinProvider(generic.NZBProvider): else: attr_dict[cur_attr].append(cur_attr_value) - logger.log("Finding quality of item based on attributes "+str(attr_dict), logger.DEBUG) + logger.log("Finding quality of item based on attributes " + str(attr_dict), logger.DEBUG) if self._is_SDTV(attr_dict): quality = Quality.SDTV @@ -109,17 +109,18 @@ class NewzbinProvider(generic.NZBProvider): else: quality = Quality.UNKNOWN - logger.log("Resulting quality: "+str(quality), logger.DEBUG) + logger.log("Resulting quality: " + str(quality), logger.DEBUG) return quality def _is_SDTV(self, attrs): # Video Fmt: (XviD, DivX, H.264/x264), NOT 720p, NOT 1080p, NOT 1080i - video_fmt = 'Video Fmt' in attrs and ('XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \ - and ('720p' not in attrs['Video Fmt']) \ - and ('1080p' not in attrs['Video Fmt']) \ - and ('1080i' not in attrs['Video Fmt']) + video_fmt = 'Video Fmt' in attrs and ( + 'XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \ + and ('720p' not in attrs['Video Fmt']) \ + and ('1080p' not in attrs['Video Fmt']) \ + and ('1080i' not in attrs['Video Fmt']) # Source: TV Cap or HDTV or (None) source = 'Source' not in attrs or 'TV Cap' in attrs['Source'] or 'HDTV' in attrs['Source'] @@ -132,11 +133,12 @@ class NewzbinProvider(generic.NZBProvider): def _is_SDDVD(self, attrs): # Video Fmt: (XviD, DivX, H.264/x264), NOT 720p, NOT 1080p, NOT 1080i - video_fmt = 'Video Fmt' in attrs and ('XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \ - and ('720p' not in attrs['Video Fmt']) \ - and ('1080p' not in attrs['Video Fmt']) \ - and ('1080i' not in attrs['Video Fmt']) - + video_fmt = 'Video Fmt' in attrs and ( + 'XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \ + and ('720p' not in attrs['Video Fmt']) \ + and ('1080p' not in attrs['Video Fmt']) \ + and ('1080i' not in attrs['Video Fmt']) + # Source: DVD source = 'Source' in attrs and 'DVD' in attrs['Source'] @@ -148,7 +150,7 @@ class NewzbinProvider(generic.NZBProvider): def _is_HDTV(self, attrs): # Video Fmt: H.264/x264, 720p video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \ - and ('720p' in attrs['Video Fmt']) + and ('720p' in attrs['Video Fmt']) # Source: TV Cap or HDTV or (None) source = 'Source' not in attrs or 'TV Cap' in attrs['Source'] or 'HDTV' in attrs['Source'] @@ -162,7 +164,7 @@ class NewzbinProvider(generic.NZBProvider): # Video Fmt: H.264/x264, 720p video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \ - and ('720p' in attrs['Video Fmt']) + and ('720p' in attrs['Video Fmt']) # Source: WEB-DL source = 'Source' in attrs and 'WEB-DL' in attrs['Source'] @@ -176,7 +178,7 @@ class NewzbinProvider(generic.NZBProvider): # Video Fmt: H.264/x264, 720p video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \ - and ('720p' in attrs['Video Fmt']) + and ('720p' in attrs['Video Fmt']) # Source: Blu-ray or HD-DVD source = 'Source' in attrs and ('Blu-ray' in attrs['Source'] or 'HD-DVD' in attrs['Source']) @@ -187,7 +189,7 @@ class NewzbinProvider(generic.NZBProvider): # Video Fmt: H.264/x264, 1080p video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \ - and ('1080p' in attrs['Video Fmt']) + and ('1080p' in attrs['Video Fmt']) # Source: Blu-ray or HD-DVD source = 'Source' in attrs and ('Blu-ray' in attrs['Source'] or 'HD-DVD' in attrs['Source']) @@ -207,19 +209,20 @@ class NewzbinProvider(generic.NZBProvider): id = self.getIDFromURL(nzb.url) if not id: - logger.log("Unable to get an ID from "+str(nzb.url)+", can't download from Newzbin's API", logger.ERROR) + logger.log("Unable to get an ID from " + str(nzb.url) + ", can't download from Newzbin's API", logger.ERROR) return False - logger.log("Downloading an NZB from newzbin with id "+id) + logger.log("Downloading an NZB from newzbin with id " + id) - fileName = ek.ek(os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(nzb.name)+'.nzb') + fileName = ek.ek(os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(nzb.name) + '.nzb') logger.log("Saving to " + fileName) urllib._urlopener = NewzbinDownloader() - params = urllib.urlencode({"username": sickbeard.NEWZBIN_USERNAME, "password": sickbeard.NEWZBIN_PASSWORD, "reportid": id}) + params = urllib.urlencode( + {"username": sickbeard.NEWZBIN_USERNAME, "password": sickbeard.NEWZBIN_PASSWORD, "reportid": id}) try: - urllib.urlretrieve(self.url+"api/dnzb/", fileName, data=params) + urllib.urlretrieve(self.url + "api/dnzb/", fileName, data=params) except exceptions.NewzbinAPIThrottled: logger.log("Done waiting for Newzbin API throttle limit, starting downloads again") self.downloadResult(nzb) @@ -235,7 +238,7 @@ class NewzbinProvider(generic.NZBProvider): try: # Remove double-slashes from url parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one + parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one url = urlparse.urlunparse(parsed) f = myOpener.openit(url) @@ -256,36 +259,36 @@ class NewzbinProvider(generic.NZBProvider): suffix = '' else: suffix = 'x' - searchTerms = ['^"'+x+' - '+str(season)+suffix+'"' for x in nameList] + searchTerms = ['^"' + x + ' - ' + str(season) + suffix + '"' for x in nameList] #searchTerms += ['^"'+x+' - Season '+str(season)+'"' for x in nameList] searchStr = " OR ".join(searchTerms) searchStr += " -subpack -extras" - logger.log("Searching newzbin for string "+searchStr, logger.DEBUG) - + logger.log("Searching newzbin for string " + searchStr, logger.DEBUG) + return [searchStr] def _get_episode_search_strings(self, ep_obj): nameList = set(show_name_helpers.allPossibleShowNames(ep_obj.show)) if not ep_obj.show.air_by_date: - searchStr = " OR ".join(['^"'+x+' - %dx%02d"'%(ep_obj.season, ep_obj.episode) for x in nameList]) + searchStr = " OR ".join(['^"' + x + ' - %dx%02d"' % (ep_obj.season, ep_obj.episode) for x in nameList]) else: - searchStr = " OR ".join(['^"'+x+' - '+str(ep_obj.airdate)+'"' for x in nameList]) + searchStr = " OR ".join(['^"' + x + ' - ' + str(ep_obj.airdate) + '"' for x in nameList]) return [searchStr] def _doSearch(self, searchStr, show=None): data = self._getRSSData(searchStr.encode('utf-8')) - + item_list = [] try: parsedXML = parseString(data) items = parsedXML.getElementsByTagName('item') except Exception, e: - logger.log("Error trying to load Newzbin RSS feed: "+ex(e), logger.ERROR) + logger.log("Error trying to load Newzbin RSS feed: " + ex(e), logger.ERROR) return [] for cur_item in items: @@ -301,7 +304,7 @@ class NewzbinProvider(generic.NZBProvider): post_date = parseDate(dateString).replace(tzinfo=None) retention_date = datetime.now() - timedelta(days=sickbeard.USENET_RETENTION) if post_date < retention_date: - logger.log(u"Date "+str(post_date)+" is out of retention range, skipping", logger.DEBUG) + logger.log(u"Date " + str(post_date) + " is out of retention range, skipping", logger.DEBUG) continue except Exception, e: logger.log("Error parsing date from Newzbin RSS feed: " + str(e), logger.ERROR) @@ -315,21 +318,21 @@ class NewzbinProvider(generic.NZBProvider): def _getRSSData(self, search=None): params = { - 'searchaction': 'Search', - 'fpn': 'p', - 'category': 8, - 'u_nfo_posts_only': 0, - 'u_url_posts_only': 0, - 'u_comment_posts_only': 0, - 'u_show_passworded': 0, - 'u_v3_retention': 0, - 'ps_rb_video_format': 3082257, - 'ps_rb_language': 4096, - 'sort': 'date', - 'order': 'desc', - 'u_post_results_amt': 50, - 'feed': 'rss', - 'hauth': 1, + 'searchaction': 'Search', + 'fpn': 'p', + 'category': 8, + 'u_nfo_posts_only': 0, + 'u_url_posts_only': 0, + 'u_comment_posts_only': 0, + 'u_show_passworded': 0, + 'u_v3_retention': 0, + 'ps_rb_video_format': 3082257, + 'ps_rb_language': 4096, + 'sort': 'date', + 'order': 'desc', + 'u_post_results_amt': 50, + 'feed': 'rss', + 'hauth': 1, } if search: @@ -350,8 +353,8 @@ class NewzbinProvider(generic.NZBProvider): if sickbeard.NEWZBIN_USERNAME in (None, "") or sickbeard.NEWZBIN_PASSWORD in (None, ""): raise exceptions.AuthException("Newzbin authentication details are empty, check your config") -class NewzbinCache(tvcache.TVCache): +class NewzbinCache(tvcache.TVCache): def __init__(self, provider): tvcache.TVCache.__init__(self, provider) @@ -374,14 +377,16 @@ class NewzbinCache(tvcache.TVCache): raise exceptions.AuthException("Invalid Newzbin username/password") if not title or not url: - logger.log("The XML returned from the "+self.provider.name+" feed is incomplete, this result is unusable", logger.ERROR) + logger.log( + "The XML returned from the " + self.provider.name + " feed is incomplete, this result is unusable", + logger.ERROR) return quality = self.provider.getQuality(item) - logger.log("Found quality "+str(quality), logger.DEBUG) + logger.log("Found quality " + str(quality), logger.DEBUG) - logger.log("Adding item from RSS to cache: "+title, logger.DEBUG) + logger.log("Adding item from RSS to cache: " + title, logger.DEBUG) self._addCacheEntry(title, url, quality=quality) diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index acb834ca..4f6bc088 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -42,7 +42,6 @@ from sickbeard.exceptions import ex, AuthException class NewznabProvider(generic.NZBProvider): - def __init__(self, name, url, key='', catIDs='5030,5040,5060'): generic.NZBProvider.__init__(self, name) @@ -73,7 +72,8 @@ class NewznabProvider(generic.NZBProvider): return self.name + '|' + self.url + '|' + self.key + '|' + self.catIDs + '|' + str(int(self.enabled)) def imageName(self): - if ek.ek(os.path.isfile, ek.ek(os.path.join, sickbeard.PROG_DIR, 'data', 'images', 'providers', self.getID() + '.png')): + if ek.ek(os.path.isfile, + ek.ek(os.path.join, sickbeard.PROG_DIR, 'data', 'images', 'providers', self.getID() + '.png')): return self.getID() + '.png' return 'newznab.png' @@ -155,7 +155,8 @@ class NewznabProvider(generic.NZBProvider): def _checkAuth(self): if self.needs_auth and not self.key: - logger.log(u"Incorrect authentication credentials for " + self.name + " : " + "API key is missing", logger.DEBUG) + logger.log(u"Incorrect authentication credentials for " + self.name + " : " + "API key is missing", + logger.DEBUG) raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") return True @@ -173,9 +174,11 @@ class NewznabProvider(generic.NZBProvider): elif code == '101': raise AuthException("Your account on " + self.name + " has been suspended, contact the administrator.") elif code == '102': - raise AuthException("Your account isn't allowed to use the API on " + self.name + ", contact the administrator") + raise AuthException( + "Your account isn't allowed to use the API on " + self.name + ", contact the administrator") else: - logger.log(u"Unknown error given from " + self.name + ": " + parsedXML.attrib['description'], logger.ERROR) + logger.log(u"Unknown error given from " + self.name + ": " + parsedXML.attrib['description'], + logger.ERROR) return False return True @@ -237,7 +240,9 @@ class NewznabProvider(generic.NZBProvider): logger.log(u"Adding item from RSS to results: " + title, logger.DEBUG) results.append(curItem) else: - logger.log(u"The XML returned from the " + self.name + " RSS feed is incomplete, this result is unusable", logger.DEBUG) + logger.log( + u"The XML returned from the " + self.name + " RSS feed is incomplete, this result is unusable", + logger.DEBUG) return results @@ -248,7 +253,8 @@ class NewznabProvider(generic.NZBProvider): search_terms = ['.proper.', '.repack.'] cache_results = self.cache.listPropers(search_date) - results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time'])) for x in cache_results] + results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time'])) for x in + cache_results] for term in search_terms: for item in self._doSearch({'q': term}, max_age=4): @@ -260,7 +266,8 @@ class NewznabProvider(generic.NZBProvider): try: # we could probably do dateStr = descriptionStr but we want date in this format - date_text = re.search('(\w{3}, \d{1,2} \w{3} \d{4} \d\d:\d\d:\d\d) [\+\-]\d{4}', description_text).group(1) + date_text = re.search('(\w{3}, \d{1,2} \w{3} \d{4} \d\d:\d\d:\d\d) [\+\-]\d{4}', + description_text).group(1) except: date_text = None @@ -281,7 +288,6 @@ class NewznabProvider(generic.NZBProvider): class NewznabCache(tvcache.TVCache): - def __init__(self, provider): tvcache.TVCache.__init__(self, provider) @@ -314,4 +320,4 @@ class NewznabCache(tvcache.TVCache): return data def _checkAuth(self, parsedXML): - return self.provider._checkAuthFromData(parsedXML) + return self.provider._checkAuthFromData(parsedXML) diff --git a/sickbeard/providers/nextgen.py b/sickbeard/providers/nextgen.py index 2c18d136..0e3dc9e6 100644 --- a/sickbeard/providers/nextgen.py +++ b/sickbeard/providers/nextgen.py @@ -78,7 +78,7 @@ class NextGenProvider(generic.TorrentProvider): return { 'username': sickbeard.NEXTGEN_USERNAME, 'password': sickbeard.NEXTGEN_PASSWORD, - } + } def loginSuccess(self, output): if "NextGen - Login" in output: @@ -107,12 +107,13 @@ class NextGenProvider(generic.TorrentProvider): try: login_params = self.getLoginParams() self.session = requests.Session() - self.session.headers.update({'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:24.0) Gecko/20130519 Firefox/24.0)'}) + self.session.headers.update( + {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:24.0) Gecko/20130519 Firefox/24.0)'}) data = self.session.get(self.urls['login_page']) bs = BeautifulSoup(data.content.decode('iso-8859-1')) - csrfraw = bs.find('form', attrs = {'id': 'login'})['action'] - output = self.session.post(self.urls['base_url']+csrfraw, data=login_params) - + csrfraw = bs.find('form', attrs={'id': 'login'})['action'] + output = self.session.post(self.urls['base_url'] + csrfraw, data=login_params) + if self.loginSuccess(output): self.last_login_check = now self.login_opener = self.session @@ -138,7 +139,7 @@ class NextGenProvider(generic.TorrentProvider): if searchSeason: search_string = {'Season': [], 'Episode': []} for show_name in set(show_name_helpers.allPossibleShowNames(show)): - ep_string = show_name + ' S%02d' % int(season) #1) ShowName SXX + ep_string = show_name + ' S%02d' % int(season) #1) ShowName SXX search_string['Season'].append(ep_string) for ep_obj in wantedEp: @@ -158,8 +159,8 @@ class NextGenProvider(generic.TorrentProvider): if ep_obj.show.air_by_date: for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)): - ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \ - str(ep_obj.airdate) +'|'+\ + ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \ + str(ep_obj.airdate) + '|' + \ helpers.custom_strftime('%Y %b {S}', ep_obj.airdate) search_string['Episode'].append(ep_string) else: @@ -193,16 +194,17 @@ class NextGenProvider(generic.TorrentProvider): try: html = BeautifulSoup(data.decode('iso-8859-1'), features=["html5lib", "permissive"]) - resultsTable = html.find('div', attrs = {'id' : 'torrent-table-wrapper'}) + resultsTable = html.find('div', attrs={'id': 'torrent-table-wrapper'}) if not resultsTable: - logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.DEBUG) + logger.log(u"The Data returned from " + self.name + " do not contains any torrent", + logger.DEBUG) continue # Collecting entries - entries_std = html.find_all('div' , attrs = {'id' : 'torrent-std'}) - entries_sticky = html.find_all('div' , attrs = {'id' : 'torrent-sticky'}) - + entries_std = html.find_all('div', attrs={'id': 'torrent-std'}) + entries_sticky = html.find_all('div', attrs={'id': 'torrent-sticky'}) + entries = entries_std + entries_sticky #Xirg STANDARD TORRENTS @@ -210,10 +212,13 @@ class NextGenProvider(generic.TorrentProvider): if len(entries) > 0: for result in entries: - + try: - torrentName = ((result.find('div', attrs = {'id' :'torrent-udgivelse2-users'})).find('a'))['title'] - torrentId = (((result.find('div', attrs = {'id' :'torrent-download'})).find('a'))['href']).replace('download.php?id=','') + torrentName = \ + ((result.find('div', attrs={'id': 'torrent-udgivelse2-users'})).find('a'))['title'] + torrentId = ( + ((result.find('div', attrs={'id': 'torrent-download'})).find('a'))['href']).replace( + 'download.php?id=', '') torrent_name = str(torrentName) torrent_download_url = (self.urls['download'] % torrentId).encode('utf8') torrent_details_url = (self.urls['detail'] % torrentId).encode('utf8') @@ -223,25 +228,28 @@ class NextGenProvider(generic.TorrentProvider): #torrent_leechers = int(result.find('td', attrs = {'class' : 'ac t_leechers'}).string) except (AttributeError, TypeError): continue - + # Filter unseeded torrent and torrents with no name/url #if mode != 'RSS' and torrent_seeders == 0: # continue - + if not torrent_name or not torrent_download_url: continue - + item = torrent_name, torrent_download_url - logger.log(u"Found result: " + torrent_name + " (" + torrent_details_url + ")", logger.DEBUG) + logger.log(u"Found result: " + torrent_name + " (" + torrent_details_url + ")", + logger.DEBUG) items[mode].append(item) - + else: - logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.WARNING) + logger.log(u"The Data returned from " + self.name + " do not contains any torrent", + logger.WARNING) continue except Exception, e: - logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) + logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), + logger.ERROR) results += items[mode] @@ -267,7 +275,7 @@ class NextGenProvider(generic.TorrentProvider): try: # Remove double-slashes from url parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one + parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one url = urlparse.urlunparse(parsed) response = self.session.get(url) @@ -276,7 +284,8 @@ class NextGenProvider(generic.TorrentProvider): return None if response.status_code != 200: - logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) + logger.log(self.name + u" page requested with url " + url + " returned status code is " + str( + response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) return None return response.content @@ -285,12 +294,13 @@ class NextGenProvider(generic.TorrentProvider): results = [] - sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + - ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + - ' WHERE e.airdate >= ' + str(search_date.toordinal()) + - ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + - ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' - ) + sqlResults = db.DBConnection().select( + 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + + ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + + ' WHERE e.airdate >= ' + str(search_date.toordinal()) + + ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + + ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' + ) if not sqlResults: return [] diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py index c8cab2b2..7cc716fe 100644 --- a/sickbeard/providers/nyaatorrents.py +++ b/sickbeard/providers/nyaatorrents.py @@ -36,14 +36,14 @@ from sickbeard import tvcache REMOTE_DBG = False -class NyaaProvider(generic.TorrentProvider): +class NyaaProvider(generic.TorrentProvider): def __init__(self): generic.TorrentProvider.__init__(self, "NyaaTorrents") - + self.supportsBacklog = True - + self.supportsAbsoluteNumbering = True self.cache = NyaaCache(self) @@ -52,22 +52,23 @@ class NyaaProvider(generic.TorrentProvider): def isEnabled(self): return sickbeard.NYAA - + def imageName(self): return 'nyaatorrents.png' - + def getQuality(self, item, anime=False): self.debug() - title = helpers.get_xml_text(item.getElementsByTagName('title')[0]).replace("/"," ") + title = helpers.get_xml_text(item.getElementsByTagName('title')[0]).replace("/", " ") quality = Quality.sceneQuality(title, anime) - return quality - + return quality + def findSeasonResults(self, show, season): results = {} - + results = generic.TorrentProvider.findSeasonResults(self, show, season) - + return results + def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False): names = [] names.extend(show_name_helpers.makeSceneShowSearchStrings(show)) @@ -77,12 +78,12 @@ class NyaaProvider(generic.TorrentProvider): return self._get_season_search_strings(ep_obj.show, ep_obj.season) def _doSearch(self, search_string, show=None): - - params = {"term" : search_string.encode('utf-8'), - "sort" : '2', #Sort Descending By Seeders - } - - searchURL = self.url+'?page=rss&'+urllib.urlencode(params) + + params = {"term": search_string.encode('utf-8'), + "sort": '2', #Sort Descending By Seeders + } + + searchURL = self.url + '?page=rss&' + urllib.urlencode(params) logger.log(u"Search string: " + searchURL, logger.DEBUG) @@ -90,42 +91,44 @@ class NyaaProvider(generic.TorrentProvider): if not data: return [] - + try: parsedXML = parseString(data) items = parsedXML.getElementsByTagName('item') except Exception, e: - logger.log(u"Error trying to load NyaaTorrents RSS feed: "+ex(e), logger.ERROR) - logger.log(u"RSS data: "+data, logger.DEBUG) + logger.log(u"Error trying to load NyaaTorrents RSS feed: " + ex(e), logger.ERROR) + logger.log(u"RSS data: " + data, logger.DEBUG) return [] - + results = [] for curItem in items: - + (title, url) = self._get_title_and_url(curItem) - + if not title or not url: - logger.log(u"The XML returned from the NyaaTorrents RSS feed is incomplete, this result is unusable: "+data, logger.ERROR) + logger.log( + u"The XML returned from the NyaaTorrents RSS feed is incomplete, this result is unusable: " + data, + logger.ERROR) continue - + results.append(curItem) - + return results def _get_title_and_url(self, item): return generic.TorrentProvider._get_title_and_url(self, item) - def findEpisode (self, episode, manualSearch=False): + def findEpisode(self, episode, manualSearch=False): self._checkAuth() - logger.log(u"Searching "+self.name+" for " + episode.prettyName()) + logger.log(u"Searching " + self.name + " for " + episode.prettyName()) self.cache.updateCache() results = self.cache.searchCache(episode, manualSearch) - logger.log(u"Cache results: "+str(results), logger.DEBUG) + logger.log(u"Cache results: " + str(results), logger.DEBUG) # if we got some results then use them no matter what. # OR @@ -147,25 +150,31 @@ class NyaaProvider(generic.TorrentProvider): myParser = NameParser(show=episode.show) parse_result = myParser.parse(title) except InvalidNameException: - logger.log(u"Unable to parse the filename "+title+" into a valid episode", logger.WARNING) + logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING) continue if episode.show.air_by_date: if parse_result.air_date != episode.airdate: - logger.log("Episode "+title+" didn't air on "+str(episode.airdate)+", skipping it", logger.DEBUG) + logger.log("Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", + logger.DEBUG) continue elif episode.show.anime and episode.show.absolute_numbering: if episode.absolute_number not in parse_result.ab_episode_numbers: - logger.log("Episode "+title+" isn't "+str(episode.absolute_number)+", skipping it", logger.DEBUG) + logger.log("Episode " + title + " isn't " + str(episode.absolute_number) + ", skipping it", + logger.DEBUG) continue elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers: - logger.log("Episode "+title+" isn't "+str(episode.season)+"x"+str(episode.episode)+", skipping it", logger.DEBUG) + logger.log( + "Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it", + logger.DEBUG) continue quality = self.getQuality(item, episode.show.anime) if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch): - logger.log(u"Ignoring result "+title+" because we don't want an episode that is "+Quality.qualityStrings[quality], logger.DEBUG) + logger.log( + u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[ + quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) @@ -181,17 +190,15 @@ class NyaaProvider(generic.TorrentProvider): def _extract_name_from_filename(self, filename): name_regex = '(.*?)\.?(\[.*]|\d+\.TPB)\.torrent$' - logger.log(u"Comparing "+name_regex+" against "+filename, logger.DEBUG) + logger.log(u"Comparing " + name_regex + " against " + filename, logger.DEBUG) match = re.match(name_regex, filename, re.I) if match: return match.group(1) return None - + class NyaaCache(tvcache.TVCache): - def __init__(self, provider): - tvcache.TVCache.__init__(self, provider) # only poll NyaaTorrents every 15 minutes max @@ -199,30 +206,30 @@ class NyaaCache(tvcache.TVCache): def _getRSSData(self): - params = { - "page" : 'rss', # Use RSS page - "order" : '1' #Sort Descending By Date - } - - url = self.provider.url + '?' + urllib.urlencode(params) + "page": 'rss', # Use RSS page + "order": '1' #Sort Descending By Date + } - logger.log(u"NyaaTorrents cache update URL: "+ url, logger.DEBUG) + url = self.provider.url + '?' + urllib.urlencode(params) + + logger.log(u"NyaaTorrents cache update URL: " + url, logger.DEBUG) data = self.provider.getURL(url) return data def _parseItem(self, item): - (title, url) = self.provider._get_title_and_url(item) if not title or not url: - logger.log(u"The XML returned from the NyaaTorrents RSS feed is incomplete, this result is unusable", logger.ERROR) + logger.log(u"The XML returned from the NyaaTorrents RSS feed is incomplete, this result is unusable", + logger.ERROR) return None - logger.log(u"Adding item from RSS to cache: "+title, logger.DEBUG) + logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG) return self._addCacheEntry(title, url) + provider = NyaaProvider() \ No newline at end of file diff --git a/sickbeard/providers/nzbs_org_old.py b/sickbeard/providers/nzbs_org_old.py index 33db0b65..a53f3373 100644 --- a/sickbeard/providers/nzbs_org_old.py +++ b/sickbeard/providers/nzbs_org_old.py @@ -34,131 +34,135 @@ from sickbeard import exceptions, logger from sickbeard import tvcache from sickbeard.exceptions import ex + class NZBsProvider(generic.NZBProvider): + def __init__(self): - def __init__(self): + generic.NZBProvider.__init__(self, "NZBs.org Old") - generic.NZBProvider.__init__(self, "NZBs.org Old") + self.supportsBacklog = True - self.supportsBacklog = True + self.cache = NZBsCache(self) - self.cache = NZBsCache(self) + self.url = 'https://secure.nzbs.org/' - self.url = 'https://secure.nzbs.org/' + def isEnabled(self): + return sickbeard.NZBS - def isEnabled(self): - return sickbeard.NZBS + def _checkAuth(self): + if sickbeard.NZBS_UID in (None, "") or sickbeard.NZBS_HASH in (None, ""): + raise exceptions.AuthException("NZBs.org authentication details are empty, check your config") - def _checkAuth(self): - if sickbeard.NZBS_UID in (None, "") or sickbeard.NZBS_HASH in (None, ""): - raise exceptions.AuthException("NZBs.org authentication details are empty, check your config") + def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False): + return ['^' + x for x in show_name_helpers.makeSceneSeasonSearchString(show, season)] - def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False): - return ['^'+x for x in show_name_helpers.makeSceneSeasonSearchString(show, season)] + def _get_episode_search_strings(self, ep_obj): + return ['^' + x for x in show_name_helpers.makeSceneSearchString(ep_obj)] - def _get_episode_search_strings(self, ep_obj): - return ['^'+x for x in show_name_helpers.makeSceneSearchString(ep_obj)] + def _doSearch(self, curString, show=None): - def _doSearch(self, curString, show=None): + curString = curString.replace('.', ' ') - curString = curString.replace('.', ' ') + params = {"action": "search", + "q": curString.encode('utf-8'), + "dl": 1, + "i": sickbeard.NZBS_UID, + "h": sickbeard.NZBS_HASH, + "age": sickbeard.USENET_RETENTION, + "num": 100, + "type": 1} - params = {"action": "search", - "q": curString.encode('utf-8'), - "dl": 1, - "i": sickbeard.NZBS_UID, - "h": sickbeard.NZBS_HASH, - "age": sickbeard.USENET_RETENTION, - "num": 100, - "type": 1} + searchURL = self.url + "rss.php?" + urllib.urlencode(params) - searchURL = self.url + "rss.php?" + urllib.urlencode(params) + logger.log(u"Search string: " + searchURL, logger.DEBUG) - logger.log(u"Search string: " + searchURL, logger.DEBUG) + data = self.getURL(searchURL) - data = self.getURL(searchURL) + # Pause to avoid 503's + time.sleep(5) - # Pause to avoid 503's - time.sleep(5) + if data == None: + return [] - if data == None: - return [] + try: + parsedXML = parseString(data) + items = parsedXML.getElementsByTagName('item') + except Exception, e: + logger.log(u"Error trying to load NZBs.org RSS feed: " + ex(e), logger.ERROR) + return [] - try: - parsedXML = parseString(data) - items = parsedXML.getElementsByTagName('item') - except Exception, e: - logger.log(u"Error trying to load NZBs.org RSS feed: "+ex(e), logger.ERROR) - return [] + results = [] - results = [] + for curItem in items: + (title, url) = self._get_title_and_url(curItem) - for curItem in items: - (title, url) = self._get_title_and_url(curItem) + if not title or not url: + logger.log( + u"The XML returned from the NZBs.org RSS feed is incomplete, this result is unusable: " + data, + logger.ERROR) + continue - if not title or not url: - logger.log(u"The XML returned from the NZBs.org RSS feed is incomplete, this result is unusable: "+data, logger.ERROR) - continue + if "&i=" not in url and "&h=" not in url: + raise exceptions.AuthException( + "The NZBs.org result URL has no auth info which means your UID/hash are incorrect, check your config") - if "&i=" not in url and "&h=" not in url: - raise exceptions.AuthException("The NZBs.org result URL has no auth info which means your UID/hash are incorrect, check your config") + results.append(curItem) - results.append(curItem) + return results - return results + def findPropers(self, date=None): - def findPropers(self, date=None): + results = [] - results = [] + for curString in (".PROPER.", ".REPACK."): - for curString in (".PROPER.", ".REPACK."): + for curResult in self._doSearch(curString): - for curResult in self._doSearch(curString): + (title, url) = self._get_title_and_url(curResult) - (title, url) = self._get_title_and_url(curResult) + pubDate_node = curResult.getElementsByTagName('pubDate')[0] + pubDate = helpers.get_xml_text(pubDate_node) - pubDate_node = curResult.getElementsByTagName('pubDate')[0] - pubDate = helpers.get_xml_text(pubDate_node) + match = re.search('(\w{3}, \d{1,2} \w{3} \d{4} \d\d:\d\d:\d\d) [\+\-]\d{4}', pubDate) + if not match: + continue - match = re.search('(\w{3}, \d{1,2} \w{3} \d{4} \d\d:\d\d:\d\d) [\+\-]\d{4}', pubDate) - if not match: - continue + resultDate = datetime.datetime.strptime(match.group(1), "%a, %d %b %Y %H:%M:%S") - resultDate = datetime.datetime.strptime(match.group(1), "%a, %d %b %Y %H:%M:%S") + if date == None or resultDate > date: + results.append(classes.Proper(title, url, resultDate)) - if date == None or resultDate > date: - results.append(classes.Proper(title, url, resultDate)) + return results - return results class NZBsCache(tvcache.TVCache): + def __init__(self, provider): + tvcache.TVCache.__init__(self, provider) - def __init__(self, provider): + # only poll NZBs.org every 15 minutes max + self.minTime = 15 - tvcache.TVCache.__init__(self, provider) + def _getRSSData(self): + url = self.provider.url + 'rss.php?' + urlArgs = {'type': 1, + 'dl': 1, + 'num': 100, + 'i': sickbeard.NZBS_UID, + 'h': sickbeard.NZBS_HASH, + 'age': sickbeard.USENET_RETENTION} - # only poll NZBs.org every 15 minutes max - self.minTime = 15 + url += urllib.urlencode(urlArgs) - def _getRSSData(self): - url = self.provider.url + 'rss.php?' - urlArgs = {'type': 1, - 'dl': 1, - 'num': 100, - 'i': sickbeard.NZBS_UID, - 'h': sickbeard.NZBS_HASH, - 'age': sickbeard.USENET_RETENTION} + logger.log(u"NZBs cache update URL: " + url, logger.DEBUG) - url += urllib.urlencode(urlArgs) + data = self.provider.getURL(url) - logger.log(u"NZBs cache update URL: "+ url, logger.DEBUG) + return data - data = self.provider.getURL(url) + def _checkItemAuth(self, title, url): + if "&i=" not in url and "&h=" not in url: + raise exceptions.AuthException( + "The NZBs.org result URL has no auth info which means your UID/hash are incorrect, check your config") - return data - - def _checkItemAuth(self, title, url): - if "&i=" not in url and "&h=" not in url: - raise exceptions.AuthException("The NZBs.org result URL has no auth info which means your UID/hash are incorrect, check your config") provider = NZBsProvider() \ No newline at end of file diff --git a/sickbeard/providers/nzbsrus.py b/sickbeard/providers/nzbsrus.py index ef76ebf8..b02d581e 100644 --- a/sickbeard/providers/nzbsrus.py +++ b/sickbeard/providers/nzbsrus.py @@ -30,7 +30,6 @@ from sickbeard import tvcache, show_name_helpers class NZBsRUSProvider(generic.NZBProvider): - def __init__(self): generic.NZBProvider.__init__(self, "NZBs'R'US") self.cache = NZBsRUSCache(self) @@ -55,12 +54,12 @@ class NZBsRUSProvider(generic.NZBProvider): 'key': sickbeard.NZBSRUS_HASH, 'xml': 1, 'age': sickbeard.USENET_RETENTION, - 'lang0': 1, # English only from CouchPotato + 'lang0': 1, # English only from CouchPotato 'lang1': 1, 'lang3': 1, - 'c91': 1, # TV:HD - 'c104': 1, # TV:SD-x264 - 'c75': 1, # TV:XviD + 'c91': 1, # TV:HD + 'c104': 1, # TV:SD-x264 + 'c75': 1, # TV:XviD 'searchtext': search} if not params['age']: @@ -93,12 +92,11 @@ class NZBsRUSProvider(generic.NZBProvider): nzbID = element.find('id').text key = element.find('key').text url = self.url + 'nzbdownload_rss.php' + '/' + \ - nzbID + '/' + sickbeard.NZBSRUS_UID + '/' + key + '/' + nzbID + '/' + sickbeard.NZBSRUS_UID + '/' + key + '/' return (title, url) class NZBsRUSCache(tvcache.TVCache): - def __init__(self, provider): tvcache.TVCache.__init__(self, provider) # only poll NZBs'R'US every 15 minutes max @@ -119,4 +117,5 @@ class NZBsRUSCache(tvcache.TVCache): def _checkAuth(self, data): return data != 'Invalid Link' + provider = NZBsRUSProvider() diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py index 71ab49ad..4025e7a8 100644 --- a/sickbeard/providers/omgwtfnzbs.py +++ b/sickbeard/providers/omgwtfnzbs.py @@ -40,7 +40,6 @@ except ImportError: class OmgwtfnzbsProvider(generic.NZBProvider): - def __init__(self): generic.NZBProvider.__init__(self, "omgwtfnzbs") self.cache = OmgwtfnzbsCache(self) @@ -52,7 +51,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider): def _checkAuth(self): - if not sickbeard.OMGWTFNZBS_USERNAME or not sickbeard.OMGWTFNZBS_APIKEY: + if not sickbeard.OMGWTFNZBS_USERNAME or not sickbeard.OMGWTFNZBS_APIKEY: raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") return True @@ -73,8 +72,10 @@ class OmgwtfnzbsProvider(generic.NZBProvider): description_text = parsedJSON.get('notice') if 'information is incorrect' in parsedJSON.get('notice'): - logger.log(u"Incorrect authentication credentials for " + self.name + " : " + str(description_text), logger.DEBUG) - raise AuthException("Your authentication credentials for " + self.name + " are incorrect, check your config.") + logger.log(u"Incorrect authentication credentials for " + self.name + " : " + str(description_text), + logger.DEBUG) + raise AuthException( + "Your authentication credentials for " + self.name + " are incorrect, check your config.") elif '0 results matched your terms' in parsedJSON.get('notice'): return True @@ -156,7 +157,6 @@ class OmgwtfnzbsProvider(generic.NZBProvider): class OmgwtfnzbsCache(tvcache.TVCache): - def __init__(self, provider): tvcache.TVCache.__init__(self, provider) self.minTime = 20 @@ -180,6 +180,7 @@ class OmgwtfnzbsCache(tvcache.TVCache): return data def _checkAuth(self, parsedXML): - return self.provider._checkAuthFromData(parsedXML) + return self.provider._checkAuthFromData(parsedXML) + provider = OmgwtfnzbsProvider() diff --git a/sickbeard/providers/publichd.py b/sickbeard/providers/publichd.py index 1a6233a1..f299ed17 100644 --- a/sickbeard/providers/publichd.py +++ b/sickbeard/providers/publichd.py @@ -43,8 +43,8 @@ from lib import requests from bs4 import BeautifulSoup from lib.unidecode import unidecode -class PublicHDProvider(generic.TorrentProvider): +class PublicHDProvider(generic.TorrentProvider): def __init__(self): generic.TorrentProvider.__init__(self, "PublicHD") @@ -81,10 +81,10 @@ class PublicHDProvider(generic.TorrentProvider): if searchSeason: search_string = {'Season': [], 'Episode': []} for show_name in set(allPossibleShowNames(show)): - ep_string = show_name +' S%02d' % int(season) #1) ShowName SXX -SXXE + ep_string = show_name + ' S%02d' % int(season) #1) ShowName SXX -SXXE search_string['Season'].append(ep_string) - ep_string = show_name+' Season ' + str(season) #2) ShowName Season X + ep_string = show_name + ' Season ' + str(season) #2) ShowName Season X search_string['Season'].append(ep_string) for ep_obj in wantedEp: @@ -106,17 +106,18 @@ class PublicHDProvider(generic.TorrentProvider): if ep_obj.show.air_by_date: for show_name in set(allPossibleShowNames(ep_obj.show)): - ep_string = sanitizeSceneName(show_name) +' '+ \ - str(ep_obj.airdate) +'|'+\ + ep_string = sanitizeSceneName(show_name) + ' ' + \ + str(ep_obj.airdate) + '|' + \ helpers.custom_strftime('%Y %b {S}', ep_obj.airdate) search_string['Episode'].append(ep_string) else: for show_name in set(allPossibleShowNames(ep_obj.show)): ep_string = sanitizeSceneName(show_name) + ' ' + \ - sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} - + sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, + 'episodenumber': ep_obj.episode} + for x in add_string.split('|'): - to_search = re.sub('\s+', ' ', ep_string + ' %s' %x) + to_search = re.sub('\s+', ' ', ep_string + ' %s' % x) search_string['Episode'].append(to_search) return [search_string] @@ -130,10 +131,12 @@ class PublicHDProvider(generic.TorrentProvider): for search_string in search_params[mode]: if mode == 'RSS': - searchURL = self.url + 'index.php?page=torrents&active=1&category=%s' %(';'.join(self.categories[mode])) - logger.log(u"PublicHD cache update URL: "+ searchURL, logger.DEBUG) + searchURL = self.url + 'index.php?page=torrents&active=1&category=%s' % ( + ';'.join(self.categories[mode])) + logger.log(u"PublicHD cache update URL: " + searchURL, logger.DEBUG) else: - searchURL = self.searchurl %(urllib.quote(unidecode(search_string)), ';'.join(self.categories[mode])) + searchURL = self.searchurl % ( + urllib.quote(unidecode(search_string)), ';'.join(self.categories[mode])) logger.log(u"Search string: " + searchURL, logger.DEBUG) html = self.getURL(searchURL) @@ -143,19 +146,20 @@ class PublicHDProvider(generic.TorrentProvider): try: soup = BeautifulSoup(html, features=["html5lib", "permissive"]) - torrent_table = soup.find('table', attrs = {'id' : 'torrbg'}) + torrent_table = soup.find('table', attrs={'id': 'torrbg'}) torrent_rows = torrent_table.find_all('tr') if torrent_table else [] #Continue only if one Release is found - if len(torrent_rows)<2: - logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.DEBUG) + if len(torrent_rows) < 2: + logger.log(u"The Data returned from " + self.name + " do not contains any torrent", + logger.DEBUG) continue for tr in torrent_rows[1:]: try: link = self.url + tr.find(href=re.compile('page=torrent-details'))['href'] - title = tr.find(lambda x: x.has_attr('title')).text.replace('_','.') + title = tr.find(lambda x: x.has_attr('title')).text.replace('_', '.') url = tr.find(href=re.compile('magnet+'))['href'] seeders = int(tr.find_all('td', {'class': 'header'})[4].text) leechers = int(tr.find_all('td', {'class': 'header'})[5].text) @@ -173,7 +177,8 @@ class PublicHDProvider(generic.TorrentProvider): items[mode].append(item) except Exception, e: - logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) + logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(), + logger.ERROR) #For each search mode sort all the items by seeders items[mode].sort(key=lambda tup: tup[3], reverse=True) @@ -187,7 +192,7 @@ class PublicHDProvider(generic.TorrentProvider): title, url, id, seeders, leechers = item if url: - url = url.replace('&','&') + url = url.replace('&', '&') return (title, url) @@ -196,16 +201,17 @@ class PublicHDProvider(generic.TorrentProvider): try: # Remove double-slashes from url parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one + parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one url = urlparse.urlunparse(parsed) r = requests.get(url, verify=False) except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: - logger.log(u"Error loading "+self.name+" URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR) + logger.log(u"Error loading " + self.name + " URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR) return None if r.status_code != 200: - logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING) + logger.log(self.name + u" page requested with url " + url + " returned status code is " + str( + r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING) return None return r.content @@ -214,35 +220,36 @@ class PublicHDProvider(generic.TorrentProvider): """ Save the result to disk. """ - + torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper() - + if not torrent_hash: - logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) - return False - + logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) + return False + try: r = requests.get('http://torcache.net/torrent/' + torrent_hash + '.torrent') except Exception, e: logger.log("Unable to connect to Torcache: " + ex(e), logger.ERROR) return False - + if not r.status_code == 200: return False - - magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) + + magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, + helpers.sanitizeFileName(result.name) + '.' + self.providerType) magnetFileContent = r.content - try: + try: with open(magnetFileName, 'wb') as fileOut: fileOut.write(magnetFileContent) - + helpers.chmodAsParent(magnetFileName) - + except EnvironmentError, e: logger.log("Unable to save the file: " + ex(e), logger.ERROR) return False - + logger.log(u"Saved magnet link to " + magnetFileName + " ", logger.MESSAGE) return True @@ -250,12 +257,13 @@ class PublicHDProvider(generic.TorrentProvider): results = [] - sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + - ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + - ' WHERE e.airdate >= ' + str(search_date.toordinal()) + - ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + - ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' - ) + sqlResults = db.DBConnection().select( + 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + + ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + + ' WHERE e.airdate >= ' + str(search_date.toordinal()) + + ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + + ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' + ) if not sqlResults: return [] @@ -272,7 +280,6 @@ class PublicHDProvider(generic.TorrentProvider): class PublicHDCache(tvcache.TVCache): - def __init__(self, provider): tvcache.TVCache.__init__(self, provider) @@ -318,4 +325,5 @@ class PublicHDCache(tvcache.TVCache): return self._addCacheEntry(title, url) + provider = PublicHDProvider() diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py index 3cb1f2f5..1b21ae90 100644 --- a/sickbeard/providers/rsstorrent.py +++ b/sickbeard/providers/rsstorrent.py @@ -35,8 +35,8 @@ from lib import requests from bs4 import BeautifulSoup from lib.bencode import bdecode -class TorrentRssProvider(generic.TorrentProvider): +class TorrentRssProvider(generic.TorrentProvider): def __init__(self, name, url): generic.TorrentProvider.__init__(self, name) @@ -50,105 +50,106 @@ class TorrentRssProvider(generic.TorrentProvider): return self.name + '|' + self.url + '|' + str(int(self.enabled)) def imageName(self): - if ek.ek(os.path.isfile, ek.ek(os.path.join, sickbeard.PROG_DIR, 'data', 'images', 'providers', self.getID() + '.png')): - return self.getID() + '.png' + if ek.ek(os.path.isfile, + ek.ek(os.path.join, sickbeard.PROG_DIR, 'data', 'images', 'providers', self.getID() + '.png')): + return self.getID() + '.png' return 'torrentrss.png' def isEnabled(self): return self.enabled def _get_title_and_url(self, item): - + title, url = None, None self.cache._remove_namespace(item) title = helpers.get_xml_text(item.find('title')) - + attempt_list = [lambda: helpers.get_xml_text(item.find('magnetURI')), - + lambda: item.find('enclosure').get('url'), - + lambda: helpers.get_xml_text(item.find('link'))] - for cur_attempt in attempt_list: try: url = cur_attempt() except: continue - + if title and url: return (title, url) - + return (title, url) def validateRSS(self): - try: - + try: + data = self.cache._getRSSData() - + if not data: return (False, 'No data returned from url: ' + self.url) - + parsedXML = helpers.parse_xml(data) - + if not parsedXML: return (False, 'Unable to parse RSS, is it a real RSS? ') - + items = parsedXML.findall('.//item') - + if not items: return (False, 'No items found in the RSS feed ' + self.url) - + (title, url) = self._get_title_and_url(items[0]) - + if not title: return (False, 'Unable to get title from first item') - + if not url: return (False, 'Unable to get torrent url from first item') - + if url.startswith('magnet:') and re.search('urn:btih:([\w]{32,40})', url): return (True, 'RSS feed Parsed correctly') else: - + torrent_file = self.getURL(url) - try: + try: bdecode(torrent_file) except Exception, e: self.dumpHTML(torrent_file) return (False, 'Torrent link is not a valid torrent file: ' + ex(e)) - + return (True, 'RSS feed Parsed correctly') except Exception, e: return (False, 'Error when trying to load RSS: ' + ex(e)) def getURL(self, url, headers=None): - + if not self.session: self.session = requests.Session() - + try: - url = urljoin(url, urlparse(url).path.replace('//','/')) + url = urljoin(url, urlparse(url).path.replace('//', '/')) response = self.session.get(url, verify=False) except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: - logger.log(u"Error loading "+self.name+" URL: " + ex(e), logger.ERROR) + logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR) return None - + if response.status_code != 200: - logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) + logger.log(self.name + u" page requested with url " + url + " returned status code is " + str( + response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) return None return response.content def dumpHTML(self, data): - + dumpName = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'custom_torrent.html') - try: + try: fileOut = open(dumpName, 'wb') fileOut.write(data) fileOut.close() @@ -157,10 +158,10 @@ class TorrentRssProvider(generic.TorrentProvider): logger.log("Unable to save the file: " + ex(e), logger.ERROR) return False logger.log(u"Saved custom_torrent html dump " + dumpName + " ", logger.MESSAGE) - return True + return True + class TorrentRssCache(tvcache.TVCache): - def __init__(self, provider): tvcache.TVCache.__init__(self, provider) @@ -173,12 +174,12 @@ class TorrentRssCache(tvcache.TVCache): return data def _parseItem(self, item): - + (title, url) = self.provider._get_title_and_url(item) if not title or not url: logger.log(u"The XML returned from the RSS feed is incomplete, this result is unusable", logger.ERROR) return None - + logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG) return self._addCacheEntry(title, url) diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index 376b53b7..b87d519c 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -30,22 +30,22 @@ from sickbeard import db from sickbeard import classes from sickbeard import helpers from sickbeard import show_name_helpers -from sickbeard.common import Overview +from sickbeard.common import Overview from sickbeard.exceptions import ex from sickbeard import clients from lib import requests from bs4 import BeautifulSoup from lib.unidecode import unidecode -class SCCProvider(generic.TorrentProvider): - urls = {'base_url' : 'https://sceneaccess.eu', - 'login' : 'https://sceneaccess.eu/login', - 'detail' : 'https://www.sceneaccess.eu/details?id=%s', - 'search' : 'https://sceneaccess.eu/browse?search=%s&method=1&%s', - 'archive' : 'https://sceneaccess.eu/archive?search=%s&method=1&c26=26', - 'download' : 'https://www.sceneaccess.eu/%s', - } +class SCCProvider(generic.TorrentProvider): + urls = {'base_url': 'https://sceneaccess.eu', + 'login': 'https://sceneaccess.eu/login', + 'detail': 'https://www.sceneaccess.eu/details?id=%s', + 'search': 'https://sceneaccess.eu/browse?search=%s&method=1&%s', + 'archive': 'https://sceneaccess.eu/archive?search=%s&method=1&c26=26', + 'download': 'https://www.sceneaccess.eu/%s', + } def __init__(self): @@ -70,27 +70,27 @@ class SCCProvider(generic.TorrentProvider): def getQuality(self, item): quality = Quality.sceneQuality(item[0]) - return quality + return quality def _doLogin(self): login_params = {'username': sickbeard.SCC_USERNAME, 'password': sickbeard.SCC_PASSWORD, 'submit': 'come on in', - } + } self.session = requests.Session() try: response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False) except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: - logger.log(u'Unable to connect to ' + self.name + ' provider: ' +ex(e), logger.ERROR) + logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR) return False if re.search('Username or password incorrect', response.text) \ - or re.search('SceneAccess \| Login', response.text) \ - or response.status_code == 401: - logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR) + or re.search('SceneAccess \| Login', response.text) \ + or response.status_code == 401: + logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR) return False return True @@ -106,7 +106,7 @@ class SCCProvider(generic.TorrentProvider): if searchSeason: search_string = {'Season': [], 'Episode': []} for show_name in set(show_name_helpers.allPossibleShowNames(show)): - ep_string = show_name +' S%02d' % int(season) #1) ShowName SXX + ep_string = show_name + ' S%02d' % int(season) #1) ShowName SXX search_string['Season'].append(ep_string) for ep_obj in wantedEp: @@ -126,16 +126,17 @@ class SCCProvider(generic.TorrentProvider): if ep_obj.show.air_by_date: for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)): - ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \ - str(ep_obj.airdate) +'|'+\ + ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \ + str(ep_obj.airdate) + '|' + \ helpers.custom_strftime('%Y %b {S}', ep_obj.airdate) search_string['Episode'].append(ep_string) else: for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)): - ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \ - sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} + ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \ + sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, + 'episodenumber': ep_obj.episode} - search_string['Episode'].append(re.sub('\s+', ' ', ep_string)) + search_string['Episode'].append(re.sub('\s+', ' ', ep_string)) return [search_string] @@ -152,10 +153,10 @@ class SCCProvider(generic.TorrentProvider): if isinstance(search_string, unicode): search_string = unidecode(search_string) - + if mode == 'Season': searchURL = self.urls['archive'] % (search_string) - else: + else: searchURL = self.urls['search'] % (search_string, self.categories) logger.log(u"Search string: " + searchURL, logger.DEBUG) @@ -167,29 +168,30 @@ class SCCProvider(generic.TorrentProvider): try: html = BeautifulSoup(data, features=["html5lib", "permissive"]) - torrent_table = html.find('table', attrs = {'id' : 'torrents-table'}) + torrent_table = html.find('table', attrs={'id': 'torrents-table'}) torrent_rows = torrent_table.find_all('tr') if torrent_table else [] #Continue only if one Release is found - if len(torrent_rows)<2: - logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.DEBUG) + if len(torrent_rows) < 2: + logger.log(u"The Data returned from " + self.name + " do not contains any torrent", + logger.DEBUG) continue for result in torrent_table.find_all('tr')[1:]: try: - link = result.find('td', attrs = {'class' : 'ttr_name'}).find('a') - url = result.find('td', attrs = {'class' : 'td_dl'}).find('a') + link = result.find('td', attrs={'class': 'ttr_name'}).find('a') + url = result.find('td', attrs={'class': 'td_dl'}).find('a') title = link.string download_url = self.urls['download'] % url['href'] id = int(link['href'].replace('details?id=', '')) - seeders = int(result.find('td', attrs = {'class' : 'ttr_seeders'}).string) - leechers = int(result.find('td', attrs = {'class' : 'ttr_leechers'}).string) + seeders = int(result.find('td', attrs={'class': 'ttr_seeders'}).string) + leechers = int(result.find('td', attrs={'class': 'ttr_leechers'}).string) except (AttributeError, TypeError): continue if mode != 'RSS' and seeders == 0: - continue + continue if not title or not download_url: continue @@ -200,13 +202,13 @@ class SCCProvider(generic.TorrentProvider): items[mode].append(item) except Exception, e: - logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) + logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) #For each search mode sort all the items by seeders - items[mode].sort(key=lambda tup: tup[3], reverse=True) + items[mode].sort(key=lambda tup: tup[3], reverse=True) + + results += items[mode] - results += items[mode] - return results def _get_title_and_url(self, item): @@ -214,7 +216,7 @@ class SCCProvider(generic.TorrentProvider): title, url, id, seeders, leechers = item if url: - url = str(url).replace('&','&') + url = str(url).replace('&', '&') return (title, url) @@ -229,16 +231,17 @@ class SCCProvider(generic.TorrentProvider): try: # Remove double-slashes from url parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one + parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one url = urlparse.urlunparse(parsed) response = self.session.get(url, verify=False) except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: - logger.log(u"Error loading "+self.name+" URL: " + ex(e), logger.ERROR) + logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR) return None if response.status_code != 200: - logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) + logger.log(self.name + u" page requested with url " + url + " returned status code is " + str( + response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) return None return response.content @@ -247,12 +250,13 @@ class SCCProvider(generic.TorrentProvider): results = [] - sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + - ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + - ' WHERE e.airdate >= ' + str(search_date.toordinal()) + - ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + - ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' - ) + sqlResults = db.DBConnection().select( + 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + + ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + + ' WHERE e.airdate >= ' + str(search_date.toordinal()) + + ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + + ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' + ) if not sqlResults: return [] @@ -269,7 +273,6 @@ class SCCProvider(generic.TorrentProvider): class SCCCache(tvcache.TVCache): - def __init__(self, provider): tvcache.TVCache.__init__(self, provider) @@ -284,12 +287,12 @@ class SCCCache(tvcache.TVCache): search_params = {'RSS': ['']} rss_results = self.provider._doSearch(search_params) - + if rss_results: self.setLastUpdate() else: return [] - + logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") self._clearCache() @@ -315,4 +318,5 @@ class SCCCache(tvcache.TVCache): return self._addCacheEntry(title, url) + provider = SCCProvider() diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index 4b8a3bbd..721dc5f3 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -34,52 +34,52 @@ from sickbeard import logger from sickbeard import tvcache from sickbeard import helpers from sickbeard.show_name_helpers import allPossibleShowNames, sanitizeSceneName -from sickbeard.common import Overview +from sickbeard.common import Overview from sickbeard.exceptions import ex from sickbeard import encodingKludge as ek from lib import requests from lib.unidecode import unidecode proxy_dict = { - 'Getprivate.eu (NL)' : 'http://getprivate.eu/', - '15bb51.info (US)' : 'http://15bb51.info/', - 'Hideme.nl (NL)' : 'http://hideme.nl/', - 'Proxite.eu (DE)' :'http://proxite.eu/', - 'Webproxy.cz (CZ)' : 'http://webproxy.cz/', - '2me2u (CZ)' : 'http://2me2u.me/', - 'Interproxy.net (EU)': 'http://interproxy.net/', - 'Unblockersurf.info (DK)' : 'http://unblockersurf.info', - 'Hiload.org (NL)' : 'http://hiload.org', - } + 'Getprivate.eu (NL)': 'http://getprivate.eu/', + '15bb51.info (US)': 'http://15bb51.info/', + 'Hideme.nl (NL)': 'http://hideme.nl/', + 'Proxite.eu (DE)': 'http://proxite.eu/', + 'Webproxy.cz (CZ)': 'http://webproxy.cz/', + '2me2u (CZ)': 'http://2me2u.me/', + 'Interproxy.net (EU)': 'http://interproxy.net/', + 'Unblockersurf.info (DK)': 'http://unblockersurf.info', + 'Hiload.org (NL)': 'http://hiload.org', +} + class ThePirateBayProvider(generic.TorrentProvider): - def __init__(self): generic.TorrentProvider.__init__(self, "ThePirateBay") - + self.supportsBacklog = True self.cache = ThePirateBayCache(self) - - self.proxy = ThePirateBayWebproxy() - + + self.proxy = ThePirateBayWebproxy() + self.url = 'http://pirateproxy.net/' self.searchurl = self.url + 'search/%s/0/7/200' # order by seed - self.re_title_url = '/torrent/(?P\d+)/(?P.*?)//1".+?(?P<url>magnet.*?)//1".+?(?P<seeders>\d+)</td>.+?(?P<leechers>\d+)</td>' + self.re_title_url = '/torrent/(?P<id>\d+)/(?P<title>.*?)//1".+?(?P<url>magnet.*?)//1".+?(?P<seeders>\d+)</td>.+?(?P<leechers>\d+)</td>' def isEnabled(self): return sickbeard.THEPIRATEBAY - + def imageName(self): return 'thepiratebay.png' - + def getQuality(self, item): - + quality = Quality.sceneQuality(item[0]) - return quality + return quality def _reverseQuality(self, quality): @@ -88,64 +88,67 @@ class ThePirateBayProvider(generic.TorrentProvider): if quality == Quality.SDTV: quality_string = 'HDTV x264' if quality == Quality.SDDVD: - quality_string = 'DVDRIP' - elif quality == Quality.HDTV: + quality_string = 'DVDRIP' + elif quality == Quality.HDTV: quality_string = '720p HDTV x264' elif quality == Quality.FULLHDTV: - quality_string = '1080p HDTV x264' + quality_string = '1080p HDTV x264' elif quality == Quality.RAWHDTV: quality_string = '1080i HDTV mpeg2' elif quality == Quality.HDWEBDL: quality_string = '720p WEB-DL h264' elif quality == Quality.FULLHDWEBDL: - quality_string = '1080p WEB-DL h264' + quality_string = '1080p WEB-DL h264' elif quality == Quality.HDBLURAY: quality_string = '720p Bluray x264' elif quality == Quality.FULLHDBLURAY: - quality_string = '1080p Bluray x264' - + quality_string = '1080p Bluray x264' + return quality_string - def _find_season_quality(self,title,torrent_id, ep_number): + def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' - 'ogv', 'rar', 'zip', 'mp4'] - - quality = Quality.UNKNOWN - + 'ogv', 'rar', 'zip', 'mp4'] + + quality = Quality.UNKNOWN + fileName = None - + fileURL = self.proxy._buildURL(self.url + 'ajax_details_filelist.php?id=' + str(torrent_id)) - + data = self.getURL(fileURL) - + if not data: return None - - filesList = re.findall('<td.+>(.*?)</td>',data) - - if not filesList: + + filesList = re.findall('<td.+>(.*?)</td>', data) + + if not filesList: logger.log(u"Unable to get the torrent file list for " + title, logger.ERROR) - + videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList) #Filtering SingleEpisode/MultiSeason Torrent - if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1 ): - logger.log(u"Result " + title + " have " + str(ep_number) + " episode and episodes retrived in torrent are " + str(len(videoFiles)), logger.DEBUG) - logger.log(u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) + if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1): + logger.log( + u"Result " + title + " have " + str(ep_number) + " episode and episodes retrived in torrent are " + str( + len(videoFiles)), logger.DEBUG) + logger.log(u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", + logger.DEBUG) return None - + if Quality.sceneQuality(title) != Quality.UNKNOWN: return title - + for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break - if fileName!=None and quality == Quality.UNKNOWN: - quality = Quality.assumeQuality(os.path.basename(fileName)) + if fileName != None and quality == Quality.UNKNOWN: + quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) @@ -156,12 +159,13 @@ class ThePirateBayProvider(generic.TorrentProvider): parse_result = myParser.parse(fileName, True) except InvalidNameException: return None - + logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) - - if parse_result.series_name and parse_result.season_number: - title = parse_result.series_name + ' S%02d' % int(parse_result.season_number) + ' ' + self._reverseQuality(quality) - + + if parse_result.series_name and parse_result.season_number: + title = parse_result.series_name + ' S%02d' % int(parse_result.season_number) + ' ' + self._reverseQuality( + quality) + return title def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False): @@ -176,10 +180,10 @@ class ThePirateBayProvider(generic.TorrentProvider): if searchSeason: search_string = {'Season': [], 'Episode': []} for show_name in set(allPossibleShowNames(show)): - ep_string = show_name + ' S%02d' % int(season) #1) ShowName SXX + ep_string = show_name + ' S%02d' % int(season) #1) ShowName SXX search_string['Season'].append(ep_string) - ep_string = show_name + ' Season ' + str(season) + ' -Ep*' #2) ShowName Season X + ep_string = show_name + ' Season ' + str(season) + ' -Ep*' #2) ShowName Season X search_string['Season'].append(ep_string) for ep_obj in wantedEp: @@ -191,28 +195,31 @@ class ThePirateBayProvider(generic.TorrentProvider): return [search_string] def _get_episode_search_strings(self, ep_obj, add_string=''): - + search_string = {'Episode': []} - + if not ep_obj: return [] - + self.show = ep_obj.show - + if ep_obj.show.air_by_date: for show_name in set(allPossibleShowNames(ep_obj.show)): - ep_string = sanitizeSceneName(show_name) +' '+ \ - str(ep_obj.airdate) +'|'+\ + ep_string = sanitizeSceneName(show_name) + ' ' + \ + str(ep_obj.airdate) + '|' + \ helpers.custom_strftime('%Y %b {S}', ep_obj.airdate) search_string['Episode'].append(ep_string) else: for show_name in set(allPossibleShowNames(ep_obj.show)): ep_string = sanitizeSceneName(show_name) + ' ' + \ - sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} + '|' + \ - sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} + '|' + \ - sickbeard.config.naming_ep_type[3] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} + sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, + 'episodenumber': ep_obj.episode} + '|' + \ + sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.season, + 'episodenumber': ep_obj.episode} + '|' + \ + sickbeard.config.naming_ep_type[3] % {'seasonnumber': ep_obj.season, + 'episodenumber': ep_obj.episode} - ep_string += ' %s' %add_string + ep_string += ' %s' % add_string search_string['Episode'].append(re.sub('\s+', ' ', ep_string)) @@ -227,7 +234,7 @@ class ThePirateBayProvider(generic.TorrentProvider): for search_string in search_params[mode]: if mode != 'RSS': - searchURL = self.proxy._buildURL(self.searchurl %(urllib.quote(unidecode(search_string)))) + searchURL = self.proxy._buildURL(self.searchurl % (urllib.quote(unidecode(search_string)))) else: searchURL = self.proxy._buildURL(self.url + 'tv/latest/') @@ -240,10 +247,11 @@ class ThePirateBayProvider(generic.TorrentProvider): re_title_url = self.proxy._buildRE(self.re_title_url) #Extracting torrent information from data returned by searchURL - match = re.compile(re_title_url, re.DOTALL ).finditer(urllib.unquote(data)) + match = re.compile(re_title_url, re.DOTALL).finditer(urllib.unquote(data)) for torrent in match: - title = torrent.group('title').replace('_','.')#Do not know why but SickBeard skip release with '_' in name + title = torrent.group('title').replace('_', + '.') #Do not know why but SickBeard skip release with '_' in name url = torrent.group('url') id = int(torrent.group('id')) seeders = int(torrent.group('seeders')) @@ -251,17 +259,18 @@ class ThePirateBayProvider(generic.TorrentProvider): #Filter unseeded torrent if mode != 'RSS' and seeders == 0: - continue + continue - #Accept Torrent only from Good People for every Episode Search - if sickbeard.THEPIRATEBAY_TRUSTED and re.search('(VIP|Trusted|Helper)',torrent.group(0))== None: - logger.log(u"ThePirateBay Provider found result " + torrent.group('title') + " but that doesn't seem like a trusted result so I'm ignoring it", logger.DEBUG) + #Accept Torrent only from Good People for every Episode Search + if sickbeard.THEPIRATEBAY_TRUSTED and re.search('(VIP|Trusted|Helper)', torrent.group(0)) == None: + logger.log(u"ThePirateBay Provider found result " + torrent.group( + 'title') + " but that doesn't seem like a trusted result so I'm ignoring it", logger.DEBUG) continue #Check number video files = episode in season and find the real Quality for full season torrent analyzing files in torrent if mode == 'Season': ep_number = int(len(search_params['Episode']) / len(set(allPossibleShowNames(self.show)))) - title = self._find_season_quality(title,id, ep_number) + title = self._find_season_quality(title, id, ep_number) if not title or not url: continue @@ -271,18 +280,18 @@ class ThePirateBayProvider(generic.TorrentProvider): items[mode].append(item) #For each search mode sort all the items by seeders - items[mode].sort(key=lambda tup: tup[3], reverse=True) + items[mode].sort(key=lambda tup: tup[3], reverse=True) - results += items[mode] + results += items[mode] return results def _get_title_and_url(self, item): - + title, url, id, seeders, leechers = item - + if url: - url = url.replace('&','&') + url = url.replace('&', '&') return (title, url) @@ -301,11 +310,12 @@ class ThePirateBayProvider(generic.TorrentProvider): try: r = requests.get(url, headers=headers) except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: - logger.log(u"Error loading "+self.name+" URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR) + logger.log(u"Error loading " + self.name + " URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR) return None if r.status_code != 200: - logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(r.status_code) + ': ' + requests.clients.http_error_code[r.status_code], logger.WARNING) + logger.log(self.name + u" page requested with url " + url + " returned status code is " + str( + r.status_code) + ': ' + requests.clients.http_error_code[r.status_code], logger.WARNING) return None return r.content @@ -314,35 +324,36 @@ class ThePirateBayProvider(generic.TorrentProvider): """ Save the result to disk. """ - + torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper() - + if not torrent_hash: - logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) - return False - + logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) + return False + try: r = requests.get('http://torcache.net/torrent/' + torrent_hash + '.torrent') except Exception, e: logger.log("Unable to connect to Torcache: " + ex(e), logger.ERROR) return False - + if not r.status_code == 200: return False - - magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) + + magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, + helpers.sanitizeFileName(result.name) + '.' + self.providerType) magnetFileContent = r.content - try: + try: with open(magnetFileName, 'wb') as fileOut: fileOut.write(magnetFileContent) - + helpers.chmodAsParent(magnetFileName) - + except EnvironmentError, e: logger.log("Unable to save the file: " + ex(e), logger.ERROR) return False - + logger.log(u"Saved magnet link to " + magnetFileName + " ", logger.MESSAGE) return True @@ -350,12 +361,13 @@ class ThePirateBayProvider(generic.TorrentProvider): results = [] - sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + - ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + - ' WHERE e.airdate >= ' + str(search_date.toordinal()) + - ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + - ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' - ) + sqlResults = db.DBConnection().select( + 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + + ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + + ' WHERE e.airdate >= ' + str(search_date.toordinal()) + + ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + + ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' + ) if not sqlResults: return [] @@ -372,7 +384,6 @@ class ThePirateBayProvider(generic.TorrentProvider): class ThePirateBayCache(tvcache.TVCache): - def __init__(self, provider): tvcache.TVCache.__init__(self, provider) @@ -387,12 +398,12 @@ class ThePirateBayCache(tvcache.TVCache): search_params = {'RSS': ['rss']} rss_results = self.provider._doSearch(search_params) - + if rss_results: self.setLastUpdate() else: return [] - + logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") self._clearCache() @@ -418,35 +429,36 @@ class ThePirateBayCache(tvcache.TVCache): return self._addCacheEntry(title, url) + class ThePirateBayWebproxy: - def __init__(self): - self.Type = 'GlypeProxy' - self.param = 'browse.php?u=' + self.Type = 'GlypeProxy' + self.param = 'browse.php?u=' self.option = '&b=32' - + def isEnabled(self): - """ Return True if we Choose to call TPB via Proxy """ + """ Return True if we Choose to call TPB via Proxy """ return sickbeard.THEPIRATEBAY_PROXY - + def getProxyURL(self): """ Return the Proxy URL Choosen via Provider Setting """ return str(sickbeard.THEPIRATEBAY_PROXY_URL) - - def _buildURL(self,url): - """ Return the Proxyfied URL of the page """ + + def _buildURL(self, url): + """ Return the Proxyfied URL of the page """ if self.isEnabled(): url = self.getProxyURL() + self.param + url + self.option - - return url - def _buildRE(self,regx): + return url + + def _buildRE(self, regx): """ Return the Proxyfied RE string """ if self.isEnabled(): - regx = re.sub('//1',self.option,regx).replace('&','&') + regx = re.sub('//1', self.option, regx).replace('&', '&') else: - regx = re.sub('//1','',regx) + regx = re.sub('//1', '', regx) + + return regx + - return regx - provider = ThePirateBayProvider() diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index cd23c321..c0a46352 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -29,86 +29,87 @@ from sickbeard import db from sickbeard import classes from sickbeard import helpers from sickbeard import show_name_helpers -from sickbeard.common import Overview +from sickbeard.common import Overview from sickbeard.exceptions import ex from sickbeard import clients from lib import requests from bs4 import BeautifulSoup from lib.unidecode import unidecode -class TorrentDayProvider(generic.TorrentProvider): - urls = {'base_url' : 'http://www.torrentday.com', - 'login' : 'http://www.torrentday.com/torrents/', - 'search' : 'http://www.torrentday.com/V3/API/API.php', +class TorrentDayProvider(generic.TorrentProvider): + urls = {'base_url': 'http://www.torrentday.com', + 'login': 'http://www.torrentday.com/torrents/', + 'search': 'http://www.torrentday.com/V3/API/API.php', 'download': 'http://www.torrentday.com/download.php/%s/%s' - } + } def __init__(self): generic.TorrentProvider.__init__(self, "TorrentDay") - + self.supportsBacklog = True self.cache = TorrentDayCache(self) - + self.url = self.urls['base_url'] - + self.session = requests.Session() - + self.cookies = None - self.categories = {'Season': {'c14':1}, 'Episode': {'c2':1, 'c26':1, 'c7':1, 'c24':1}, 'RSS': {'c2':1, 'c26':1, 'c7':1, 'c24':1, 'c14':1}} + self.categories = {'Season': {'c14': 1}, 'Episode': {'c2': 1, 'c26': 1, 'c7': 1, 'c24': 1}, + 'RSS': {'c2': 1, 'c26': 1, 'c7': 1, 'c24': 1, 'c14': 1}} def isEnabled(self): return sickbeard.TORRENTDAY - + def imageName(self): return 'torrentday.png' - + def getQuality(self, item): - + quality = Quality.sceneQuality(item[0]) - return quality + return quality def _doLogin(self): if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()): return True - + if sickbeard.TORRENTDAY_UID and sickbeard.TORRENTDAY_HASH: - + requests.utils.add_dict_to_cookiejar(self.session.cookies, self.cookies) - - else: + + else: login_params = {'username': sickbeard.TORRENTDAY_USERNAME, 'password': sickbeard.TORRENTDAY_PASSWORD, - 'submit.x': 0, + 'submit.x': 0, 'submit.y': 0 - } - + } + try: - response = self.session.post(self.urls['login'], data=login_params, timeout=30) + response = self.session.post(self.urls['login'], data=login_params, timeout=30) except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR) return False - + if re.search('You tried too often', response.text): logger.log(u'Too many login access for ' + self.name + ', can''t retrive any data', logger.ERROR) return False - + if response.status_code == 401: - logger.log(u'Invalid username or password for ' + self.name + ', Check your settings!', logger.ERROR) + logger.log(u'Invalid username or password for ' + self.name + ', Check your settings!', logger.ERROR) return False - + sickbeard.TORRENTDAY_UID = requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] sickbeard.TORRENTDAY_HASH = requests.utils.dict_from_cookiejar(self.session.cookies)['pass'] - + self.cookies = {'uid': sickbeard.TORRENTDAY_UID, 'pass': sickbeard.TORRENTDAY_HASH - } - + } + return True def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False): @@ -123,7 +124,7 @@ class TorrentDayProvider(generic.TorrentProvider): if searchSeason: search_string = {'Season': [], 'Episode': []} for show_name in set(show_name_helpers.allPossibleShowNames(show)): - ep_string = show_name +' S%02d' % int(season) #1) ShowName SXX + ep_string = show_name + ' S%02d' % int(season) #1) ShowName SXX search_string['Season'].append(ep_string) for ep_obj in wantedEp: @@ -135,48 +136,50 @@ class TorrentDayProvider(generic.TorrentProvider): return [search_string] def _get_episode_search_strings(self, ep_obj, add_string=''): - + search_string = {'Episode': []} - + if not ep_obj: return [] - + if ep_obj.show.air_by_date: for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)): - ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \ - str(ep_obj.airdate) +'|'+\ + ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \ + str(ep_obj.airdate) + '|' + \ helpers.custom_strftime('%Y %b {S}', ep_obj.airdate) search_string['Episode'].append(ep_string) else: for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)): - ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \ - sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} + ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \ + sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, + 'episodenumber': ep_obj.episode} search_string['Episode'].append(re.sub('\s+', ' ', ep_string)) - + return [search_string] def _doSearch(self, search_params): - + results = [] items = {'Season': [], 'Episode': [], 'RSS': []} freeleech = '&free=on' if sickbeard.TORRENTDAY_FREELEECH else '' - + if not self._doLogin(): - return [] - + return [] + for mode in search_params.keys(): for search_string in search_params[mode]: logger.log(u"Search string: " + search_string, logger.DEBUG) - + search_string = '+'.join(search_string.split()) - - post_data = dict({'/browse.php?' : None,'cata':'yes','jxt':8,'jxw':'b','search':search_string}, **self.categories[mode]) - + + post_data = dict({'/browse.php?': None, 'cata': 'yes', 'jxt': 8, 'jxw': 'b', 'search': search_string}, + **self.categories[mode]) + if sickbeard.TORRENTDAY_FREELEECH: - post_data.update({'free':'on'}) + post_data.update({'free': 'on'}) data = self.session.post(self.urls['search'], data=post_data).json() @@ -186,43 +189,43 @@ class TorrentDayProvider(generic.TorrentProvider): continue for torrent in torrents: - + title = re.sub(r"\[.*\=.*\].*\[/.*\]", "", torrent['name']) - url = self.urls['download'] %( torrent['id'], torrent['fname'] ) + url = self.urls['download'] % ( torrent['id'], torrent['fname'] ) seeders = int(torrent['seed']) leechers = int(torrent['leech']) - + if mode != 'RSS' and seeders == 0: continue - + if not title or not url: continue item = title, url, seeders, leechers items[mode].append(item) - results += items[mode] - + results += items[mode] + return results def _get_title_and_url(self, item): - + title, url = item[0], item[1] - + if url: - url = str(url).replace('&','&') + url = str(url).replace('&', '&') return (title, url) - def getURL(self, url, headers=None): + def getURL(self, url, headers=None): if not self.session: self._doLogin() - + try: # Remove double-slashes from url parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one + parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one url = urlparse.urlunparse(parsed) response = self.session.get(url) @@ -231,7 +234,8 @@ class TorrentDayProvider(generic.TorrentProvider): return None if response.status_code != 200: - logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) + logger.log(self.name + u" page requested with url " + url + " returned status code is " + str( + response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) return None return response.content @@ -240,12 +244,13 @@ class TorrentDayProvider(generic.TorrentProvider): results = [] - sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + - ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + - ' WHERE e.airdate >= ' + str(search_date.toordinal()) + - ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + - ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' - ) + sqlResults = db.DBConnection().select( + 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + + ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + + ' WHERE e.airdate >= ' + str(search_date.toordinal()) + + ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + + ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' + ) if not sqlResults: return [] @@ -262,7 +267,6 @@ class TorrentDayProvider(generic.TorrentProvider): class TorrentDayCache(tvcache.TVCache): - def __init__(self, provider): tvcache.TVCache.__init__(self, provider) @@ -277,12 +281,12 @@ class TorrentDayCache(tvcache.TVCache): search_params = {'RSS': ['']} rss_results = self.provider._doSearch(search_params) - + if rss_results: self.setLastUpdate() else: return [] - + logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") self._clearCache() @@ -296,7 +300,7 @@ class TorrentDayCache(tvcache.TVCache): if len(cl) > 0: myDB = self._getDB() myDB.mass_action(cl) - + def _parseItem(self, item): (title, url) = item @@ -306,6 +310,7 @@ class TorrentDayCache(tvcache.TVCache): logger.log(u"Adding item to cache: " + title, logger.DEBUG) - return self._addCacheEntry(title, url) + return self._addCacheEntry(title, url) + provider = TorrentDayProvider() diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index 770beaf9..45a095c3 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -29,46 +29,46 @@ from sickbeard import db from sickbeard import classes from sickbeard import helpers from sickbeard import show_name_helpers -from sickbeard.common import Overview +from sickbeard.common import Overview from sickbeard.exceptions import ex from sickbeard import clients from lib import requests from bs4 import BeautifulSoup from lib.unidecode import unidecode -class TorrentLeechProvider(generic.TorrentProvider): - urls = {'base_url' : 'http://torrentleech.org/', - 'login' : 'http://torrentleech.org/user/account/login/', - 'detail' : 'http://torrentleech.org/torrent/%s', - 'search' : 'http://torrentleech.org/torrents/browse/index/query/%s/categories/%s', - 'download' : 'http://torrentleech.org%s', - } +class TorrentLeechProvider(generic.TorrentProvider): + urls = {'base_url': 'http://torrentleech.org/', + 'login': 'http://torrentleech.org/user/account/login/', + 'detail': 'http://torrentleech.org/torrent/%s', + 'search': 'http://torrentleech.org/torrents/browse/index/query/%s/categories/%s', + 'download': 'http://torrentleech.org%s', + } def __init__(self): generic.TorrentProvider.__init__(self, "TorrentLeech") - + self.supportsBacklog = True self.cache = TorrentLeechCache(self) - + self.url = self.urls['base_url'] - + self.categories = "2,26,27,32" - + self.session = None def isEnabled(self): return sickbeard.TORRENTLEECH - + def imageName(self): return 'torrentleech.png' - + def getQuality(self, item): - + quality = Quality.sceneQuality(item[0]) - return quality + return quality def _doLogin(self): @@ -76,22 +76,22 @@ class TorrentLeechProvider(generic.TorrentProvider): 'password': sickbeard.TORRENTLEECH_PASSWORD, 'remember_me': 'on', 'login': 'submit', - } - + } + self.session = requests.Session() - + try: response = self.session.post(self.urls['login'], data=login_params, timeout=30) except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: - logger.log(u'Unable to connect to ' + self.name + ' provider: ' +ex(e), logger.ERROR) + logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR) return False - + if re.search('Invalid Username/password', response.text) \ - or re.search('<title>Login :: TorrentLeech.org', response.text) \ - or response.status_code == 401: - logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR) + or re.search('Login :: TorrentLeech.org', response.text) \ + or response.status_code == 401: + logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR) return False - + return True def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False): @@ -105,7 +105,7 @@ class TorrentLeechProvider(generic.TorrentProvider): if searchSeason: search_string = {'Season': [], 'Episode': []} for show_name in set(show_name_helpers.allPossibleShowNames(show)): - ep_string = show_name +' S%02d' % int(season) #1) ShowName SXX + ep_string = show_name + ' S%02d' % int(season) #1) ShowName SXX search_string['Season'].append(ep_string) for ep_obj in wantedEp: @@ -117,76 +117,78 @@ class TorrentLeechProvider(generic.TorrentProvider): return [search_string] def _get_episode_search_strings(self, ep_obj, add_string=''): - + search_string = {'Episode': []} - + if not ep_obj: return [] - + if ep_obj.show.air_by_date: for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)): - ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \ - str(ep_obj.airdate) +'|'+\ + ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \ + str(ep_obj.airdate) + '|' + \ helpers.custom_strftime('%Y %b {S}', ep_obj.airdate) search_string['Episode'].append(ep_string) else: for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)): - ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \ - sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} - + ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \ + sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, + 'episodenumber': ep_obj.episode} + search_string['Episode'].append(re.sub('\s+', ' ', ep_string)) - + return [search_string] def _doSearch(self, search_params, show=None): - + results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - + if not self._doLogin(): return [] - + for mode in search_params.keys(): for search_string in search_params[mode]: - + if isinstance(search_string, unicode): search_string = unidecode(search_string) - + searchURL = self.urls['search'] % (search_string, self.categories) logger.log(u"Search string: " + searchURL, logger.DEBUG) - + data = self.getURL(searchURL) if not data: continue try: html = BeautifulSoup(data, features=["html5lib", "permissive"]) - - torrent_table = html.find('table', attrs = {'id' : 'torrenttable'}) + + torrent_table = html.find('table', attrs={'id': 'torrenttable'}) torrent_rows = torrent_table.find_all('tr') if torrent_table else [] #Continue only if one Release is found - if len(torrent_rows)<2: - logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.DEBUG) + if len(torrent_rows) < 2: + logger.log(u"The Data returned from " + self.name + " do not contains any torrent", + logger.DEBUG) continue for result in torrent_table.find_all('tr')[1:]: try: - link = result.find('td', attrs = {'class' : 'name'}).find('a') - url = result.find('td', attrs = {'class' : 'quickdownload'}).find('a') + link = result.find('td', attrs={'class': 'name'}).find('a') + url = result.find('td', attrs={'class': 'quickdownload'}).find('a') title = link.string download_url = self.urls['download'] % url['href'] id = int(link['href'].replace('/torrent/', '')) - seeders = int(result.find('td', attrs = {'class' : 'seeders'}).string) - leechers = int(result.find('td', attrs = {'class' : 'leechers'}).string) + seeders = int(result.find('td', attrs={'class': 'seeders'}).string) + leechers = int(result.find('td', attrs={'class': 'leechers'}).string) except (AttributeError, TypeError): continue #Filter unseeded torrent if mode != 'RSS' and seeders == 0: - continue + continue if not title or not download_url: continue @@ -197,21 +199,21 @@ class TorrentLeechProvider(generic.TorrentProvider): items[mode].append(item) except Exception, e: - logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) + logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) #For each search mode sort all the items by seeders - items[mode].sort(key=lambda tup: tup[3], reverse=True) + items[mode].sort(key=lambda tup: tup[3], reverse=True) + + results += items[mode] - results += items[mode] - return results def _get_title_and_url(self, item): - + title, url, id, seeders, leechers = item - + if url: - url = str(url).replace('&','&') + url = str(url).replace('&', '&') return (title, url) @@ -226,16 +228,17 @@ class TorrentLeechProvider(generic.TorrentProvider): try: # Remove double-slashes from url parsed = list(urlparse.urlparse(url)) - parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one + parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one url = urlparse.urlunparse(parsed) response = self.session.get(url) except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: - logger.log(u"Error loading "+self.name+" URL: " + ex(e), logger.ERROR) + logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR) return None if response.status_code != 200: - logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) + logger.log(self.name + u" page requested with url " + url + " returned status code is " + str( + response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) return None return response.content @@ -244,12 +247,13 @@ class TorrentLeechProvider(generic.TorrentProvider): results = [] - sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + - ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + - ' WHERE e.airdate >= ' + str(search_date.toordinal()) + - ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + - ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' - ) + sqlResults = db.DBConnection().select( + 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + + ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + + ' WHERE e.airdate >= ' + str(search_date.toordinal()) + + ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + + ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' + ) if not sqlResults: return [] @@ -266,7 +270,6 @@ class TorrentLeechProvider(generic.TorrentProvider): class TorrentLeechCache(tvcache.TVCache): - def __init__(self, provider): tvcache.TVCache.__init__(self, provider) @@ -312,4 +315,5 @@ class TorrentLeechCache(tvcache.TVCache): return self._addCacheEntry(title, url) + provider = TorrentLeechProvider() diff --git a/sickbeard/providers/tvtorrents.py b/sickbeard/providers/tvtorrents.py index 0ae55a84..262c4b7e 100644 --- a/sickbeard/providers/tvtorrents.py +++ b/sickbeard/providers/tvtorrents.py @@ -31,7 +31,6 @@ from sickbeard import tvcache class TvTorrentsProvider(generic.TorrentProvider): - def __init__(self): generic.TorrentProvider.__init__(self, "TvTorrents") @@ -63,23 +62,22 @@ class TvTorrentsProvider(generic.TorrentProvider): description_text = helpers.get_xml_text(parsedXML.find('.//channel/description')) if "User can't be found" in description_text or "Invalid Hash" in description_text: - logger.log(u"Incorrect authentication credentials for " + self.name + " : " + str(description_text), logger.DEBUG) - raise AuthException(u"Your authentication credentials for " + self.name + " are incorrect, check your config") + logger.log(u"Incorrect authentication credentials for " + self.name + " : " + str(description_text), + logger.DEBUG) + raise AuthException( + u"Your authentication credentials for " + self.name + " are incorrect, check your config") return True class TvTorrentsCache(tvcache.TVCache): - def __init__(self, provider): - tvcache.TVCache.__init__(self, provider) # only poll TvTorrents every 15 minutes max self.minTime = 15 def _getRSSData(self): - # These will be ignored on the serverside. ignore_regex = "all.month|month.of|season[\s\d]*complete" @@ -95,6 +93,7 @@ class TvTorrentsCache(tvcache.TVCache): return data def _checkAuth(self, parsedXML): - return self.provider._checkAuthFromData(parsedXML) + return self.provider._checkAuthFromData(parsedXML) + provider = TvTorrentsProvider() diff --git a/sickbeard/providers/womble.py b/sickbeard/providers/womble.py index 538f933d..89231751 100644 --- a/sickbeard/providers/womble.py +++ b/sickbeard/providers/womble.py @@ -24,7 +24,6 @@ from sickbeard import tvcache class WombleProvider(generic.NZBProvider): - def __init__(self): generic.NZBProvider.__init__(self, "Womble's Index") self.cache = WombleCache(self) @@ -35,7 +34,6 @@ class WombleProvider(generic.NZBProvider): class WombleCache(tvcache.TVCache): - def __init__(self, provider): tvcache.TVCache.__init__(self, provider) # only poll Womble's Index every 15 minutes max @@ -50,4 +48,5 @@ class WombleCache(tvcache.TVCache): def _checkAuth(self, data): return data != 'Invalid Link' + provider = WombleProvider() diff --git a/sickbeard/sab.py b/sickbeard/sab.py index dbb692a1..8d67a2c9 100644 --- a/sickbeard/sab.py +++ b/sickbeard/sab.py @@ -25,6 +25,7 @@ import sickbeard from lib import MultipartPostHandler import urllib2, cookielib + try: import json except ImportError: @@ -34,6 +35,7 @@ from sickbeard.common import USER_AGENT from sickbeard import logger from sickbeard.exceptions import ex + def sendNZB(nzb): """ Sends an NZB to SABnzbd via the API. @@ -84,7 +86,7 @@ def sendNZB(nzb): # if we have the URL to an NZB then we've built up the SAB API URL already so just call it if nzb.resultType == "nzb": f = urllib.urlopen(url) - + # if we are uploading the NZB data to SAB then we need to build a little POST form and send it elif nzb.resultType == "nzbdata": cookies = cookielib.CookieJar() @@ -137,6 +139,7 @@ def sendNZB(nzb): logger.log(u"Unknown failure sending NZB to sab. Return text is: " + sabText, logger.ERROR) return False + def _checkSabResponse(f): try: result = f.readlines() @@ -164,6 +167,7 @@ def _checkSabResponse(f): else: return True, sabText + def _sabURLOpenSimple(url): try: f = urllib.urlopen(url) @@ -179,9 +183,10 @@ def _sabURLOpenSimple(url): else: return True, f + def getSabAccesMethod(host=None, username=None, password=None, apikey=None): url = host + "api?mode=auth" - + result, f = _sabURLOpenSimple(url) if not result: return False, f @@ -192,6 +197,7 @@ def getSabAccesMethod(host=None, username=None, password=None, apikey=None): return True, sabText + def testAuthentication(host=None, username=None, password=None, apikey=None): """ Sends a simple API request to SAB to determine if the given connection information is connect @@ -203,7 +209,7 @@ def testAuthentication(host=None, username=None, password=None, apikey=None): Returns: A tuple containing the success boolean and a message """ - + # build up the URL parameters params = {} params['mode'] = 'queue' @@ -212,7 +218,7 @@ def testAuthentication(host=None, username=None, password=None, apikey=None): params['ma_password'] = password params['apikey'] = apikey url = host + "api?" + urllib.urlencode(params) - + # send the test request logger.log(u"SABnzbd test URL: " + url, logger.DEBUG) result, f = _sabURLOpenSimple(url) @@ -223,6 +229,6 @@ def testAuthentication(host=None, username=None, password=None, apikey=None): result, sabText = _checkSabResponse(f) if not result: return False, sabText - + return True, "Success" diff --git a/sickbeard/sbdatetime.py b/sickbeard/sbdatetime.py index fb6e2821..7b20fb10 100644 --- a/sickbeard/sbdatetime.py +++ b/sickbeard/sbdatetime.py @@ -76,11 +76,11 @@ date_presets = ('%Y-%m-%d', '%B %d, %Y', '%a, %B %d, %Y', '%A, %B %d, %Y' - ) +) time_presets = ('%I:%M:%S %p', '%H:%M:%S' - ) +) # helper class class static_or_instance(object): @@ -90,14 +90,14 @@ class static_or_instance(object): def __get__(self, instance, owner): return functools.partial(self.func, instance) + # subclass datetime.datetime to add function to display custom date and time formats class sbdatetime(datetime.datetime): - has_locale = True # display Time in Sickbeard Format @static_or_instance - def sbftime(self, dt = None, show_seconds = False, t_preset = None): + def sbftime(self, dt=None, show_seconds=False, t_preset=None): try: if sbdatetime.has_locale: @@ -131,7 +131,7 @@ class sbdatetime(datetime.datetime): # display Date in Sickbeard Format @static_or_instance - def sbfdate(self, dt = None, d_preset = None): + def sbfdate(self, dt=None, d_preset=None): strd = '' try: @@ -151,7 +151,7 @@ class sbdatetime(datetime.datetime): # display Datetime in Sickbeard Format @static_or_instance - def sbfdatetime(self, dt = None, show_seconds = False, d_preset = None, t_preset = None): + def sbfdatetime(self, dt=None, show_seconds=False, d_preset=None, t_preset=None): strd = '' try: diff --git a/sickbeard/scene_exceptions.py b/sickbeard/scene_exceptions.py index e3257d47..cc17e345 100644 --- a/sickbeard/scene_exceptions.py +++ b/sickbeard/scene_exceptions.py @@ -17,12 +17,14 @@ # along with Sick Beard. If not, see . import re +import sickbeard from sickbeard import helpers from sickbeard import name_cache from sickbeard import logger from sickbeard import db + def get_scene_exceptions(indexer_id): """ Given a indexer_id, return a list of all the scene exceptions. @@ -42,7 +44,8 @@ def get_scene_exception_by_name(show_name): myDB = db.DBConnection("cache.db") # try the obvious case first - exception_result = myDB.select("SELECT indexer_id FROM scene_exceptions WHERE LOWER(show_name) = ?", [show_name.lower()]) + exception_result = myDB.select("SELECT indexer_id FROM scene_exceptions WHERE LOWER(show_name) = ?", + [show_name.lower()]) if exception_result: return int(exception_result[0]["indexer_id"]) @@ -52,8 +55,9 @@ def get_scene_exception_by_name(show_name): cur_exception_name = cur_exception["show_name"] cur_indexer_id = int(cur_exception["indexer_id"]) - if show_name.lower() in (cur_exception_name.lower(), helpers.sanitizeSceneName(cur_exception_name).lower().replace('.', ' ')): - logger.log(u"Scene exception lookup got indexer id "+str(cur_indexer_id)+u", using that", logger.DEBUG) + if show_name.lower() in ( + cur_exception_name.lower(), helpers.sanitizeSceneName(cur_exception_name).lower().replace('.', ' ')): + logger.log(u"Scene exception lookup got indexer id " + str(cur_indexer_id) + u", using that", logger.DEBUG) return cur_indexer_id return None @@ -66,16 +70,13 @@ def retrieve_exceptions(): """ exception_dict = {} - url_data = '' # exceptions are stored on github pages - url_dict = { - 'TheTVDB': 'http://midgetspy.github.com/sb_tvdb_scene_exceptions/exceptions.txt', - 'TVRage': 'http://raw.github.com/echel0n/sb_tvrage_scene_exceptions/master/exceptions.txt' - } - for indexer, url in url_dict.iteritems(): - logger.log(u"Checking for scene exception updates for " + indexer) + for indexer in sickbeard.indexerApi().indexers: + logger.log(u"Checking for scene exception updates for " + sickbeard.indexerApi(indexer).name + "") + + url = sickbeard.indexerApi(indexer).config['scene_url'] url_data = helpers.getURL(url) @@ -88,7 +89,7 @@ def retrieve_exceptions(): # each exception is on one line with the format indexer_id: 'show name 1', 'show name 2', etc for cur_line in url_data.splitlines(): cur_line = cur_line.decode('utf-8') - indexer_id, sep, aliases = cur_line.partition(':') #@UnusedVariable + indexer_id, sep, aliases = cur_line.partition(':') #@UnusedVariable if not aliases: continue @@ -108,12 +109,14 @@ def retrieve_exceptions(): for cur_indexer_id in exception_dict: # get a list of the existing exceptions for this ID - existing_exceptions = [x["show_name"] for x in myDB.select("SELECT * FROM scene_exceptions WHERE indexer_id = ?", [cur_indexer_id])] + existing_exceptions = [x["show_name"] for x in + myDB.select("SELECT * FROM scene_exceptions WHERE indexer_id = ?", [cur_indexer_id])] for cur_exception in exception_dict[cur_indexer_id]: # if this exception isn't already in the DB then add it if cur_exception not in existing_exceptions: - myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name) VALUES (?,?)", [cur_indexer_id, cur_exception]) + myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name) VALUES (?,?)", + [cur_indexer_id, cur_exception]) changed_exceptions = True # since this could invalidate the results of the cache we clear it out after updating @@ -122,17 +125,18 @@ def retrieve_exceptions(): name_cache.clearCache() else: logger.log(u"No scene exceptions update needed") - + + def update_scene_exceptions(indexer_id, scene_exceptions): """ Given a indexer_id, and a list of all show scene exceptions, update the db. """ - + myDB = db.DBConnection("cache.db") - + myDB.action('DELETE FROM scene_exceptions WHERE indexer_id=?', [indexer_id]) - + for cur_exception in scene_exceptions: myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name) VALUES (?,?)", [indexer_id, cur_exception]) - + name_cache.clearCache() diff --git a/sickbeard/scene_numbering.py b/sickbeard/scene_numbering.py index d82e8a79..48056906 100644 --- a/sickbeard/scene_numbering.py +++ b/sickbeard/scene_numbering.py @@ -35,7 +35,8 @@ from sickbeard import helpers from sickbeard.exceptions import ex from lib import requests -MAX_XEM_AGE_SECS = 86400 # 1 day +MAX_XEM_AGE_SECS = 86400 # 1 day + def get_scene_numbering(indexer_id, season, episode, fallback_to_xem=True): """ @@ -62,7 +63,8 @@ def get_scene_numbering(indexer_id, season, episode, fallback_to_xem=True): if xem_result: return xem_result return (season, episode) - + + def find_scene_numbering(indexer_id, season, episode): """ Same as get_scene_numbering(), but returns None if scene numbering is not set @@ -76,10 +78,13 @@ def find_scene_numbering(indexer_id, season, episode): myDB = db.DBConnection() - rows = myDB.select("SELECT scene_season, scene_episode FROM scene_numbering WHERE indexer = ? and indexer_id = ? and season = ? and episode = ?", [indexer, indexer_id, season, episode]) + rows = myDB.select( + "SELECT scene_season, scene_episode FROM scene_numbering WHERE indexer = ? and indexer_id = ? and season = ? and episode = ?", + [indexer, indexer_id, season, episode]) if rows: return (int(rows[0]["scene_season"]), int(rows[0]["scene_episode"])) + def get_indexer_numbering(indexer_id, sceneSeason, sceneEpisode, fallback_to_xem=True): """ Returns a tuple, (season, episode) with the TVDB and TVRAGE numbering for (sceneSeason, sceneEpisode) @@ -93,15 +98,18 @@ def get_indexer_numbering(indexer_id, sceneSeason, sceneEpisode, fallback_to_xem indexer = showObj.indexer myDB = db.DBConnection() - - rows = myDB.select("SELECT season, episode FROM scene_numbering WHERE indexer = ? and indexer_id = ? and scene_season = ? and scene_episode = ?", [indexer, indexer_id, sceneSeason, sceneEpisode]) + + rows = myDB.select( + "SELECT season, episode FROM scene_numbering WHERE indexer = ? and indexer_id = ? and scene_season = ? and scene_episode = ?", + [indexer, indexer_id, sceneSeason, sceneEpisode]) if rows: return (int(rows[0]["season"]), int(rows[0]["episode"])) else: if fallback_to_xem: return get_indexer_numbering_for_xem(indexer_id, sceneSeason, sceneEpisode) return (sceneSeason, sceneEpisode) - + + def get_scene_numbering_for_show(indexer_id): """ Returns a dict of (season, episode) : (sceneSeason, sceneEpisode) mappings @@ -116,16 +124,17 @@ def get_scene_numbering_for_show(indexer_id): indexer = showObj.indexer myDB = db.DBConnection() - - rows = myDB.select('''SELECT season, episode, scene_season, scene_episode - FROM scene_numbering WHERE indexer = ? and indexer_id = ? - ORDER BY season, episode''', [indexer, indexer_id]) + + rows = myDB.select( + 'SELECT season, episode, scene_season, scene_episode FROM scene_numbering WHERE indexer = ? and indexer_id = ? ORDER BY season, episode', + [indexer, indexer_id]) result = {} for row in rows: result[(int(row['season']), int(row['episode']))] = (int(row['scene_season']), int(row['scene_episode'])) - + return result - + + def set_scene_numbering(indexer_id, season, episode, sceneSeason=None, sceneEpisode=None): """ Set scene numbering for a season/episode. @@ -140,19 +149,22 @@ def set_scene_numbering(indexer_id, season, episode, sceneSeason=None, sceneEpis indexer = showObj.indexer myDB = db.DBConnection() - + # sanity #if sceneSeason == None: sceneSeason = season #if sceneEpisode == None: sceneEpisode = episode - + # delete any existing record first - myDB.action('DELETE FROM scene_numbering where indexer = ? and indexer_id = ? and season = ? and episode = ?', [indexer, indexer_id, season, episode]) - + myDB.action('DELETE FROM scene_numbering where indexer = ? and indexer_id = ? and season = ? and episode = ?', + [indexer, indexer_id, season, episode]) + # now, if the new numbering is not the default, we save a new record if sceneSeason is not None and sceneEpisode is not None: - myDB.action("INSERT INTO scene_numbering (indexer, indexer_id, season, episode, scene_season, scene_episode) VALUES (?,?,?,?,?,?)", [indexer, indexer_id, season, episode, sceneSeason, sceneEpisode]) - - + myDB.action( + "INSERT INTO scene_numbering (indexer, indexer_id, season, episode, scene_season, scene_episode) VALUES (?,?,?,?,?,?)", + [indexer, indexer_id, season, episode, sceneSeason, sceneEpisode]) + + def find_xem_numbering(indexer_id, season, episode): """ Returns the scene numbering, as retrieved from xem. @@ -172,14 +184,17 @@ def find_xem_numbering(indexer_id, season, episode): if _xem_refresh_needed(indexer_id): _xem_refresh(indexer_id) - + cacheDB = db.DBConnection('cache.db') - rows = cacheDB.select("SELECT scene_season, scene_episode FROM xem_numbering WHERE indexer = ? and indexer_id = ? and season = ? and episode = ?", [indexer, indexer_id, season, episode]) + rows = cacheDB.select( + "SELECT scene_season, scene_episode FROM xem_numbering WHERE indexer = ? and indexer_id = ? and season = ? and episode = ?", + [indexer, indexer_id, season, episode]) if rows: return (int(rows[0]["scene_season"]), int(rows[0]["scene_episode"])) else: return None - + + def get_indexer_numbering_for_xem(indexer_id, sceneSeason, sceneEpisode): """ Reverse of find_xem_numbering: lookup a tvdb season and episode using scene numbering @@ -199,12 +214,15 @@ def get_indexer_numbering_for_xem(indexer_id, sceneSeason, sceneEpisode): if _xem_refresh_needed(indexer_id): _xem_refresh(indexer_id) cacheDB = db.DBConnection('cache.db') - rows = cacheDB.select("SELECT season, episode FROM xem_numbering WHERE indexer = ? and indexer_id = ? and scene_season = ? and scene_episode = ?", [indexer, indexer_id, sceneSeason, sceneEpisode]) + rows = cacheDB.select( + "SELECT season, episode FROM xem_numbering WHERE indexer = ? and indexer_id = ? and scene_season = ? and scene_episode = ?", + [indexer, indexer_id, sceneSeason, sceneEpisode]) if rows: return (int(rows[0]["season"]), int(rows[0]["episode"])) else: return (sceneSeason, sceneEpisode) - + + def _xem_refresh_needed(indexer_id): """ Is a refresh needed on a show? @@ -220,12 +238,14 @@ def _xem_refresh_needed(indexer_id): indexer = showObj.indexer cacheDB = db.DBConnection('cache.db') - rows = cacheDB.select("SELECT last_refreshed FROM xem_refresh WHERE indexer = ? and indexer_id = ?", [indexer, indexer_id]) + rows = cacheDB.select("SELECT last_refreshed FROM xem_refresh WHERE indexer = ? and indexer_id = ?", + [indexer, indexer_id]) if rows: return time.time() > (int(rows[0]['last_refreshed']) + MAX_XEM_AGE_SECS) else: return True - + + def _xem_refresh(indexer_id): """ Refresh data from xem for a tv show @@ -240,54 +260,55 @@ def _xem_refresh(indexer_id): indexer = showObj.indexer try: - logger.log(u'Looking up XEM scene mapping for show %s on %s' % (indexer_id, indexer,), logger.DEBUG) - data = None - if 'Tvdb' in indexer: - data = requests.get('http://thexem.de/map/all?id=%s&origin=tvdb&destination=scene' % (indexer_id,)).json() - elif 'TVRage' in indexer: - data = requests.get('http://thexem.de/map/all?id=%s&origin=rage&destination=scene' % (indexer_id,)).json() + logger.log( + u'Looking up XEM scene mapping for show %s on %s' % (indexer_id, sickbeard.indexerApi(indexer).name,), + logger.DEBUG) + data = requests.get("http://thexem.de/map/all?id=%s&origin=%s&destination=scene" % ( + indexer_id, sickbeard.indexerApi(indexer).config['xem_origin'],)).json() if data is None or data == '': - logger.log(u'No XEN data for show "%s on %s", trying TVTumbler' % (indexer_id, indexer,), logger.MESSAGE) - if 'Tvdb' in indexer: - data = requests.get('http://show-api.tvtumbler.com/api/thexem/all?id=%s&origin=tvdb&destination=scene' % (indexer_id,)).json() - elif 'TVRage' in indexer: - data = requests.get('http://show-api.tvtumbler.com/api/thexem/all?id=%s&origin=rage&destination=scene' % (indexer_id,)).json() - + logger.log(u'No XEN data for show "%s on %s", trying TVTumbler' % ( + indexer_id, sickbeard.indexerApi(indexer).name,), logger.MESSAGE) + data = requests.get("http://show-api.tvtumbler.com/api/thexem/all?id=%s&origin=%s&destination=scene" % ( + indexer_id, sickbeard.indexerApi(indexer).config['xem_origin'],)).json() if data is None or data == '': - logger.log(u'TVTumbler also failed for show "%s on %s". giving up.' % (indexer_id, indexer,), logger.MESSAGE) + logger.log(u'TVTumbler also failed for show "%s on %s". giving up.' % (indexer_id, indexer,), + logger.MESSAGE) return None result = data if result: cacheDB = db.DBConnection('cache.db') - cacheDB.action("INSERT OR REPLACE INTO xem_refresh (indexer, indexer_id, last_refreshed) VALUES (?,?,?)", [indexer, indexer_id, time.time()]) + cacheDB.action("INSERT OR REPLACE INTO xem_refresh (indexer, indexer_id, last_refreshed) VALUES (?,?,?)", + [indexer, indexer_id, time.time()]) if 'success' in result['result']: cacheDB.action("DELETE FROM xem_numbering where indexer = ? and indexer_id = ?", [indexer, indexer_id]) for entry in result['data']: if 'scene' in entry: - if 'Tvdb' in indexer: - cacheDB.action("INSERT INTO xem_numbering (indexer, indexer_id, season, episode, scene_season, scene_episode) VALUES (?,?,?,?,?,?)", - [indexer, indexer_id, entry['tvdb']['season'], entry['tvdb']['episode'], entry['scene']['season'], entry['scene']['episode'] ]) - elif 'TVRage' in indexer: - cacheDB.action("INSERT INTO xem_numbering (indexer, indexer_id, season, episode, scene_season, scene_episode) VALUES (?,?,?,?,?,?)", - [indexer, indexer_id, entry['rage']['season'], entry['rage']['episode'], entry['scene']['season'], entry['scene']['episode'] ]) - if 'scene_2' in entry: # for doubles - if 'Tvdb' in indexer: - cacheDB.action("INSERT INTO xem_numbering (indexer, indexer_id, season, episode, scene_season, scene_episode) VALUES (?,?,?,?,?,?)", - [indexer, indexer_id, entry['tvdb']['season'], entry['tvdb']['episode'], entry['scene_2']['season'], entry['scene_2']['episode'] ]) - elif 'TVRage' in indexer: - cacheDB.action("INSERT INTO xem_numbering (indexer, indexer_id, season, episode, scene_season, scene_episode) VALUES (?,?,?,?,?,?)", - [indexer, indexer_id, entry['rage']['season'], entry['rage']['episode'], entry['scene_2']['season'], entry['scene_2']['episode'] ]) + cacheDB.action( + "INSERT INTO xem_numbering (indexer, indexer_id, season, episode, scene_season, scene_episode) VALUES (?,?,?,?,?,?)", + [indexer, indexer_id, entry[sickbeard.indexerApi(indexer).config['xem_origin']]['season'], + entry[sickbeard.indexerApi(indexer).config['xem_origin']]['episode'], + entry['scene']['season'], entry['scene']['episode']]) + if 'scene_2' in entry: # for doubles + cacheDB.action( + "INSERT INTO xem_numbering (indexer, indexer_id, season, episode, scene_season, scene_episode) VALUES (?,?,?,?,?,?)", + [indexer, indexer_id, entry[sickbeard.indexerApi(indexer).config['xem_origin']]['season'], + entry[sickbeard.indexerApi(indexer).config['xem_origin']]['episode'], + entry['scene_2']['season'], entry['scene_2']['episode']]) else: - logger.log(u'Failed to get XEM scene data for show %s from %s because "%s"' % (indexer_id, indexer, result['message']), logger.MESSAGE) + logger.log(u'Failed to get XEM scene data for show %s from %s because "%s"' % ( + indexer_id, sickbeard.indexerApi(indexer).name, result['message']), logger.MESSAGE) else: - logger.log(u"Empty lookup result - no XEM data for show %s on %s" % (indexer_id, indexer,), logger.MESSAGE) + logger.log(u"Empty lookup result - no XEM data for show %s on %s" % ( + indexer_id, sickbeard.indexerApi(indexer).name,), logger.MESSAGE) except Exception, e: - logger.log(u"Exception while refreshing XEM data for show " + str(indexer_id) + " on " + indexer + ": " + ex(e), logger.WARNING) + logger.log(u"Exception while refreshing XEM data for show " + str(indexer_id) + " on " + sickbeard.indexerApi( + indexer).name + ": " + ex(e), logger.WARNING) logger.log(traceback.format_exc(), logger.DEBUG) return None - + + def get_xem_numbering_for_show(indexer_id): """ Returns a dict of (season, episode) : (sceneSeason, sceneEpisode) mappings @@ -303,17 +324,20 @@ def get_xem_numbering_for_show(indexer_id): if _xem_refresh_needed(indexer_id): _xem_refresh(indexer_id) + cacheDB = db.DBConnection('cache.db') - - rows = cacheDB.select('''SELECT season, episode, scene_season, scene_episode - FROM xem_numbering WHERE indexer = ? and indexer_id = ? - ORDER BY season, episode''', [indexer, indexer_id]) + + rows = cacheDB.select( + 'SELECT season, episode, scene_season, scene_episode FROM xem_numbering WHERE indexer = ? and indexer_id = ? ORDER BY season, episode', + [indexer, indexer_id]) + result = {} for row in rows: result[(int(row['season']), int(row['episode']))] = (int(row['scene_season']), int(row['scene_episode'])) - + return result + def get_xem_numbering_for_season(indexer_id, season): """ Returns a dict of (season, episode) : (sceneSeason, sceneEpisode) mappings @@ -332,14 +356,14 @@ def get_xem_numbering_for_season(indexer_id, season): cacheDB = db.DBConnection('cache.db') - rows = cacheDB.select('''SELECT season, scene_season - FROM xem_numbering WHERE indexer = ? and indexer_id = ? AND season = ? - ORDER BY season''', [indexer, indexer_id, season]) + rows = cacheDB.select( + 'SELECT season, scene_season FROM xem_numbering WHERE indexer = ? and indexer_id = ? AND season = ? ORDER BY season, [indexer, indexer_id, season]') + result = {} if rows: for row in rows: - result.setdefault(int(row['season']),[]).append(int(row['scene_season'])) + result.setdefault(int(row['season']), []).append(int(row['scene_season'])) else: - result.setdefault(int(season),[]).append(int(season)) + result.setdefault(int(season), []).append(int(season)) return result \ No newline at end of file diff --git a/sickbeard/scheduler.py b/sickbeard/scheduler.py index af9e9924..29cae776 100644 --- a/sickbeard/scheduler.py +++ b/sickbeard/scheduler.py @@ -24,9 +24,10 @@ import traceback from sickbeard import logger from sickbeard.exceptions import ex -class Scheduler: - def __init__(self, action, cycleTime=datetime.timedelta(minutes=10), runImmediately=True, threadName="ScheduledThread", silent=False): +class Scheduler: + def __init__(self, action, cycleTime=datetime.timedelta(minutes=10), runImmediately=True, + threadName="ScheduledThread", silent=False): if runImmediately: self.lastRun = datetime.datetime.fromordinal(1) @@ -67,10 +68,10 @@ class Scheduler: self.lastRun = currentTime try: if not self.silent: - logger.log(u"Starting new thread: "+self.threadName, logger.DEBUG) + logger.log(u"Starting new thread: " + self.threadName, logger.DEBUG) self.action.run() except Exception, e: - logger.log(u"Exception generated in thread "+self.threadName+": " + ex(e), logger.ERROR) + logger.log(u"Exception generated in thread " + self.threadName + ": " + ex(e), logger.ERROR) logger.log(repr(traceback.format_exc()), logger.DEBUG) if self.abort: diff --git a/sickbeard/search.py b/sickbeard/search.py index b48a1bce..0fd473f9 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -40,6 +40,7 @@ from sickbeard import failed_history from sickbeard.exceptions import ex from sickbeard.providers.generic import GenericProvider + def _downloadResult(result): """ Downloads a result to the appropriate black hole folder. @@ -90,10 +91,12 @@ def _downloadResult(result): return False if newResult and sickbeard.USE_FAILED_DOWNLOADS: - ui.notifications.message('Episode snatched', '%s snatched from %s' % (result.name, resProvider.name)) + ui.notifications.message('Episode snatched', + '%s snatched from %s' % (result.name, resProvider.name)) return newResult + def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that @@ -107,7 +110,7 @@ def snatchEpisode(result, endStatus=SNATCHED): if result is None: return False - result.priority = 0 # -1 = low, 0 = normal, 1 = high + result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for curEp in result.episodes: @@ -167,8 +170,8 @@ def snatchEpisode(result, endStatus=SNATCHED): return True -def searchForNeededEpisodes(): +def searchForNeededEpisodes(): logger.log(u"Searching all providers for any needed episodes") foundResults = {} @@ -199,7 +202,8 @@ def searchForNeededEpisodes(): for curEp in curFoundResults: if curEp.show.paused: - logger.log(u"Show " + curEp.show.name + " is paused, ignoring all RSS items for " + curEp.prettyName(), logger.DEBUG) + logger.log(u"Show " + curEp.show.name + " is paused, ignoring all RSS items for " + curEp.prettyName(), + logger.DEBUG) continue # find the best result for the current episode @@ -222,13 +226,13 @@ def searchForNeededEpisodes(): foundResults[curEp] = bestResult if not didSearch: - logger.log(u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.", logger.ERROR) + logger.log(u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.", + logger.ERROR) return foundResults.values() def pickBestResult(results, quality_list=None): - logger.log(u"Picking the best result out of " + str([x.name for x in results]), logger.DEBUG) # find the best result for the current episode @@ -240,7 +244,8 @@ def pickBestResult(results, quality_list=None): logger.log(cur_result.name + " is a quality we know we don't want, rejecting it", logger.DEBUG) continue - if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(cur_result.name, cur_result.size, cur_result.provider.name): + if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(cur_result.name, cur_result.size, + cur_result.provider.name): logger.log(cur_result.name + u" has previously failed, rejecting it") continue @@ -259,6 +264,7 @@ def pickBestResult(results, quality_list=None): return bestResult + def isFinalResult(result): """ Checks if the given result is good enough quality that we can stop searching for other ones. @@ -296,12 +302,14 @@ def isFinalResult(result): else: return False + def isFirstBestMatch(result): """ Checks if the given result is a best quality match and if we want to archive the episode on first match. """ - logger.log(u"Checking if we should archive our first best quality match for for episode " + result.name, logger.DEBUG) + logger.log(u"Checking if we should archive our first best quality match for for episode " + result.name, + logger.DEBUG) show_obj = result.episodes[0].show @@ -313,8 +321,8 @@ def isFirstBestMatch(result): return False -def findEpisode(episode, manualSearch=False): +def findEpisode(episode, manualSearch=False): logger.log(u"Searching for " + episode.prettyName()) foundResults = [] @@ -339,13 +347,17 @@ def findEpisode(episode, manualSearch=False): didSearch = True # skip non-tv crap - curFoundResults = filter(lambda x: show_name_helpers.filterBadReleases(x.name) and show_name_helpers.isGoodResult(x.name, episode.show), curFoundResults) + curFoundResults = filter( + lambda x: show_name_helpers.filterBadReleases(x.name) and show_name_helpers.isGoodResult(x.name, + episode.show), + curFoundResults) # loop all results and see if any of them are good enough that we can stop searching done_searching = False for cur_result in curFoundResults: done_searching = isFinalResult(cur_result) - logger.log(u"Should we stop searching after finding " + cur_result.name + ": " + str(done_searching), logger.DEBUG) + logger.log(u"Should we stop searching after finding " + cur_result.name + ": " + str(done_searching), + logger.DEBUG) if done_searching: break @@ -356,14 +368,15 @@ def findEpisode(episode, manualSearch=False): break if not didSearch: - logger.log(u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.", logger.ERROR) + logger.log(u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.", + logger.ERROR) bestResult = pickBestResult(foundResults) return bestResult -def findSeason(show, season): +def findSeason(show, season): logger.log(u"Searching for stuff we need from " + show.name + " season " + str(season)) foundResults = {} @@ -382,7 +395,10 @@ def findSeason(show, season): for curEp in curResults: # skip non-tv crap - curResults[curEp] = filter(lambda x: show_name_helpers.filterBadReleases(x.name) and show_name_helpers.isGoodResult(x.name, show), curResults[curEp]) + curResults[curEp] = filter( + lambda x: show_name_helpers.filterBadReleases(x.name) and show_name_helpers.isGoodResult(x.name, + show), + curResults[curEp]) if curEp in foundResults: foundResults[curEp] += curResults[curEp] @@ -400,7 +416,8 @@ def findSeason(show, season): didSearch = True if not didSearch: - logger.log(u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.", logger.ERROR) + logger.log(u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.", + logger.ERROR) finalResults = [] @@ -424,10 +441,14 @@ def findSeason(show, season): # get the quality of the season nzb seasonQual = Quality.sceneQuality(bestSeasonNZB.name) seasonQual = bestSeasonNZB.quality - logger.log(u"The quality of the season " + bestSeasonNZB.provider.providerType + " is " + Quality.qualityStrings[seasonQual], logger.DEBUG) + logger.log( + u"The quality of the season " + bestSeasonNZB.provider.providerType + " is " + Quality.qualityStrings[ + seasonQual], logger.DEBUG) myDB = db.DBConnection() - allEps = [int(x["episode"]) for x in myDB.select("SELECT episode FROM tv_episodes WHERE showid = ? AND season = ?", [show.indexerid, season])] + allEps = [int(x["episode"]) for x in + myDB.select("SELECT episode FROM tv_episodes WHERE showid = ? AND season = ?", + [show.indexerid, season])] logger.log(u"Episode list: " + str(allEps), logger.DEBUG) allWanted = True @@ -440,7 +461,8 @@ def findSeason(show, season): # if we need every ep in the season and there's nothing better then just download this and be done with it if allWanted and bestSeasonNZB.quality == highest_quality_overall: - logger.log(u"Every ep in this season is needed, downloading the whole " + bestSeasonNZB.provider.providerType + " " + bestSeasonNZB.name) + logger.log( + u"Every ep in this season is needed, downloading the whole " + bestSeasonNZB.provider.providerType + " " + bestSeasonNZB.name) epObjs = [] for curEpNum in allEps: @@ -449,7 +471,9 @@ def findSeason(show, season): return [bestSeasonNZB] elif not anyWanted: - logger.log(u"No eps from this season are wanted at this quality, ignoring the result of " + bestSeasonNZB.name, logger.DEBUG) + logger.log( + u"No eps from this season are wanted at this quality, ignoring the result of " + bestSeasonNZB.name, + logger.DEBUG) else: @@ -459,7 +483,10 @@ def findSeason(show, season): # if not, break it apart and add them as the lowest priority results individualResults = nzbSplitter.splitResult(bestSeasonNZB) - individualResults = filter(lambda x: show_name_helpers.filterBadReleases(x.name) and show_name_helpers.isGoodResult(x.name, show), individualResults) + individualResults = filter( + lambda x: show_name_helpers.filterBadReleases(x.name) and show_name_helpers.isGoodResult(x.name, + show), + individualResults) for curResult in individualResults: if len(curResult.episodes) == 1: @@ -476,7 +503,8 @@ def findSeason(show, season): else: # Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it. - logger.log(u"Adding multi-ep result for full-season torrent. Set the episodes you don't want to 'don't download' in your torrent client if desired!") + logger.log( + u"Adding multi-ep result for full-season torrent. Set the episodes you don't want to 'don't download' in your torrent client if desired!") epObjs = [] for curEpNum in allEps: epObjs.append(show.getEpisode(season, curEpNum)) @@ -495,7 +523,8 @@ def findSeason(show, season): logger.log(u"Seeing if we want to bother with multi-episode result " + multiResult.name, logger.DEBUG) - if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(multiResult.name, multiResult.size, multiResult.provider.name): + if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(multiResult.name, multiResult.size, + multiResult.provider.name): logger.log(multiResult.name + u" has previously failed, rejecting this multi-ep result") continue @@ -515,10 +544,13 @@ def findSeason(show, season): else: neededEps.append(epNum) - logger.log(u"Single-ep check result is neededEps: " + str(neededEps) + ", notNeededEps: " + str(notNeededEps), logger.DEBUG) + logger.log( + u"Single-ep check result is neededEps: " + str(neededEps) + ", notNeededEps: " + str(notNeededEps), + logger.DEBUG) if not neededEps: - logger.log(u"All of these episodes were covered by single nzbs, ignoring this multi-ep result", logger.DEBUG) + logger.log(u"All of these episodes were covered by single nzbs, ignoring this multi-ep result", + logger.DEBUG) continue # check if these eps are already covered by another multi-result @@ -531,10 +563,14 @@ def findSeason(show, season): else: multiNeededEps.append(epNum) - logger.log(u"Multi-ep check result is multiNeededEps: " + str(multiNeededEps) + ", multiNotNeededEps: " + str(multiNotNeededEps), logger.DEBUG) + logger.log( + u"Multi-ep check result is multiNeededEps: " + str(multiNeededEps) + ", multiNotNeededEps: " + str( + multiNotNeededEps), logger.DEBUG) if not multiNeededEps: - logger.log(u"All of these episodes were covered by another multi-episode nzbs, ignoring this multi-ep result", logger.DEBUG) + logger.log( + u"All of these episodes were covered by another multi-episode nzbs, ignoring this multi-ep result", + logger.DEBUG) continue # if we're keeping this multi-result then remember it @@ -545,7 +581,8 @@ def findSeason(show, season): for epObj in multiResult.episodes: epNum = epObj.episode if epNum in foundResults: - logger.log(u"A needed multi-episode result overlaps with a single-episode result for ep #" + str(epNum) + ", removing the single-episode results from the list", logger.DEBUG) + logger.log(u"A needed multi-episode result overlaps with a single-episode result for ep #" + str( + epNum) + ", removing the single-episode results from the list", logger.DEBUG) del foundResults[epNum] finalResults += set(multiResults.values()) diff --git a/sickbeard/searchBacklog.py b/sickbeard/searchBacklog.py index f63d64ad..6a1f75da 100644 --- a/sickbeard/searchBacklog.py +++ b/sickbeard/searchBacklog.py @@ -30,7 +30,6 @@ from sickbeard import ui #from sickbeard.common import * class BacklogSearchScheduler(scheduler.Scheduler): - def forceSearch(self): self.action._set_lastBacklog(1) self.lastRun = datetime.datetime.fromordinal(1) @@ -41,8 +40,8 @@ class BacklogSearchScheduler(scheduler.Scheduler): else: return datetime.date.fromordinal(self.action._lastBacklog + self.action.cycleTime) -class BacklogSearcher: +class BacklogSearcher: def __init__(self): self._lastBacklog = self._get_lastBacklog() @@ -65,7 +64,7 @@ class BacklogSearcher: return None def am_running(self): - logger.log(u"amWaiting: "+str(self.amWaiting)+", amActive: "+str(self.amActive), logger.DEBUG) + logger.log(u"amWaiting: " + str(self.amWaiting) + ", amActive: " + str(self.amActive), logger.DEBUG) return (not self.amWaiting) and self.amActive def searchBacklog(self, which_shows=None): @@ -101,9 +100,9 @@ class BacklogSearcher: # figure out how many segments of air by date shows we're going to do air_by_date_segments = [] for cur_id in [x.indexerid for x in air_by_date_shows]: - air_by_date_segments += self._get_air_by_date_segments(cur_id, fromDate) + air_by_date_segments += self._get_air_by_date_segments(cur_id, fromDate) - logger.log(u"Air-by-date segments: "+str(air_by_date_segments), logger.DEBUG) + logger.log(u"Air-by-date segments: " + str(air_by_date_segments), logger.DEBUG) #totalSeasons = float(len(numSeasonResults) + len(air_by_date_segments)) #numSeasonsDone = 0.0 @@ -121,12 +120,14 @@ class BacklogSearcher: for cur_segment in segments: - self.currentSearchInfo = {'title': curShow.name + " Season "+str(cur_segment)} + self.currentSearchInfo = {'title': curShow.name + " Season " + str(cur_segment)} backlog_queue_item = search_queue.BacklogQueueItem(curShow, cur_segment) if not backlog_queue_item.wantSeason: - logger.log(u"Nothing in season "+str(cur_segment)+" needs to be downloaded, skipping this season", logger.DEBUG) + logger.log( + u"Nothing in season " + str(cur_segment) + " needs to be downloaded, skipping this season", + logger.DEBUG) else: sickbeard.searchQueueScheduler.action.add_item(backlog_queue_item) #@UndefinedVariable @@ -159,14 +160,17 @@ class BacklogSearcher: def _get_season_segments(self, indexer_id, fromDate): myDB = db.DBConnection() - sqlResults = myDB.select("SELECT DISTINCT(season) as season FROM tv_episodes WHERE showid = ? AND season > 0 and airdate > ?", [indexer_id, fromDate.toordinal()]) + sqlResults = myDB.select( + "SELECT DISTINCT(season) as season FROM tv_episodes WHERE showid = ? AND season > 0 and airdate > ?", + [indexer_id, fromDate.toordinal()]) return [int(x["season"]) for x in sqlResults] def _get_air_by_date_segments(self, indexer_id, fromDate): # query the DB for all dates for this show myDB = db.DBConnection() - num_air_by_date_results = myDB.select("SELECT airdate, showid FROM tv_episodes ep, tv_shows show WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 ANd ep.airdate > ? AND ep.showid = ?", - [fromDate.toordinal(), indexer_id]) + num_air_by_date_results = myDB.select( + "SELECT airdate, showid FROM tv_episodes ep, tv_shows show WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 ANd ep.airdate > ? AND ep.showid = ?", + [fromDate.toordinal(), indexer_id]) # break them apart into month/year strings air_by_date_segments = [] @@ -174,11 +178,11 @@ class BacklogSearcher: cur_date = datetime.date.fromordinal(int(cur_result["airdate"])) cur_date_str = str(cur_date)[:7] cur_indexer_id = int(cur_result["showid"]) - + cur_result_tuple = (cur_indexer_id, cur_date_str) if cur_result_tuple not in air_by_date_segments: air_by_date_segments.append(cur_result_tuple) - + return air_by_date_segments def _set_lastBacklog(self, when): diff --git a/sickbeard/searchCurrent.py b/sickbeard/searchCurrent.py index acf85274..c146dcce 100644 --- a/sickbeard/searchCurrent.py +++ b/sickbeard/searchCurrent.py @@ -24,8 +24,8 @@ from sickbeard import search_queue import threading -class CurrentSearcher(): +class CurrentSearcher(): def __init__(self): self.lock = threading.Lock() @@ -33,4 +33,4 @@ class CurrentSearcher(): def run(self): search_queue_item = search_queue.RSSSearchQueueItem() - sickbeard.searchQueueScheduler.action.add_item(search_queue_item) #@UndefinedVariable + sickbeard.searchQueueScheduler.action.add_item(search_queue_item) #@UndefinedVariable diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py index eede681d..46f61072 100644 --- a/sickbeard/search_queue.py +++ b/sickbeard/search_queue.py @@ -33,7 +33,6 @@ MANUAL_SEARCH = 30 class SearchQueue(generic_queue.GenericQueue): - def __init__(self): generic_queue.GenericQueue.__init__(self) self.queue_name = "SEARCHQUEUE" @@ -79,6 +78,7 @@ class SearchQueue(generic_queue.GenericQueue): else: logger.log(u"Not adding item, it's already in the queue", logger.DEBUG) + class ManualSearchQueueItem(generic_queue.QueueItem): def __init__(self, ep_obj): generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH) @@ -97,7 +97,8 @@ class ManualSearchQueueItem(generic_queue.QueueItem): result = False if not foundEpisode: - ui.notifications.message('No downloads were found', "Couldn't find a download for %s" % self.ep_obj.prettyName()) + ui.notifications.message('No downloads were found', + "Couldn't find a download for %s" % self.ep_obj.prettyName()) logger.log(u"Unable to find a download for " + self.ep_obj.prettyName()) else: @@ -109,7 +110,7 @@ class ManualSearchQueueItem(generic_queue.QueueItem): providerModule = foundEpisode.provider if not result: - ui.notifications.error('Error while attempting to snatch ' + foundEpisode.name+', check your logs') + ui.notifications.error('Error while attempting to snatch ' + foundEpisode.name + ', check your logs') elif providerModule == None: ui.notifications.error('Provider is configured incorrectly, unable to download') @@ -121,6 +122,7 @@ class ManualSearchQueueItem(generic_queue.QueueItem): self.success = False generic_queue.QueueItem.finish(self) + class RSSSearchQueueItem(generic_queue.QueueItem): def __init__(self): generic_queue.QueueItem.__init__(self, 'RSS Search', RSS_SEARCH) @@ -150,18 +152,20 @@ class RSSSearchQueueItem(generic_queue.QueueItem): curDate = datetime.date.today().toordinal() myDB = db.DBConnection() - sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status = ? AND airdate < ?", [common.UNAIRED, curDate]) + sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status = ? AND airdate < ?", + [common.UNAIRED, curDate]) for sqlEp in sqlResults: try: show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"])) except exceptions.MultipleShowObjectsException: - logger.log(u"ERROR: expected to find a single show matching " + sqlEp["showid"]) + logger.log(u"ERROR: expected to find a single show matching " + str(sqlEp["showid"])) return None if show == None: - logger.log(u"Unable to find the show with ID " + str(sqlEp["showid"]) + " in your show list! DB value was " + str(sqlEp), logger.ERROR) + logger.log(u"Unable to find the show with ID " + str( + sqlEp["showid"]) + " in your show list! DB value was " + str(sqlEp), logger.ERROR) return None ep = show.getEpisode(sqlEp["season"], sqlEp["episode"]) @@ -172,6 +176,7 @@ class RSSSearchQueueItem(generic_queue.QueueItem): ep.status = common.WANTED ep.saveToDB() + class BacklogQueueItem(generic_queue.QueueItem): def __init__(self, show, segment): generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH) @@ -187,7 +192,8 @@ class BacklogQueueItem(generic_queue.QueueItem): # see if there is anything in this season worth searching for if not self.show.air_by_date: - statusResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ?", [self.show.indexerid, self.segment]) + statusResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ?", + [self.show.indexerid, self.segment]) else: segment_year, segment_month = map(int, self.segment.split('-')) min_date = datetime.date(segment_year, segment_month, 1) @@ -198,10 +204,11 @@ class BacklogQueueItem(generic_queue.QueueItem): else: max_date = datetime.date(segment_year, segment_month + 1, 1) - datetime.timedelta(days=1) - statusResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND airdate >= ? AND airdate <= ?", - [self.show.indexerid, min_date.toordinal(), max_date.toordinal()]) + statusResults = myDB.select( + "SELECT status FROM tv_episodes WHERE showid = ? AND airdate >= ? AND airdate <= ?", + [self.show.indexerid, min_date.toordinal(), max_date.toordinal()]) - anyQualities, bestQualities = common.Quality.splitQuality(self.show.quality) #@UnusedVariable + anyQualities, bestQualities = common.Quality.splitQuality(self.show.quality) #@UnusedVariable self.wantSeason = self._need_any_episodes(statusResults, bestQualities) def execute(self): @@ -232,14 +239,15 @@ class BacklogQueueItem(generic_queue.QueueItem): highestBestQuality = 0 # if we need a better one then say yes - if (curStatus in (common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER, common.SNATCHED_BEST) and curQuality < highestBestQuality) or curStatus == common.WANTED: + if (curStatus in (common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER, + common.SNATCHED_BEST) and curQuality < highestBestQuality) or curStatus == common.WANTED: wantSeason = True break return wantSeason -class FailedQueueItem(generic_queue.QueueItem): +class FailedQueueItem(generic_queue.QueueItem): def __init__(self, show, segment): generic_queue.QueueItem.__init__(self, 'Retry', MANUAL_SEARCH) self.priority = generic_queue.QueuePriorities.HIGH diff --git a/sickbeard/showUpdater.py b/sickbeard/showUpdater.py index 46b4d0a3..77fffa3d 100644 --- a/sickbeard/showUpdater.py +++ b/sickbeard/showUpdater.py @@ -28,10 +28,8 @@ from sickbeard.exceptions import ex from sickbeard import encodingKludge as ek from sickbeard import db -from indexers.indexer_api import indexerApi class ShowUpdater(): - def __init__(self): self.updateInterval = datetime.timedelta(hours=1) @@ -55,8 +53,8 @@ class ShowUpdater(): # clean out cache directory, remove everything > 12 hours old if sickbeard.CACHE_DIR: - for indexer in indexerApi().indexers: - cache_dir = indexerApi(indexer=indexer).cache + for indexer in sickbeard.indexerApi().indexers: + cache_dir = sickbeard.indexerApi(indexer).config['api_params']['cache'] logger.log(u"Trying to clean cache folder " + cache_dir) # Does our cache_dir exists @@ -71,13 +69,15 @@ class ShowUpdater(): cache_file_path = ek.ek(os.path.join, cache_dir, cache_file) if ek.ek(os.path.isfile, cache_file_path): - cache_file_modified = datetime.datetime.fromtimestamp(ek.ek(os.path.getmtime, cache_file_path)) + cache_file_modified = datetime.datetime.fromtimestamp( + ek.ek(os.path.getmtime, cache_file_path)) if update_datetime - cache_file_modified > max_age: try: ek.ek(os.remove, cache_file_path) except OSError, e: - logger.log(u"Unable to clean " + cache_dir + ": " + repr(e) + " / " + str(e), logger.WARNING) + logger.log(u"Unable to clean " + cache_dir + ": " + repr(e) + " / " + str(e), + logger.WARNING) break # select 10 'Ended' tv_shows updated more than 90 days ago to include in this update @@ -86,10 +86,12 @@ class ShowUpdater(): myDB = db.DBConnection() # last_update_date <= 90 days, sorted ASC because dates are ordinal - sql_result = myDB.select("SELECT indexer_id FROM tv_shows WHERE status = 'Ended' AND last_update_indexer <= ? ORDER BY last_update_indexer ASC LIMIT 10;", [stale_update_date]) + sql_result = myDB.select( + "SELECT indexer_id FROM tv_shows WHERE status = 'Ended' AND last_update_indexer <= ? ORDER BY last_update_indexer ASC LIMIT 10;", + [stale_update_date]) for cur_result in sql_result: - stale_should_update.append(cur_result['indexer_id']) + stale_should_update.append(int(cur_result['indexer_id'])) # start update process piList = [] @@ -100,7 +102,9 @@ class ShowUpdater(): if curShow.should_update(update_date=update_date) or curShow.indexerid in stale_should_update: curQueueItem = sickbeard.showQueueScheduler.action.updateShow(curShow, True) # @UndefinedVariable else: - logger.log(u"Not updating episodes for show " + curShow.name + " because it's marked as ended and last/next episode is not within the grace period.", logger.DEBUG) + logger.log( + u"Not updating episodes for show " + curShow.name + " because it's marked as ended and last/next episode is not within the grace period.", + logger.DEBUG) curQueueItem = sickbeard.showQueueScheduler.action.refreshShow(curShow, True) # @UndefinedVariable piList.append(curQueueItem) diff --git a/sickbeard/show_name_helpers.py b/sickbeard/show_name_helpers.py index cdefe2f4..2460c166 100644 --- a/sickbeard/show_name_helpers.py +++ b/sickbeard/show_name_helpers.py @@ -33,9 +33,10 @@ from lib.unidecode import unidecode resultFilters = ["sub(pack|s|bed)", "swesub(bed)?", - "(dir|sample|sub|nfo)fix", "sample", "(dvd)?extras", + "(dir|sample|sub|nfo)fix", "sample", "(dvd)?extras", "dub(bed)?"] + def filterBadReleases(name): """ Filters out non-english and just all-around stupid releases by comparing them @@ -75,6 +76,7 @@ def filterBadReleases(name): return True + def sceneToNormalShowNames(name): """ Takes a show name from a scene dirname and converts it to a more "human-readable" format. @@ -88,7 +90,7 @@ def sceneToNormalShowNames(name): return [] name_list = [name] - + # use both and and & new_name = re.sub('(?i)([\. ])and([\. ])', '\\1&\\2', name, re.I) if new_name not in name_list: @@ -99,7 +101,7 @@ def sceneToNormalShowNames(name): for cur_name in name_list: # add brackets around the year results.append(re.sub('(\D)(\d{4})$', '\\1(\\2)', cur_name)) - + # add brackets around the country country_match_str = '|'.join(countryList.values()) results.append(re.sub('(?i)([. _-])(' + country_match_str + ')$', '\\1(\\2)', cur_name)) @@ -108,8 +110,8 @@ def sceneToNormalShowNames(name): return list(set(results)) -def makeSceneShowSearchStrings(show): +def makeSceneShowSearchStrings(show): showNames = allPossibleShowNames(show) # scenify the names @@ -117,17 +119,18 @@ def makeSceneShowSearchStrings(show): def makeSceneSeasonSearchString(show, segment, extraSearchType=None): - myDB = db.DBConnection() if show.air_by_date: numseasons = 0 - + # the search string for air by date shows is just seasonStrings = [segment] - + else: - numseasonsSQlResult = myDB.select("SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0", [show.indexerid]) + numseasonsSQlResult = myDB.select( + "SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0", + [show.indexerid]) numseasons = int(numseasonsSQlResult[0][0]) seasonStrings = ["S%02d" % segment] @@ -147,16 +150,19 @@ def makeSceneSeasonSearchString(show, segment, extraSearchType=None): else: for cur_season in seasonStrings: toReturn.append(curShow + "." + cur_season) - + return toReturn def makeSceneSearchString(episode): - myDB = db.DBConnection() - numseasonsSQlResult = myDB.select("SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0", [episode.show.indexerid]) + numseasonsSQlResult = myDB.select( + "SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0", + [episode.show.indexerid]) numseasons = int(numseasonsSQlResult[0][0]) - numepisodesSQlResult = myDB.select("SELECT COUNT(episode) as numepisodes FROM tv_episodes WHERE showid = ? and season != 0", [episode.show.indexerid]) + numepisodesSQlResult = myDB.select( + "SELECT COUNT(episode) as numepisodes FROM tv_episodes WHERE showid = ? and season != 0", + [episode.show.indexerid]) numepisodes = int(numepisodesSQlResult[0][0]) # see if we should use dates instead of episodes @@ -164,7 +170,7 @@ def makeSceneSearchString(episode): epStrings = [str(episode.airdate)] else: epStrings = ["S%02iE%02i" % (int(episode.season), int(episode.episode)), - "%ix%02i" % (int(episode.season), int(episode.episode))] + "%ix%02i" % (int(episode.season), int(episode.episode))] # for single-season shows just search for the show name -- if total ep count (exclude s0) is less than 11 # due to the amount of qualities and releases, it is easy to go over the 50 result limit on rss feeds otherwise @@ -181,6 +187,7 @@ def makeSceneSearchString(episode): return toReturn + def isGoodResult(name, show, log=True): """ Use an automatically-created regex to make sure the result actually is the show it claims to be @@ -188,7 +195,7 @@ def isGoodResult(name, show, log=True): all_show_names = allPossibleShowNames(show) showNames = map(sanitizeSceneName, all_show_names) + all_show_names - showNames += map(unidecode, all_show_names) + showNames += map(unidecode, all_show_names) for curName in set(showNames): escaped_name = re.sub('\\\\[\\s.-]', '\W+', re.escape(curName)) @@ -205,9 +212,11 @@ def isGoodResult(name, show, log=True): return True if log: - logger.log(u"Provider gave result " + name + " but that doesn't seem like a valid result for "+show.name+" so I'm ignoring it") + logger.log( + u"Provider gave result " + name + " but that doesn't seem like a valid result for " + show.name + " so I'm ignoring it") return False + def allPossibleShowNames(show): """ Figures out every possible variation of the name for a particular show. Includes TVDB name, TVRage name, @@ -259,7 +268,8 @@ def determineReleaseName(dir_name=None, nzb_name=None): for search in file_types: reg_expr = re.compile(fnmatch.translate(search), re.IGNORECASE) - files = [file_name for file_name in ek.ek(os.listdir, dir_name) if ek.ek(os.path.isfile, ek.ek(os.path.join, dir_name, file_name))] + files = [file_name for file_name in ek.ek(os.listdir, dir_name) if + ek.ek(os.path.isfile, ek.ek(os.path.join, dir_name, file_name))] results = filter(reg_expr.search, files) if len(results) == 1: diff --git a/sickbeard/show_queue.py b/sickbeard/show_queue.py index c40b79ec..437d4bae 100644 --- a/sickbeard/show_queue.py +++ b/sickbeard/show_queue.py @@ -30,10 +30,8 @@ from sickbeard import generic_queue from sickbeard import name_cache from sickbeard.exceptions import ex -from sickbeard.indexers import indexer_api, indexer_exceptions class ShowQueue(generic_queue.GenericQueue): - def __init__(self): generic_queue.GenericQueue.__init__(self) @@ -45,7 +43,7 @@ class ShowQueue(generic_queue.GenericQueue): def _isBeingSomethinged(self, show, actions): return self.currentItem != None and show == self.currentItem.show and \ - self.currentItem.action_id in actions + self.currentItem.action_id in actions def isInUpdateQueue(self, show): return self._isInQueue(show, (ShowQueueActions.UPDATE, ShowQueueActions.FORCEUPDATE)) @@ -55,7 +53,7 @@ class ShowQueue(generic_queue.GenericQueue): def isInRenameQueue(self, show): return self._isInQueue(show, (ShowQueueActions.RENAME,)) - + def isInSubtitleQueue(self, show): return self._isInQueue(show, (ShowQueueActions.SUBTITLE,)) @@ -70,7 +68,7 @@ class ShowQueue(generic_queue.GenericQueue): def isBeingRenamed(self, show): return self._isBeingSomethinged(show, (ShowQueueActions.RENAME,)) - + def isBeingSubtitled(self, show): return self._isBeingSomethinged(show, (ShowQueueActions.SUBTITLE,)) @@ -82,13 +80,16 @@ class ShowQueue(generic_queue.GenericQueue): def updateShow(self, show, force=False): if self.isBeingAdded(show): - raise exceptions.CantUpdateException("Show is still being added, wait until it is finished before you update.") + raise exceptions.CantUpdateException( + "Show is still being added, wait until it is finished before you update.") if self.isBeingUpdated(show): - raise exceptions.CantUpdateException("This show is already being updated, can't update again until it's done.") + raise exceptions.CantUpdateException( + "This show is already being updated, can't update again until it's done.") if self.isInUpdateQueue(show): - raise exceptions.CantUpdateException("This show is already being updated, can't update again until it's done.") + raise exceptions.CantUpdateException( + "This show is already being updated, can't update again until it's done.") if not force: queueItemObj = QueueItemUpdate(show) @@ -105,11 +106,13 @@ class ShowQueue(generic_queue.GenericQueue): raise exceptions.CantRefreshException("This show is already being refreshed, not refreshing again.") if (self.isBeingUpdated(show) or self.isInUpdateQueue(show)) and not force: - logger.log(u"A refresh was attempted but there is already an update queued or in progress. Since updates do a refres at the end anyway I'm skipping this request.", logger.DEBUG) + logger.log( + u"A refresh was attempted but there is already an update queued or in progress. Since updates do a refres at the end anyway I'm skipping this request.", + logger.DEBUG) return queueItemObj = QueueItemRefresh(show) - + self.add_item(queueItemObj) return queueItemObj @@ -121,7 +124,7 @@ class ShowQueue(generic_queue.GenericQueue): self.add_item(queueItemObj) return queueItemObj - + def downloadSubtitles(self, show, force=False): queueItemObj = QueueItemSubtitle(show) @@ -130,28 +133,32 @@ class ShowQueue(generic_queue.GenericQueue): return queueItemObj - def addShow(self, indexer, indexer_id, showDir, default_status=None, quality=None, flatten_folders=None, subtitles=None, lang="en", refresh=False): - queueItemObj = QueueItemAdd(indexer, indexer_id, showDir, default_status, quality, flatten_folders, lang, subtitles, refresh) - + def addShow(self, indexer, indexer_id, showDir, default_status=None, quality=None, flatten_folders=None, + subtitles=None, lang="en", refresh=False): + queueItemObj = QueueItemAdd(indexer, indexer_id, showDir, default_status, quality, flatten_folders, lang, + subtitles, refresh) + self.add_item(queueItemObj) return queueItemObj + class ShowQueueActions: - REFRESH=1 - ADD=2 - UPDATE=3 - FORCEUPDATE=4 - RENAME=5 - SUBTITLE=6 - + REFRESH = 1 + ADD = 2 + UPDATE = 3 + FORCEUPDATE = 4 + RENAME = 5 + SUBTITLE = 6 + names = {REFRESH: 'Refresh', - ADD: 'Add', - UPDATE: 'Update', - FORCEUPDATE: 'Force Update', - RENAME: 'Rename', - SUBTITLE: 'Subtitle', - } + ADD: 'Add', + UPDATE: 'Update', + FORCEUPDATE: 'Force Update', + RENAME: 'Rename', + SUBTITLE: 'Subtitle', + } + class ShowQueueItem(generic_queue.QueueItem): """ @@ -164,12 +171,14 @@ class ShowQueueItem(generic_queue.QueueItem): - show being force updated - show being subtitled """ + def __init__(self, action_id, show): generic_queue.QueueItem.__init__(self, ShowQueueActions.names[action_id], action_id) self.show = show - + def isInQueue(self): - return self in sickbeard.showQueueScheduler.action.queue + [sickbeard.showQueueScheduler.action.currentItem] #@UndefinedVariable + return self in sickbeard.showQueueScheduler.action.queue + [ + sickbeard.showQueueScheduler.action.currentItem] #@UndefinedVariable def _getName(self): return str(self.show.indexerid) @@ -183,7 +192,8 @@ class ShowQueueItem(generic_queue.QueueItem): class QueueItemAdd(ShowQueueItem): - def __init__(self, indexer, indexer_id, showDir, default_status, quality, flatten_folders, lang, subtitles, refresh): + def __init__(self, indexer, indexer_id, showDir, default_status, quality, flatten_folders, lang, subtitles, + refresh): self.indexer = indexer self.indexer_id = indexer_id @@ -199,7 +209,7 @@ class QueueItemAdd(ShowQueueItem): # this will initialize self.show to None ShowQueueItem.__init__(self, ShowQueueActions.ADD, self.show) - + def _getName(self): """ Returns the show name if there is a show object created, if not returns @@ -235,26 +245,36 @@ class QueueItemAdd(ShowQueueItem): if self.lang: lINDEXER_API_PARMS['language'] = self.lang - logger.log(u"" + self.indexer + ": " + repr(lINDEXER_API_PARMS)) + logger.log(u"" + sickbeard.indexerApi(self.indexer).name + ": " + repr(lINDEXER_API_PARMS)) - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) s = t[self.indexer_id] # this usually only happens if they have an NFO in their show dir which gave us a indexer ID that has no proper english version of the show if getattr(s, 'seriesname', None) is None: - logger.log(u"Show in " + self.showDir + " has no name on " + self.indexer + ", probably the wrong language used to search with.", logger.ERROR) - ui.notifications.error("Unable to add show", "Show in " + self.showDir + " has no name on " + self.indexer + ", probably the wrong language. Delete .nfo and add manually in the correct language.") + logger.log(u"Show in " + self.showDir + " has no name on " + sickbeard.indexerApi( + self.indexer).name + ", probably the wrong language used to search with.", logger.ERROR) + ui.notifications.error("Unable to add show", + "Show in " + self.showDir + " has no name on " + sickbeard.indexerApi( + self.indexer).name + ", probably the wrong language. Delete .nfo and add manually in the correct language.") self._finishEarly() return # if the show has no episodes/seasons if not s: - logger.log(u"Show " + str(s['seriesname']) + " is on " + self.indexer + " but contains no season/episode data.", logger.ERROR) - ui.notifications.error("Unable to add show", "Show " + str(s['seriesname']) + " is on " + self.indexer + " but contains no season/episode data.") + logger.log(u"Show " + str(s['seriesname']) + " is on " + sickbeard.indexerApi( + self.indexer).name + " but contains no season/episode data.", logger.ERROR) + ui.notifications.error("Unable to add show", + "Show " + str(s['seriesname']) + " is on " + sickbeard.indexerApi( + self.indexer).name + " but contains no season/episode data.") self._finishEarly() return except Exception, e: - logger.log(u"Unable to find show ID:" + self.indexer_id + "on Indexer: " + self.indexer, logger.ERROR) - ui.notifications.error("Unable to add show", "Unable to look up the show in " + self.showDir + " on " + self.indexer + " using ID " + self.indexer_id + ", not using the NFO. Delete .nfo and try adding manually again.") + logger.log(u"Unable to find show ID:" + str(self.indexer_id) + "on Indexer: " + sickbeard.indexerApi( + self.indexer).name, logger.ERROR) + ui.notifications.error("Unable to add show", + "Unable to look up the show in " + self.showDir + " on " + sickbeard.indexerApi( + self.indexer).name + " using ID " + str( + self.indexer_id) + ", not using the NFO. Delete .nfo and try adding manually again.") self._finishEarly() return @@ -273,17 +293,22 @@ class QueueItemAdd(ShowQueueItem): self.show.quality = self.quality if self.quality else sickbeard.QUALITY_DEFAULT self.show.flatten_folders = self.flatten_folders if self.flatten_folders != None else sickbeard.FLATTEN_FOLDERS_DEFAULT self.show.paused = False - + # be smartish about this if self.show.genre and "talk show" in self.show.genre.lower() or "sports" in self.show.classification.lower(): self.show.air_by_date = 1 - except indexer_exceptions.indexer_exception, e: - logger.log(u"Unable to add show due to an error with " + self.indexer + ": " + ex(e), logger.ERROR) + except sickbeard.indexer_exception, e: + logger.log( + u"Unable to add show due to an error with " + sickbeard.indexerApi(self.indexer).name + ": " + ex(e), + logger.ERROR) if self.show: - ui.notifications.error("Unable to add " + str(self.show.name) + " due to an error with " + self.indexer + "") + ui.notifications.error( + "Unable to add " + str(self.show.name) + " due to an error with " + sickbeard.indexerApi( + self.indexer).name + "") else: - ui.notifications.error("Unable to add show due to an error with " + self.indexer + "") + ui.notifications.error( + "Unable to add show due to an error with " + sickbeard.indexerApi(self.indexer).name + "") self._finishEarly() return @@ -294,7 +319,7 @@ class QueueItemAdd(ShowQueueItem): return except Exception, e: - logger.log(u"Error trying to add show: "+ex(e), logger.ERROR) + logger.log(u"Error trying to add show: " + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) self._finishEarly() raise @@ -307,7 +332,6 @@ class QueueItemAdd(ShowQueueItem): logger.log(u" Something wrong on IMDb api: " + ex(e), logger.WARNING) except Exception, e: logger.log(u"Error loading IMDb info: " + ex(e), logger.ERROR) - logger.log(traceback.format_exc(), logger.DEBUG) try: self.show.saveToDB() @@ -316,14 +340,16 @@ class QueueItemAdd(ShowQueueItem): logger.log(traceback.format_exc(), logger.DEBUG) self._finishEarly() raise - + # add it to the show list - sickbeard.showList.append(self.show) + sickbeard.showList.append(self.show) try: self.show.loadEpisodesFromIndexer() except Exception, e: - logger.log(u"Error with " + self.show.indexer + ", not creating episode list: " + ex(e), logger.ERROR) + logger.log( + u"Error with " + sickbeard.indexerApi(self.show.indexer).name + ", not creating episode list: " + ex(e), + logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) try: @@ -336,12 +362,13 @@ class QueueItemAdd(ShowQueueItem): if self.default_status != SKIPPED: logger.log(u"Setting all episodes to the specified default status: " + str(self.default_status)) myDB = db.DBConnection() - myDB.action("UPDATE tv_episodes SET status = ? WHERE status = ? AND showid = ? AND season != 0", [self.default_status, SKIPPED, self.show.indexerid]) + myDB.action("UPDATE tv_episodes SET status = ? WHERE status = ? AND showid = ? AND season != 0", + [self.default_status, SKIPPED, self.show.indexerid]) # if they started with WANTED eps then run the backlog if self.default_status == WANTED: logger.log(u"Launching backlog for this show since its episodes are WANTED") - sickbeard.backlogSearchScheduler.action.searchBacklog([self.show]) #@UndefinedVariable + sickbeard.backlogSearchScheduler.action.searchBacklog([self.show]) #@UndefinedVariable self.show.writeMetadata(force=self.refresh) self.show.populateCache() @@ -368,7 +395,6 @@ class QueueItemRefresh(ShowQueueItem): self.priority = generic_queue.QueuePriorities.HIGH def execute(self): - ShowQueueItem.execute(self) logger.log(u"Performing refresh on " + self.show.name) @@ -420,15 +446,15 @@ class QueueItemRename(ShowQueueItem): self.inProgress = False + class QueueItemSubtitle(ShowQueueItem): def __init__(self, show=None): ShowQueueItem.__init__(self, ShowQueueActions.SUBTITLE, show) def execute(self): - ShowQueueItem.execute(self) - logger.log(u"Downloading subtitles for "+self.show.name) + logger.log(u"Downloading subtitles for " + self.show.name) self.show.downloadSubtitles() @@ -446,14 +472,16 @@ class QueueItemUpdate(ShowQueueItem): logger.log(u"Beginning update of " + self.show.name) - logger.log(u"Retrieving show info from " + self.show.indexer + "", logger.DEBUG) + logger.log(u"Retrieving show info from " + sickbeard.indexerApi(self.show.indexer).name + "", logger.DEBUG) try: - self.show.loadFromIndexer(cache=self.force) - except indexer_exceptions.indexer_error, e: - logger.log(u"Unable to contact " + self.show.indexer + ", aborting: " + ex(e), logger.WARNING) + self.show.loadFromIndexer(cache=not self.force) + except sickbeard.indexer_error, e: + logger.log(u"Unable to contact " + sickbeard.indexerApi(self.show.indexer).name + ", aborting: " + ex(e), + logger.WARNING) return - except indexer_exceptions.indexer_attributenotfound, e: - logger.log(u"Data retrieved from " + self.show.indexer + " was incomplete, aborting: " + ex(e), logger.ERROR) + except sickbeard.indexer_attributenotfound, e: + logger.log(u"Data retrieved from " + sickbeard.indexerApi( + self.show.indexer).name + " was incomplete, aborting: " + ex(e), logger.ERROR) return logger.log(u"Retrieving show info from IMDb", logger.DEBUG) @@ -464,48 +492,53 @@ class QueueItemUpdate(ShowQueueItem): except Exception, e: logger.log(u"Error loading IMDb info: " + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) - + try: self.show.saveToDB() except Exception, e: logger.log(u"Error saving the episode to the database: " + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) - + # get episode list from DB logger.log(u"Loading all episodes from the database", logger.DEBUG) DBEpList = self.show.loadEpisodesFromDB() # get episode list from TVDB - logger.log(u"Loading all episodes from " + self.show.indexer + "", logger.DEBUG) + logger.log(u"Loading all episodes from " + sickbeard.indexerApi(self.show.indexer).name + "", logger.DEBUG) try: IndexerEpList = self.show.loadEpisodesFromIndexer(cache=not self.force) - except indexer_exceptions.indexer_exception, e: - logger.log(u"Unable to get info from " + self.show.indexer + ", the show info will not be refreshed: " + ex(e), logger.ERROR) + except sickbeard.indexer_exception, e: + logger.log(u"Unable to get info from " + sickbeard.indexerApi( + self.show.indexer).name + ", the show info will not be refreshed: " + ex(e), logger.ERROR) IndexerEpList = None if IndexerEpList == None: - logger.log(u"No data returned from " + self.show.indexer + ", unable to update this show", logger.ERROR) + logger.log(u"No data returned from " + sickbeard.indexerApi( + self.show.indexer).name + ", unable to update this show", logger.ERROR) else: # for each ep we found on TVDB delete it from the DB list for curSeason in IndexerEpList: for curEpisode in IndexerEpList[curSeason]: - logger.log(u"Removing " + str(curSeason) + "x" + str(curEpisode) + " from the DB list", logger.DEBUG) + logger.log(u"Removing " + str(curSeason) + "x" + str(curEpisode) + " from the DB list", + logger.DEBUG) if curSeason in DBEpList and curEpisode in DBEpList[curSeason]: del DBEpList[curSeason][curEpisode] # for the remaining episodes in the DB list just delete them from the DB for curSeason in DBEpList: for curEpisode in DBEpList[curSeason]: - logger.log(u"Permanently deleting episode " + str(curSeason) + "x" + str(curEpisode) + " from the database", logger.MESSAGE) + logger.log(u"Permanently deleting episode " + str(curSeason) + "x" + str( + curEpisode) + " from the database", logger.MESSAGE) curEp = self.show.getEpisode(curSeason, curEpisode) try: curEp.deleteEpisode() except exceptions.EpisodeDeletedException: pass - sickbeard.showQueueScheduler.action.refreshShow(self.show, True) #@UndefinedVariable + sickbeard.showQueueScheduler.action.refreshShow(self.show, True) #@UndefinedVariable + class QueueItemForceUpdate(QueueItemUpdate): def __init__(self, show=None): diff --git a/sickbeard/subtitles.py b/sickbeard/subtitles.py index e4b0bfd5..e765268c 100644 --- a/sickbeard/subtitles.py +++ b/sickbeard/subtitles.py @@ -28,6 +28,8 @@ from sickbeard import history from lib import subliminal SINGLE = 'und' + + def sortedServiceList(): servicesMapping = dict([(x.lower(), x) for x in subliminal.core.SERVICES]) @@ -37,33 +39,50 @@ def sortedServiceList(): curIndex = 0 for curService in sickbeard.SUBTITLES_SERVICES_LIST: if curService in servicesMapping: - curServiceDict = {'id': curService, 'image': curService+'.png', 'name': servicesMapping[curService], 'enabled': sickbeard.SUBTITLES_SERVICES_ENABLED[curIndex] == 1, 'api_based': __import__('lib.subliminal.services.' + curService, globals=globals(), locals=locals(), fromlist=['Service'], level=-1).Service.api_based, 'url': __import__('lib.subliminal.services.' + curService, globals=globals(), locals=locals(), fromlist=['Service'], level=-1).Service.site_url} + curServiceDict = {'id': curService, 'image': curService + '.png', 'name': servicesMapping[curService], + 'enabled': sickbeard.SUBTITLES_SERVICES_ENABLED[curIndex] == 1, + 'api_based': __import__('lib.subliminal.services.' + curService, globals=globals(), + locals=locals(), fromlist=['Service'], + level=-1).Service.api_based, + 'url': __import__('lib.subliminal.services.' + curService, globals=globals(), + locals=locals(), fromlist=['Service'], level=-1).Service.site_url} newList.append(curServiceDict) curIndex += 1 # add any services that are missing from that list for curService in servicesMapping.keys(): if curService not in [x['id'] for x in newList]: - curServiceDict = {'id': curService, 'image': curService+'.png', 'name': servicesMapping[curService], 'enabled': False, 'api_based': __import__('lib.subliminal.services.' + curService, globals=globals(), locals=locals(), fromlist=['Service'], level=-1).Service.api_based, 'url': __import__('lib.subliminal.services.' + curService, globals=globals(), locals=locals(), fromlist=['Service'], level=-1).Service.site_url} + curServiceDict = {'id': curService, 'image': curService + '.png', 'name': servicesMapping[curService], + 'enabled': False, + 'api_based': __import__('lib.subliminal.services.' + curService, globals=globals(), + locals=locals(), fromlist=['Service'], + level=-1).Service.api_based, + 'url': __import__('lib.subliminal.services.' + curService, globals=globals(), + locals=locals(), fromlist=['Service'], level=-1).Service.site_url} newList.append(curServiceDict) return newList - + + def getEnabledServiceList(): return [x['name'] for x in sortedServiceList() if x['enabled']] - + + def isValidLanguage(language): return subliminal.language.language_list(language) + def getLanguageName(selectLang): return subliminal.language.Language(selectLang).name -def wantedLanguages(sqlLike = False): + +def wantedLanguages(sqlLike=False): wantedLanguages = sorted(sickbeard.SUBTITLES_LANGUAGES) if sqlLike: return '%' + ','.join(wantedLanguages) + '%' return wantedLanguages + def subtitlesLanguages(video_path): """Return a list detected subtitles for the given video file""" video = subliminal.videos.Video.from_path(video_path) @@ -76,22 +95,27 @@ def subtitlesLanguages(video_path): languages.add(SINGLE) return list(languages) + # Return a list with languages that have alpha2 code def subtitleLanguageFilter(): return [language for language in subliminal.language.LANGUAGES if language[2] != ""] + class SubtitlesFinder(): """ The SubtitlesFinder will be executed every hour but will not necessarly search and download subtitles. Only if the defined rule is true """ + def run(self): # TODO: Put that in the __init__ before starting the thread? if not sickbeard.USE_SUBTITLES: logger.log(u'Subtitles support disabled', logger.DEBUG) return if len(sickbeard.subtitles.getEnabledServiceList()) < 1: - logger.log(u'Not enough services selected. At least 1 service is required to search subtitles in the background', logger.ERROR) + logger.log( + u'Not enough services selected. At least 1 service is required to search subtitles in the background', + logger.ERROR) return logger.log(u'Checking for subtitles', logger.MESSAGE) @@ -102,40 +126,51 @@ class SubtitlesFinder(): # - episode subtitles != config wanted languages or SINGLE (depends on config multi) # - search count < 2 and diff(airdate, now) > 1 week : now -> 1d # - search count < 7 and diff(airdate, now) <= 1 week : now -> 4h -> 8h -> 16h -> 1d -> 1d -> 1d - + myDB = db.DBConnection() today = datetime.date.today().toordinal() # you have 5 minutes to understand that one. Good luck - sqlResults = myDB.select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.subtitles, e.subtitles_searchcount AS searchcount, e.subtitles_lastsearch AS lastsearch, e.location, (? - e.airdate) AS airdate_daydiff FROM tv_episodes AS e INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id) WHERE s.subtitles = 1 AND e.subtitles NOT LIKE (?) AND ((e.subtitles_searchcount <= 2 AND (? - e.airdate) > 7) OR (e.subtitles_searchcount <= 7 AND (? - e.airdate) <= 7)) AND (e.status IN ('+','.join([str(x) for x in Quality.DOWNLOADED])+') OR (e.status IN ('+','.join([str(x) for x in Quality.SNATCHED + Quality.SNATCHED_PROPER])+') AND e.location != ""))', [today, wantedLanguages(True), today, today]) + sqlResults = myDB.select( + 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.subtitles, e.subtitles_searchcount AS searchcount, e.subtitles_lastsearch AS lastsearch, e.location, (? - e.airdate) AS airdate_daydiff FROM tv_episodes AS e INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id) WHERE s.subtitles = 1 AND e.subtitles NOT LIKE (?) AND ((e.subtitles_searchcount <= 2 AND (? - e.airdate) > 7) OR (e.subtitles_searchcount <= 7 AND (? - e.airdate) <= 7)) AND (e.status IN (' + ','.join( + [str(x) for x in Quality.DOWNLOADED]) + ') OR (e.status IN (' + ','.join( + [str(x) for x in Quality.SNATCHED + Quality.SNATCHED_PROPER]) + ') AND e.location != ""))', + [today, wantedLanguages(True), today, today]) if len(sqlResults) == 0: logger.log('No subtitles to download', logger.MESSAGE) return - + rules = self._getRules() - now = datetime.datetime.now(); + now = datetime.datetime.now() for epToSub in sqlResults: if not ek.ek(os.path.isfile, epToSub['location']): - logger.log('Episode file does not exist, cannot download subtitles for episode %dx%d of show %s' % (epToSub['season'], epToSub['episode'], epToSub['show_name']), logger.DEBUG) + logger.log('Episode file does not exist, cannot download subtitles for episode %dx%d of show %s' % ( + epToSub['season'], epToSub['episode'], epToSub['show_name']), logger.DEBUG) continue - + # Old shows rule - if ((epToSub['airdate_daydiff'] > 7 and epToSub['searchcount'] < 2 and now - datetime.datetime.strptime(epToSub['lastsearch'], '%Y-%m-%d %H:%M:%S') > datetime.timedelta(hours=rules['old'][epToSub['searchcount']])) or + if ((epToSub['airdate_daydiff'] > 7 and epToSub['searchcount'] < 2 and now - datetime.datetime.strptime( + epToSub['lastsearch'], '%Y-%m-%d %H:%M:%S') > datetime.timedelta( + hours=rules['old'][epToSub['searchcount']])) or # Recent shows rule - (epToSub['airdate_daydiff'] <= 7 and epToSub['searchcount'] < 7 and now - datetime.datetime.strptime(epToSub['lastsearch'], '%Y-%m-%d %H:%M:%S') > datetime.timedelta(hours=rules['new'][epToSub['searchcount']]))): - logger.log('Downloading subtitles for episode %dx%d of show %s' % (epToSub['season'], epToSub['episode'], epToSub['show_name']), logger.DEBUG) - + (epToSub['airdate_daydiff'] <= 7 and epToSub[ + 'searchcount'] < 7 and now - datetime.datetime.strptime(epToSub['lastsearch'], + '%Y-%m-%d %H:%M:%S') > datetime.timedelta( + hours=rules['new'][epToSub['searchcount']]))): + logger.log('Downloading subtitles for episode %dx%d of show %s' % ( + epToSub['season'], epToSub['episode'], epToSub['show_name']), logger.DEBUG) + showObj = helpers.findCertainShow(sickbeard.showList, int(epToSub['showid'])) if not showObj: logger.log(u'Show not found', logger.DEBUG) return - + epObj = showObj.getEpisode(int(epToSub["season"]), int(epToSub["episode"])) if isinstance(epObj, str): logger.log(u'Episode not found', logger.DEBUG) return - + previous_subtitles = epObj.subtitles - + try: subtitles = epObj.downloadSubtitles() diff --git a/sickbeard/traktWatchListChecker.py b/sickbeard/traktWatchListChecker.py index 26f1a3e8..74d105be 100644 --- a/sickbeard/traktWatchListChecker.py +++ b/sickbeard/traktWatchListChecker.py @@ -26,6 +26,7 @@ from sickbeard import search_queue from sickbeard.common import SNATCHED, SNATCHED_PROPER, DOWNLOADED, SKIPPED, UNAIRED, IGNORED, ARCHIVED, WANTED, UNKNOWN from lib.trakt import * + class TraktChecker(): def __init__(self): self.todoWanted = [] @@ -43,7 +44,8 @@ class TraktChecker(): def updateShows(self): logger.log(u"Starting trakt show watchlist check", logger.DEBUG) - watchlist = TraktCall("user/watchlist/shows.json/%API%/" + sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) + watchlist = TraktCall("user/watchlist/shows.json/%API%/" + sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_API, + sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) if watchlist is None: logger.log(u"Could not connect to trakt service, aborting watchlist update", logger.ERROR) return @@ -60,19 +62,20 @@ class TraktChecker(): self.startBacklog(newShow) else: self.todoWanted.append((int(show["indexer_id"]), 1, 1)) - self.todoWanted.append((int(show["indexer_id"]), -1, -1)) #used to pause new shows if the settings say to + self.todoWanted.append((int(show["indexer_id"]), -1, -1)) #used to pause new shows if the settings say to def updateEpisodes(self): """ Sets episodes to wanted that are in trakt watchlist """ logger.log(u"Starting trakt episode watchlist check", logger.DEBUG) - watchlist = TraktCall("user/watchlist/episodes.json/%API%/" + sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) + watchlist = TraktCall("user/watchlist/episodes.json/%API%/" + sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_API, + sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD) if watchlist is None: logger.log(u"Could not connect to trakt service, aborting watchlist update", logger.ERROR) return for show in watchlist: - self.addDefaultShow(show["indexer_id"], show["title"], SKIPPED) + self.addDefaultShow(int(show["indexer_id"]), show["title"], SKIPPED) newShow = helpers.findCertainShow(sickbeard.showList, int(show["indexer_id"])) for episode in show["episodes"]: if newShow is not None: @@ -88,7 +91,7 @@ class TraktChecker(): showObj = helpers.findCertainShow(sickbeard.showList, int(indexerid)) if showObj != None: return - logger.log(u"Adding show " + indexerid) + logger.log(u"Adding show " + str(indexerid)) root_dirs = sickbeard.ROOT_DIRS.split('|') location = root_dirs[int(root_dirs[0]) + 1] @@ -99,7 +102,9 @@ class TraktChecker(): return else: helpers.chmodAsParent(showPath) - sickbeard.showQueueScheduler.action.addShow(int(indexerid), showPath, status, int(sickbeard.QUALITY_DEFAULT), int(sickbeard.FLATTEN_FOLDERS_DEFAULT)) + sickbeard.showQueueScheduler.action.addShow(int(showObj.indexer), int(indexerid), showPath, status, + int(sickbeard.QUALITY_DEFAULT), + int(sickbeard.FLATTEN_FOLDERS_DEFAULT)) def setEpisodeToWanted(self, show, s, e): """ @@ -111,7 +116,7 @@ class TraktChecker(): with epObj.lock: if epObj.status != SKIPPED: return - logger.log(u"Setting episode s"+str(s)+"e"+str(e)+" of show " + show.name + " to wanted") + logger.log(u"Setting episode s" + str(s) + "e" + str(e) + " of show " + show.name + " to wanted") # figure out what segment the episode is in and remember it so we can backlog it if epObj.show.air_by_date: ep_segment = str(epObj.airdate)[:7] @@ -121,7 +126,7 @@ class TraktChecker(): epObj.status = WANTED epObj.saveToDB() backlog = (show, ep_segment) - if self.todoBacklog.count(backlog)==0: + if self.todoBacklog.count(backlog) == 0: self.todoBacklog.append(backlog) @@ -140,7 +145,8 @@ class TraktChecker(): for segment in segments: cur_backlog_queue_item = search_queue.BacklogQueueItem(show, segment[1]) sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) - logger.log(u"Starting backlog for " + show.name + " season " + str(segment[1]) + " because some eps were set to wanted") + logger.log(u"Starting backlog for " + show.name + " season " + str( + segment[1]) + " because some eps were set to wanted") self.todoBacklog.remove(segment) diff --git a/sickbeard/tv.py b/sickbeard/tv.py index 2a69b8d0..5d6ace9b 100644 --- a/sickbeard/tv.py +++ b/sickbeard/tv.py @@ -33,9 +33,6 @@ from name_parser.parser import NameParser, InvalidNameException from lib import subliminal - -from sickbeard.indexers import indexer_api, indexer_exceptions - from lib.imdb import imdb from sickbeard import db @@ -50,16 +47,17 @@ from sickbeard import history from sickbeard import encodingKludge as ek from common import Quality, Overview, statusStrings -from common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, ARCHIVED, IGNORED, UNAIRED, WANTED, SKIPPED, UNKNOWN, FAILED -from common import NAMING_DUPLICATE, NAMING_EXTEND, NAMING_LIMITED_EXTEND, NAMING_SEPARATED_REPEAT, NAMING_LIMITED_EXTEND_E_PREFIXED +from common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, ARCHIVED, IGNORED, UNAIRED, WANTED, SKIPPED, \ + UNKNOWN, FAILED +from common import NAMING_DUPLICATE, NAMING_EXTEND, NAMING_LIMITED_EXTEND, NAMING_SEPARATED_REPEAT, \ + NAMING_LIMITED_EXTEND_E_PREFIXED class TVShow(object): - def __init__(self, indexer, indexerid, lang=""): - self.indexerid = indexerid - self.indexer = indexer + self.indexerid = int(indexerid) + self.indexer = int(indexer) self.name = "" self._location = "" self.imdbid = "" @@ -142,14 +140,15 @@ class TVShow(object): else: segment_year, segment_month = map(int, season.split('-')) min_date = datetime.date(segment_year, segment_month, 1) - + # it's easier to just hard code this than to worry about rolling the year over or making a month length map if segment_month == 12: max_date = datetime.date(segment_year, 12, 31) else: max_date = datetime.date(segment_year, segment_month + 1, 1) - datetime.timedelta(days=1) - sql_selection = sql_selection + " AND airdate >= " + str(min_date.toordinal()) + " AND airdate <= " + str(max_date.toordinal()) + sql_selection = sql_selection + " AND airdate >= " + str( + min_date.toordinal()) + " AND airdate <= " + str(max_date.toordinal()) if has_location: sql_selection = sql_selection + " AND location != '' " @@ -167,7 +166,9 @@ class TVShow(object): if cur_ep.location: # if there is a location, check if it's a multi-episode (share_location > 0) and put them in relatedEps if cur_result["share_location"] > 0: - related_eps_result = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND location = ? AND episode != ? ORDER BY episode ASC", [self.indexerid, cur_ep.season, cur_ep.location, cur_ep.episode]) + related_eps_result = myDB.select( + "SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND location = ? AND episode != ? ORDER BY episode ASC", + [self.indexerid, cur_ep.season, cur_ep.location, cur_ep.episode]) for cur_related_ep in related_eps_result: related_ep = self.getEpisode(int(cur_related_ep["season"]), int(cur_related_ep["episode"])) if related_ep not in cur_ep.relatedEps: @@ -190,7 +191,8 @@ class TVShow(object): if noCreate: return None - logger.log(str(self.indexerid) + u": An object for episode " + str(season) + "x" + str(episode) + " didn't exist in the cache, trying to create it", logger.DEBUG) + logger.log(str(self.indexerid) + u": An object for episode " + str(season) + "x" + str( + episode) + " didn't exist in the cache, trying to create it", logger.DEBUG) if file != None: ep = TVEpisode(self, season, episode, file) @@ -217,7 +219,9 @@ class TVShow(object): last_airdate = datetime.date.fromordinal(1) # get latest aired episode to compare against today - graceperiod and today + graceperiod - sql_result = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season > '0' AND airdate > '1' AND status > '1' ORDER BY airdate DESC LIMIT 1", [cur_indexerid]) + sql_result = myDB.select( + "SELECT * FROM tv_episodes WHERE showid = ? AND season > '0' AND airdate > '1' AND status > '1' ORDER BY airdate DESC LIMIT 1", + [cur_indexerid]) if sql_result: last_airdate = datetime.date.fromordinal(sql_result[0]['airdate']) @@ -225,7 +229,9 @@ class TVShow(object): return True # get next upcoming UNAIRED episode to compare against today + graceperiod - sql_result = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season > '0' AND airdate > '1' AND status = '1' ORDER BY airdate ASC LIMIT 1", [cur_indexerid]) + sql_result = myDB.select( + "SELECT * FROM tv_episodes WHERE showid = ? AND season > '0' AND airdate > '1' AND status = '1' ORDER BY airdate ASC LIMIT 1", + [cur_indexerid]) if sql_result: next_airdate = datetime.date.fromordinal(sql_result[0]['airdate']) @@ -235,7 +241,8 @@ class TVShow(object): last_update_indexer = datetime.date.fromordinal(self.last_update_indexer) # in the first year after ended (last airdate), update every 30 days - if (update_date - last_airdate) < datetime.timedelta(days=450) and (update_date - last_update_indexer) > datetime.timedelta(days=30): + if (update_date - last_airdate) < datetime.timedelta(days=450) and ( + update_date - last_update_indexer) > datetime.timedelta(days=30): return True return False @@ -247,7 +254,7 @@ class TVShow(object): if not ek.ek(os.path.isdir, self._location): logger.log(str(self.indexerid) + u": Show dir doesn't exist, skipping NFO generation") return False - + logger.log(str(self.indexerid) + u": Writing NFOs for show") for cur_provider in sickbeard.metadata_provider_dict.values(): result = cur_provider.create_show_metadata(self, force) or result @@ -279,13 +286,14 @@ class TVShow(object): sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.indexerid]) for epResult in sqlResults: - logger.log(str(self.indexerid) + u": Retrieving/creating episode " + str(epResult["season"]) + "x" + str(epResult["episode"]), logger.DEBUG) + logger.log(str(self.indexerid) + u": Retrieving/creating episode " + str(epResult["season"]) + "x" + str( + epResult["episode"]), logger.DEBUG) curEp = self.getEpisode(epResult["season"], epResult["episode"]) curEp.createMetaFiles(force) # find all media files in the show folder and create episodes for as many as possible - def loadEpisodesFromDir (self): + def loadEpisodesFromDir(self): if not ek.ek(os.path.isdir, self._location): logger.log(str(self.indexerid) + u": Show dir doesn't exist, not loading episodes from disk") @@ -325,7 +333,9 @@ class TVShow(object): pass if not ' ' in ep_file_name and parse_result and parse_result.release_group: - logger.log(u"Name " + ep_file_name + u" gave release group of " + parse_result.release_group + ", seems valid", logger.DEBUG) + logger.log( + u"Name " + ep_file_name + u" gave release group of " + parse_result.release_group + ", seems valid", + logger.DEBUG) curEpisode.release_name = ep_file_name # store the reference in the show @@ -337,7 +347,7 @@ class TVShow(object): logger.log(str(self.indexerid) + ": Could not refresh subtitles", logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) curEpisode.saveToDB() - + def loadEpisodesFromDB(self): logger.log(u"Loading all episodes from the DB") @@ -356,7 +366,7 @@ class TVShow(object): if self.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) cachedShow = t[self.indexerid] cachedSeasons = {} @@ -370,8 +380,9 @@ class TVShow(object): if curSeason not in cachedSeasons: try: cachedSeasons[curSeason] = cachedShow[curSeason] - except indexer_exceptions.indexer_seasonnotfound, e: - logger.log(u"Error when trying to load the episode from " + self.indexer + ": " + e.message, logger.WARNING) + except sickbeard.indexer_seasonnotfound, e: + logger.log(u"Error when trying to load the episode from " + sickbeard.indexerApi( + self.indexer).name + ": " + e.message, logger.WARNING) deleteEp = True if not curSeason in scannedEps: @@ -385,12 +396,13 @@ class TVShow(object): # if we found out that the ep is no longer on TVDB then delete it from our database too if deleteEp: curEp.deleteEpisode() - + curEp.loadFromDB(curSeason, curEpisode) curEp.loadFromIndexer(tvapi=t, cachedSeason=cachedSeasons[curSeason]) scannedEps[curSeason][curEpisode] = True except exceptions.EpisodeDeletedException: - logger.log(u"Tried loading an episode from the DB that should have been deleted, skipping it", logger.DEBUG) + logger.log(u"Tried loading an episode from the DB that should have been deleted, skipping it", + logger.DEBUG) continue return scannedEps @@ -409,13 +421,16 @@ class TVShow(object): lINDEXER_API_PARMS['dvdorder'] = True try: - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) showObj = t[self.indexerid] - except indexer_exceptions.indexer_error: - logger.log(u"" + self.indexer + " timed out, unable to update episodes from " + self.indexer, logger.ERROR) + except sickbeard.indexer_error: + logger.log(u"" + sickbeard.indexerApi( + self.indexer).name + " timed out, unable to update episodes from " + sickbeard.indexerApi( + self.indexer).name, logger.ERROR) return None - logger.log(str(self.indexerid) + u": Loading all episodes from " + self.indexer + "..") + logger.log( + str(self.indexerid) + u": Loading all episodes from " + sickbeard.indexerApi(self.indexer).name + "..") scannedEps = {} @@ -427,10 +442,11 @@ class TVShow(object): if episode == 0: continue try: - #ep = TVEpisode(self, season, episode) ep = self.getEpisode(season, episode) except exceptions.EpisodeNotFoundException: - logger.log(str(self.indexerid) + ": " + self.indexer + " object for " + str(season) + "x" + str(episode) + " is incomplete, skipping this episode") + logger.log( + str(self.indexerid) + ": " + sickbeard.indexerApi(self.indexer).name + " object for " + str( + season) + "x" + str(episode) + " is incomplete, skipping this episode") continue else: try: @@ -440,7 +456,8 @@ class TVShow(object): continue with ep.lock: - logger.log(str(self.indexerid) + u": Loading info from " + self.indexer + " for episode " + str(season) + "x" + str(episode), logger.DEBUG) + logger.log(str(self.indexerid) + u": Loading info from " + sickbeard.indexerApi( + self.indexer).name + " for episode " + str(season) + "x" + str(episode), logger.DEBUG) ep.loadFromIndexer(season, episode, tvapi=t) if ep.dirty: sql_l.append(ep.get_sql()) @@ -513,23 +530,27 @@ class TVShow(object): if self.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) epObj = t[self.indexerid].airedOn(parse_result.air_date)[0] season = int(epObj["seasonnumber"]) episodes = [int(epObj["episodenumber"])] - except indexer_exceptions.indexer_episodenotfound: - logger.log(u"Unable to find episode with date " + str(parse_result.air_date) + " for show " + self.name + ", skipping", logger.WARNING) + except sickbeard.indexer_episodenotfound: + logger.log(u"Unable to find episode with date " + str( + parse_result.air_date) + " for show " + self.name + ", skipping", logger.WARNING) return None - except indexer_exceptions.indexer_error, e: - logger.log(u"Unable to contact " + self.indexer + ": " + ex(e), logger.WARNING) + except sickbeard.indexer_error, e: + logger.log(u"Unable to contact " + sickbeard.indexerApi(self.indexer).name + ": " + ex(e), + logger.WARNING) return None for curEpNum in episodes: episode = int(curEpNum) - logger.log(str(self.indexerid) + ": " + file + " parsed to " + self.name + " " + str(season) + "x" + str(episode), logger.DEBUG) + logger.log( + str(self.indexerid) + ": " + file + " parsed to " + self.name + " " + str(season) + "x" + str(episode), + logger.DEBUG) checkQualityAgain = False same_file = False @@ -539,13 +560,16 @@ class TVShow(object): try: curEp = self.getEpisode(season, episode, file) except exceptions.EpisodeNotFoundException: - logger.log(str(self.indexerid) + u": Unable to figure out what this file is, skipping", logger.ERROR) + logger.log(str(self.indexerid) + u": Unable to figure out what this file is, skipping", + logger.ERROR) continue else: # if there is a new file associated with this ep then re-check the quality if curEp.location and ek.ek(os.path.normpath, curEp.location) != ek.ek(os.path.normpath, file): - logger.log(u"The old episode had a different file associated with it, I will re-check the quality based on the new filename " + file, logger.DEBUG) + logger.log( + u"The old episode had a different file associated with it, I will re-check the quality based on the new filename " + file, + logger.DEBUG) checkQualityAgain = True with curEp.lock: @@ -572,13 +596,15 @@ class TVShow(object): # if they replace a file on me I'll make some attempt at re-checking the quality unless I know it's the same file if checkQualityAgain and not same_file: newQuality = Quality.nameQuality(file) - logger.log(u"Since this file has been renamed, I checked " + file + " and found quality " + Quality.qualityStrings[newQuality], logger.DEBUG) + logger.log(u"Since this file has been renamed, I checked " + file + " and found quality " + + Quality.qualityStrings[newQuality], logger.DEBUG) if newQuality != Quality.UNKNOWN: curEp.status = Quality.compositeStatus(DOWNLOADED, newQuality) # check for status/quality changes as long as it's a new file - elif not same_file and sickbeard.helpers.isMediaFile(file) and curEp.status not in Quality.DOWNLOADED + [ARCHIVED, IGNORED]: + elif not same_file and sickbeard.helpers.isMediaFile(file) and curEp.status not in Quality.DOWNLOADED + [ + ARCHIVED, IGNORED]: oldStatus, oldQuality = Quality.splitCompositeStatus(curEp.status) newQuality = Quality.nameQuality(file) @@ -589,12 +615,16 @@ class TVShow(object): # if it was snatched and now exists then set the status correctly if oldStatus == SNATCHED and oldQuality <= newQuality: - logger.log(u"STATUS: this ep used to be snatched with quality " + Quality.qualityStrings[oldQuality] + u" but a file exists with quality " + Quality.qualityStrings[newQuality] + u" so I'm setting the status to DOWNLOADED", logger.DEBUG) + logger.log(u"STATUS: this ep used to be snatched with quality " + Quality.qualityStrings[ + oldQuality] + u" but a file exists with quality " + Quality.qualityStrings[ + newQuality] + u" so I'm setting the status to DOWNLOADED", logger.DEBUG) newStatus = DOWNLOADED # if it was snatched proper and we found a higher quality one then allow the status change elif oldStatus == SNATCHED_PROPER and oldQuality < newQuality: - logger.log(u"STATUS: this ep used to be snatched proper with quality " + Quality.qualityStrings[oldQuality] + u" but a file exists with quality " + Quality.qualityStrings[newQuality] + u" so I'm setting the status to DOWNLOADED", logger.DEBUG) + logger.log(u"STATUS: this ep used to be snatched proper with quality " + Quality.qualityStrings[ + oldQuality] + u" but a file exists with quality " + Quality.qualityStrings[ + newQuality] + u" so I'm setting the status to DOWNLOADED", logger.DEBUG) newStatus = DOWNLOADED elif oldStatus not in (SNATCHED, SNATCHED_PROPER): @@ -602,7 +632,8 @@ class TVShow(object): if newStatus != None: with curEp.lock: - logger.log(u"STATUS: we have an associated file, so setting the status from " + str(curEp.status) + u" to DOWNLOADED/" + str(Quality.statusFromName(file)), logger.DEBUG) + logger.log(u"STATUS: we have an associated file, so setting the status from " + str( + curEp.status) + u" to DOWNLOADED/" + str(Quality.statusFromName(file)), logger.DEBUG) curEp.status = Quality.compositeStatus(newStatus, newQuality) with curEp.lock: @@ -630,7 +661,7 @@ class TVShow(object): return else: if not self.indexer: - self.indexer = sqlResults[0]["indexer"] + self.indexer = int(sqlResults[0]["indexer"]) if not self.name: self.name = sqlResults[0]["show_name"] if not self.network: @@ -655,12 +686,12 @@ class TVShow(object): self.air_by_date = sqlResults[0]["air_by_date"] if not self.air_by_date: self.air_by_date = 0 - + self.subtitles = sqlResults[0]["subtitles"] if self.subtitles: self.subtitles = 1 else: - self.subtitles = 0 + self.subtitles = 0 self.dvdorder = sqlResults[0]["dvdorder"] if not self.dvdorder: @@ -682,9 +713,9 @@ class TVShow(object): self.last_update_indexer = sqlResults[0]["last_update_indexer"] if not self.imdbid: - self.imdbid = sqlResults[0]["imdb_id"] + self.imdbid = sqlResults[0]["imdb_id"] - #Get IMDb_info from database + #Get IMDb_info from database sqlResults = myDB.select("SELECT * FROM imdb_info WHERE indexer_id = ?", [self.indexerid]) if len(sqlResults) == 0: @@ -695,7 +726,7 @@ class TVShow(object): def loadFromIndexer(self, cache=True, tvapi=None, cachedSeason=None): - logger.log(str(self.indexerid) + u": Loading show info from " + self.indexer) + logger.log(str(self.indexerid) + u": Loading show info from " + sickbeard.indexerApi(self.indexer).name) # There's gotta be a better way of doing this but we don't wanna # change the cache value elsewhere @@ -711,7 +742,7 @@ class TVShow(object): if self.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) else: t = tvapi @@ -722,7 +753,8 @@ class TVShow(object): if getattr(myEp, 'seriesname', None) is not None: self.name = myEp['seriesname'].strip() except AttributeError: - raise indexer_exceptions.indexer_attributenotfound("Found %s, but attribute 'seriesname' was empty." % (self.indexerid)) + raise sickbeard.indexer_attributenotfound( + "Found %s, but attribute 'seriesname' was empty." % (self.indexerid)) self.classification = getattr(myEp, 'classification', 'Scripted') self.genre = getattr(myEp, 'genre', '') @@ -741,71 +773,72 @@ class TVShow(object): def loadIMDbInfo(self, imdbapi=None): - imdb_info = {'imdb_id' : self.imdbid, - 'title' : '', - 'year' : '', - 'akas' : [], - 'runtimes' : '', - 'genres' : [], - 'countries' : '', - 'country_codes' : '', - 'certificates' : [], - 'rating' : '', + imdb_info = {'imdb_id': self.imdbid, + 'title': '', + 'year': '', + 'akas': [], + 'runtimes': '', + 'genres': [], + 'countries': '', + 'country_codes': '', + 'certificates': [], + 'rating': '', 'votes': '', 'last_update': '' - } - + } + if self.imdbid: logger.log(str(self.indexerid) + u": Loading show info from IMDb") - + i = imdb.IMDb() imdbTv = i.get_movie(str(re.sub("[^0-9]", "", self.imdbid))) - + for key in filter(lambda x: x in imdbTv.keys(), imdb_info.keys()): # Store only the first value for string type if type(imdb_info[key]) == type('') and type(imdbTv.get(key)) == type([]): imdb_info[key] = imdbTv.get(key)[0] else: imdb_info[key] = imdbTv.get(key) - + #Filter only the value if imdb_info['runtimes']: - imdb_info['runtimes'] = re.search('\d+',imdb_info['runtimes']).group(0) + imdb_info['runtimes'] = re.search('\d+', imdb_info['runtimes']).group(0) else: - imdb_info['runtimes'] = self.runtime - + imdb_info['runtimes'] = self.runtime + if imdb_info['akas']: imdb_info['akas'] = '|'.join(imdb_info['akas']) else: - imdb_info['akas'] = '' - - #Join all genres in a string + imdb_info['akas'] = '' + + #Join all genres in a string if imdb_info['genres']: imdb_info['genres'] = '|'.join(imdb_info['genres']) else: - imdb_info['genres'] = '' - - #Get only the production country certificate if any + imdb_info['genres'] = '' + + #Get only the production country certificate if any if imdb_info['certificates'] and imdb_info['countries']: dct = {} try: for item in imdb_info['certificates']: dct[item.split(':')[0]] = item.split(':')[1] - + imdb_info['certificates'] = dct[imdb_info['countries']] except: - imdb_info['certificates'] = '' - + imdb_info['certificates'] = '' + else: - imdb_info['certificates'] = '' - + imdb_info['certificates'] = '' + imdb_info['last_update'] = datetime.date.today().toordinal() - + #Rename dict keys without spaces for DB upsert - self.imdb_info = dict((k.replace(' ', '_'),float(v) if hasattr(v,'keys') else v) for k,v in imdb_info.items()) - - logger.log(str(self.indexerid) + u": Obtained info from IMDb ->" + str(self.imdb_info), logger.DEBUG) - + self.imdb_info = dict( + (k.replace(' ', '_'), float(v) if hasattr(v, 'keys') else v) for k, v in imdb_info.items()) + + logger.log(str(self.indexerid) + u": Obtained info from IMDb ->" + str(self.imdb_info), logger.DEBUG) + def nextEpisode(self): logger.log(str(self.indexerid) + ": Finding the episode which airs next", logger.DEBUG) @@ -821,7 +854,8 @@ class TVShow(object): logger.log(str(self.indexerid) + u": No episode found... need to implement show status", logger.DEBUG) return [] else: - logger.log(str(self.indexerid) + u": Found episode " + str(sqlResults[0]["season"]) + "x" + str(sqlResults[0]["episode"]), logger.DEBUG) + logger.log(str(self.indexerid) + u": Found episode " + str(sqlResults[0]["season"]) + "x" + str( + sqlResults[0]["episode"]), logger.DEBUG) foundEps = [] for sqlEp in sqlResults: curEp = self.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"])) @@ -833,13 +867,13 @@ class TVShow(object): myDB = db.DBConnection() sql_l = [["DELETE FROM tv_episodes WHERE showid = ?", [self.indexerid]], - ["DELETE FROM tv_shows WHERE indexer_id = ?", [self.indexerid]], - ["DELETE FROM imdb_info WHERE indexer_id = ?", [self.indexerid]]] + ["DELETE FROM tv_shows WHERE indexer_id = ?", [self.indexerid]], + ["DELETE FROM imdb_info WHERE indexer_id = ?", [self.indexerid]]] myDB.mass_action(sql_l) - + # remove self from show list - sickbeard.showList = [x for x in sickbeard.showList if x.indexerid != self.indexerid] + sickbeard.showList = [x for x in sickbeard.showList if int(x.indexerid) != self.indexerid] # clear the cache image_cache_dir = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'images') @@ -849,7 +883,7 @@ class TVShow(object): def populateCache(self): cache_inst = image_cache.ImageCache() - + logger.log(u"Checking & filling cache for show " + self.name) cache_inst.fill_cache(self) @@ -876,16 +910,19 @@ class TVShow(object): try: curEp = self.getEpisode(season, episode) except exceptions.EpisodeDeletedException: - logger.log(u"The episode was deleted while we were refreshing it, moving on to the next one", logger.DEBUG) + logger.log(u"The episode was deleted while we were refreshing it, moving on to the next one", + logger.DEBUG) continue # if the path doesn't exist or if it's not in our show dir - if not ek.ek(os.path.isfile, curLoc) or not os.path.normpath(curLoc).startswith(os.path.normpath(self.location)): + if not ek.ek(os.path.isfile, curLoc) or not os.path.normpath(curLoc).startswith( + os.path.normpath(self.location)): with curEp.lock: # if it used to have a file associated with it and it doesn't anymore then set it to IGNORED if curEp.location and curEp.status in Quality.DOWNLOADED: - logger.log(str(self.indexerid) + u": Location for " + str(season) + "x" + str(episode) + " doesn't exist, removing it and changing our status to IGNORED", logger.DEBUG) + logger.log(str(self.indexerid) + u": Location for " + str(season) + "x" + str( + episode) + " doesn't exist, removing it and changing our status to IGNORED", logger.DEBUG) curEp.status = IGNORED curEp.subtitles = list() curEp.subtitles_searchcount = 0 @@ -903,9 +940,11 @@ class TVShow(object): logger.log(str(self.indexerid) + ": Show dir doesn't exist, can't download subtitles", logger.DEBUG) return logger.log(str(self.indexerid) + ": Downloading subtitles", logger.DEBUG) - + try: - episodes = db.DBConnection().select("SELECT location FROM tv_episodes WHERE showid = ? AND location NOT LIKE '' ORDER BY season DESC, episode DESC", [self.indexerid]) + episodes = db.DBConnection().select( + "SELECT location FROM tv_episodes WHERE showid = ? AND location NOT LIKE '' ORDER BY season DESC, episode DESC", + [self.indexerid]) for episodeLoc in episodes: episode = self.makeEpFromFile(episodeLoc['location']) subtitles = episode.downloadSubtitles(force=force) @@ -940,13 +979,13 @@ class TVShow(object): "lang": self.lang, "imdb_id": self.imdbid, "last_update_indexer": self.last_update_indexer - } + } myDB.upsert("tv_shows", newValueDict, controlValueDict) if self.imdbid: controlValueDict = {"indexer_id": self.indexerid} newValueDict = self.imdb_info - + myDB.upsert("imdb_info", newValueDict, controlValueDict) def __str__(self): @@ -971,18 +1010,21 @@ class TVShow(object): def wantEpisode(self, season, episode, quality, manualSearch=False): - logger.log(u"Checking if found episode " + str(season) + "x" + str(episode) + " is wanted at quality " + Quality.qualityStrings[quality], logger.DEBUG) + logger.log(u"Checking if found episode " + str(season) + "x" + str(episode) + " is wanted at quality " + + Quality.qualityStrings[quality], logger.DEBUG) # if the quality isn't one we want under any circumstances then just say no anyQualities, bestQualities = Quality.splitQuality(self.quality) - logger.log(u"any,best = " + str(anyQualities) + " " + str(bestQualities) + " and found " + str(quality), logger.DEBUG) + logger.log(u"any,best = " + str(anyQualities) + " " + str(bestQualities) + " and found " + str(quality), + logger.DEBUG) if quality not in anyQualities + bestQualities: logger.log(u"Don't want this quality, ignoring found episode", logger.DEBUG) return False myDB = db.DBConnection() - sqlResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [self.indexerid, season, episode]) + sqlResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", + [self.indexerid, season, episode]) if not sqlResults or not len(sqlResults): logger.log(u"Unable to find a matching episode in database, ignoring found episode", logger.DEBUG) @@ -1004,19 +1046,24 @@ class TVShow(object): logger.log(u"Existing episode status is wanted/unaired/skipped, getting found episode", logger.DEBUG) return True elif manualSearch: - logger.log(u"Usually ignoring found episode, but forced search allows the quality, getting found episode", logger.DEBUG) + logger.log( + u"Usually ignoring found episode, but forced search allows the quality, getting found episode", + logger.DEBUG) return True else: - logger.log(u"Quality is on wanted list, need to check if it's better than existing quality", logger.DEBUG) + logger.log(u"Quality is on wanted list, need to check if it's better than existing quality", + logger.DEBUG) curStatus, curQuality = Quality.splitCompositeStatus(epStatus) # if we are re-downloading then we only want it if it's in our bestQualities list and better than what we have if curStatus in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST and quality in bestQualities and quality > curQuality: - logger.log(u"Episode already exists but the found episode has better quality, getting found episode", logger.DEBUG) + logger.log(u"Episode already exists but the found episode has better quality, getting found episode", + logger.DEBUG) return True else: - logger.log(u"Episode already exists and the found episode has same/lower quality, ignoring found episode", logger.DEBUG) + logger.log(u"Episode already exists and the found episode has same/lower quality, ignoring found episode", + logger.DEBUG) logger.log(u"None of the conditions were met, ignoring found episode", logger.DEBUG) return False @@ -1055,16 +1102,17 @@ class TVShow(object): else: return Overview.GOOD + def dirty_setter(attr_name): def wrapper(self, val): if getattr(self, attr_name) != val: setattr(self, attr_name, val) self.dirty = True + return wrapper class TVEpisode(object): - def __init__(self, show, season, episode, file=""): self._name = "" @@ -1088,7 +1136,7 @@ class TVEpisode(object): self.show = show - self._indexer = self.show.indexer + self._indexer = int(self.show.indexer) self._location = file @@ -1135,25 +1183,30 @@ class TVEpisode(object): """Look for subtitles files and refresh the subtitles property""" self.subtitles = subtitles.subtitlesLanguages(self.location) - def downloadSubtitles(self,force=False): + def downloadSubtitles(self, force=False): #TODO: Add support for force option if not ek.ek(os.path.isfile, self.location): - logger.log(str(self.show.indexerid) + ": Episode file doesn't exist, can't download subtitles for episode " + str(self.season) + "x" + str(self.episode), logger.DEBUG) + logger.log( + str(self.show.indexerid) + ": Episode file doesn't exist, can't download subtitles for episode " + str( + self.season) + "x" + str(self.episode), logger.DEBUG) return - logger.log(str(self.show.indexerid) + ": Downloading subtitles for episode " + str(self.season) + "x" + str(self.episode), logger.DEBUG) + logger.log(str(self.show.indexerid) + ": Downloading subtitles for episode " + str(self.season) + "x" + str( + self.episode), logger.DEBUG) previous_subtitles = self.subtitles try: need_languages = set(sickbeard.SUBTITLES_LANGUAGES) - set(self.subtitles) - subtitles = subliminal.download_subtitles([self.location], languages=need_languages, services=sickbeard.subtitles.getEnabledServiceList(), force=force, multi=True, cache_dir=sickbeard.CACHE_DIR) + subtitles = subliminal.download_subtitles([self.location], languages=need_languages, + services=sickbeard.subtitles.getEnabledServiceList(), force=force, + multi=True, cache_dir=sickbeard.CACHE_DIR) if sickbeard.SUBTITLES_DIR: for video in subtitles: subs_new_path = ek.ek(os.path.join, os.path.dirname(video.path), sickbeard.SUBTITLES_DIR) dir_exists = helpers.makeDir(subs_new_path) if not dir_exists: - logger.log(u"Unable to create subtitles folder "+subs_new_path, logger.ERROR) + logger.log(u"Unable to create subtitles folder " + subs_new_path, logger.ERROR) else: helpers.chmodAsParent(subs_new_path) @@ -1171,7 +1224,7 @@ class TVEpisode(object): return self.refreshSubtitles() - self.subtitles_searchcount = self.subtitles_searchcount + 1 if self.subtitles_searchcount else 1 #added the if because sometime it raise an error + self.subtitles_searchcount = self.subtitles_searchcount + 1 if self.subtitles_searchcount else 1 #added the if because sometime it raise an error self.subtitles_lastsearch = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") self.saveToDB() @@ -1179,12 +1232,15 @@ class TVEpisode(object): if newsubtitles: subtitleList = ", ".join(subliminal.language.Language(x).name for x in newsubtitles) - logger.log(str(self.show.indexerid) + u": Downloaded " + subtitleList + " subtitles for episode " + str(self.season) + "x" + str(self.episode), logger.DEBUG) + logger.log(str(self.show.indexerid) + u": Downloaded " + subtitleList + " subtitles for episode " + str( + self.season) + "x" + str(self.episode), logger.DEBUG) notifiers.notify_subtitle_download(self.prettyName(), subtitleList) else: - logger.log(str(self.show.indexerid) + u": No subtitles downloaded for episode " + str(self.season) + "x" + str(self.episode), logger.DEBUG) + logger.log( + str(self.show.indexerid) + u": No subtitles downloaded for episode " + str(self.season) + "x" + str( + self.episode), logger.DEBUG) if sickbeard.SUBTITLES_HISTORY: for video in subtitles: @@ -1232,7 +1288,8 @@ class TVEpisode(object): try: self.loadFromNFO(self.location) except exceptions.NoNFOException: - logger.log(str(self.show.indexerid) + u": There was an error loading the NFO for episode " + str(season) + "x" + str(episode), logger.ERROR) + logger.log(str(self.show.indexerid) + u": There was an error loading the NFO for episode " + str( + season) + "x" + str(episode), logger.ERROR) pass # if we tried loading it from NFO and didn't find the NFO, try the Indexers @@ -1244,19 +1301,24 @@ class TVEpisode(object): # if we failed SQL *and* NFO, Indexers then fail if result == False: - raise exceptions.EpisodeNotFoundException("Couldn't find episode " + str(season) + "x" + str(episode)) + raise exceptions.EpisodeNotFoundException( + "Couldn't find episode " + str(season) + "x" + str(episode)) def loadFromDB(self, season, episode): - logger.log(str(self.show.indexerid) + u": Loading episode details from DB for episode " + str(season) + "x" + str(episode), logger.DEBUG) + logger.log( + str(self.show.indexerid) + u": Loading episode details from DB for episode " + str(season) + "x" + str( + episode), logger.DEBUG) myDB = db.DBConnection() - sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [self.show.indexerid, season, episode]) + sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", + [self.show.indexerid, season, episode]) if len(sqlResults) > 1: raise exceptions.MultipleDBEpisodesException("Your DB has two records for the same show somehow.") elif len(sqlResults) == 0: - logger.log(str(self.show.indexerid) + u": Episode " + str(self.season) + "x" + str(self.episode) + " not found in the database", logger.DEBUG) + logger.log(str(self.show.indexerid) + u": Episode " + str(self.season) + "x" + str( + self.episode) + " not found in the database", logger.DEBUG) return False else: #NAMEIT logger.log(u"AAAAA from" + str(self.season)+"x"+str(self.episode) + " -" + self.name + " to " + str(sqlResults[0]["name"])) @@ -1285,7 +1347,7 @@ class TVEpisode(object): self.file_size = 0 self.indexerid = int(sqlResults[0]["indexerid"]) - self.indexer = sqlResults[0]["indexer"] + self.indexer = int(sqlResults[0]["indexer"]) if sqlResults[0]["release_name"] is not None: self.release_name = sqlResults[0]["release_name"] @@ -1303,7 +1365,8 @@ class TVEpisode(object): if episode is None: episode = self.episode - logger.log(str(self.show.indexerid) + u": Loading episode details from " + self.show.indexer + " for episode " + str(season) + "x" + str(episode), logger.DEBUG) + logger.log(str(self.show.indexerid) + u": Loading episode details from " + sickbeard.indexerApi( + self.show.indexer).name + " for episode " + str(season) + "x" + str(episode), logger.DEBUG) indexer_lang = self.show.lang @@ -1321,31 +1384,36 @@ class TVEpisode(object): if self.show.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) else: t = tvapi myEp = t[self.show.indexerid][season][episode] else: myEp = cachedSeason[episode] - except (indexer_exceptions.indexer_error, IOError), e: - logger.log(u"" + self.indexer + " threw up an error: " + ex(e), logger.DEBUG) + except (sickbeard.indexer_error, IOError), e: + logger.log(u"" + sickbeard.indexerApi(self.indexer).name + " threw up an error: " + ex(e), logger.DEBUG) # if the episode is already valid just log it, if not throw it up if self.name: - logger.log(u"" + self.indexer + " timed out but we have enough info from other sources, allowing the error", logger.DEBUG) + logger.log(u"" + sickbeard.indexerApi( + self.indexer).name + " timed out but we have enough info from other sources, allowing the error", + logger.DEBUG) return else: - logger.log(u"" + self.indexer + " timed out, unable to create the episode", logger.ERROR) + logger.log(u"" + sickbeard.indexerApi(self.indexer).name + " timed out, unable to create the episode", + logger.ERROR) return False - except (indexer_exceptions.indexer_episodenotfound, indexer_exceptions.indexer_seasonnotfound): - logger.log(u"Unable to find the episode on " + self.indexer + "... has it been removed? Should I delete from db?", logger.DEBUG) + except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound): + logger.log(u"Unable to find the episode on " + sickbeard.indexerApi( + self.indexer).name + "... has it been removed? Should I delete from db?", logger.DEBUG) # if I'm no longer on the Indexers but I once was then delete myself from the DB if self.indexerid != -1: self.deleteEpisode() return if getattr(myEp, 'episodename', None) is None: - logger.log(u"This episode (" + self.show.name + " - " + str(season) + "x" + str(episode) + ") has no name on " + self.indexer + "") + logger.log(u"This episode (" + self.show.name + " - " + str(season) + "x" + str( + episode) + ") has no name on " + sickbeard.indexerApi(self.indexer).name + "") # if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now if self.indexerid != -1: self.deleteEpisode() @@ -1355,10 +1423,9 @@ class TVEpisode(object): self.season = season self.episode = episode - self.description = getattr(myEp, 'overview', "") - firstaired = getattr(myEp, 'firstaired', None) + firstaired = getattr(myEp, 'firstaired', None) if firstaired is None or firstaired is "0000-00-00": firstaired = str(datetime.date.fromordinal(1)) rawAirdate = [int(x) for x in firstaired.split("-")] @@ -1366,33 +1433,39 @@ class TVEpisode(object): try: self.airdate = datetime.date(rawAirdate[0], rawAirdate[1], rawAirdate[2]) except ValueError: - logger.log(u"Malformed air date retrieved from " + self.indexer + " ("+self.show.name+" - "+str(season)+"x"+str(episode)+")", logger.ERROR) + logger.log(u"Malformed air date retrieved from " + sickbeard.indexerApi( + self.indexer).name + " (" + self.show.name + " - " + str(season) + "x" + str(episode) + ")", + logger.ERROR) # if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now if self.indexerid != -1: self.deleteEpisode() return False #early conversion to int so that episode doesn't get marked dirty - self.indexerid = getattr(myEp, 'id', None) + self.indexerid = getattr(myEp, 'id', None) if self.indexerid is None: - logger.log(u"Failed to retrieve ID from " + self.indexer, logger.ERROR) + logger.log(u"Failed to retrieve ID from " + sickbeard.indexerApi(self.indexer).name, logger.ERROR) if self.indexerid != -1: self.deleteEpisode() return False #don't update show status if show dir is missing, unless missing show dirs are created during post-processing if not ek.ek(os.path.isdir, self.show._location) and not sickbeard.CREATE_MISSING_SHOW_DIRS: - logger.log(u"The show dir is missing, not bothering to change the episode statuses since it'd probably be invalid") + logger.log( + u"The show dir is missing, not bothering to change the episode statuses since it'd probably be invalid") return - logger.log(str(self.show.indexerid) + u": Setting status for " + str(season) + "x" + str(episode) + " based on status " + str(self.status) + " and existence of " + self.location, logger.DEBUG) + logger.log(str(self.show.indexerid) + u": Setting status for " + str(season) + "x" + str( + episode) + " based on status " + str(self.status) + " and existence of " + self.location, logger.DEBUG) if not ek.ek(os.path.isfile, self.location): # if we don't have the file - if self.airdate >= datetime.date.today() and self.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER : + if self.airdate >= datetime.date.today() and self.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER: # and it hasn't aired yet set the status to UNAIRED - logger.log(u"Episode airs in the future, changing status from " + str(self.status) + " to " + str(UNAIRED), logger.DEBUG) + logger.log( + u"Episode airs in the future, changing status from " + str(self.status) + " to " + str(UNAIRED), + logger.DEBUG) self.status = UNAIRED # if there's no airdate then set it to skipped (and respect ignored) elif self.airdate == datetime.date.fromordinal(1): @@ -1411,13 +1484,17 @@ class TVEpisode(object): self.status = SKIPPED else: - logger.log(u"Not touching status because we have no ep file, the airdate is in the past, and the status is "+str(self.status), logger.DEBUG) + logger.log( + u"Not touching status because we have no ep file, the airdate is in the past, and the status is " + str( + self.status), logger.DEBUG) # if we have a media file then it's downloaded elif sickbeard.helpers.isMediaFile(self.location): # leave propers alone, you have to either post-process them or manually change them back if self.status not in Quality.SNATCHED_PROPER + Quality.DOWNLOADED + Quality.SNATCHED + [ARCHIVED]: - logger.log(u"5 Status changes from " + str(self.status) + " to " + str(Quality.statusFromName(self.location)), logger.DEBUG) + logger.log( + u"5 Status changes from " + str(self.status) + " to " + str(Quality.statusFromName(self.location)), + logger.DEBUG) self.status = Quality.statusFromName(self.location) # shouldn't get here probably @@ -1428,10 +1505,13 @@ class TVEpisode(object): def loadFromNFO(self, location): if not ek.ek(os.path.isdir, self.show._location): - logger.log(str(self.show.indexerid) + u": The show dir is missing, not bothering to try loading the episode NFO") + logger.log( + str(self.show.indexerid) + u": The show dir is missing, not bothering to try loading the episode NFO") return - logger.log(str(self.show.indexerid) + u": Loading episode details from the NFO file associated with " + location, logger.DEBUG) + logger.log( + str(self.show.indexerid) + u": Loading episode details from the NFO file associated with " + location, + logger.DEBUG) self.location = location @@ -1439,7 +1519,8 @@ class TVEpisode(object): if self.status == UNKNOWN: if sickbeard.helpers.isMediaFile(self.location): - logger.log(u"7 Status changes from " + str(self.status) + " to " + str(Quality.statusFromName(self.location)), logger.DEBUG) + logger.log(u"7 Status changes from " + str(self.status) + " to " + str( + Quality.statusFromName(self.location)), logger.DEBUG) self.status = Quality.statusFromName(self.location) nfoFile = sickbeard.helpers.replaceExtension(self.location, "nfo") @@ -1449,17 +1530,24 @@ class TVEpisode(object): try: showXML = etree.ElementTree(file=nfoFile) except (SyntaxError, ValueError), e: - logger.log(u"Error loading the NFO, backing up the NFO and skipping for now: " + ex(e), logger.ERROR) #TODO: figure out what's wrong and fix it + logger.log(u"Error loading the NFO, backing up the NFO and skipping for now: " + ex(e), + logger.ERROR) #TODO: figure out what's wrong and fix it try: ek.ek(os.rename, nfoFile, nfoFile + ".old") except Exception, e: - logger.log(u"Failed to rename your episode's NFO file - you need to delete it or fix it: " + ex(e), logger.ERROR) + logger.log( + u"Failed to rename your episode's NFO file - you need to delete it or fix it: " + ex(e), + logger.ERROR) raise exceptions.NoNFOException("Error in NFO format") for epDetails in showXML.getiterator('episodedetails'): if epDetails.findtext('season') is None or int(epDetails.findtext('season')) != self.season or \ - epDetails.findtext('episode') is None or int(epDetails.findtext('episode')) != self.episode: - logger.log(str(self.show.indexerid) + u": NFO has an block for a different episode - wanted " + str(self.season) + "x" + str(self.episode) + " but got " + str(epDetails.findtext('season')) + "x" + str(epDetails.findtext('episode')), logger.DEBUG) + epDetails.findtext('episode') is None or int( + epDetails.findtext('episode')) != self.episode: + logger.log(str( + self.show.indexerid) + u": NFO has an block for a different episode - wanted " + str( + self.season) + "x" + str(self.episode) + " but got " + str( + epDetails.findtext('season')) + "x" + str(epDetails.findtext('episode')), logger.DEBUG) continue if epDetails.findtext('title') is None or epDetails.findtext('aired') is None: @@ -1491,7 +1579,8 @@ class TVEpisode(object): def __str__(self): toReturn = "" - toReturn += str(self.show.name) + " - " + str(self.season) + "x" + str(self.episode) + " - " + str(self.name) + "\n" + toReturn += str(self.show.name) + " - " + str(self.season) + "x" + str(self.episode) + " - " + str( + self.name) + "\n" toReturn += "location: " + str(self.location) + "\n" toReturn += "description: " + str(self.description) + "\n" toReturn += "subtitles: " + str(",".join(self.subtitles)) + "\n" @@ -1535,7 +1624,8 @@ class TVEpisode(object): def deleteEpisode(self): - logger.log(u"Deleting " + self.show.name + " " + str(self.season) + "x" + str(self.episode) + " from the DB", logger.DEBUG) + logger.log(u"Deleting " + self.show.name + " " + str(self.season) + "x" + str(self.episode) + " from the DB", + logger.DEBUG) # remove myself from the show dictionary if self.show.getEpisode(self.season, self.episode, noCreate=True) == self: @@ -1545,7 +1635,8 @@ class TVEpisode(object): # delete myself from the DB logger.log(u"Deleting myself from the database", logger.DEBUG) myDB = db.DBConnection() - sql = "DELETE FROM tv_episodes WHERE showid=" + str(self.show.indexerid) + " AND season=" + str(self.season) + " AND episode=" + str(self.episode) + sql = "DELETE FROM tv_episodes WHERE showid=" + str(self.show.indexerid) + " AND season=" + str( + self.season) + " AND episode=" + str(self.episode) myDB.action(sql) raise exceptions.EpisodeDeletedException() @@ -1563,8 +1654,12 @@ class TVEpisode(object): return # use a custom update/insert method to get the data into the DB - return ["INSERT OR REPLACE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, release_name, is_proper, showid, season, episode) VALUES ((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?),?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);", - [self.show.indexerid, self.season, self.episode, self.indexerid, self.indexer, self.name, self.description, ",".join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size, self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode]] + return [ + "INSERT OR REPLACE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, release_name, is_proper, showid, season, episode) VALUES ((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?),?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);", + [self.show.indexerid, self.season, self.episode, self.indexerid, self.indexer, self.name, self.description, + ",".join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch, + self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size, + self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode]] def saveToDB(self, forceSave=False): """ @@ -1708,32 +1803,32 @@ class TVEpisode(object): show_name = self.show.name return { - '%SN': show_name, - '%S.N': dot(show_name), - '%S_N': us(show_name), - '%EN': ep_name, - '%E.N': dot(ep_name), - '%E_N': us(ep_name), - '%QN': Quality.qualityStrings[epQual], - '%Q.N': dot(Quality.qualityStrings[epQual]), - '%Q_N': us(Quality.qualityStrings[epQual]), - '%S': str(self.season), - '%0S': '%02d' % self.season, - '%E': str(self.episode), - '%0E': '%02d' % self.episode, - '%RN': release_name(self.release_name), - '%RG': release_group(self.release_name), - '%AD': str(self.airdate).replace('-', ' '), - '%A.D': str(self.airdate).replace('-', '.'), - '%A_D': us(str(self.airdate)), - '%A-D': str(self.airdate), - '%Y': str(self.airdate.year), - '%M': str(self.airdate.month), - '%D': str(self.airdate.day), - '%0M': '%02d' % self.airdate.month, - '%0D': '%02d' % self.airdate.day, - '%RT': "PROPER" if self.is_proper else "", - } + '%SN': show_name, + '%S.N': dot(show_name), + '%S_N': us(show_name), + '%EN': ep_name, + '%E.N': dot(ep_name), + '%E_N': us(ep_name), + '%QN': Quality.qualityStrings[epQual], + '%Q.N': dot(Quality.qualityStrings[epQual]), + '%Q_N': us(Quality.qualityStrings[epQual]), + '%S': str(self.season), + '%0S': '%02d' % self.season, + '%E': str(self.episode), + '%0E': '%02d' % self.episode, + '%RN': release_name(self.release_name), + '%RG': release_group(self.release_name), + '%AD': str(self.airdate).replace('-', ' '), + '%A.D': str(self.airdate).replace('-', '.'), + '%A_D': us(str(self.airdate)), + '%A-D': str(self.airdate), + '%Y': str(self.airdate.year), + '%M': str(self.airdate.month), + '%D': str(self.airdate.day), + '%0M': '%02d' % self.airdate.month, + '%0D': '%02d' % self.airdate.day, + '%RT': "PROPER" if self.is_proper else "", + } def _format_string(self, pattern, replace_map): """ @@ -1745,7 +1840,8 @@ class TVEpisode(object): # do the replacements for cur_replacement in sorted(replace_map.keys(), reverse=True): result_name = result_name.replace(cur_replacement, helpers.sanitizeFileName(replace_map[cur_replacement])) - result_name = result_name.replace(cur_replacement.lower(), helpers.sanitizeFileName(replace_map[cur_replacement].lower())) + result_name = result_name.replace(cur_replacement.lower(), + helpers.sanitizeFileName(replace_map[cur_replacement].lower())) return result_name @@ -1835,7 +1931,8 @@ class TVEpisode(object): for other_ep in self.relatedEps: # for limited extend we only append the last ep - if multi in (NAMING_LIMITED_EXTEND, NAMING_LIMITED_EXTEND_E_PREFIXED) and other_ep != self.relatedEps[-1]: + if multi in (NAMING_LIMITED_EXTEND, NAMING_LIMITED_EXTEND_E_PREFIXED) and other_ep != self.relatedEps[ + -1]: continue elif multi == NAMING_DUPLICATE: @@ -1946,17 +2043,21 @@ class TVEpisode(object): if absolute_current_path_no_ext.startswith(self.show.location): current_path = absolute_current_path_no_ext[len(self.show.location):] - logger.log(u"Renaming/moving episode from the base path " + self.location + " to " + absolute_proper_path, logger.DEBUG) + logger.log(u"Renaming/moving episode from the base path " + self.location + " to " + absolute_proper_path, + logger.DEBUG) # if it's already named correctly then don't do anything if proper_path == current_path: - logger.log(str(self.indexerid) + u": File " + self.location + " is already named correctly, skipping", logger.DEBUG) + logger.log(str(self.indexerid) + u": File " + self.location + " is already named correctly, skipping", + logger.DEBUG) return - related_files = postProcessor.PostProcessor(self.location, indexer=self.indexer).list_associated_files(self.location) + related_files = postProcessor.PostProcessor(self.location, indexer=self.indexer).list_associated_files( + self.location) if self.show.subtitles and sickbeard.SUBTITLES_DIR != '': - related_subs = postProcessor.PostProcessor(self.location, indexer=self.indexer).list_associated_files(sickbeard.SUBTITLES_DIR, subtitles_only=True) + related_subs = postProcessor.PostProcessor(self.location, indexer=self.indexer).list_associated_files( + sickbeard.SUBTITLES_DIR, subtitles_only=True) absolute_proper_subs_path = ek.ek(os.path.join, sickbeard.SUBTITLES_DIR, self.formatted_filename()) logger.log(u"Files associated to " + self.location + ": " + str(related_files), logger.DEBUG) @@ -1966,12 +2067,14 @@ class TVEpisode(object): # move related files for cur_related_file in related_files: - cur_result = helpers.rename_ep_file(cur_related_file, absolute_proper_path, absolute_current_path_no_ext_length) + cur_result = helpers.rename_ep_file(cur_related_file, absolute_proper_path, + absolute_current_path_no_ext_length) if cur_result == False: logger.log(str(self.indexerid) + u": Unable to rename file " + cur_related_file, logger.ERROR) for cur_related_sub in related_subs: - cur_result = helpers.rename_ep_file(cur_related_sub, absolute_proper_subs_path, absolute_current_path_no_ext_length) + cur_result = helpers.rename_ep_file(cur_related_sub, absolute_proper_subs_path, + absolute_current_path_no_ext_length) if cur_result == False: logger.log(str(self.indexerid) + u": Unable to rename file " + cur_related_sub, logger.ERROR) @@ -1995,19 +2098,21 @@ class TVEpisode(object): def convertToSceneNumbering(self): if self.show.air_by_date: return - if self.season is None: return # can't work without a season - if self.episode is None: return # need to know the episode + if self.season is None: return # can't work without a season + if self.episode is None: return # need to know the episode indexer_id = self.show.indexerid - (self.season, self.episode) = sickbeard.scene_numbering.get_scene_numbering(indexer_id, self.season, self.episode) + (self.season, self.episode) = sickbeard.scene_numbering.get_scene_numbering(indexer_id, self.season, + self.episode) def convertToIndexer(self): if self.show.air_by_date: return - if self.season is None: return # can't work without a season - if self.episode is None: return # need to know the episode + if self.season is None: return # can't work without a season + if self.episode is None: return # need to know the episode indexer_id = self.show.indexerid - (self.season, self.episode) = sickbeard.scene_numbering.get_indexer_numbering(indexer_id, self.season, self.episode) + (self.season, self.episode) = sickbeard.scene_numbering.get_indexer_numbering(indexer_id, self.season, + self.episode) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 5652c9c8..9ed68de8 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -34,16 +34,12 @@ from sickbeard.exceptions import ex, AuthException try: import xml.etree.cElementTree as etree except ImportError: - import elementtree.ElementTree as etree - - -from sickbeard.indexers import indexer_api, indexer_exceptions + import elementtree.ElementTree as etree from name_parser.parser import NameParser, InvalidNameException class CacheDBConnection(db.DBConnection): - def __init__(self, providerName): db.DBConnection.__init__(self, "cache.db") @@ -65,8 +61,8 @@ class CacheDBConnection(db.DBConnection): if str(e) != "table lastUpdate already exists": raise -class TVCache(): +class TVCache(): def __init__(self, provider): self.provider = provider @@ -108,26 +104,26 @@ class TVCache(): self.setLastUpdate() else: return [] - + # now that we've loaded the current RSS feed lets delete the old cache logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") self._clearCache() - + parsedXML = helpers.parse_xml(data) - + if parsedXML is None: logger.log(u"Error trying to load " + self.provider.name + " RSS feed", logger.ERROR) return [] - + if self._checkAuth(parsedXML): - + if parsedXML.tag == 'rss': items = parsedXML.findall('.//item') - + else: logger.log(u"Resulting XML from " + self.provider.name + " isn't RSS, not parsing it", logger.ERROR) return [] - + cl = [] for item in items: ci = self._parseItem(item) @@ -137,14 +133,15 @@ class TVCache(): if len(cl) > 0: myDB = self._getDB() myDB.mass_action(cl) - + else: - raise AuthException(u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config") - + raise AuthException( + u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config") + return [] def _translateTitle(self, title): - return title.replace(' ', '.') + return title.replace(' ', '.') def _translateLinkURL(self, url): return url.replace('&', '&') @@ -153,19 +150,21 @@ class TVCache(): title = helpers.get_xml_text(item.find('title')) url = helpers.get_xml_text(item.find('link')) - + self._checkItemAuth(title, url) if title and url: title = self._translateTitle(title) url = self._translateLinkURL(url) - + logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG) return self._addCacheEntry(title, url) - + else: - logger.log(u"The XML returned from the " + self.provider.name + " feed is incomplete, this result is unusable", logger.DEBUG) - return None + logger.log( + u"The XML returned from the " + self.provider.name + " feed is incomplete, this result is unusable", + logger.DEBUG) + return None def _getLastUpdate(self): @@ -196,7 +195,8 @@ class TVCache(): def shouldUpdate(self): # if we've updated recently then skip the update if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.minTime): - logger.log(u"Last update was too soon, using old cache: today()-" + str(self.lastUpdate) + "<" + str(datetime.timedelta(minutes=self.minTime)), logger.DEBUG) + logger.log(u"Last update was too soon, using old cache: today()-" + str(self.lastUpdate) + "<" + str( + datetime.timedelta(minutes=self.minTime)), logger.DEBUG) return False return True @@ -217,7 +217,7 @@ class TVCache(): continue if not parse_result: - logger.log(u"Giving up because I'm unable to parse this name: "+name, logger.DEBUG) + logger.log(u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return None if not parse_result.series_name: @@ -242,9 +242,11 @@ class TVCache(): else: # check the name cache and see if we already know what show this is - logger.log(u"Checking the cache to see if we already know the indexer id of "+parse_result.series_name, logger.DEBUG) + logger.log( + u"Checking the cache to see if we already know the indexer id of " + parse_result.series_name, + logger.DEBUG) indexer_id = name_cache.retrieveNameFromCache(parse_result.series_name) - + # remember if the cache lookup worked or not so we know whether we should bother updating it later if indexer_id == None: logger.log(u"No cache results returned, continuing on with the search", logger.DEBUG) @@ -252,21 +254,25 @@ class TVCache(): else: logger.log(u"Cache lookup found " + repr(indexer_id) + ", using that", logger.DEBUG) from_cache = True - + # if the cache failed, try looking up the show name in the database if indexer_id == None: logger.log(u"Trying to look the show up in the show database", logger.DEBUG) showResult = helpers.searchDBForShow(parse_result.series_name) if showResult: - logger.log(u"" + parse_result.series_name + " was found to be show " + showResult[2] + " (" + str(showResult[1]) + ") in our DB.", logger.DEBUG) + logger.log( + u"" + parse_result.series_name + " was found to be show " + showResult[2] + " (" + str( + showResult[1]) + ") in our DB.", logger.DEBUG) indexer_id = showResult[1] # if the DB lookup fails then do a comprehensive regex search if indexer_id == None: - logger.log(u"Couldn't figure out a show name straight from the DB, trying a regex search instead", logger.DEBUG) + logger.log(u"Couldn't figure out a show name straight from the DB, trying a regex search instead", + logger.DEBUG) for curShow in sickbeard.showList: if show_name_helpers.isGoodResult(name, curShow, False): - logger.log(u"Successfully matched " + name + " to " + curShow.name + " with regex", logger.DEBUG) + logger.log(u"Successfully matched " + name + " to " + curShow.name + " with regex", + logger.DEBUG) indexer_id = curShow.indexerid indexer_lang = curShow.lang break @@ -303,19 +309,21 @@ class TVCache(): if not (indexer_lang == "" or indexer_lang == "en" or indexer_lang == None): lINDEXER_API_PARMS['language'] = indexer_lang - t = indexer_api.indexerApi(**lINDEXER_API_PARMS) + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) epObj = t[indexer_id].airedOn(parse_result.air_date)[0] season = int(epObj["seasonnumber"]) episodes = [int(epObj["episodenumber"])] - except indexer_exceptions.indexer_episodenotfound: - logger.log(u"Unable to find episode with date " + str(parse_result.air_date) + " for show " + parse_result.series_name+", skipping", logger.WARNING) + except sickbeard.indexer_episodenotfound: + logger.log(u"Unable to find episode with date " + str( + parse_result.air_date) + " for show " + parse_result.series_name + ", skipping", logger.WARNING) return None - except indexer_exceptions.indexer_error, e: - logger.log(u"Unable to contact " + self.indexer + ": " + ex(e), logger.WARNING) + except sickbeard.indexer_error, e: + logger.log(u"Unable to contact " + sickbeard.indexerApi(self.indexer).name + ": " + ex(e), + logger.WARNING) return None - episodeText = "|"+"|".join(map(str, episodes))+"|" + episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) @@ -326,8 +334,9 @@ class TVCache(): if not isinstance(name, unicode): name = unicode(name, 'utf-8') - myDB.action("INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)", - [name, season, episodeText, indexer_id, url, curTimestamp, quality]) + myDB.action( + "INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)", + [name, season, episodeText, indexer_id, url, curTimestamp, quality]) def searchCache(self, episode, manualSearch=False): @@ -357,7 +366,9 @@ class TVCache(): if not episode: sqlResults = myDB.select("SELECT * FROM [" + self.providerID + "]") else: - sqlResults = myDB.select("SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ?", [episode.show.indexerid, episode.season, "%|" + str(episode.episode) + "|%"]) + sqlResults = myDB.select( + "SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ?", + [episode.show.indexerid, episode.season, "%|" + str(episode.episode) + "|%"]) # for each cache entry for curResult in sqlResults: @@ -386,7 +397,8 @@ class TVCache(): # if the show says we want that episode then add it to the list if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch): - logger.log(u"Skipping " + curResult["name"] + " because we don't want an episode that's " + Quality.qualityStrings[curQuality], logger.DEBUG) + logger.log(u"Skipping " + curResult["name"] + " because we don't want an episode that's " + + Quality.qualityStrings[curQuality], logger.DEBUG) else: @@ -406,8 +418,8 @@ class TVCache(): result.name = title result.quality = curQuality result.content = self.provider.getURL(url) \ - if self.provider.providerType == sickbeard.providers.generic.GenericProvider.TORRENT \ - and not url.startswith('magnet') else None + if self.provider.providerType == sickbeard.providers.generic.GenericProvider.TORRENT \ + and not url.startswith('magnet') else None # add it to the list if epObj not in neededEps: diff --git a/sickbeard/tvtumbler.py b/sickbeard/tvtumbler.py index d5afd3d7..4adafa7b 100644 --- a/sickbeard/tvtumbler.py +++ b/sickbeard/tvtumbler.py @@ -26,7 +26,7 @@ def show_info(indexer_id): if time.time() < (cachedResult['mtime'] + UPDATE_INTERVAL): # cached result is still considered current, use it return cachedResult['response'] - # otherwise we just fall through to lookup + # otherwise we just fall through to lookup except KeyError: pass # no cached value, just fall through to lookup @@ -42,6 +42,6 @@ def show_info(indexer_id): # result is good, store it for later _tvtumber_cache[str(indexer_id)] = {'mtime': time.time(), - 'response': result['show']} + 'response': result['show']} return result['show'] diff --git a/sickbeard/ui.py b/sickbeard/ui.py index b8e1b3d3..15f088df 100644 --- a/sickbeard/ui.py +++ b/sickbeard/ui.py @@ -23,14 +23,16 @@ import sickbeard MESSAGE = 'notice' ERROR = 'error' + class Notifications(object): """ A queue of Notification objects. """ + def __init__(self): self._messages = [] self._errors = [] - + def message(self, title, message=''): """ Add a regular notification to the queue @@ -56,27 +58,28 @@ class Notifications(object): Returns: A list of Notification objects """ - + # filter out expired notifications self._errors = [x for x in self._errors if not x.is_expired()] self._messages = [x for x in self._messages if not x.is_expired()] - + # return any notifications that haven't been shown to the client already return [x.see() for x in self._errors + self._messages if x.is_new()] # static notification queue object notifications = Notifications() - + class Notification(object): """ Represents a single notification. Tracks its own timeout and a list of which clients have seen it before. """ + def __init__(self, title, message='', type=None, timeout=None): self.title = title self.message = message - + self._when = datetime.datetime.now() self._seen = [] @@ -84,7 +87,7 @@ class Notification(object): self.type = type else: self.type = MESSAGE - + if timeout: self._timeout = timeout else: @@ -95,14 +98,14 @@ class Notification(object): Returns True if the notification hasn't been displayed to the current client (aka IP address). """ return cherrypy.request.remote.ip not in self._seen - + def is_expired(self): """ Returns True if the notification is older than the specified timeout value. """ return datetime.datetime.now() - self._when > self._timeout - + def see(self): """ Returns this notification object and marks it as seen by the client ip @@ -110,17 +113,18 @@ class Notification(object): self._seen.append(cherrypy.request.remote.ip) return self -class ProgressIndicator(): +class ProgressIndicator(): def __init__(self, percentComplete=0, currentStatus={'title': ''}): self.percentComplete = percentComplete self.currentStatus = currentStatus + class ProgressIndicators(): _pi = {'massUpdate': [], 'massAdd': [], 'dailyUpdate': [] - } + } @staticmethod def getIndicator(name): @@ -139,10 +143,12 @@ class ProgressIndicators(): def setIndicator(name, indicator): ProgressIndicators._pi[name].append(indicator) + class QueueProgressIndicator(): """ A class used by the UI to show the progress of the queue or a part of it. """ + def __init__(self, name, queueItemList): self.queueItemList = queueItemList self.name = name @@ -157,7 +163,8 @@ class QueueProgressIndicator(): return len([x for x in self.queueItemList if x.isInQueue()]) def nextName(self): - for curItem in [sickbeard.showQueueScheduler.action.currentItem]+sickbeard.showQueueScheduler.action.queue: #@UndefinedVariable + for curItem in [ + sickbeard.showQueueScheduler.action.currentItem] + sickbeard.showQueueScheduler.action.queue: #@UndefinedVariable if curItem in self.queueItemList: return curItem.name @@ -170,7 +177,8 @@ class QueueProgressIndicator(): if numTotal == 0: return 0 else: - return int(float(numFinished)/float(numTotal)*100) + return int(float(numFinished) / float(numTotal) * 100) + class LoadingTVShow(): def __init__(self, dir): diff --git a/sickbeard/versionChecker.py b/sickbeard/versionChecker.py index a60ca71f..dc1e931d 100644 --- a/sickbeard/versionChecker.py +++ b/sickbeard/versionChecker.py @@ -120,7 +120,6 @@ class CheckVersion(): class UpdateManager(): - def get_github_repo_user(self): return 'echel0n' @@ -132,7 +131,6 @@ class UpdateManager(): class WindowsUpdateManager(UpdateManager): - def __init__(self): self.github_repo_user = self.get_github_repo_user() self.github_repo = self.get_github_repo() @@ -199,7 +197,8 @@ class WindowsUpdateManager(UpdateManager): if not self._cur_version: newest_text = "Unknown SickBeard Windows binary version. Not updating with original version." else: - newest_text = 'There is a newer version available (build ' + str(self._newest_version) + ')' + newest_text = 'There is a newer version available (build ' + str( + self._newest_version) + ')' newest_text += "— Update Now" sickbeard.NEWEST_VERSION_STRING = newest_text @@ -248,10 +247,12 @@ class WindowsUpdateManager(UpdateManager): os.remove(zip_download_path) # find update dir name - update_dir_contents = [x for x in os.listdir(sb_update_dir) if os.path.isdir(os.path.join(sb_update_dir, x))] + update_dir_contents = [x for x in os.listdir(sb_update_dir) if + os.path.isdir(os.path.join(sb_update_dir, x))] if len(update_dir_contents) != 1: - logger.log(u"Invalid update data, update failed. Maybe try deleting your sb-update folder?", logger.ERROR) + logger.log(u"Invalid update data, update failed. Maybe try deleting your sb-update folder?", + logger.ERROR) return False content_dir = os.path.join(sb_update_dir, update_dir_contents[0]) @@ -268,7 +269,6 @@ class WindowsUpdateManager(UpdateManager): class GitUpdateManager(UpdateManager): - def __init__(self): self._git_path = self._find_working_git() self.github_repo_user = self.get_github_repo_user() @@ -345,7 +345,8 @@ class GitUpdateManager(UpdateManager): try: logger.log(u"Executing " + cmd + " with your shell in " + sickbeard.PROG_DIR, logger.DEBUG) - p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, cwd=sickbeard.PROG_DIR) + p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, + shell=True, cwd=sickbeard.PROG_DIR) output, err = p.communicate() exit_status = p.returncode @@ -397,7 +398,7 @@ class GitUpdateManager(UpdateManager): return False def _find_git_branch(self): - branch_info, err, exit_status = self._run_git(self._git_path, 'symbolic-ref -q HEAD') # @UnusedVariable + branch_info, err, exit_status = self._run_git(self._git_path, 'symbolic-ref -q HEAD') # @UnusedVariable if exit_status == 0 and branch_info: branch = branch_info.strip().replace('refs/heads/', '', 1) if branch: @@ -451,7 +452,8 @@ class GitUpdateManager(UpdateManager): return logger.log(u"cur_commit = " + str(self._cur_commit_hash) + u", newest_commit = " + str(self._newest_commit_hash) - + u", num_commits_behind = " + str(self._num_commits_behind) + u", num_commits_ahead = " + str(self._num_commits_ahead), logger.DEBUG) + + u", num_commits_behind = " + str(self._num_commits_behind) + u", num_commits_ahead = " + str( + self._num_commits_ahead), logger.DEBUG) def set_newest_text(self): @@ -515,7 +517,6 @@ class GitUpdateManager(UpdateManager): class SourceUpdateManager(UpdateManager): - def __init__(self): self.github_repo_user = self.get_github_repo_user() self.github_repo = self.get_github_repo() @@ -532,7 +533,7 @@ class SourceUpdateManager(UpdateManager): if not os.path.isfile(version_file): self._cur_commit_hash = None return - + try: with open(version_file, 'r') as fp: self._cur_commit_hash = fp.read().strip(' \n\r') @@ -610,17 +611,17 @@ class SourceUpdateManager(UpdateManager): newest_text += "— Update Now" elif self._num_commits_behind > 0: - base_url = 'http://github.com/' + self.github_repo_user + '/' + self.github_repo - if self._newest_commit_hash: - url = base_url + '/compare/' + self._cur_commit_hash + '...' + self._newest_commit_hash - else: - url = base_url + '/commits/' + base_url = 'http://github.com/' + self.github_repo_user + '/' + self.github_repo + if self._newest_commit_hash: + url = base_url + '/compare/' + self._cur_commit_hash + '...' + self._newest_commit_hash + else: + url = base_url + '/commits/' - newest_text = 'There is a newer version available' - newest_text += " (you're " + str(self._num_commits_behind) + " commit" - if self._num_commits_behind > 1: - newest_text += "s" - newest_text += " behind)" + "— Update Now" + newest_text = 'There is a newer version available' + newest_text += " (you're " + str(self._num_commits_behind) + " commit" + if self._num_commits_behind > 1: + newest_text += "s" + newest_text += " behind)" + "— Update Now" else: return @@ -669,7 +670,8 @@ class SourceUpdateManager(UpdateManager): os.remove(tar_download_path) # find update dir name - update_dir_contents = [x for x in os.listdir(sb_update_dir) if os.path.isdir(os.path.join(sb_update_dir, x))] + update_dir_contents = [x for x in os.listdir(sb_update_dir) if + os.path.isdir(os.path.join(sb_update_dir, x))] if len(update_dir_contents) != 1: logger.log(u"Invalid update data, update failed: " + str(update_dir_contents), logger.ERROR) return False diff --git a/sickbeard/webapi.py b/sickbeard/webapi.py index 683821fa..2d52a670 100644 --- a/sickbeard/webapi.py +++ b/sickbeard/webapi.py @@ -37,9 +37,8 @@ from sickbeard import search_queue from sickbeard.common import SNATCHED, SNATCHED_PROPER, DOWNLOADED, SKIPPED, UNAIRED, IGNORED, ARCHIVED, WANTED, UNKNOWN from common import Quality, qualityPresetStrings, statusStrings from sickbeard import image_cache -from sickbeard.common import indexerStrings -from sickbeard.indexers import indexer_api, indexer_exceptions + try: import json except ImportError: @@ -52,26 +51,25 @@ from lib import subliminal dateFormat = "%Y-%m-%d" dateTimeFormat = "%Y-%m-%d %H:%M" - -RESULT_SUCCESS = 10 # only use inside the run methods -RESULT_FAILURE = 20 # only use inside the run methods -RESULT_TIMEOUT = 30 # not used yet :( -RESULT_ERROR = 40 # only use outside of the run methods ! -RESULT_FATAL = 50 # only use in Api.default() ! this is the "we encountered an internal error" error -RESULT_DENIED = 60 # only use in Api.default() ! this is the acces denied error +RESULT_SUCCESS = 10 # only use inside the run methods +RESULT_FAILURE = 20 # only use inside the run methods +RESULT_TIMEOUT = 30 # not used yet :( +RESULT_ERROR = 40 # only use outside of the run methods ! +RESULT_FATAL = 50 # only use in Api.default() ! this is the "we encountered an internal error" error +RESULT_DENIED = 60 # only use in Api.default() ! this is the acces denied error result_type_map = {RESULT_SUCCESS: "success", - RESULT_FAILURE: "failure", - RESULT_TIMEOUT: "timeout", - RESULT_ERROR: "error", - RESULT_FATAL: "fatal", - RESULT_DENIED: "denied", - } + RESULT_FAILURE: "failure", + RESULT_TIMEOUT: "timeout", + RESULT_ERROR: "error", + RESULT_FATAL: "fatal", + RESULT_DENIED: "denied", +} # basically everything except RESULT_SUCCESS / success is bad class Api: """ api class that returns json results """ - version = 4 # use an int since float-point is unpredictible + version = 4 # use an int since float-point is unpredictible intent = 4 @cherrypy.expose @@ -84,7 +82,7 @@ class Api: # default json outputCallbackDict = {'default': self._out_as_json, 'image': lambda x: x['image'], - } + } # do we have acces ? if access: @@ -98,24 +96,26 @@ class Api: # if profile was set wrap "_call_dispatcher" in the profile function if 'profile' in kwargs: from lib.profilehooks import profile + _call_dispatcher = profile(_call_dispatcher, immediate=True) del kwargs["profile"] # if debug was set call the "_call_dispatcher" if 'debug' in kwargs: - outDict = _call_dispatcher(args, kwargs) # this way we can debug the cherry.py traceback in the browser + outDict = _call_dispatcher(args, kwargs) # this way we can debug the cherry.py traceback in the browser del kwargs["debug"] - else:# if debug was not set we wrap the "call_dispatcher" in a try block to assure a json output + else: # if debug was not set we wrap the "call_dispatcher" in a try block to assure a json output try: outDict = _call_dispatcher(args, kwargs) - except cherrypy.HTTPRedirect: # seams like cherrypy uses exceptions for redirecting apparently this can happen when requesting images but it is ok so lets re raise it + except cherrypy.HTTPRedirect: # seams like cherrypy uses exceptions for redirecting apparently this can happen when requesting images but it is ok so lets re raise it raise - except Exception, e: # real internal error oohhh nooo :( + except Exception, e: # real internal error oohhh nooo :( logger.log(u"API :: " + ex(e), logger.ERROR) errorData = {"error_msg": ex(e), "args": args, "kwargs": kwargs} - outDict = _responds(RESULT_FATAL, errorData, "SickBeard encountered an internal error! Please report to the Devs") + outDict = _responds(RESULT_FATAL, errorData, + "SickBeard encountered an internal error! Please report to the Devs") if 'outputType' in outDict: outputCallback = outputCallbackDict[outDict['outputType']] @@ -147,10 +147,13 @@ class Api: episodeSQLResults = {} for curShow in t.sortedShowList: - seasonSQLResults[curShow.indexerid] = myDB.select("SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season DESC", [curShow.indexerid]) + seasonSQLResults[curShow.indexerid] = myDB.select( + "SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season DESC", [curShow.indexerid]) for curShow in t.sortedShowList: - episodeSQLResults[curShow.indexerid] = myDB.select("SELECT DISTINCT season,episode FROM tv_episodes WHERE showid = ? ORDER BY season DESC, episode DESC", [curShow.indexerid]) + episodeSQLResults[curShow.indexerid] = myDB.select( + "SELECT DISTINCT season,episode FROM tv_episodes WHERE showid = ? ORDER BY season DESC, episode DESC", + [curShow.indexerid]) t.seasonSQLResults = seasonSQLResults t.episodeSQLResults = episodeSQLResults @@ -172,10 +175,11 @@ class Api: out = json.dumps(dict, indent=self.intent, sort_keys=True) callback = request.params.get('callback') or request.params.get('jsonp') if callback != None: - out = callback + '(' + out + ');' # wrap with JSONP call if requested - except Exception, e: # if we fail to generate the output fake an error + out = callback + '(' + out + ');' # wrap with JSONP call if requested + except Exception, e: # if we fail to generate the output fake an error logger.log(u"API :: " + traceback.format_exc(), logger.DEBUG) - out = '{"result":"' + result_type_map[RESULT_ERROR] + '", "message": "error while composing output: "' + ex(e) + '"}' + out = '{"result":"' + result_type_map[RESULT_ERROR] + '", "message": "error while composing output: "' + ex( + e) + '"}' return out def _grand_access(self, realKey, args, kwargs): @@ -183,9 +187,9 @@ class Api: remoteIp = cherrypy.request.remote.ip apiKey = kwargs.get("apikey", None) if not apiKey: - if args: # if we have keyless vars we assume first one is the api key, always ! + if args: # if we have keyless vars we assume first one is the api key, always ! apiKey = args[0] - args = args[1:] # remove the apikey from the args tuple + args = args[1:] # remove the apikey from the args tuple else: del kwargs["apikey"] @@ -229,28 +233,29 @@ def call_dispatcher(args, kwargs): for cmd in cmds: curArgs, curKwargs = filter_params(cmd, args, kwargs) cmdIndex = None - if len(cmd.split("_")) > 1: # was a index used for this cmd ? - cmd, cmdIndex = cmd.split("_") # this gives us the clear cmd and the index + if len(cmd.split("_")) > 1: # was a index used for this cmd ? + cmd, cmdIndex = cmd.split("_") # this gives us the clear cmd and the index logger.log(u"API :: " + cmd + ": curKwargs " + str(curKwargs), logger.DEBUG) - if not (multiCmds and cmd in ('show.getposter', 'show.getbanner')): # skip these cmd while chaining + if not (multiCmds and cmd in ('show.getposter', 'show.getbanner')): # skip these cmd while chaining try: if cmd in _functionMaper: - curOutDict = _functionMaper.get(cmd)(curArgs, curKwargs).run() # get the cmd class, init it and run() + curOutDict = _functionMaper.get(cmd)(curArgs, + curKwargs).run() # get the cmd class, init it and run() elif _is_int(cmd): curOutDict = TVDBShorthandWrapper(curArgs, curKwargs, cmd).run() else: curOutDict = _responds(RESULT_ERROR, "No such cmd: '" + cmd + "'") - except ApiError, e: # Api errors that we raised, they are harmless + except ApiError, e: # Api errors that we raised, they are harmless curOutDict = _responds(RESULT_ERROR, msg=ex(e)) - else: # if someone chained one of the forbiden cmds they will get an error for this one cmd + else: # if someone chained one of the forbiden cmds they will get an error for this one cmd curOutDict = _responds(RESULT_ERROR, msg="The cmd '" + cmd + "' is not supported while chaining") if multiCmds: # note: if multiple same cmds are issued but one has not an index defined it will override all others # or the other way around, this depends on the order of the cmds # this is not a bug - if cmdIndex is None: # do we need a index dict for this cmd ? + if cmdIndex is None: # do we need a index dict for this cmd ? outDict[cmd] = curOutDict else: if not cmd in outDict: @@ -259,9 +264,9 @@ def call_dispatcher(args, kwargs): else: outDict = curOutDict - if multiCmds: # if we had multiple cmds we have to wrap it in a response dict + if multiCmds: # if we had multiple cmds we have to wrap it in a response dict outDict = _responds(RESULT_SUCCESS, outDict) - else: # index / no cmd given + else: # index / no cmd given outDict = CMD_SickBeard(args, kwargs).run() return outDict @@ -296,7 +301,7 @@ def filter_params(cmd, args, kwargs): if kwarg.find(cmd + ".") == 0: cleanKey = kwarg.rpartition(".")[2] curKwargs[cleanKey] = kwargs[kwarg].lower() - elif not "." in kwarg: # the kwarg was not namespaced therefore a "global" + elif not "." in kwarg: # the kwarg was not namespaced therefore a "global" curKwargs[kwarg] = kwargs[kwarg] return curArgs, curKwargs @@ -332,7 +337,7 @@ class ApiCall(object): self._optionalParams = [] for paramDict, type in [(self._requiredParams, "requiredParameters"), - (self._optionalParams, "optionalParameters")]: + (self._optionalParams, "optionalParameters")]: if type in self._help: for paramName in paramDict: @@ -442,11 +447,14 @@ class ApiCall(object): elif type == "ignore": pass else: - logger.log(u"API :: Invalid param type set " + str(type) + " can not check or convert ignoring it", logger.ERROR) + logger.log(u"API :: Invalid param type set " + str(type) + " can not check or convert ignoring it", + logger.ERROR) if error: # this is a real ApiError !! - raise ApiError(u"param: '" + str(name) + "' with given value: '" + str(value) + "' could not be parsed into '" + str(type) + "'") + raise ApiError( + u"param: '" + str(name) + "' with given value: '" + str(value) + "' could not be parsed into '" + str( + type) + "'") return value @@ -467,7 +475,8 @@ class ApiCall(object): if error: # this is kinda a ApiError but raising an error is the only way of quitting here - raise ApiError(u"param: '" + str(name) + "' with given value: '" + str(value) + "' is out of allowed range '" + str(allowedValues) + "'") + raise ApiError(u"param: '" + str(name) + "' with given value: '" + str( + value) + "' is out of allowed range '" + str(allowedValues) + "'") class TVDBShorthandWrapper(ApiCall): @@ -667,14 +676,15 @@ class ApiError(Exception): class IntParseError(Exception): "A value could not be parsed into a int. But should be parsable to a int " + #-------------------------------------------------------------------------------------# class CMD_Help(ApiCall): _help = {"desc": "display help information for a given subject/command", "optionalParameters": {"subject": {"desc": "command - the top level command"}, - } } + } def __init__(self, args, kwargs): # required @@ -694,17 +704,20 @@ class CMD_Help(ApiCall): class CMD_ComingEpisodes(ApiCall): _help = {"desc": "display the coming episodes", "optionalParameters": {"sort": {"desc": "change the sort order"}, - "type": {"desc": "one or more of allowedValues separated by |"}, - "paused": {"desc": "0 to exclude paused shows, 1 to include them, or omitted to use the SB default"}, - } + "type": {"desc": "one or more of allowedValues separated by |"}, + "paused": { + "desc": "0 to exclude paused shows, 1 to include them, or omitted to use the SB default"}, } + } def __init__(self, args, kwargs): # required # optional self.sort, args = self.check_params(args, kwargs, "sort", "date", False, "string", ["date", "show", "network"]) - self.type, args = self.check_params(args, kwargs, "type", "today|missed|soon|later", False, "list", ["missed", "later", "today", "soon"]) - self.paused, args = self.check_params(args, kwargs, "paused", sickbeard.COMING_EPS_DISPLAY_PAUSED, False, "int", [0, 1]) + self.type, args = self.check_params(args, kwargs, "type", "today|missed|soon|later", False, "list", + ["missed", "later", "today", "soon"]) + self.paused, args = self.check_params(args, kwargs, "paused", sickbeard.COMING_EPS_DISPLAY_PAUSED, False, "int", + [0, 1]) # super, missing, help ApiCall.__init__(self, args, kwargs) @@ -718,14 +731,23 @@ class CMD_ComingEpisodes(ApiCall): qualList = Quality.DOWNLOADED + Quality.SNATCHED + [ARCHIVED, IGNORED] myDB = db.DBConnection(row_type="dict") - sql_results = myDB.select("SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'indexerid', show_name, tv_shows.quality AS quality, tv_shows.status AS 'show_status', tv_shows.paused AS 'paused' FROM tv_episodes, tv_shows WHERE season != 0 AND airdate >= ? AND airdate < ? AND tv_shows.indexer_id = tv_episodes.showid AND tv_episodes.status NOT IN (" + ','.join(['?'] * len(qualList)) + ")", [today, next_week] + qualList) + sql_results = myDB.select( + "SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'indexerid', show_name, tv_shows.quality AS quality, tv_shows.status AS 'show_status', tv_shows.paused AS 'paused' FROM tv_episodes, tv_shows WHERE season != 0 AND airdate >= ? AND airdate < ? AND tv_shows.indexer_id = tv_episodes.showid AND tv_episodes.status NOT IN (" + ','.join( + ['?'] * len(qualList)) + ")", [today, next_week] + qualList) for cur_result in sql_results: done_show_list.append(int(cur_result["indexerid"])) - more_sql_results = myDB.select("SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'indexerid', show_name, tv_shows.quality AS quality, tv_shows.status AS 'show_status', tv_shows.paused AS 'paused' FROM tv_episodes outer_eps, tv_shows WHERE season != 0 AND showid NOT IN (" + ','.join(['?'] * len(done_show_list)) + ") AND tv_shows.indexer_id = outer_eps.showid AND airdate = (SELECT airdate FROM tv_episodes inner_eps WHERE inner_eps.season != 0 AND inner_eps.showid = outer_eps.showid AND inner_eps.airdate >= ? ORDER BY inner_eps.airdate ASC LIMIT 1) AND outer_eps.status NOT IN (" + ','.join(['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED)) + ")", done_show_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED) + more_sql_results = myDB.select( + "SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'indexerid', show_name, tv_shows.quality AS quality, tv_shows.status AS 'show_status', tv_shows.paused AS 'paused' FROM tv_episodes outer_eps, tv_shows WHERE season != 0 AND showid NOT IN (" + ','.join( + ['?'] * len( + done_show_list)) + ") AND tv_shows.indexer_id = outer_eps.showid AND airdate = (SELECT airdate FROM tv_episodes inner_eps WHERE inner_eps.season != 0 AND inner_eps.showid = outer_eps.showid AND inner_eps.airdate >= ? ORDER BY inner_eps.airdate ASC LIMIT 1) AND outer_eps.status NOT IN (" + ','.join( + ['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED)) + ")", + done_show_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED) sql_results += more_sql_results - more_sql_results = myDB.select("SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'indexerid', show_name, tv_shows.quality AS quality, tv_shows.status AS 'show_status', tv_shows.paused AS 'paused' FROM tv_episodes, tv_shows WHERE season != 0 AND tv_shows.indexer_id = tv_episodes.showid AND airdate < ? AND airdate >= ? AND tv_episodes.status = ? AND tv_episodes.status NOT IN (" + ','.join(['?'] * len(qualList)) + ")", [today, recently, WANTED] + qualList) + more_sql_results = myDB.select( + "SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'indexerid', show_name, tv_shows.quality AS quality, tv_shows.status AS 'show_status', tv_shows.paused AS 'paused' FROM tv_episodes, tv_shows WHERE season != 0 AND tv_shows.indexer_id = tv_episodes.showid AND airdate < ? AND airdate >= ? AND tv_episodes.status = ? AND tv_episodes.status NOT IN (" + ','.join( + ['?'] * len(qualList)) + ")", [today, recently, WANTED] + qualList) sql_results += more_sql_results # sort by air date @@ -790,12 +812,13 @@ class CMD_ComingEpisodes(ApiCall): class CMD_Episode(ApiCall): _help = {"desc": "display detailed info about an episode", "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"}, - "season": {"desc": "the season number"}, - "episode": {"desc": "the episode number"} - }, - "optionalParameters": {"full_path": {"desc": "show the full absolute path (if valid) instead of a relative path for the episode location"} - } + "season": {"desc": "the season number"}, + "episode": {"desc": "the episode number"} + }, + "optionalParameters": {"full_path": { + "desc": "show the full absolute path (if valid) instead of a relative path for the episode location"} } + } def __init__(self, args, kwargs): # required @@ -814,7 +837,9 @@ class CMD_Episode(ApiCall): return _responds(RESULT_FAILURE, msg="Show not found") myDB = db.DBConnection(row_type="dict") - sqlResults = myDB.select("SELECT name, description, airdate, status, location, file_size, release_name, subtitles FROM tv_episodes WHERE showid = ? AND episode = ? AND season = ?", [self.indexerid, self.e, self.s]) + sqlResults = myDB.select( + "SELECT name, description, airdate, status, location, file_size, release_name, subtitles FROM tv_episodes WHERE showid = ? AND episode = ? AND season = ?", + [self.indexerid, self.e, self.s]) if not len(sqlResults) == 1: raise ApiError("Episode not found") episode = sqlResults[0] @@ -830,9 +855,9 @@ class CMD_Episode(ApiCall): pass elif bool(self.fullPath) == False and showPath: # using the length because lstrip removes to much - showPathLength = len(showPath) + 1 # the / or \ yeah not that nice i know + showPathLength = len(showPath) + 1 # the / or \ yeah not that nice i know episode["location"] = episode["location"][showPathLength:] - elif not showPath: # show dir is broken ... episode path will be empty + elif not showPath: # show dir is broken ... episode path will be empty episode["location"] = "" # convert stuff to human form episode["airdate"] = _ordinal_to_dateForm(episode["airdate"]) @@ -848,10 +873,10 @@ class CMD_Episode(ApiCall): class CMD_EpisodeSearch(ApiCall): _help = {"desc": "search for an episode. the response might take some time", "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"}, - "season": {"desc": "the season number"}, - "episode": {"desc": "the episode number"} - } + "season": {"desc": "the season number"}, + "episode": {"desc": "the episode number"} } + } def __init__(self, args, kwargs): # required @@ -875,17 +900,18 @@ class CMD_EpisodeSearch(ApiCall): # make a queue item for it and put it on the queue ep_queue_item = search_queue.ManualSearchQueueItem(epObj) - sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) #@UndefinedVariable + sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) #@UndefinedVariable # wait until the queue item tells us whether it worked or not - while ep_queue_item.success == None: #@UndefinedVariable + while ep_queue_item.success == None: #@UndefinedVariable time.sleep(1) # return the correct json value if ep_queue_item.success: - status, quality = Quality.splitCompositeStatus(epObj.status) #@UnusedVariable + status, quality = Quality.splitCompositeStatus(epObj.status) #@UnusedVariable # TODO: split quality and status? - return _responds(RESULT_SUCCESS, {"quality": _get_quality_string(quality)}, "Snatched (" + _get_quality_string(quality) + ")") + return _responds(RESULT_SUCCESS, {"quality": _get_quality_string(quality)}, + "Snatched (" + _get_quality_string(quality) + ")") return _responds(RESULT_FAILURE, msg='Unable to find episode') @@ -893,19 +919,20 @@ class CMD_EpisodeSearch(ApiCall): class CMD_EpisodeSetStatus(ApiCall): _help = {"desc": "set status of an episode or season (when no ep is provided)", "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"}, - "season": {"desc": "the season number"}, - "status": {"desc": "the status values: wanted, skipped, archived, ignored, failed"} - }, + "season": {"desc": "the season number"}, + "status": {"desc": "the status values: wanted, skipped, archived, ignored, failed"} + }, "optionalParameters": {"episode": {"desc": "the episode number"}, "force": {"desc": "should we replace existing (downloaded) episodes or not"} - } } + } def __init__(self, args, kwargs): # required self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", []) self.s, args = self.check_params(args, kwargs, "season", None, True, "int", []) - self.status, args = self.check_params(args, kwargs, "status", None, True, "string", ["wanted", "skipped", "archived", "ignored", "failed"]) + self.status, args = self.check_params(args, kwargs, "status", None, True, "string", + ["wanted", "skipped", "archived", "ignored", "failed"]) # optional self.e, args = self.check_params(args, kwargs, "episode", None, False, "int", []) self.force, args = self.check_params(args, kwargs, "force", 0, False, "bool", []) @@ -923,7 +950,7 @@ class CMD_EpisodeSetStatus(ApiCall): if str(statusStrings[status]).lower() == str(self.status).lower(): self.status = status break - else: # if we dont break out of the for loop we got here. + else: # if we dont break out of the for loop we got here. # the allowed values has at least one item that could not be matched against the internal status strings raise ApiError("The status string could not be matched to a status. Report to Devs!") @@ -938,7 +965,8 @@ class CMD_EpisodeSetStatus(ApiCall): ep_list = showObj.getAllEpisodes(season=self.s) def _epResult(result_code, ep, msg=""): - return {'season': ep.season, 'episode': ep.episode, 'status': _get_status_Strings(ep.status), 'result': result_type_map[result_code], 'message': msg} + return {'season': ep.season, 'episode': ep.episode, 'status': _get_status_Strings(ep.status), + 'result': result_type_map[result_code], 'message': msg} ep_results = [] failure = False @@ -955,14 +983,16 @@ class CMD_EpisodeSetStatus(ApiCall): with epObj.lock: # don't let them mess up UNAIRED episodes if epObj.status == UNAIRED: - if self.e != None: # setting the status of a unaired is only considert a failure if we directly wanted this episode, but is ignored on a season request - ep_results.append(_epResult(RESULT_FAILURE, epObj, "Refusing to change status because it is UNAIRED")) + if self.e != None: # setting the status of a unaired is only considert a failure if we directly wanted this episode, but is ignored on a season request + ep_results.append( + _epResult(RESULT_FAILURE, epObj, "Refusing to change status because it is UNAIRED")) failure = True continue # allow the user to force setting the status for an already downloaded episode if epObj.status in Quality.DOWNLOADED and not self.force: - ep_results.append(_epResult(RESULT_FAILURE, epObj, "Refusing to change status because it is already marked as DOWNLOADED")) + ep_results.append(_epResult(RESULT_FAILURE, epObj, + "Refusing to change status because it is already marked as DOWNLOADED")) failure = True continue @@ -976,8 +1006,9 @@ class CMD_EpisodeSetStatus(ApiCall): extra_msg = "" if start_backlog: cur_backlog_queue_item = search_queue.BacklogQueueItem(showObj, ep_segment) - sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) #@UndefinedVariable - logger.log(u"API :: Starting backlog for " + showObj.name + " season " + str(ep_segment) + " because some episodes were set to WANTED") + sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) #@UndefinedVariable + logger.log(u"API :: Starting backlog for " + showObj.name + " season " + str( + ep_segment) + " because some episodes were set to WANTED") extra_msg = " Backlog started" if failure: @@ -985,13 +1016,14 @@ class CMD_EpisodeSetStatus(ApiCall): else: return _responds(RESULT_SUCCESS, msg='All status set successfully.' + extra_msg) + class CMD_SubtitleSearch(ApiCall): _help = {"desc": "search episode subtitles. the response might take some time", "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"}, - "season": {"desc": "the season number"}, - "episode": {"desc": "the episode number"} - } + "season": {"desc": "the season number"}, + "episode": {"desc": "the episode number"} } + } def __init__(self, args, kwargs): # required @@ -1012,33 +1044,36 @@ class CMD_SubtitleSearch(ApiCall): epObj = showObj.getEpisode(int(self.s), int(self.e)) if isinstance(epObj, str): return _responds(RESULT_FAILURE, msg="Episode not found") - + # try do download subtitles for that episode previous_subtitles = epObj.subtitles - + try: subtitles = epObj.downloadSubtitles() except: return _responds(RESULT_FAILURE, msg='Unable to find subtitles') - + # return the correct json value if previous_subtitles != epObj.subtitles: - status = 'New subtitles downloaded: %s' % ' '.join([""+subliminal.language.Language(x).name+"" for x in sorted(list(set(epObj.subtitles).difference(previous_subtitles)))]) + status = 'New subtitles downloaded: %s' % ' '.join([ + "" + subliminal.language.Language(x).name + "" for x in + sorted(list(set(epObj.subtitles).difference(previous_subtitles)))]) response = _responds(RESULT_SUCCESS, msg='New subtitles found') else: status = 'No subtitles downloaded' response = _responds(RESULT_FAILURE, msg='Unable to find subtitles') - + ui.notifications.message('Subtitles Search', status) - + return response class CMD_Exceptions(ApiCall): _help = {"desc": "display scene exceptions for all or a given show", "optionalParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"}, - } } + } def __init__(self, args, kwargs): # required @@ -1065,7 +1100,9 @@ class CMD_Exceptions(ApiCall): if not showObj: return _responds(RESULT_FAILURE, msg="Show not found") - sqlResults = myDB.select("SELECT show_name, indexer_id AS 'indexerid' FROM scene_exceptions WHERE indexer_id = ?", [self.indexerid]) + sqlResults = myDB.select( + "SELECT show_name, indexer_id AS 'indexerid' FROM scene_exceptions WHERE indexer_id = ?", + [self.indexerid]) scene_exceptions = [] for row in sqlResults: scene_exceptions.append(row["show_name"]) @@ -1078,8 +1115,8 @@ class CMD_History(ApiCall): _help = {"desc": "display sickbeard downloaded/snatched history", "optionalParameters": {"limit": {"desc": "limit returned results"}, "type": {"desc": "only show a specific type of results"}, - } } + } def __init__(self, args, kwargs): # required @@ -1106,9 +1143,13 @@ class CMD_History(ApiCall): ulimit = min(int(self.limit), 100) if ulimit == 0: - sqlResults = myDB.select("SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.indexer_id AND action in (" + ','.join(['?'] * len(typeCodes)) + ") ORDER BY date DESC", typeCodes) + sqlResults = myDB.select( + "SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.indexer_id AND action in (" + ','.join( + ['?'] * len(typeCodes)) + ") ORDER BY date DESC", typeCodes) else: - sqlResults = myDB.select("SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.indexer_id AND action in (" + ','.join(['?'] * len(typeCodes)) + ") ORDER BY date DESC LIMIT ?", typeCodes + [ulimit]) + sqlResults = myDB.select( + "SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.indexer_id AND action in (" + ','.join( + ['?'] * len(typeCodes)) + ") ORDER BY date DESC LIMIT ?", typeCodes + [ulimit]) results = [] for row in sqlResults: @@ -1131,7 +1172,7 @@ class CMD_History(ApiCall): class CMD_HistoryClear(ApiCall): _help = {"desc": "clear sickbeard's history", - } + } def __init__(self, args, kwargs): # required @@ -1150,7 +1191,7 @@ class CMD_HistoryClear(ApiCall): class CMD_HistoryTrim(ApiCall): _help = {"desc": "trim sickbeard's history by removing entries greater than 30 days old" - } + } def __init__(self, args, kwargs): # required @@ -1161,7 +1202,8 @@ class CMD_HistoryTrim(ApiCall): def run(self): """ trim sickbeard's history """ myDB = db.DBConnection() - myDB.action("DELETE FROM history WHERE date < " + str((datetime.datetime.today() - datetime.timedelta(days=30)).strftime(history.dateFormat))) + myDB.action("DELETE FROM history WHERE date < " + str( + (datetime.datetime.today() - datetime.timedelta(days=30)).strftime(history.dateFormat))) myDB.connection.close() return _responds(RESULT_SUCCESS, msg="Removed history entries greater than 30 days old") @@ -1169,13 +1211,15 @@ class CMD_HistoryTrim(ApiCall): class CMD_Logs(ApiCall): _help = {"desc": "view sickbeard's log", - "optionalParameters": {"min_level ": {"desc": "the minimum level classification of log entries to show, with each level inherting its above level"} } - } + "optionalParameters": {"min_level ": { + "desc": "the minimum level classification of log entries to show, with each level inherting its above level"}} + } def __init__(self, args, kwargs): # required # optional - self.min_level, args = self.check_params(args, kwargs, "min_level", "error", False, "string", ["error", "warning", "info", "debug"]) + self.min_level, args = self.check_params(args, kwargs, "min_level", "error", False, "string", + ["error", "warning", "info", "debug"]) # super, missing, help ApiCall.__init__(self, args, kwargs) @@ -1237,17 +1281,18 @@ class CMD_SickBeard(ApiCall): def run(self): """ display misc sickbeard related information """ - data = {"sb_version": sickbeard.version.SICKBEARD_VERSION, "api_version": Api.version, "api_commands": sorted(_functionMaper.keys())} + data = {"sb_version": sickbeard.version.SICKBEARD_VERSION, "api_version": Api.version, + "api_commands": sorted(_functionMaper.keys())} return _responds(RESULT_SUCCESS, data) class CMD_SickBeardAddRootDir(ApiCall): _help = {"desc": "add a sickbeard user's parent directory", "requiredParameters": {"location": {"desc": "the full path to root (parent) directory"} - }, + }, "optionalParameters": {"default": {"desc": "make the location passed the default root (parent) directory"} - } } + } def __init__(self, args, kwargs): # required @@ -1278,13 +1323,13 @@ class CMD_SickBeardAddRootDir(ApiCall): # clean up the list - replace %xx escapes by their single-character equivalent root_dirs = [urllib.unquote_plus(x) for x in root_dirs] for x in root_dirs: - if(x == self.location): + if (x == self.location): location_matched = 1 if (self.default == 1): index = root_dirs.index(self.location) break - if(location_matched == 0): + if (location_matched == 0): if (self.default == 1): index = 0 root_dirs.insert(0, self.location) @@ -1313,21 +1358,23 @@ class CMD_SickBeardCheckScheduler(ApiCall): myDB = db.DBConnection() sqlResults = myDB.select("SELECT last_backlog FROM info") - backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused() #@UndefinedVariable - backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() #@UndefinedVariable - searchStatus = sickbeard.currentSearchScheduler.action.amActive #@UndefinedVariable + backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused() #@UndefinedVariable + backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() #@UndefinedVariable + searchStatus = sickbeard.currentSearchScheduler.action.amActive #@UndefinedVariable nextSearch = str(sickbeard.currentSearchScheduler.timeLeft()).split('.')[0] nextBacklog = sickbeard.backlogSearchScheduler.nextRun().strftime(dateFormat).decode(sickbeard.SYS_ENCODING) myDB.connection.close() - data = {"backlog_is_paused": int(backlogPaused), "backlog_is_running": int(backlogRunning), "last_backlog": _ordinal_to_dateForm(sqlResults[0]["last_backlog"]), "search_is_running": int(searchStatus), "next_search": nextSearch, "next_backlog": nextBacklog} + data = {"backlog_is_paused": int(backlogPaused), "backlog_is_running": int(backlogRunning), + "last_backlog": _ordinal_to_dateForm(sqlResults[0]["last_backlog"]), + "search_is_running": int(searchStatus), "next_search": nextSearch, "next_backlog": nextBacklog} return _responds(RESULT_SUCCESS, data) class CMD_SickBeardDeleteRootDir(ApiCall): _help = {"desc": "delete a sickbeard user's parent directory", - "requiredParameters": {"location": {"desc": "the full path to root (parent) directory"} } - } + "requiredParameters": {"location": {"desc": "the full path to root (parent) directory"}} + } def __init__(self, args, kwargs): # required @@ -1371,7 +1418,7 @@ class CMD_SickBeardDeleteRootDir(ApiCall): class CMD_SickBeardForceSearch(ApiCall): _help = {"desc": "force the episode search early" - } + } def __init__(self, args, kwargs): # required @@ -1404,7 +1451,9 @@ class CMD_SickBeardGetDefaults(ApiCall): anyQualities, bestQualities = _mapQuality(sickbeard.QUALITY_DEFAULT) - data = {"status": statusStrings[sickbeard.STATUS_DEFAULT].lower(), "flatten_folders": int(sickbeard.FLATTEN_FOLDERS_DEFAULT), "initial": anyQualities, "archive": bestQualities, "future_show_paused": int(sickbeard.COMING_EPS_DISPLAY_PAUSED) } + data = {"status": statusStrings[sickbeard.STATUS_DEFAULT].lower(), + "flatten_folders": int(sickbeard.FLATTEN_FOLDERS_DEFAULT), "initial": anyQualities, + "archive": bestQualities, "future_show_paused": int(sickbeard.COMING_EPS_DISPLAY_PAUSED)} return _responds(RESULT_SUCCESS, data) @@ -1421,8 +1470,8 @@ class CMD_SickBeardGetMessages(ApiCall): messages = [] for cur_notification in ui.notifications.get_notifications(): messages.append({"title": cur_notification.title, - "message": cur_notification.message, - "type": cur_notification.type}) + "message": cur_notification.message, + "type": cur_notification.type}) return _responds(RESULT_SUCCESS, messages) @@ -1443,8 +1492,8 @@ class CMD_SickBeardGetRootDirs(ApiCall): class CMD_SickBeardPauseBacklog(ApiCall): _help = {"desc": "pause the backlog search", - "optionalParameters": {"pause ": {"desc": "pause or unpause the global backlog"} } - } + "optionalParameters": {"pause ": {"desc": "pause or unpause the global backlog"}} + } def __init__(self, args, kwargs): # required @@ -1456,10 +1505,10 @@ class CMD_SickBeardPauseBacklog(ApiCall): def run(self): """ pause the backlog search """ if self.pause == True: - sickbeard.searchQueueScheduler.action.pause_backlog() #@UndefinedVariable + sickbeard.searchQueueScheduler.action.pause_backlog() #@UndefinedVariable return _responds(RESULT_SUCCESS, msg="Backlog paused") else: - sickbeard.searchQueueScheduler.action.unpause_backlog() #@UndefinedVariable + sickbeard.searchQueueScheduler.action.unpause_backlog() #@UndefinedVariable return _responds(RESULT_SUCCESS, msg="Backlog unpaused") @@ -1499,16 +1548,16 @@ class CMD_SickBeardRestart(ApiCall): class CMD_SickBeardSearchTVDB(ApiCall): _help = {"desc": "search for show at tvdb with a given string and language", "optionalParameters": {"name": {"desc": "name of the show you want to search for"}, - "indexerid": {"desc": "thetvdb.com unique id of a show"}, - "lang": {"desc": "the 2 letter abbreviation lang id"} - } + "indexerid": {"desc": "thetvdb.com unique id of a show"}, + "lang": {"desc": "the 2 letter abbreviation lang id"} } + } valid_languages = { - 'el': 20, 'en': 7, 'zh': 27, 'it': 15, 'cs': 28, 'es': 16, 'ru': 22, - 'nl': 13, 'pt': 26, 'no': 9, 'tr': 21, 'pl': 18, 'fr': 17, 'hr': 31, - 'de': 14, 'da': 10, 'fi': 11, 'hu': 19, 'ja': 25, 'he': 24, 'ko': 32, - 'sv': 8, 'sl': 30} + 'el': 20, 'en': 7, 'zh': 27, 'it': 15, 'cs': 28, 'es': 16, 'ru': 22, + 'nl': 13, 'pt': 26, 'no': 9, 'tr': 21, 'pl': 18, 'fr': 17, 'hr': 31, + 'de': 14, 'da': 10, 'fi': 11, 'hu': 19, 'ja': 25, 'he': 24, 'ko': 32, + 'sv': 8, 'sl': 30} def __init__(self, args, kwargs): # required @@ -1523,7 +1572,7 @@ class CMD_SickBeardSearchTVDB(ApiCall): def run(self): """ search for show at tvdb with a given string and language """ - if self.name and not self.indexerid: # only name was given + if self.name and not self.indexerid: # only name was given baseURL = "http://thetvdb.com/api/GetSeries.php?" params = {"seriesname": str(self.name).encode('utf-8'), 'language': self.lang} finalURL = baseURL + urllib.urlencode(params) @@ -1535,7 +1584,8 @@ class CMD_SickBeardSearchTVDB(ApiCall): try: seriesXML = etree.ElementTree(etree.XML(urlData)) except Exception, e: - logger.log(u"API :: Unable to parse XML for some reason: " + ex(e) + " from XML: " + urlData, logger.ERROR) + logger.log(u"API :: Unable to parse XML for some reason: " + ex(e) + " from XML: " + urlData, + logger.ERROR) return _responds(RESULT_FAILURE, msg="Unable to read result from tvdb") series = seriesXML.getiterator('Series') @@ -1555,21 +1605,23 @@ class CMD_SickBeardSearchTVDB(ApiCall): if self.lang and not self.lang == 'en': lINDEXER_API_PARMS['language'] = self.lang - t = indexer_api.indexerApi(actors=False, **lINDEXER_API_PARMS) + t = sickbeard.indexerApi(actors=False, **lINDEXER_API_PARMS) try: myShow = t[int(self.indexerid)] - except (indexer_exceptions.indexer_shownotfound, indexer_exceptions.indexer_error): + except (sickbeard.indexer_shownotfound, sickbeard.indexer_error): logger.log(u"API :: Unable to find show with id " + str(self.indexerid), logger.WARNING) return _responds(RESULT_SUCCESS, {"results": [], "langid": lang_id}) if not myShow.data['seriesname']: - logger.log(u"API :: Found show with indexerid " + str(self.indexerid) + ", however it contained no show name", logger.DEBUG) + logger.log( + u"API :: Found show with indexerid " + str(self.indexerid) + ", however it contained no show name", + logger.DEBUG) return _responds(RESULT_FAILURE, msg="Show contains no name, invalid result") showOut = [{"indexerid": self.indexerid, - "name": unicode(myShow.data['seriesname']), - "first_aired": myShow.data['firstaired']}] + "name": unicode(myShow.data['seriesname']), + "first_aired": myShow.data['firstaired']}] return _responds(RESULT_SUCCESS, {"results": showOut, "langid": lang_id}) else: @@ -1582,17 +1634,22 @@ class CMD_SickBeardSetDefaults(ApiCall): "archive": {"desc": "archive quality for the show"}, "flatten_folders": {"desc": "flatten subfolders within the show directory"}, "status": {"desc": "status of missing episodes"} - } } + } def __init__(self, args, kwargs): # required # optional - self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"]) - self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray"]) + self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", + ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", + "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"]) + self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", + ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", + "hdbluray", "fullhdbluray"]) self.future_show_paused, args = self.check_params(args, kwargs, "future_show_paused", None, False, "bool", []) self.flatten_folders, args = self.check_params(args, kwargs, "flatten_folders", None, False, "bool", []) - self.status, args = self.check_params(args, kwargs, "status", None, False, "string", ["wanted", "skipped", "archived", "ignored"]) + self.status, args = self.check_params(args, kwargs, "status", None, False, "string", + ["wanted", "skipped", "archived", "ignored"]) # super, missing, help ApiCall.__init__(self, args, kwargs) @@ -1664,8 +1721,8 @@ class CMD_SickBeardShutdown(ApiCall): class CMD_Show(ApiCall): _help = {"desc": "display information for a given show", "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"}, - } } + } def __init__(self, args, kwargs): # required @@ -1726,23 +1783,29 @@ class CMD_ShowAddExisting(ApiCall): _help = {"desc": "add a show in sickbeard with an existing folder", "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"}, "location": {"desc": "full path to the existing folder for the show"} - }, + }, "optionalParameters": {"initial": {"desc": "initial quality for the show"}, "archive": {"desc": "archive quality for the show"}, "flatten_folders": {"desc": "flatten subfolders for the show"}, "subtitles": {"desc": "allow search episode subtitle"} - } } + } def __init__(self, args, kwargs): # required self.location, args = self.check_params(args, kwargs, "location", None, True, "string", []) self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", []) # optional - self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"]) - self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray"]) - self.flatten_folders, args = self.check_params(args, kwargs, "flatten_folders", str(sickbeard.FLATTEN_FOLDERS_DEFAULT), False, "bool", []) - self.subtitles, args = self.check_params(args, kwargs, "subtitles", int(sickbeard.USE_SUBTITLES), False, "int", []) + self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", + ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", + "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"]) + self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", + ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", + "hdbluray", "fullhdbluray"]) + self.flatten_folders, args = self.check_params(args, kwargs, "flatten_folders", + str(sickbeard.FLATTEN_FOLDERS_DEFAULT), False, "bool", []) + self.subtitles, args = self.check_params(args, kwargs, "subtitles", int(sickbeard.USE_SUBTITLES), False, "int", + []) # super, missing, help ApiCall.__init__(self, args, kwargs) @@ -1793,14 +1856,15 @@ class CMD_ShowAddExisting(ApiCall): if iqualityID or aqualityID: newQuality = Quality.combineQualities(iqualityID, aqualityID) - sickbeard.showQueueScheduler.action.addShow(int(self.indexerid), self.location, SKIPPED, newQuality, int(self.flatten_folders)) #@UndefinedVariable + sickbeard.showQueueScheduler.action.addShow(int(self.indexerid), self.location, SKIPPED, newQuality, + int(self.flatten_folders)) #@UndefinedVariable return _responds(RESULT_SUCCESS, {"name": tvdbName}, tvdbName + " has been queued to be added") class CMD_ShowAddNew(ApiCall): _help = {"desc": "add a new show to sickbeard", "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"} - }, + }, "optionalParameters": {"initial": {"desc": "initial quality for the show"}, "location": {"desc": "base path for where the show folder is to be created"}, "archive": {"desc": "archive quality for the show"}, @@ -1808,26 +1872,33 @@ class CMD_ShowAddNew(ApiCall): "status": {"desc": "status of missing episodes"}, "lang": {"desc": "the 2 letter lang abbreviation id"}, "subtitles": {"desc": "allow search episode subtitle"} - } } + } valid_languages = { - 'el': 20, 'en': 7, 'zh': 27, 'it': 15, 'cs': 28, 'es': 16, 'ru': 22, - 'nl': 13, 'pt': 26, 'no': 9, 'tr': 21, 'pl': 18, 'fr': 17, 'hr': 31, - 'de': 14, 'da': 10, 'fi': 11, 'hu': 19, 'ja': 25, 'he': 24, 'ko': 32, - 'sv': 8, 'sl': 30} + 'el': 20, 'en': 7, 'zh': 27, 'it': 15, 'cs': 28, 'es': 16, 'ru': 22, + 'nl': 13, 'pt': 26, 'no': 9, 'tr': 21, 'pl': 18, 'fr': 17, 'hr': 31, + 'de': 14, 'da': 10, 'fi': 11, 'hu': 19, 'ja': 25, 'he': 24, 'ko': 32, + 'sv': 8, 'sl': 30} def __init__(self, args, kwargs): # required self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", []) # optional self.location, args = self.check_params(args, kwargs, "location", None, False, "string", []) - self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"]) - self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray"]) - self.flatten_folders, args = self.check_params(args, kwargs, "flatten_folders", str(sickbeard.FLATTEN_FOLDERS_DEFAULT), False, "bool", []) - self.status, args = self.check_params(args, kwargs, "status", None, False, "string", ["wanted", "skipped", "archived", "ignored"]) + self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", + ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", + "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"]) + self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", + ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", + "hdbluray", "fullhdbluray"]) + self.flatten_folders, args = self.check_params(args, kwargs, "flatten_folders", + str(sickbeard.FLATTEN_FOLDERS_DEFAULT), False, "bool", []) + self.status, args = self.check_params(args, kwargs, "status", None, False, "string", + ["wanted", "skipped", "archived", "ignored"]) self.lang, args = self.check_params(args, kwargs, "lang", "en", False, "string", self.valid_languages.keys()) - self.subtitles, args = self.check_params(args, kwargs, "subtitles", int(sickbeard.USE_SUBTITLES), False, "int", []) + self.subtitles, args = self.check_params(args, kwargs, "subtitles", int(sickbeard.USE_SUBTITLES), False, "int", + []) # super, missing, help ApiCall.__init__(self, args, kwargs) @@ -1913,19 +1984,22 @@ class CMD_ShowAddNew(ApiCall): dir_exists = helpers.makeDir(showPath) if not dir_exists: logger.log(u"API :: Unable to create the folder " + showPath + ", can't add the show", logger.ERROR) - return _responds(RESULT_FAILURE, {"path": showPath}, "Unable to create the folder " + showPath + ", can't add the show") + return _responds(RESULT_FAILURE, {"path": showPath}, + "Unable to create the folder " + showPath + ", can't add the show") else: helpers.chmodAsParent(showPath) - sickbeard.showQueueScheduler.action.addShow(int(self.indexerid), showPath, newStatus, newQuality, int(self.flatten_folders), self.subtitles, self.lang) #@UndefinedVariable + sickbeard.showQueueScheduler.action.addShow(int(self.indexerid), showPath, newStatus, newQuality, + int(self.flatten_folders), self.subtitles, + self.lang) #@UndefinedVariable return _responds(RESULT_SUCCESS, {"name": tvdbName}, tvdbName + " has been queued to be added") class CMD_ShowCache(ApiCall): _help = {"desc": "check sickbeard's cache to see if the banner or poster image for a show is valid", "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"} - } } + } def __init__(self, args, kwargs): # required @@ -1959,8 +2033,8 @@ class CMD_ShowCache(ApiCall): class CMD_ShowDelete(ApiCall): _help = {"desc": "delete a show in sickbeard", "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"}, - } } + } def __init__(self, args, kwargs): # required @@ -1975,7 +2049,8 @@ class CMD_ShowDelete(ApiCall): if not showObj: return _responds(RESULT_FAILURE, msg="Show not found") - if sickbeard.showQueueScheduler.action.isBeingAdded(showObj) or sickbeard.showQueueScheduler.action.isBeingUpdated(showObj): #@UndefinedVariable + if sickbeard.showQueueScheduler.action.isBeingAdded( + showObj) or sickbeard.showQueueScheduler.action.isBeingUpdated(showObj): #@UndefinedVariable return _responds(RESULT_FAILURE, msg="Show can not be deleted while being added or updated") showObj.deleteShow() @@ -1985,8 +2060,8 @@ class CMD_ShowDelete(ApiCall): class CMD_ShowGetQuality(ApiCall): _help = {"desc": "get quality setting for a show in sickbeard", "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"} - } } + } def __init__(self, args, kwargs): # required @@ -2009,8 +2084,8 @@ class CMD_ShowGetQuality(ApiCall): class CMD_ShowGetPoster(ApiCall): _help = {"desc": "get the poster stored for a show in sickbeard", "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"} - } } + } def __init__(self, args, kwargs): # required @@ -2027,8 +2102,8 @@ class CMD_ShowGetPoster(ApiCall): class CMD_ShowGetBanner(ApiCall): _help = {"desc": "get the banner stored for a show in sickbeard", "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"} - } } + } def __init__(self, args, kwargs): # required @@ -2045,10 +2120,10 @@ class CMD_ShowGetBanner(ApiCall): class CMD_ShowPause(ApiCall): _help = {"desc": "set a show's paused state in sickbeard", "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"}, - }, + }, "optionalParameters": {"pause": {"desc": "set the pause state of the show"} - } } + } def __init__(self, args, kwargs): # required @@ -2077,8 +2152,8 @@ class CMD_ShowPause(ApiCall): class CMD_ShowRefresh(ApiCall): _help = {"desc": "refresh a show in sickbeard", "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"}, - } } + } def __init__(self, args, kwargs): # required @@ -2094,7 +2169,7 @@ class CMD_ShowRefresh(ApiCall): return _responds(RESULT_FAILURE, msg="Show not found") try: - sickbeard.showQueueScheduler.action.refreshShow(showObj) #@UndefinedVariable + sickbeard.showQueueScheduler.action.refreshShow(showObj) #@UndefinedVariable return _responds(RESULT_SUCCESS, msg=str(showObj.name) + " has queued to be refreshed") except exceptions.CantRefreshException: # TODO: log the excption @@ -2104,16 +2179,17 @@ class CMD_ShowRefresh(ApiCall): class CMD_ShowSeasonList(ApiCall): _help = {"desc": "display the season list for a given show", "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"}, - }, + }, "optionalParameters": {"sort": {"desc": "change the sort order from descending to ascending"} - } } + } def __init__(self, args, kwargs): # required self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", []) # optional - self.sort, args = self.check_params(args, kwargs, "sort", "desc", False, "string", ["asc", "desc"]) # "asc" and "desc" default and fallback is "desc" + self.sort, args = self.check_params(args, kwargs, "sort", "desc", False, "string", + ["asc", "desc"]) # "asc" and "desc" default and fallback is "desc" # super, missing, help ApiCall.__init__(self, args, kwargs) @@ -2125,10 +2201,12 @@ class CMD_ShowSeasonList(ApiCall): myDB = db.DBConnection(row_type="dict") if self.sort == "asc": - sqlResults = myDB.select("SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season ASC", [self.indexerid]) + sqlResults = myDB.select("SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season ASC", + [self.indexerid]) else: - sqlResults = myDB.select("SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season DESC", [self.indexerid]) - seasonList = [] # a list with all season numbers + sqlResults = myDB.select("SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season DESC", + [self.indexerid]) + seasonList = [] # a list with all season numbers for row in sqlResults: seasonList.append(int(row["season"])) @@ -2139,10 +2217,10 @@ class CMD_ShowSeasonList(ApiCall): class CMD_ShowSeasons(ApiCall): _help = {"desc": "display a listing of episodes for all or a given season", "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"}, - }, + }, "optionalParameters": {"season": {"desc": "the season number"}, - } } + } def __init__(self, args, kwargs): # required @@ -2161,7 +2239,8 @@ class CMD_ShowSeasons(ApiCall): myDB = db.DBConnection(row_type="dict") if self.season == None: - sqlResults = myDB.select("SELECT name, episode, airdate, status, season FROM tv_episodes WHERE showid = ?", [self.indexerid]) + sqlResults = myDB.select("SELECT name, episode, airdate, status, season FROM tv_episodes WHERE showid = ?", + [self.indexerid]) seasons = {} for row in sqlResults: status, quality = Quality.splitCompositeStatus(int(row["status"])) @@ -2177,7 +2256,9 @@ class CMD_ShowSeasons(ApiCall): seasons[curSeason][curEpisode] = row else: - sqlResults = myDB.select("SELECT name, episode, airdate, status FROM tv_episodes WHERE showid = ? AND season = ?", [self.indexerid, self.season]) + sqlResults = myDB.select( + "SELECT name, episode, airdate, status FROM tv_episodes WHERE showid = ? AND season = ?", + [self.indexerid, self.season]) if len(sqlResults) is 0: return _responds(RESULT_FAILURE, msg="Season not found") seasons = {} @@ -2197,13 +2278,14 @@ class CMD_ShowSeasons(ApiCall): class CMD_ShowSetQuality(ApiCall): - _help = {"desc": "set desired quality of a show in sickbeard. if neither initial or archive are provided then the config default quality will be used", - "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"} - }, - "optionalParameters": {"initial": {"desc": "initial quality for the show"}, - "archive": {"desc": "archive quality for the show"} - } - } + _help = { + "desc": "set desired quality of a show in sickbeard. if neither initial or archive are provided then the config default quality will be used", + "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"} + }, + "optionalParameters": {"initial": {"desc": "initial quality for the show"}, + "archive": {"desc": "archive quality for the show"} + } + } def __init__(self, args, kwargs): # required @@ -2211,8 +2293,12 @@ class CMD_ShowSetQuality(ApiCall): # optional # this for whatever reason removes hdbluray not sdtv... which is just wrong. reverting to previous code.. plus we didnt use the new code everywhere. # self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", _getQualityMap().values()[1:]) - self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"]) - self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray"]) + self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", + ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", + "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"]) + self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", + ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", + "hdbluray", "fullhdbluray"]) # super, missing, help ApiCall.__init__(self, args, kwargs) @@ -2251,14 +2337,15 @@ class CMD_ShowSetQuality(ApiCall): newQuality = Quality.combineQualities(iqualityID, aqualityID) showObj.quality = newQuality - return _responds(RESULT_SUCCESS, msg=showObj.name + " quality has been changed to " + _get_quality_string(showObj.quality)) + return _responds(RESULT_SUCCESS, + msg=showObj.name + " quality has been changed to " + _get_quality_string(showObj.quality)) class CMD_ShowStats(ApiCall): _help = {"desc": "display episode statistics for a given show", "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"}, - } } + } def __init__(self, args, kwargs): # required @@ -2300,7 +2387,8 @@ class CMD_ShowStats(ApiCall): episode_qualities_counts_snatch[statusCode] = 0 myDB = db.DBConnection(row_type="dict") - sqlResults = myDB.select("SELECT status, season FROM tv_episodes WHERE season != 0 AND showid = ?", [self.indexerid]) + sqlResults = myDB.select("SELECT status, season FROM tv_episodes WHERE season != 0 AND showid = ?", + [self.indexerid]) # the main loop that goes through all episodes for row in sqlResults: status, quality = Quality.splitCompositeStatus(int(row["status"])) @@ -2313,7 +2401,7 @@ class CMD_ShowStats(ApiCall): elif status in Quality.SNATCHED + Quality.SNATCHED_PROPER: episode_qualities_counts_snatch["total"] += 1 episode_qualities_counts_snatch[int(row["status"])] += 1 - elif status == 0: # we dont count NONE = 0 = N/A + elif status == 0: # we dont count NONE = 0 = N/A pass else: episode_status_counts_total[status] += 1 @@ -2350,7 +2438,8 @@ class CMD_ShowStats(ApiCall): episodes_stats["total"] = episode_status_counts_total[statusCode] continue status, quality = Quality.splitCompositeStatus(int(statusCode)) - statusString = statusStrings.statusStrings[statusCode].lower().replace(" ", "_").replace("(", "").replace(")", "") + statusString = statusStrings.statusStrings[statusCode].lower().replace(" ", "_").replace("(", "").replace( + ")", "") episodes_stats[statusString] = episode_status_counts_total[statusCode] myDB.connection.close() @@ -2360,8 +2449,8 @@ class CMD_ShowStats(ApiCall): class CMD_ShowUpdate(ApiCall): _help = {"desc": "update a show in sickbeard", "requiredParameters": {"indexerid": {"desc": "thetvdb.com unique id of a show"}, - } } + } def __init__(self, args, kwargs): # required @@ -2377,7 +2466,7 @@ class CMD_ShowUpdate(ApiCall): return _responds(RESULT_FAILURE, msg="Show not found") try: - sickbeard.showQueueScheduler.action.updateShow(showObj, True) #@UndefinedVariable + sickbeard.showQueueScheduler.action.updateShow(showObj, True) #@UndefinedVariable return _responds(RESULT_SUCCESS, msg=str(showObj.name) + " has queued to be updated") except exceptions.CantUpdateException, e: logger.log(u"API:: Unable to update " + str(showObj.name) + ". " + str(ex(e)), logger.ERROR) @@ -2388,8 +2477,8 @@ class CMD_Shows(ApiCall): _help = {"desc": "display all shows in sickbeard", "optionalParameters": {"sort": {"desc": "sort the list of shows by show name instead of indexerid"}, "paused": {"desc": "only show the shows that are set to paused"}, - }, - } + }, + } def __init__(self, args, kwargs): # required @@ -2432,7 +2521,7 @@ class CMD_Shows(ApiCall): class CMD_ShowsStats(ApiCall): _help = {"desc": "display the global shows and episode stats" - } + } def __init__(self, args, kwargs): # required @@ -2447,9 +2536,16 @@ class CMD_ShowsStats(ApiCall): myDB = db.DBConnection() today = str(datetime.date.today().toordinal()) stats["shows_total"] = len(sickbeard.showList) - stats["shows_active"] = len([show for show in sickbeard.showList if show.paused == 0 and show.status != "Ended"]) - stats["ep_downloaded"] = myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE status IN (" + ",".join([str(show) for show in Quality.DOWNLOADED + [ARCHIVED]]) + ") AND season != 0 and episode != 0 AND airdate <= " + today + "")[0][0] - stats["ep_total"] = myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE season != 0 AND episode != 0 AND (airdate != 1 OR status IN (" + ",".join([str(show) for show in (Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER) + [ARCHIVED]]) + ")) AND airdate <= " + today + " AND status != " + str(IGNORED) + "")[0][0] + stats["shows_active"] = len( + [show for show in sickbeard.showList if show.paused == 0 and show.status != "Ended"]) + stats["ep_downloaded"] = myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE status IN (" + ",".join( + [str(show) for show in + Quality.DOWNLOADED + [ARCHIVED]]) + ") AND season != 0 and episode != 0 AND airdate <= " + today + "")[0][ + 0] + stats["ep_total"] = myDB.select( + "SELECT COUNT(*) FROM tv_episodes WHERE season != 0 AND episode != 0 AND (airdate != 1 OR status IN (" + ",".join( + [str(show) for show in (Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER) + [ + ARCHIVED]]) + ")) AND airdate <= " + today + " AND status != " + str(IGNORED) + "")[0][0] myDB.connection.close() return _responds(RESULT_SUCCESS, stats) @@ -2499,5 +2595,5 @@ _functionMaper = {"help": CMD_Help, "show.stats": CMD_ShowStats, "show.update": CMD_ShowUpdate, "shows": CMD_Shows, - "shows.stats": CMD_ShowsStats - } + "shows.stats": CMD_ShowsStats +} diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 737e07b0..56ee8b14 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -28,6 +28,8 @@ import datetime import random import locale import logging +import itertools +import string from Cheetah.Template import Template import cherrypy.lib @@ -53,15 +55,13 @@ from sickbeard.providers import newznab, rsstorrent from sickbeard.common import Quality, Overview, statusStrings, qualityPresetStrings from sickbeard.common import SNATCHED, SKIPPED, UNAIRED, IGNORED, ARCHIVED, WANTED, FAILED from sickbeard.common import SD, HD720p, HD1080p -from sickbeard.common import indexerStrings from sickbeard.exceptions import ex from sickbeard.webapi import Api from sickbeard.scene_exceptions import get_scene_exceptions -from sickbeard.scene_numbering import get_scene_numbering, set_scene_numbering, get_scene_numbering_for_show, get_xem_numbering_for_show +from sickbeard.scene_numbering import get_scene_numbering, set_scene_numbering, get_scene_numbering_for_show, \ + get_xem_numbering_for_show from sickbeard.providers.generic import TorrentProvider -from sickbeard.indexers.indexer_api import indexerApi -from sickbeard.indexers.indexer_exceptions import indexer_shownotfound from lib.dateutil import tz from lib.unrar2 import RarFile, RarInfo @@ -80,24 +80,26 @@ except ImportError: from sickbeard import browser -class PageTemplate (Template): +class PageTemplate(Template): def __init__(self, *args, **KWs): -# KWs['file'] = os.path.join(sickbeard.PROG_DIR, "data/interfaces/default/", KWs['file']) - KWs['file'] = os.path.join(sickbeard.PROG_DIR, "gui/" + sickbeard.GUI_NAME + "/interfaces/default/", KWs['file']) + KWs['file'] = os.path.join(sickbeard.PROG_DIR, "gui/" + sickbeard.GUI_NAME + "/interfaces/default/", + KWs['file']) super(PageTemplate, self).__init__(*args, **KWs) self.sbRoot = sickbeard.WEB_ROOT self.sbHttpPort = sickbeard.WEB_PORT self.sbHttpsPort = sickbeard.WEB_PORT self.sbHttpsEnabled = sickbeard.ENABLE_HTTPS if cherrypy.request.headers['Host'][0] == '[': - self.sbHost = re.match("^\[.*\]", cherrypy.request.headers['Host'], re.X|re.M|re.S).group(0) + self.sbHost = re.match("^\[.*\]", cherrypy.request.headers['Host'], re.X | re.M | re.S).group(0) else: - self.sbHost = re.match("^[^:]+", cherrypy.request.headers['Host'], re.X|re.M|re.S).group(0) + self.sbHost = re.match("^[^:]+", cherrypy.request.headers['Host'], re.X | re.M | re.S).group(0) self.projectHomePage = "http://code.google.com/p/sickbeard/" if sickbeard.NZBS and sickbeard.NZBS_UID and sickbeard.NZBS_HASH: - logger.log(u"NZBs.org has been replaced, please check the config to configure the new provider!", logger.ERROR) - ui.notifications.error("NZBs.org Config Update", "NZBs.org has a new site. Please update your config with the api key from http://nzbs.org and then disable the old NZBs.org provider.") + logger.log(u"NZBs.org has been replaced, please check the config to configure the new provider!", + logger.ERROR) + ui.notifications.error("NZBs.org Config Update", + "NZBs.org has a new site. Please update your config with the api key from http://nzbs.org and then disable the old NZBs.org provider.") if "X-Forwarded-Host" in cherrypy.request.headers: self.sbHost = cherrypy.request.headers['X-Forwarded-Host'] @@ -113,12 +115,12 @@ class PageTemplate (Template): self.logPageTitle = logPageTitle self.sbPID = str(sickbeard.PID) self.menu = [ - { 'title': 'Home', 'key': 'home' }, - { 'title': 'Coming Episodes', 'key': 'comingEpisodes' }, - { 'title': 'History', 'key': 'history' }, - { 'title': 'Manage', 'key': 'manage' }, - { 'title': 'Config', 'key': 'config' }, - { 'title': logPageTitle, 'key': 'errorlogs' }, + {'title': 'Home', 'key': 'home'}, + {'title': 'Coming Episodes', 'key': 'comingEpisodes'}, + {'title': 'History', 'key': 'history'}, + {'title': 'Manage', 'key': 'manage'}, + {'title': 'Config', 'key': 'config'}, + {'title': logPageTitle, 'key': 'errorlogs'}, ] @@ -133,7 +135,6 @@ class IndexerWebUI: self.log = log def selectSeries(self, allSeries): - searchList = ",".join([x['id'] for x in allSeries]) showDirList = "" for curShowDir in self.config['_showDir']: @@ -152,54 +153,53 @@ def _genericMessage(subject, message): t.message = message return _munge(t) -def _getEpisode(show, season, episode): - if show == None or season == None or episode == None: +def _getEpisode(show, season, episode): + if show is None or season is None or episode is None: return "Invalid parameters" showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show)) - if showObj == None: + if showObj is None: return "Show not in show list" epObj = showObj.getEpisode(int(season), int(episode)) - if epObj == None: + if epObj is None: return "Episode couldn't be retrieved" return epObj + def ManageMenu(): - manageMenu = [ - { 'title': 'Backlog Overview', 'path': 'manage/backlogOverview/' }, - { 'title': 'Manage Searches', 'path': 'manage/manageSearches/' }, - { 'title': 'Episode Status Management', 'path': 'manage/episodeStatuses/' },] + {'title': 'Backlog Overview', 'path': 'manage/backlogOverview/'}, + {'title': 'Manage Searches', 'path': 'manage/manageSearches/'}, + {'title': 'Episode Status Management', 'path': 'manage/episodeStatuses/'}, ] if sickbeard.USE_TORRENTS and sickbeard.TORRENT_METHOD != 'blackhole' \ - and (sickbeard.ENABLE_HTTPS and sickbeard.TORRENT_HOST[:5] == 'https' \ - or not sickbeard.ENABLE_HTTPS and sickbeard.TORRENT_HOST[:5] == 'http:'): - manageMenu.append({ 'title': 'Manage Torrents', 'path': 'manage/manageTorrents/'}) + and (sickbeard.ENABLE_HTTPS and sickbeard.TORRENT_HOST[:5] == 'https' + or not sickbeard.ENABLE_HTTPS and sickbeard.TORRENT_HOST[:5] == 'http:'): + manageMenu.append({'title': 'Manage Torrents', 'path': 'manage/manageTorrents/'}) if sickbeard.USE_SUBTITLES: - manageMenu.append({ 'title': 'Missed Subtitle Management', 'path': 'manage/subtitleMissed/' }) - + manageMenu.append({'title': 'Missed Subtitle Management', 'path': 'manage/subtitleMissed/'}) + if sickbeard.USE_FAILED_DOWNLOADS: - manageMenu.append({ 'title': 'Failed Downloads', 'path': 'manage/failedDownloads/' }) + manageMenu.append({'title': 'Failed Downloads', 'path': 'manage/failedDownloads/'}) return manageMenu class ManageSearches: - @cherrypy.expose def index(self): t = PageTemplate(file="manage_manageSearches.tmpl") #t.backlogPI = sickbeard.backlogSearchScheduler.action.getProgressIndicator() - t.backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused() # @UndefinedVariable - t.backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() # @UndefinedVariable - t.searchStatus = sickbeard.currentSearchScheduler.action.amActive # @UndefinedVariable - + t.backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused() # @UndefinedVariable + t.backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() # @UndefinedVariable + t.searchStatus = sickbeard.currentSearchScheduler.action.amActive # @UndefinedVariable + t.submenu = ManageMenu() return _munge(t) @@ -212,16 +212,16 @@ class ManageSearches: if result: logger.log(u"Search forced") ui.notifications.message('Episode search started', - 'Note: RSS feeds may not be updated if retrieved recently') + 'Note: RSS feeds may not be updated if retrieved recently') redirect("/manage/manageSearches/") @cherrypy.expose def pauseBacklog(self, paused=None): if paused == "1": - sickbeard.searchQueueScheduler.action.pause_backlog() # @UndefinedVariable + sickbeard.searchQueueScheduler.action.pause_backlog() # @UndefinedVariable else: - sickbeard.searchQueueScheduler.action.unpause_backlog() # @UndefinedVariable + sickbeard.searchQueueScheduler.action.unpause_backlog() # @UndefinedVariable redirect("/manage/manageSearches/") @@ -229,7 +229,7 @@ class ManageSearches: def forceVersionCheck(self): # force a check to see if there is a new version - result = sickbeard.versionCheckScheduler.action.check_for_new_version(force=True) # @UndefinedVariable + result = sickbeard.versionCheckScheduler.action.check_for_new_version(force=True) # @UndefinedVariable if result: logger.log(u"Forcing version check") @@ -237,7 +237,6 @@ class ManageSearches: class Manage: - manageSearches = ManageSearches() @cherrypy.expose @@ -245,7 +244,7 @@ class Manage: t = PageTemplate(file="manage.tmpl") t.submenu = ManageMenu() return _munge(t) - + @cherrypy.expose def showEpisodeStatuses(self, indexer_id, whichStatus): myDB = db.DBConnection() @@ -254,7 +253,9 @@ class Manage: if status_list[0] == SNATCHED: status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER - cur_show_results = myDB.select("SELECT season, episode, name FROM tv_episodes WHERE showid = ? AND season != 0 AND status IN (" + ','.join(['?']*len(status_list)) + ")", [int(indexer_id)] + status_list) + cur_show_results = myDB.select( + "SELECT season, episode, name FROM tv_episodes WHERE showid = ? AND season != 0 AND status IN (" + ','.join( + ['?'] * len(status_list)) + ")", [int(indexer_id)] + status_list) result = {} for cur_result in cur_show_results: @@ -288,7 +289,11 @@ class Manage: return _munge(t) myDB = db.DBConnection() - status_results = myDB.select("SELECT show_name, tv_shows.indexer_id as indexer_id FROM tv_episodes, tv_shows WHERE tv_episodes.status IN (" + ','.join(['?']*len(status_list)) + ") AND season != 0 AND tv_episodes.showid = tv_shows.indexer_id ORDER BY show_name", status_list) + status_results = myDB.select( + "SELECT show_name, tv_shows.indexer_id as indexer_id FROM tv_episodes, tv_shows WHERE tv_episodes.status IN (" + ','.join( + ['?'] * len( + status_list)) + ") AND season != 0 AND tv_episodes.showid = tv_shows.indexer_id ORDER BY show_name", + status_list) ep_counts = {} show_names = {} @@ -337,8 +342,9 @@ class Manage: # get a list of all the eps we want to change if they just said "all" if 'all' in to_change[cur_indexer_id]: - all_eps_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE status IN (" + ','.join(['?']*len(status_list)) + ") AND season != 0 AND showid = ?", status_list + [cur_indexer_id]) - all_eps = [str(x["season"])+'x' + str(x["episode"]) for x in all_eps_results] + all_eps_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE status IN (" + ','.join( + ['?'] * len(status_list)) + ") AND season != 0 AND showid = ?", status_list + [cur_indexer_id]) + all_eps = [str(x["season"]) + 'x' + str(x["episode"]) for x in all_eps_results] to_change[cur_indexer_id] = all_eps Home().setStatus(cur_indexer_id, '|'.join(to_change[cur_indexer_id]), newStatus, direct=True) @@ -349,12 +355,15 @@ class Manage: def showSubtitleMissed(self, indexer_id, whichSubs): myDB = db.DBConnection() - cur_show_results = myDB.select("SELECT season, episode, name, subtitles FROM tv_episodes WHERE showid = ? AND season != 0 AND status LIKE '%4'", [int(indexer_id)]) + cur_show_results = myDB.select( + "SELECT season, episode, name, subtitles FROM tv_episodes WHERE showid = ? AND season != 0 AND status LIKE '%4'", + [int(indexer_id)]) result = {} for cur_result in cur_show_results: if whichSubs == 'all': - if len(set(cur_result["subtitles"].split(',')).intersection(set(subtitles.wantedLanguages()))) >= len(subtitles.wantedLanguages()): + if len(set(cur_result["subtitles"].split(',')).intersection(set(subtitles.wantedLanguages()))) >= len( + subtitles.wantedLanguages()): continue elif whichSubs in cur_result["subtitles"].split(','): continue @@ -370,7 +379,9 @@ class Manage: result[cur_season][cur_episode]["name"] = cur_result["name"] - result[cur_season][cur_episode]["subtitles"] = ",".join(subliminal.language.Language(subtitle).alpha2 for subtitle in cur_result["subtitles"].split(',')) if not cur_result["subtitles"] == '' else '' + result[cur_season][cur_episode]["subtitles"] = ",".join( + subliminal.language.Language(subtitle).alpha2 for subtitle in cur_result["subtitles"].split(',')) if not \ + cur_result["subtitles"] == '' else '' return json.dumps(result) @@ -385,14 +396,16 @@ class Manage: return _munge(t) myDB = db.DBConnection() - status_results = myDB.select("SELECT show_name, tv_shows.indexer_id as indexer_id, tv_episodes.subtitles subtitles FROM tv_episodes, tv_shows WHERE tv_shows.subtitles = 1 AND tv_episodes.status LIKE '%4' AND tv_episodes.season != 0 AND tv_episodes.showid = tv_shows.indexer_id ORDER BY show_name") + status_results = myDB.select( + "SELECT show_name, tv_shows.indexer_id as indexer_id, tv_episodes.subtitles subtitles FROM tv_episodes, tv_shows WHERE tv_shows.subtitles = 1 AND tv_episodes.status LIKE '%4' AND tv_episodes.season != 0 AND tv_episodes.showid = tv_shows.indexer_id ORDER BY show_name") ep_counts = {} show_names = {} sorted_show_ids = [] for cur_status_result in status_results: if whichSubs == 'all': - if len(set(cur_status_result["subtitles"].split(',')).intersection(set(subtitles.wantedLanguages()))) >= len(subtitles.wantedLanguages()): + if len(set(cur_status_result["subtitles"].split(',')).intersection( + set(subtitles.wantedLanguages()))) >= len(subtitles.wantedLanguages()): continue elif whichSubs in cur_status_result["subtitles"].split(','): continue @@ -434,7 +447,9 @@ class Manage: # get a list of all the eps we want to download subtitles if they just said "all" if 'all' in to_download[cur_indexer_id]: myDB = db.DBConnection() - all_eps_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE status LIKE '%4' AND season != 0 AND showid = ?", [cur_indexer_id]) + all_eps_results = myDB.select( + "SELECT season, episode FROM tv_episodes WHERE status LIKE '%4' AND season != 0 AND showid = ?", + [cur_indexer_id]) to_download[cur_indexer_id] = [str(x["season"]) + 'x' + str(x["episode"]) for x in all_eps_results] for epResult in to_download[cur_indexer_id]: @@ -451,7 +466,7 @@ class Manage: show_obj = helpers.findCertainShow(sickbeard.showList, int(indexer_id)) if show_obj: - sickbeard.backlogSearchScheduler.action.searchBacklog([show_obj]) # @UndefinedVariable + sickbeard.backlogSearchScheduler.action.searchBacklog([show_obj]) # @UndefinedVariable redirect("/manage/backlogOverview/") @@ -478,10 +493,10 @@ class Manage: epCounts[Overview.UNAIRED] = 0 epCounts[Overview.SNATCHED] = 0 - sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? ORDER BY season DESC, episode DESC", [curShow.indexerid]) + sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? ORDER BY season DESC, episode DESC", + [curShow.indexerid]) for curResult in sqlResults: - curEpCat = curShow.getOverview(int(curResult["status"])) epCats[str(curResult["season"]) + "x" + str(curResult["episode"])] = curEpCat epCounts[curEpCat] += 1 @@ -592,7 +607,8 @@ class Manage: cur_show_dir = ek.ek(os.path.basename, showObj._location) if cur_root_dir in dir_map and cur_root_dir != dir_map[cur_root_dir]: new_show_dir = ek.ek(os.path.join, dir_map[cur_root_dir], cur_show_dir) - logger.log(u"For show " + showObj.name + " changing dir from " + showObj._location + " to " + new_show_dir) + logger.log( + u"For show " + showObj.name + " changing dir from " + showObj._location + " to " + new_show_dir) else: new_show_dir = showObj._location @@ -620,47 +636,49 @@ class Manage: exceptions_list = [] - curErrors += Home().editShow(curShow, new_show_dir, anyQualities, bestQualities, exceptions_list, new_flatten_folders, new_paused, subtitles=new_subtitles, directCall=True) + curErrors += Home().editShow(curShow, new_show_dir, anyQualities, bestQualities, exceptions_list, + new_flatten_folders, new_paused, subtitles=new_subtitles, directCall=True) if curErrors: logger.log(u"Errors: " + str(curErrors), logger.ERROR) - errors.append('%s:\n
    ' % showObj.name + ' '.join(['
  • %s
  • ' % error for error in curErrors]) + "
") + errors.append('%s:\n
    ' % showObj.name + ' '.join( + ['
  • %s
  • ' % error for error in curErrors]) + "
") if len(errors) > 0: ui.notifications.error('%d error%s while saving changes:' % (len(errors), "" if len(errors) == 1 else "s"), - " ".join(errors)) + " ".join(errors)) redirect("/manage/") @cherrypy.expose def massUpdate(self, toUpdate=None, toRefresh=None, toRename=None, toDelete=None, toMetadata=None, toSubtitle=None): - if toUpdate != None: + if toUpdate is not None: toUpdate = toUpdate.split('|') else: toUpdate = [] - if toRefresh != None: + if toRefresh is not None: toRefresh = toRefresh.split('|') else: toRefresh = [] - if toRename != None: + if toRename is not None: toRename = toRename.split('|') else: toRename = [] - if toSubtitle != None: + if toSubtitle is not None: toSubtitle = toSubtitle.split('|') else: toSubtitle = [] - if toDelete != None: + if toDelete is not None: toDelete = toDelete.split('|') else: toDelete = [] - if toMetadata != None: + if toMetadata is not None: toMetadata = toMetadata.split('|') else: toMetadata = [] @@ -678,7 +696,7 @@ class Manage: showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(curShowID)) - if showObj == None: + if showObj is None: continue if curShowID in toDelete: @@ -688,7 +706,7 @@ class Manage: if curShowID in toUpdate: try: - sickbeard.showQueueScheduler.action.updateShow(showObj, True) # @UndefinedVariable + sickbeard.showQueueScheduler.action.updateShow(showObj, True) # @UndefinedVariable updates.append(showObj.name) except exceptions.CantUpdateException, e: errors.append("Unable to update show " + showObj.name + ": " + ex(e)) @@ -696,22 +714,22 @@ class Manage: # don't bother refreshing shows that were updated anyway if curShowID in toRefresh and curShowID not in toUpdate: try: - sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable + sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable refreshes.append(showObj.name) except exceptions.CantRefreshException, e: errors.append("Unable to refresh show " + showObj.name + ": " + ex(e)) if curShowID in toRename: - sickbeard.showQueueScheduler.action.renameShowEpisodes(showObj) # @UndefinedVariable + sickbeard.showQueueScheduler.action.renameShowEpisodes(showObj) # @UndefinedVariable renames.append(showObj.name) if curShowID in toSubtitle: - sickbeard.showQueueScheduler.action.downloadSubtitles(showObj) # @UndefinedVariable + sickbeard.showQueueScheduler.action.downloadSubtitles(showObj) # @UndefinedVariable subtitles.append(showObj.name) if len(errors) > 0: ui.notifications.error("Errors encountered", - '
\n'.join(errors)) + '
\n'.join(errors)) messageDetail = "" @@ -737,7 +755,7 @@ class Manage: if len(updates + refreshes + renames + subtitles) > 0: ui.notifications.message("The following actions were queued:", - messageDetail) + messageDetail) redirect("/manage/") @@ -777,7 +795,7 @@ class Manage: else: sqlResults = myDB.select("SELECT * FROM failed LIMIT ?", [limit]) - toRemove = toRemove.split("|") if toRemove != None else [] + toRemove = toRemove.split("|") if toRemove is not None else [] for release in toRemove: myDB.action('DELETE FROM failed WHERE release = ?', [release]) @@ -792,41 +810,45 @@ class Manage: return _munge(t) -class History: +class History: @cherrypy.expose def index(self, limit=100): myDB = db.DBConnection() -# sqlResults = myDB.select("SELECT h.*, show_name, name FROM history h, tv_shows s, tv_episodes e WHERE h.showid=s.indexer_id AND h.showid=e.showid AND h.season=e.season AND h.episode=e.episode ORDER BY date DESC LIMIT "+str(numPerPage*(p-1))+", "+str(numPerPage)) + # sqlResults = myDB.select("SELECT h.*, show_name, name FROM history h, tv_shows s, tv_episodes e WHERE h.showid=s.indexer_id AND h.showid=e.showid AND h.season=e.season AND h.episode=e.episode ORDER BY date DESC LIMIT "+str(numPerPage*(p-1))+", "+str(numPerPage)) if limit == "0": - sqlResults = myDB.select("SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.indexer_id ORDER BY date DESC") + sqlResults = myDB.select( + "SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.indexer_id ORDER BY date DESC") else: - sqlResults = myDB.select("SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.indexer_id ORDER BY date DESC LIMIT ?", [limit]) + sqlResults = myDB.select( + "SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.indexer_id ORDER BY date DESC LIMIT ?", + [limit]) + + history = {'show_id': 0, 'season': 0, 'episode': 0, 'quality': 0, + 'actions': [{'time': '', 'action': '', 'provider': ''}]} + compact = [] - history = { 'show_id': 0, 'season': 0, 'episode': 0, 'quality': 0, 'actions': [{'time': '', 'action': '', 'provider': ''}]} - compact=[] - for sql_result in sqlResults: - - if not any((history['show_id'] == sql_result['showid'] \ - and history['season'] == sql_result['season'] \ - and history['episode'] == sql_result['episode'] \ - and history['quality'] == sql_result['quality'])\ - for history in compact): - history = {} + if not any((history['show_id'] == sql_result['showid'] \ + and history['season'] == sql_result['season'] \ + and history['episode'] == sql_result['episode'] \ + and history['quality'] == sql_result['quality']) \ + for history in compact): + + history = {} history['show_id'] = sql_result['showid'] history['season'] = sql_result['season'] history['episode'] = sql_result['episode'] history['quality'] = sql_result['quality'] history['show_name'] = sql_result['show_name'] history['resource'] = sql_result['resource'] - + action = {} history['actions'] = [] - + action['time'] = sql_result['date'] action['action'] = sql_result['action'] action['provider'] = sql_result['provider'] @@ -836,11 +858,11 @@ class History: compact.append(history) else: index = [i for i, dict in enumerate(compact) \ - if dict['show_id'] == sql_result['showid'] \ - and dict['season'] == sql_result['season'] \ - and dict['episode'] == sql_result['episode'] - and dict['quality'] == sql_result['quality']][0] - + if dict['show_id'] == sql_result['showid'] \ + and dict['season'] == sql_result['season'] \ + and dict['episode'] == sql_result['episode'] + and dict['quality'] == sql_result['quality']][0] + action = {} history = compact[index] @@ -856,8 +878,8 @@ class History: t.compactResults = compact t.limit = limit t.submenu = [ - { 'title': 'Clear History', 'path': 'history/clearHistory' }, - { 'title': 'Trim History', 'path': 'history/trimHistory' }, + {'title': 'Clear History', 'path': 'history/clearHistory'}, + {'title': 'Trim History', 'path': 'history/trimHistory'}, ] return _munge(t) @@ -876,21 +898,23 @@ class History: def trimHistory(self): myDB = db.DBConnection() - myDB.action("DELETE FROM history WHERE date < "+str((datetime.datetime.today()-datetime.timedelta(days=30)).strftime(history.dateFormat))) + myDB.action("DELETE FROM history WHERE date < " + str( + (datetime.datetime.today() - datetime.timedelta(days=30)).strftime(history.dateFormat))) ui.notifications.message('Removed history entries greater than 30 days old') redirect("/history/") + ConfigMenu = [ - { 'title': 'General', 'path': 'config/general/' }, - { 'title': 'Search Settings', 'path': 'config/search/' }, - { 'title': 'Search Providers', 'path': 'config/providers/' }, - { 'title': 'Subtitles Settings','path': 'config/subtitles/' }, - { 'title': 'Post Processing', 'path': 'config/postProcessing/' }, - { 'title': 'Notifications', 'path': 'config/notifications/' }, + {'title': 'General', 'path': 'config/general/'}, + {'title': 'Search Settings', 'path': 'config/search/'}, + {'title': 'Search Providers', 'path': 'config/providers/'}, + {'title': 'Subtitles Settings', 'path': 'config/subtitles/'}, + {'title': 'Post Processing', 'path': 'config/postProcessing/'}, + {'title': 'Notifications', 'path': 'config/notifications/'}, ] -class ConfigGeneral: +class ConfigGeneral: @cherrypy.expose def index(self): @@ -950,7 +974,8 @@ class ConfigGeneral: @cherrypy.expose def saveGeneral(self, log_dir=None, web_port=None, web_log=None, encryption_version=None, web_ipv6=None, update_shows_on_start=None, launch_browser=None, web_username=None, use_api=None, api_key=None, - web_password=None, version_notify=None, enable_https=None, https_cert=None, https_key=None, sort_article=None, + web_password=None, version_notify=None, enable_https=None, https_cert=None, https_key=None, + sort_article=None, anon_redirect=None, git_path=None, calendar_unprotected=None, date_preset=None, time_preset=None): results = [] @@ -977,15 +1002,14 @@ class ConfigGeneral: if date_preset: sickbeard.DATE_PRESET = date_preset - + if time_preset: sickbeard.TIME_PRESET_W_SECONDS = time_preset - sickbeard.TIME_PRESET = sickbeard.TIME_PRESET_W_SECONDS.replace(u":%S",u"") + sickbeard.TIME_PRESET = sickbeard.TIME_PRESET_W_SECONDS.replace(u":%S", u"") if not config.change_LOG_DIR(log_dir, web_log): results += ["Unable to create directory " + os.path.normpath(log_dir) + ", log directory not changed."] - sickbeard.USE_API = config.checkbox_to_value(use_api) sickbeard.API_KEY = api_key @@ -997,10 +1021,12 @@ class ConfigGeneral: sickbeard.ENABLE_HTTPS = config.checkbox_to_value(enable_https) if not config.change_HTTPS_CERT(https_cert): - results += ["Unable to create directory " + os.path.normpath(https_cert) + ", https cert directory not changed."] + results += [ + "Unable to create directory " + os.path.normpath(https_cert) + ", https cert directory not changed."] if not config.change_HTTPS_KEY(https_key): - results += ["Unable to create directory " + os.path.normpath(https_key) + ", https key directory not changed."] + results += [ + "Unable to create directory " + os.path.normpath(https_key) + ", https key directory not changed."] sickbeard.save_config() @@ -1008,7 +1034,7 @@ class ConfigGeneral: for x in results: logger.log(x, logger.ERROR) ui.notifications.error('Error(s) Saving Configuration', - '
\n'.join(results)) + '
\n'.join(results)) else: ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE)) @@ -1016,7 +1042,6 @@ class ConfigGeneral: class ConfigSearch: - @cherrypy.expose def index(self): @@ -1026,10 +1051,13 @@ class ConfigSearch: @cherrypy.expose def saveSearch(self, use_nzbs=None, use_torrents=None, nzb_dir=None, sab_username=None, sab_password=None, - sab_apikey=None, sab_category=None, sab_host=None, nzbget_username=None, nzbget_password=None, nzbget_category=None, nzbget_host=None, - nzb_method=None, torrent_method=None, usenet_retention=None, search_frequency=None, download_propers=None, allow_high_priority=None, - torrent_dir=None, torrent_username=None, torrent_password=None, torrent_host=None, torrent_label=None, torrent_path=None, - torrent_ratio=None, torrent_paused=None, torrent_high_bandwidth=None, ignore_words=None): + sab_apikey=None, sab_category=None, sab_host=None, nzbget_username=None, nzbget_password=None, + nzbget_category=None, nzbget_host=None, + nzb_method=None, torrent_method=None, usenet_retention=None, search_frequency=None, + download_propers=None, allow_high_priority=None, + torrent_dir=None, torrent_username=None, torrent_password=None, torrent_host=None, + torrent_label=None, torrent_path=None, + torrent_ratio=None, torrent_paused=None, torrent_high_bandwidth=None, ignore_words=None): results = [] @@ -1084,14 +1112,14 @@ class ConfigSearch: for x in results: logger.log(x, logger.ERROR) ui.notifications.error('Error(s) Saving Configuration', - '
\n'.join(results)) + '
\n'.join(results)) else: ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE)) redirect("/config/search/") -class ConfigPostProcessing: +class ConfigPostProcessing: @cherrypy.expose def index(self): @@ -1101,17 +1129,19 @@ class ConfigPostProcessing: @cherrypy.expose def savePostProcessing(self, naming_pattern=None, naming_multi_ep=None, - xbmc_data=None, xbmc_12plus_data=None, mediabrowser_data=None, sony_ps3_data=None, wdtv_data=None, tivo_data=None, - keep_processed_dir=None, process_method=None, process_automatically=None, rename_episodes=None, unpack=None, - move_associated_files=None, tv_download_dir=None, naming_custom_abd=None, naming_abd_pattern=None, naming_strip_year=None, use_failed_downloads=None, - delete_failed=None, extra_scripts=None): + xbmc_data=None, xbmc_12plus_data=None, mediabrowser_data=None, sony_ps3_data=None, + wdtv_data=None, tivo_data=None, + keep_processed_dir=None, process_method=None, process_automatically=None, + rename_episodes=None, unpack=None, + move_associated_files=None, tv_download_dir=None, naming_custom_abd=None, + naming_abd_pattern=None, naming_strip_year=None, use_failed_downloads=None, + delete_failed=None, extra_scripts=None): results = [] if not config.change_TV_DOWNLOAD_DIR(tv_download_dir): results += ["Unable to create directory " + os.path.normpath(tv_download_dir) + ", dir not changed."] - sickbeard.PROCESS_AUTOMATICALLY = config.checkbox_to_value(process_automatically) if sickbeard.PROCESS_AUTOMATICALLY: @@ -1162,7 +1192,8 @@ class ConfigPostProcessing: if self.isNamingValid(naming_abd_pattern, None, True) != "invalid": sickbeard.NAMING_ABD_PATTERN = naming_abd_pattern else: - results.append("You tried saving an invalid air-by-date naming config, not saving your air-by-date settings") + results.append( + "You tried saving an invalid air-by-date naming config, not saving your air-by-date settings") sickbeard.save_config() @@ -1170,7 +1201,7 @@ class ConfigPostProcessing: for x in results: logger.log(x, logger.ERROR) ui.notifications.error('Error(s) Saving Configuration', - '
\n'.join(results)) + '
\n'.join(results)) else: ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE)) @@ -1179,7 +1210,7 @@ class ConfigPostProcessing: @cherrypy.expose def testNaming(self, pattern=None, multi=None, abd=False): - if multi != None: + if multi is not None: multi = int(multi) result = naming.test_name(pattern, multi, abd) @@ -1190,7 +1221,7 @@ class ConfigPostProcessing: @cherrypy.expose def isNamingValid(self, pattern=None, multi=None, abd=False): - if pattern == None: + if pattern is None: return "invalid" # air by date shows just need one check, we don't need to worry about season folders @@ -1222,7 +1253,7 @@ class ConfigPostProcessing: try: rar_path = os.path.join(sickbeard.PROG_DIR, 'lib', 'unrar2', 'test.rar') testing = RarFile(rar_path).read_files('*test.txt') - if testing[0][1]=='This is only a test.': + if testing[0][1] == 'This is only a test.': return 'supported' logger.log(u'Rar Not Supported: Can not read the content of test file', logger.ERROR) return 'not supported' @@ -1230,8 +1261,8 @@ class ConfigPostProcessing: logger.log(u'Rar Not Supported: ' + ex(e), logger.ERROR) return 'not supported' -class ConfigProviders: +class ConfigProviders: @cherrypy.expose def index(self): t = PageTemplate(file="config_providers.tmpl") @@ -1303,7 +1334,8 @@ class ConfigProviders: if not name: return json.dumps({'error': 'Invalid name specified'}) - providerDict = dict(zip([x.getID() for x in sickbeard.torrentRssProviderList], sickbeard.torrentRssProviderList)) + providerDict = dict( + zip([x.getID() for x in sickbeard.torrentRssProviderList], sickbeard.torrentRssProviderList)) tempProvider = rsstorrent.TorrentRssProvider(name, url) @@ -1314,7 +1346,7 @@ class ConfigProviders: if succ: return json.dumps({'success': tempProvider.getID()}) else: - return json.dumps({'error': errMsg }) + return json.dumps({'error': errMsg}) @cherrypy.expose def saveTorrentRssProvider(self, name, url): @@ -1339,7 +1371,8 @@ class ConfigProviders: @cherrypy.expose def deleteTorrentRssProvider(self, id): - providerDict = dict(zip([x.getID() for x in sickbeard.torrentRssProviderList], sickbeard.torrentRssProviderList)) + providerDict = dict( + zip([x.getID() for x in sickbeard.torrentRssProviderList], sickbeard.torrentRssProviderList)) if id not in providerDict: return '0' @@ -1360,12 +1393,12 @@ class ConfigProviders: thepiratebay_trusted=None, thepiratebay_proxy=None, thepiratebay_proxy_url=None, torrentleech_username=None, torrentleech_password=None, iptorrents_username=None, iptorrents_password=None, iptorrents_freeleech=None, - kat_trusted = None, kat_verified = None, + kat_trusted=None, kat_verified=None, scc_username=None, scc_password=None, hdtorrents_username=None, hdtorrents_password=None, torrentday_username=None, torrentday_password=None, torrentday_freeleech=None, hdbits_username=None, hdbits_passkey=None, - nextgen_username=None, nextgen_password=None, + nextgen_username=None, nextgen_password=None, newzbin_username=None, newzbin_password=None, provider_order=None): @@ -1374,7 +1407,8 @@ class ConfigProviders: provider_str_list = provider_order.split() provider_list = [] - newznabProviderDict = dict(zip([x.getID() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList)) + newznabProviderDict = dict( + zip([x.getID() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList)) finishedNames = [] @@ -1387,7 +1421,7 @@ class ConfigProviders: cur_name, cur_url, cur_key = curNewznabProviderStr.split('|') cur_url = config.clean_url(cur_url) - + newProvider = newznab.NewznabProvider(cur_name, cur_url, key=cur_key) cur_id = newProvider.getID() @@ -1418,7 +1452,8 @@ class ConfigProviders: if curProvider.getID() not in finishedNames: sickbeard.newznabProviderList.remove(curProvider) - torrentRssProviderDict = dict(zip([x.getID() for x in sickbeard.torrentRssProviderList], sickbeard.torrentRssProviderList)) + torrentRssProviderDict = dict( + zip([x.getID() for x in sickbeard.torrentRssProviderList], sickbeard.torrentRssProviderList)) finishedNames = [] if torrentrss_string: @@ -1429,7 +1464,7 @@ class ConfigProviders: curName, curURL = curTorrentRssProviderStr.split('|') curURL = config.clean_url(curURL) - + newProvider = rsstorrent.TorrentRssProvider(curName, curURL) curID = newProvider.getID() @@ -1495,7 +1530,7 @@ class ConfigProviders: elif curProvider == 'hdbits': sickbeard.HDBITS = curEnabled elif curProvider == 'nextgen': - sickbeard.NEXTGEN = curEnabled + sickbeard.NEXTGEN = curEnabled elif curProvider in newznabProviderDict: newznabProviderDict[curProvider].enabled = bool(curEnabled) elif curProvider in torrentRssProviderDict: @@ -1549,7 +1584,7 @@ class ConfigProviders: sickbeard.NEXTGEN_USERNAME = nextgen_username.strip() sickbeard.NEXTGEN_PASSWORD = nextgen_password.strip() - + sickbeard.NEWZNAB_DATA = '!!!'.join([x.configStr() for x in sickbeard.newznabProviderList]) sickbeard.PROVIDER_ORDER = provider_list @@ -1559,7 +1594,7 @@ class ConfigProviders: for x in results: logger.log(x, logger.ERROR) ui.notifications.error('Error(s) Saving Configuration', - '
\n'.join(results)) + '
\n'.join(results)) else: ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE)) @@ -1567,7 +1602,6 @@ class ConfigProviders: class ConfigNotifications: - @cherrypy.expose def index(self): t = PageTemplate(file="config_notifications.tmpl") @@ -1575,27 +1609,46 @@ class ConfigNotifications: return _munge(t) @cherrypy.expose - def saveNotifications(self, use_xbmc=None, xbmc_notify_onsnatch=None, xbmc_notify_ondownload=None, xbmc_notify_onsubtitledownload=None, xbmc_update_onlyfirst=None, - xbmc_update_library=None, xbmc_update_full=None, xbmc_host=None, xbmc_username=None, xbmc_password=None, - use_plex=None, plex_notify_onsnatch=None, plex_notify_ondownload=None, plex_notify_onsubtitledownload=None, plex_update_library=None, + def saveNotifications(self, use_xbmc=None, xbmc_notify_onsnatch=None, xbmc_notify_ondownload=None, + xbmc_notify_onsubtitledownload=None, xbmc_update_onlyfirst=None, + xbmc_update_library=None, xbmc_update_full=None, xbmc_host=None, xbmc_username=None, + xbmc_password=None, + use_plex=None, plex_notify_onsnatch=None, plex_notify_ondownload=None, + plex_notify_onsubtitledownload=None, plex_update_library=None, plex_server_host=None, plex_host=None, plex_username=None, plex_password=None, - use_growl=None, growl_notify_onsnatch=None, growl_notify_ondownload=None, growl_notify_onsubtitledownload=None, growl_host=None, growl_password=None, - use_prowl=None, prowl_notify_onsnatch=None, prowl_notify_ondownload=None, prowl_notify_onsubtitledownload=None, prowl_api=None, prowl_priority=0, - use_twitter=None, twitter_notify_onsnatch=None, twitter_notify_ondownload=None, twitter_notify_onsubtitledownload=None, - use_boxcar=None, boxcar_notify_onsnatch=None, boxcar_notify_ondownload=None, boxcar_notify_onsubtitledownload=None, boxcar_username=None, - use_pushover=None, pushover_notify_onsnatch=None, pushover_notify_ondownload=None, pushover_notify_onsubtitledownload=None, pushover_userkey=None, - use_libnotify=None, libnotify_notify_onsnatch=None, libnotify_notify_ondownload=None, libnotify_notify_onsubtitledownload=None, + use_growl=None, growl_notify_onsnatch=None, growl_notify_ondownload=None, + growl_notify_onsubtitledownload=None, growl_host=None, growl_password=None, + use_prowl=None, prowl_notify_onsnatch=None, prowl_notify_ondownload=None, + prowl_notify_onsubtitledownload=None, prowl_api=None, prowl_priority=0, + use_twitter=None, twitter_notify_onsnatch=None, twitter_notify_ondownload=None, + twitter_notify_onsubtitledownload=None, + use_boxcar=None, boxcar_notify_onsnatch=None, boxcar_notify_ondownload=None, + boxcar_notify_onsubtitledownload=None, boxcar_username=None, + use_pushover=None, pushover_notify_onsnatch=None, pushover_notify_ondownload=None, + pushover_notify_onsubtitledownload=None, pushover_userkey=None, + use_libnotify=None, libnotify_notify_onsnatch=None, libnotify_notify_ondownload=None, + libnotify_notify_onsubtitledownload=None, use_nmj=None, nmj_host=None, nmj_database=None, nmj_mount=None, use_synoindex=None, use_nmjv2=None, nmjv2_host=None, nmjv2_dbloc=None, nmjv2_database=None, - use_trakt=None, trakt_username=None, trakt_password=None, trakt_api=None, trakt_remove_watchlist=None, trakt_use_watchlist=None, trakt_method_add=None, trakt_start_paused=None, - use_synologynotifier=None, synologynotifier_notify_onsnatch=None, synologynotifier_notify_ondownload=None, synologynotifier_notify_onsubtitledownload=None, - use_pytivo=None, pytivo_notify_onsnatch=None, pytivo_notify_ondownload=None, pytivo_notify_onsubtitledownload=None, pytivo_update_library=None, + use_trakt=None, trakt_username=None, trakt_password=None, trakt_api=None, + trakt_remove_watchlist=None, trakt_use_watchlist=None, trakt_method_add=None, + trakt_start_paused=None, + use_synologynotifier=None, synologynotifier_notify_onsnatch=None, + synologynotifier_notify_ondownload=None, synologynotifier_notify_onsubtitledownload=None, + use_pytivo=None, pytivo_notify_onsnatch=None, pytivo_notify_ondownload=None, + pytivo_notify_onsubtitledownload=None, pytivo_update_library=None, pytivo_host=None, pytivo_share_name=None, pytivo_tivo_name=None, - use_nma=None, nma_notify_onsnatch=None, nma_notify_ondownload=None, nma_notify_onsubtitledownload=None, nma_api=None, nma_priority=0, - use_pushalot=None, pushalot_notify_onsnatch=None, pushalot_notify_ondownload=None, pushalot_notify_onsubtitledownload=None, pushalot_authorizationtoken=None, - use_pushbullet=None, pushbullet_notify_onsnatch=None, pushbullet_notify_ondownload=None, pushbullet_notify_onsubtitledownload=None, pushbullet_api=None, pushbullet_device=None, pushbullet_device_list=None, - use_email=None, email_notify_onsnatch=None, email_notify_ondownload=None, email_notify_onsubtitledownload=None, email_host=None, email_port=25, email_from=None, - email_tls=None, email_user=None, email_password=None, email_list=None, email_show_list=None, email_show=None ): + use_nma=None, nma_notify_onsnatch=None, nma_notify_ondownload=None, + nma_notify_onsubtitledownload=None, nma_api=None, nma_priority=0, + use_pushalot=None, pushalot_notify_onsnatch=None, pushalot_notify_ondownload=None, + pushalot_notify_onsubtitledownload=None, pushalot_authorizationtoken=None, + use_pushbullet=None, pushbullet_notify_onsnatch=None, pushbullet_notify_ondownload=None, + pushbullet_notify_onsubtitledownload=None, pushbullet_api=None, pushbullet_device=None, + pushbullet_device_list=None, + use_email=None, email_notify_onsnatch=None, email_notify_ondownload=None, + email_notify_onsubtitledownload=None, email_host=None, email_port=25, email_from=None, + email_tls=None, email_user=None, email_password=None, email_list=None, email_show_list=None, + email_show=None): results = [] @@ -1671,7 +1724,8 @@ class ConfigNotifications: sickbeard.USE_SYNOLOGYNOTIFIER = config.checkbox_to_value(use_synologynotifier) sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH = config.checkbox_to_value(synologynotifier_notify_onsnatch) sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(synologynotifier_notify_ondownload) - sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(synologynotifier_notify_onsubtitledownload) + sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value( + synologynotifier_notify_onsubtitledownload) sickbeard.USE_TRAKT = config.checkbox_to_value(use_trakt) sickbeard.TRAKT_USERNAME = trakt_username @@ -1734,14 +1788,14 @@ class ConfigNotifications: for x in results: logger.log(x, logger.ERROR) ui.notifications.error('Error(s) Saving Configuration', - '
\n'.join(results)) + '
\n'.join(results)) else: ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE)) redirect("/config/notifications/") -class ConfigSubtitles: +class ConfigSubtitles: @cherrypy.expose def index(self): t = PageTemplate(file="config_subtitles.tmpl") @@ -1749,14 +1803,15 @@ class ConfigSubtitles: return _munge(t) @cherrypy.expose - def saveSubtitles(self, use_subtitles=None, subtitles_plugins=None, subtitles_languages=None, subtitles_dir=None, service_order=None, subtitles_history=None, subtitles_finder_frequency=None): + def saveSubtitles(self, use_subtitles=None, subtitles_plugins=None, subtitles_languages=None, subtitles_dir=None, + service_order=None, subtitles_history=None, subtitles_finder_frequency=None): results = [] if subtitles_finder_frequency == '' or subtitles_finder_frequency is None: subtitles_finder_frequency = 1 if use_subtitles == "on": - if sickbeard.subtitlesFinderScheduler.thread == None or not sickbeard.subtitlesFinderScheduler.thread.isAlive(): + if sickbeard.subtitlesFinderScheduler.thread is None or not sickbeard.subtitlesFinderScheduler.thread.isAlive(): sickbeard.subtitlesFinderScheduler.silent = False sickbeard.subtitlesFinderScheduler.initThread() else: @@ -1769,7 +1824,8 @@ class ConfigSubtitles: pass sickbeard.USE_SUBTITLES = config.checkbox_to_value(use_subtitles) - sickbeard.SUBTITLES_LANGUAGES = [lang.alpha2 for lang in subtitles.isValidLanguage(subtitles_languages.replace(' ', '').split(','))] if subtitles_languages != '' else '' + sickbeard.SUBTITLES_LANGUAGES = [lang.alpha2 for lang in subtitles.isValidLanguage( + subtitles_languages.replace(' ', '').split(','))] if subtitles_languages != '' else '' sickbeard.SUBTITLES_DIR = subtitles_dir sickbeard.SUBTITLES_HISTORY = config.checkbox_to_value(subtitles_history) sickbeard.SUBTITLES_FINDER_FREQUENCY = config.to_int(subtitles_finder_frequency, default=1) @@ -1792,17 +1848,16 @@ class ConfigSubtitles: for x in results: logger.log(x, logger.ERROR) ui.notifications.error('Error(s) Saving Configuration', - '
\n'.join(results)) + '
\n'.join(results)) else: ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE)) redirect("/config/subtitles/") -class Config: +class Config: @cherrypy.expose def index(self): - t = PageTemplate(file="config.tmpl") t.submenu = ConfigMenu @@ -1820,24 +1875,27 @@ class Config: subtitles = ConfigSubtitles() + def haveXBMC(): return sickbeard.USE_XBMC and sickbeard.XBMC_UPDATE_LIBRARY + def havePLEX(): return sickbeard.USE_PLEX and sickbeard.PLEX_UPDATE_LIBRARY + def HomeMenu(): return [ - { 'title': 'Add Shows', 'path': 'home/addShows/', }, - { 'title': 'Manual Post-Processing', 'path': 'home/postprocess/' }, - { 'title': 'Update XBMC', 'path': 'home/updateXBMC/', 'requires': haveXBMC }, - { 'title': 'Update Plex', 'path': 'home/updatePLEX/', 'requires': havePLEX }, - { 'title': 'Restart', 'path': 'home/restart/?pid=' + str(sickbeard.PID), 'confirm': True }, - { 'title': 'Shutdown', 'path': 'home/shutdown/?pid=' + str(sickbeard.PID), 'confirm': True }, - ] + {'title': 'Add Shows', 'path': 'home/addShows/', }, + {'title': 'Manual Post-Processing', 'path': 'home/postprocess/'}, + {'title': 'Update XBMC', 'path': 'home/updateXBMC/', 'requires': haveXBMC}, + {'title': 'Update Plex', 'path': 'home/updatePLEX/', 'requires': havePLEX}, + {'title': 'Restart', 'path': 'home/restart/?pid=' + str(sickbeard.PID), 'confirm': True}, + {'title': 'Shutdown', 'path': 'home/shutdown/?pid=' + str(sickbeard.PID), 'confirm': True}, + ] + class HomePostProcess: - @cherrypy.expose def index(self): @@ -1846,7 +1904,8 @@ class HomePostProcess: return _munge(t) @cherrypy.expose - def processEpisode(self, dir=None, dirName=None, nzbName=None, jobName=None, quiet=None, process_method=None, force=None, is_priority=None, failed="0", type="auto", indexer="auto"): + def processEpisode(self, dir=None, dirName=None, nzbName=None, jobName=None, quiet=None, process_method=None, + force=None, is_priority=None, failed="0", type="auto", indexer="auto"): # auto-detect dirParam style dirParam = dir if dir is not None else dirName if not None else redirect("/home/postprocess/") @@ -1856,26 +1915,26 @@ class HomePostProcess: else: failed = True - if force=="on": - force=True + if force == "on": + force = True else: - force=False + force = False - if is_priority =="on": + if is_priority == "on": is_priority = True else: is_priority = False - result = processTV.processDir(dirParam, nzbName, process_method=process_method, force=force, is_priority=is_priority, failed=failed, type=type, indexer=indexer) - if quiet != None and int(quiet) == 1: + result = processTV.processDir(dirParam, nzbName, process_method=process_method, force=force, + is_priority=is_priority, failed=failed, type=type, indexer=indexer) + if quiet is not None and int(quiet) == 1: return result - result = result.replace("\n","
\n") + result = result.replace("\n", "
\n") return _genericMessage("Postprocessing results", result) class NewHomeAddShows: - @cherrypy.expose def index(self): @@ -1885,7 +1944,7 @@ class NewHomeAddShows: @cherrypy.expose def getIndexerLanguages(self): - result = indexerApi().config['valid_languages'] + result = sickbeard.indexerApi().config['valid_languages'] # Make sure list is sorted alphabetically but 'en' is in front if 'en' in result: @@ -1904,8 +1963,8 @@ class NewHomeAddShows: if not lang or lang == 'null': lang = "en" - baseURL_TVDB = "http://thetvdb.com/api/GetSeries.php" - baseURL_TVRAGE = "http://services.tvrage.com/feeds/search.php" + results = [] + nameUTF8 = name.encode('utf-8') # Use each word in the show's name as a possible search term @@ -1916,76 +1975,56 @@ class NewHomeAddShows: if len(keywords) > 1: keywords.insert(0, nameUTF8) - # Query the TVDB for each search term and build the list of results - results = [] - for searchTerm in keywords: - paramsTVDB = {'seriesname': searchTerm, - 'language': lang} + # check for indexer preset + indexers = [int(indexer)] + if 0 in indexers: + indexers = sickbeard.indexerApi().indexers - paramsTVRAGE = {'show': searchTerm} + # Query Indexers for each search term and build the list of results + for i in indexers: + def searchShows(): + results = [] - urlDataTVDB = None - if indexer is None or indexer in 'Tvdb': - urlDataTVDB = helpers.getURL(baseURL_TVDB, params=paramsTVDB) + lINDEXER_API_PARMS = {'indexer': i, 'custom_ui': classes.AllShowsListUI} + t = sickbeard.indexerApi(**lINDEXER_API_PARMS) - urlDataTVRAGE = None - if indexer is None or indexer in 'TVRage': - urlDataTVRAGE = helpers.getURL(baseURL_TVRAGE, params=paramsTVRAGE) + for searchTerm in keywords: + try: + search = t[searchTerm] + if isinstance(search, dict): + search = [search] - if urlDataTVDB is None and urlDataTVRAGE is None: - # When urlData is None, trouble connecting to TVDB and TVRage, don't try the rest of the keywords - logger.log(u"Unable to get list of shows", logger.ERROR) - break + # add search results + result = [[t.name, t.config['id'], t.config["show_url"], int(x['id']), x['seriesname'], + x['firstaired']] for x in search if nameUTF8.lower() in x['seriesname'].lower()] - if urlDataTVDB is not None: - indexer = "Tvdb" + # see if we have any matches + if len(result) > 0: + # add result to list of found shows + results += result - logger.log(u"Trying to find Show on " + indexer + ".com with: " + nameUTF8.decode('utf-8'), logger.DEBUG) + # search through result to see if we have a exact match + for show in result: + # cleanup the series name + seriesname = show[4].encode('utf-8').translate(None, string.punctuation) - try: - seriesXML_TVDB = etree.ElementTree(etree.XML(urlDataTVDB)) - seriesTVDB = seriesXML_TVDB.getiterator('Series') + # check if we got a exact match + if nameUTF8.lower() == seriesname.lower(): + return results - # add each TVDB result to our list - if seriesTVDB: - for curSeries in seriesTVDB: - indexer_id = int(curSeries.findtext('seriesid')) + except Exception, e: + continue - # don't add duplicates - if indexer_id in [x[0] for x in results]: - continue + # finished searching a indexer so return the results + return results - results.append((indexer, indexer_id, curSeries.findtext('SeriesName'), curSeries.findtext('FirstAired'))) + # search indexers for shows + results += searchShows() - except Exception, e: - # use finalURL in log, because urlData can be too much information - logger.log(u"Unable to parse XML from " + indexer + " for some reason: " + ex(e), logger.ERROR) - - if urlDataTVRAGE is not None: - indexer = "TVRage" - - logger.log(u"Trying to find Show on " + indexer + ".com with: " + nameUTF8.decode('utf-8'), logger.DEBUG) - - try: - seriesXML_TVRAGE = etree.ElementTree(etree.XML(urlDataTVRAGE)) - seriesTVRAGE = seriesXML_TVRAGE.getiterator('show') - - # add each TVRAGE result to our list - for curSeries in seriesTVRAGE: - indexer_id = int(curSeries.findtext('showid')) - - # don't add duplicates - if indexer_id in [x[0] for x in results]: - continue - - results.append((indexer, indexer_id, curSeries.findtext('name'), curSeries.findtext('started'))) - except Exception, e: - # use finalURL in log, because urlData can be too much information - logger.log(u"Unable to parse XML from " + indexer + " for some reason: " + ex(e), logger.ERROR) - - - lang_id = indexerApi().config['langabbv_to_id'][lang] + # remove duplicates + results = list(results for results, _ in itertools.groupby(results)) + lang_id = sickbeard.indexerApi().config['langabbv_to_id'][lang] return json.dumps({'results': results, 'langid': lang_id}) @cherrypy.expose @@ -2030,9 +2069,10 @@ class NewHomeAddShows: continue cur_dir = { - 'dir': cur_path, - 'display_dir': '' + ek.ek(os.path.dirname, cur_path) + os.sep + '' + ek.ek(os.path.basename, cur_path), - } + 'dir': cur_path, + 'display_dir': '' + ek.ek(os.path.dirname, cur_path) + os.sep + '' + ek.ek(os.path.basename, + cur_path), + } # see if the folder is in XBMC already dirResults = myDB.select("SELECT * FROM tv_shows WHERE location = ?", [cur_path]) @@ -2044,19 +2084,22 @@ class NewHomeAddShows: dir_list.append(cur_dir) - indexer_id = '' - indexer = '' - show_name = '' + indexer_id = show_name = indexer = None for cur_provider in sickbeard.metadata_provider_dict.values(): (indexer_id, show_name, indexer) = cur_provider.retrieveShowMetadata(cur_path) - if indexer_id and show_name: - break + if show_name: break # default to TVDB if indexer was not detected - if indexer is None and show_name: - found_info = helpers.searchIndexersForShow(show_name) - if found_info is not None: - indexer = found_info + if show_name and (indexer is None or indexer_id is None): + found_info = helpers.searchIndexersForShow(show_name, indexer_id) + + if found_info: + # set indexer and indexer_id from found info + if indexer is None: + indexer = found_info[0] + + if indexer_id is None: + indexer_id = found_info[1] cur_dir['existing_info'] = (indexer_id, show_name, indexer) @@ -2090,7 +2133,7 @@ class NewHomeAddShows: if not show_dir: t.default_show_name = '' elif not show_name: - t.default_show_name = ek.ek(os.path.basename, ek.ek(os.path.normpath, show_dir)).replace('.',' ') + t.default_show_name = ek.ek(os.path.basename, ek.ek(os.path.normpath, show_dir)).replace('.', ' ') else: t.default_show_name = show_name @@ -2101,12 +2144,12 @@ class NewHomeAddShows: other_shows = [other_shows] if use_provided_info: - t.provided_indexer_id = indexer_id + t.provided_indexer_id = int(indexer_id or 0) t.provided_indexer_name = show_name t.provided_show_dir = show_dir t.other_shows = other_shows - t.provided_indexer = indexer + t.provided_indexer = int(indexer or 0) return _munge(t) @@ -2153,16 +2196,17 @@ class NewHomeAddShows: # sanity check on our inputs if (not rootDir and not fullShowPath) or not whichSeries: - return "Missing params, no indexer id or folder:" + repr(whichSeries) + " and " + repr(rootDir)+"/" + repr(fullShowPath) + return "Missing params, no indexer id or folder:" + repr(whichSeries) + " and " + repr( + rootDir) + "/" + repr(fullShowPath) # figure out what show we're adding and where series_pieces = whichSeries.split('|') - if len(series_pieces) < 4: + if len(series_pieces) < 6: return "Error with show selection." - indexer = series_pieces[0] - indexer_id = int(series_pieces[1]) - show_name = series_pieces[2] + indexer = int(series_pieces[1]) + indexer_id = int(series_pieces[3]) + show_name = series_pieces[4] # use the whole path if it's given, or else append the show name to the root dir to get the full show path if fullShowPath: @@ -2182,7 +2226,8 @@ class NewHomeAddShows: dir_exists = helpers.makeDir(show_dir) if not dir_exists: logger.log(u"Unable to create the folder " + show_dir + ", can't add the show", logger.ERROR) - ui.notifications.error("Unable to add show", "Unable to create the folder " + show_dir + ", can't add the show") + ui.notifications.error("Unable to add show", + "Unable to create the folder " + show_dir + ", can't add the show") redirect("/home/") else: helpers.chmodAsParent(show_dir) @@ -2202,7 +2247,8 @@ class NewHomeAddShows: newQuality = Quality.combineQualities(map(int, anyQualities), map(int, bestQualities)) # add the show - sickbeard.showQueueScheduler.action. addShow(indexer, indexer_id, show_dir, int(defaultStatus), newQuality, flatten_folders, subtitles, indexerLang) # @UndefinedVariable + sickbeard.showQueueScheduler.action.addShow(indexer, indexer_id, show_dir, int(defaultStatus), newQuality, + flatten_folders, subtitles, indexerLang) # @UndefinedVariable ui.notifications.message('Show added', 'Adding the specified show into ' + show_dir) return finishAddShow() @@ -2251,9 +2297,11 @@ class NewHomeAddShows: dirs_only.append(cur_dir) else: indexer, show_dir, indexer_id, show_name = self.split_extra_show(cur_dir) + if not show_dir or not indexer_id or not show_name: continue - indexer_id_given.append((indexer, show_dir, int(indexer_id), show_name)) + + indexer_id_given.append((int(indexer), show_dir, int(indexer_id), show_name)) # if they want me to prompt for settings then I will just carry on to the newShow page @@ -2267,11 +2315,15 @@ class NewHomeAddShows: if indexer is not None and indexer_id is not None: # add the show - sickbeard.showQueueScheduler.action.addShow(indexer, indexer_id, show_dir, SKIPPED, sickbeard.QUALITY_DEFAULT, sickbeard.FLATTEN_FOLDERS_DEFAULT, sickbeard.SUBTITLES_DEFAULT, refresh=True) + sickbeard.showQueueScheduler.action.addShow(indexer, indexer_id, show_dir, SKIPPED, + sickbeard.QUALITY_DEFAULT, + sickbeard.FLATTEN_FOLDERS_DEFAULT, + sickbeard.SUBTITLES_DEFAULT, refresh=True) num_added += 1 if num_added: - ui.notifications.message("Shows Added", "Automatically added " + str(num_added) + " from their existing metadata files") + ui.notifications.message("Shows Added", + "Automatically added " + str(num_added) + " from their existing metadata files") # if we're done then go home if not dirs_only: @@ -2280,14 +2332,14 @@ class NewHomeAddShows: # for the remaining shows we need to prompt for each one, so forward this on to the newShow page return self.newShow(dirs_only[0], dirs_only[1:]) + ErrorLogsMenu = [ - { 'title': 'Clear Errors', 'path': 'errorlogs/clearerrors/' }, + {'title': 'Clear Errors', 'path': 'errorlogs/clearerrors/'}, #{ 'title': 'View Log', 'path': 'errorlogs/viewlog' }, ] class ErrorLogs: - @cherrypy.expose def index(self): @@ -2358,7 +2410,6 @@ class ErrorLogs: class Home: - @cherrypy.expose def is_alive(self, *args, **kwargs): if 'callback' in kwargs and '_' in kwargs: @@ -2383,17 +2434,16 @@ class Home: return _munge(t) addShows = NewHomeAddShows() - postprocess = HomePostProcess() @cherrypy.expose def testSABnzbd(self, host=None, username=None, password=None, apikey=None): host = config.clean_url(host) - + connection, accesMsg = sab.getSabAccesMethod(host, username, password, apikey) if connection: - authed, authMsg = sab.testAuthentication(host, username, password, apikey) #@UnusedVariable + authed, authMsg = sab.testAuthentication(host, username, password, apikey) #@UnusedVariable if authed: return "Success. Connected and authenticated" else: @@ -2403,7 +2453,7 @@ class Home: @cherrypy.expose def testTorrent(self, torrent_method=None, host=None, username=None, password=None): - + host = config.clean_url(host) client = clients.getClientIstance(torrent_method) @@ -2415,11 +2465,11 @@ class Home: @cherrypy.expose def testGrowl(self, host=None, password=None): cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" - + host = config.clean_host(host, default_port=23053) - + result = notifiers.growl_notifier.test_notify(host, password) - if password == None or password == '': + if password is None or password == '': pw_append = '' else: pw_append = " with password: " + password @@ -2490,7 +2540,7 @@ class Home: def testXBMC(self, host=None, username=None, password=None): cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" - host = config.clean_hosts(host) + host = config.clean_hosts(host) finalResult = '' for curHost in [x.strip() for x in host.split(",")]: curResult = notifiers.xbmc_notifier.test_notify(urllib.unquote_plus(curHost), username, password) @@ -2540,18 +2590,19 @@ class Home: @cherrypy.expose def settingsNMJ(self, host=None): cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" - + host = config.clean_host(host) result = notifiers.nmj_notifier.notify_settings(urllib.unquote_plus(host)) if result: - return '{"message": "Got settings from %(host)s", "database": "%(database)s", "mount": "%(mount)s"}' % {"host": host, "database": sickbeard.NMJ_DATABASE, "mount": sickbeard.NMJ_MOUNT} + return '{"message": "Got settings from %(host)s", "database": "%(database)s", "mount": "%(mount)s"}' % { + "host": host, "database": sickbeard.NMJ_DATABASE, "mount": sickbeard.NMJ_MOUNT} else: return '{"message": "Failed! Make sure your Popcorn is on and NMJ is running. (see Log & Errors -> Debug for detailed info)", "database": "", "mount": ""}' @cherrypy.expose def testNMJv2(self, host=None): cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" - + host = config.clean_host(host) result = notifiers.nmjv2_notifier.test_notify(urllib.unquote_plus(host)) if result: @@ -2560,15 +2611,17 @@ class Home: return "Test notice failed to " + urllib.unquote_plus(host) @cherrypy.expose - def settingsNMJv2(self, host=None, dbloc=None,instance=None): + def settingsNMJv2(self, host=None, dbloc=None, instance=None): cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" host = config.clean_host(host) result = notifiers.nmjv2_notifier.notify_settings(urllib.unquote_plus(host), dbloc, instance) if result: - return '{"message": "NMJ Database found at: %(host)s", "database": "%(database)s"}' % {"host": host, "database": sickbeard.NMJv2_DATABASE} + return '{"message": "NMJ Database found at: %(host)s", "database": "%(database)s"}' % {"host": host, + "database": sickbeard.NMJv2_DATABASE} else: - return '{"message": "Unable to find NMJ Database at location: %(dbloc)s. Is the right location selected and PCH running?", "database": ""}' % {"dbloc": dbloc} + return '{"message": "Unable to find NMJ Database at location: %(dbloc)s. Is the right location selected and PCH running?", "database": ""}' % { + "dbloc": dbloc} @cherrypy.expose def testTrakt(self, api=None, username=None, password=None): @@ -2678,7 +2731,7 @@ class Home: if str(pid) != str(sickbeard.PID): redirect("/home/") - updated = sickbeard.versionCheckScheduler.action.update() # @UndefinedVariable + updated = sickbeard.versionCheckScheduler.action.update() # @UndefinedVariable if updated: # do a hard restart @@ -2686,17 +2739,18 @@ class Home: t = PageTemplate(file="restart_bare.tmpl") return _munge(t) else: - return _genericMessage("Update Failed","Update wasn't successful, not restarting. Check your log for more information.") + return _genericMessage("Update Failed", + "Update wasn't successful, not restarting. Check your log for more information.") @cherrypy.expose def displayShow(self, show=None): - if show == None: + if show is None: return _genericMessage("Error", "Invalid show ID") else: showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show)) - if showObj == None: + if showObj is None: return _genericMessage("Error", "Show not in show list") showObj.exceptions = scene_exceptions.get_scene_exceptions(showObj.indexerid) @@ -2714,7 +2768,7 @@ class Home: ) t = PageTemplate(file="displayShow.tmpl") - t.submenu = [ { 'title': 'Edit', 'path': 'home/editShow?show=%d'%showObj.indexerid } ] + t.submenu = [{'title': 'Edit', 'path': 'home/editShow?show=%d' % showObj.indexerid}] try: t.showLoc = (showObj.location, True) @@ -2723,36 +2777,42 @@ class Home: show_message = '' - if sickbeard.showQueueScheduler.action.isBeingAdded(showObj): # @UndefinedVariable + if sickbeard.showQueueScheduler.action.isBeingAdded(showObj): # @UndefinedVariable show_message = 'This show is in the process of being downloaded - the info below is incomplete.' - elif sickbeard.showQueueScheduler.action.isBeingUpdated(showObj): # @UndefinedVariable + elif sickbeard.showQueueScheduler.action.isBeingUpdated(showObj): # @UndefinedVariable show_message = 'The information below is in the process of being updated.' - elif sickbeard.showQueueScheduler.action.isBeingRefreshed(showObj): # @UndefinedVariable + elif sickbeard.showQueueScheduler.action.isBeingRefreshed(showObj): # @UndefinedVariable show_message = 'The episodes below are currently being refreshed from disk' - elif sickbeard.showQueueScheduler.action.isBeingSubtitled(showObj): # @UndefinedVariable + elif sickbeard.showQueueScheduler.action.isBeingSubtitled(showObj): # @UndefinedVariable show_message = 'Currently downloading subtitles for this show' - elif sickbeard.showQueueScheduler.action.isInRefreshQueue(showObj): # @UndefinedVariable + elif sickbeard.showQueueScheduler.action.isInRefreshQueue(showObj): # @UndefinedVariable show_message = 'This show is queued to be refreshed.' - elif sickbeard.showQueueScheduler.action.isInUpdateQueue(showObj): # @UndefinedVariable + elif sickbeard.showQueueScheduler.action.isInUpdateQueue(showObj): # @UndefinedVariable show_message = 'This show is queued and awaiting an update.' - elif sickbeard.showQueueScheduler.action.isInSubtitleQueue(showObj): # @UndefinedVariable + elif sickbeard.showQueueScheduler.action.isInSubtitleQueue(showObj): # @UndefinedVariable show_message = 'This show is queued and awaiting subtitles download.' - if not sickbeard.showQueueScheduler.action.isBeingAdded(showObj): # @UndefinedVariable - if not sickbeard.showQueueScheduler.action.isBeingUpdated(showObj): # @UndefinedVariable - t.submenu.append({ 'title': 'Delete', 'path': 'home/deleteShow?show=%d'%showObj.indexerid, 'confirm': True }) - t.submenu.append({ 'title': 'Re-scan files', 'path': 'home/refreshShow?show=%d'%showObj.indexerid }) - t.submenu.append({ 'title': 'Force Full Update', 'path': 'home/updateShow?show=%d&force=1'%showObj.indexerid }) - t.submenu.append({ 'title': 'Update show in XBMC', 'path': 'home/updateXBMC?showName=%s'%urllib.quote_plus(showObj.name.encode('utf-8')), 'requires': haveXBMC }) - t.submenu.append({ 'title': 'Preview Rename', 'path': 'home/testRename?show=%d'%showObj.indexerid }) - if sickbeard.USE_SUBTITLES and not sickbeard.showQueueScheduler.action.isBeingSubtitled(showObj) and showObj.subtitles: - t.submenu.append({ 'title': 'Download Subtitles', 'path': 'home/subtitleShow?show=%d'%showObj.indexerid }) + if not sickbeard.showQueueScheduler.action.isBeingAdded(showObj): # @UndefinedVariable + if not sickbeard.showQueueScheduler.action.isBeingUpdated(showObj): # @UndefinedVariable + t.submenu.append( + {'title': 'Delete', 'path': 'home/deleteShow?show=%d' % showObj.indexerid, 'confirm': True}) + t.submenu.append({'title': 'Re-scan files', 'path': 'home/refreshShow?show=%d' % showObj.indexerid}) + t.submenu.append( + {'title': 'Force Full Update', 'path': 'home/updateShow?show=%d&force=1' % showObj.indexerid}) + t.submenu.append({'title': 'Update show in XBMC', + 'path': 'home/updateXBMC?showName=%s' % urllib.quote_plus( + showObj.name.encode('utf-8')), 'requires': haveXBMC}) + t.submenu.append({'title': 'Preview Rename', 'path': 'home/testRename?show=%d' % showObj.indexerid}) + if sickbeard.USE_SUBTITLES and not sickbeard.showQueueScheduler.action.isBeingSubtitled( + showObj) and showObj.subtitles: + t.submenu.append( + {'title': 'Download Subtitles', 'path': 'home/subtitleShow?show=%d' % showObj.indexerid}) t.show = showObj t.sqlResults = sqlResults @@ -2769,13 +2829,12 @@ class Home: epCounts[Overview.SNATCHED] = 0 for curResult in sqlResults: - curEpCat = showObj.getOverview(int(curResult["status"])) epCats[str(curResult["season"]) + "x" + str(curResult["episode"])] = curEpCat epCounts[curEpCat] += 1 def titler(x): - if not x or sickbeard.SORT_ARTICLE: + if not x or sickbeard.SORT_ARTICLE: return x if x.lower().startswith('a '): x = x[2:] @@ -2784,11 +2843,12 @@ class Home: elif x.lower().startswith('the '): x = x[4:] return x + t.sortedShowList = sorted(sickbeard.showList, lambda x, y: cmp(titler(x.name), titler(y.name))) t.epCounts = epCounts t.epCats = epCats - + #t.all_scene_exceptions = list(set((get_scene_exceptions(showObj.indexerid) or []) + (get_custom_exceptions(showObj.indexerid) or []))) t.all_scene_exceptions = get_scene_exceptions(showObj.indexerid) t.scene_numbering = get_scene_numbering_for_show(showObj.indexerid) @@ -2798,13 +2858,17 @@ class Home: @cherrypy.expose def plotDetails(self, show, season, episode): - result = db.DBConnection().action("SELECT description FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", (show, season, episode)).fetchone() + result = db.DBConnection().action( + "SELECT description FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", + (show, season, episode)).fetchone() return result['description'] if result else 'Episode not found.' @cherrypy.expose - def editShow(self, show=None, location=None, anyQualities=[], bestQualities=[], exceptions_list=[], flatten_folders=None, paused=None, directCall=False, air_by_date=None, dvdorder=None, indexerLang=None, subtitles=None, archive_firstmatch=None): + def editShow(self, show=None, location=None, anyQualities=[], bestQualities=[], exceptions_list=[], + flatten_folders=None, paused=None, directCall=False, air_by_date=None, dvdorder=None, indexerLang=None, + subtitles=None, archive_firstmatch=None): - if show == None: + if show is None: errString = "Invalid show ID: " + str(show) if directCall: return [errString] @@ -2813,7 +2877,7 @@ class Home: showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show)) - if showObj == None: + if showObj is None: errString = "Unable to find the specified show: " + str(show) if directCall: return [errString] @@ -2823,19 +2887,18 @@ class Home: showObj.exceptions = scene_exceptions.get_scene_exceptions(showObj.indexerid) if not location and not anyQualities and not bestQualities and not flatten_folders: - t = PageTemplate(file="editShow.tmpl") t.submenu = HomeMenu() with showObj.lock: t.show = showObj - + t.scene_exceptions = get_scene_exceptions(showObj.indexerid) return _munge(t) flatten_folders = config.checkbox_to_value(flatten_folders) logger.log(u"flatten folders: " + str(flatten_folders)) - + dvdorder = config.checkbox_to_value(dvdorder) archive_firstmatch = config.checkbox_to_value(archive_firstmatch) paused = config.checkbox_to_value(paused) @@ -2877,7 +2940,7 @@ class Home: if bool(showObj.flatten_folders) != bool(flatten_folders): showObj.flatten_folders = flatten_folders try: - sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable + sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable except exceptions.CantRefreshException, e: errors.append("Unable to refresh this show: " + ex(e)) @@ -2900,14 +2963,15 @@ class Home: try: showObj.location = location try: - sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable + sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable except exceptions.CantRefreshException, e: errors.append("Unable to refresh this show:" + ex(e)) - # grab updated info from TVDB - #showObj.loadEpisodesFromIndexer() - # rescan the episodes in the new folder + # grab updated info from TVDB + #showObj.loadEpisodesFromIndexer() + # rescan the episodes in the new folder except exceptions.NoNFOException: - errors.append("The folder at %s doesn't contain a tvshow.nfo - copy your files to that folder before you change the directory in Sick Beard." % location) + errors.append( + "The folder at %s doesn't contain a tvshow.nfo - copy your files to that folder before you change the directory in Sick Beard." % location) # save it to the DB showObj.saveToDB() @@ -2915,14 +2979,14 @@ class Home: # force the update if do_update: try: - sickbeard.showQueueScheduler.action.updateShow(showObj, True) # @UndefinedVariable + sickbeard.showQueueScheduler.action.updateShow(showObj, True) # @UndefinedVariable time.sleep(1) except exceptions.CantUpdateException, e: errors.append("Unable to force an update on the show.") if do_update_exceptions: try: - scene_exceptions.update_scene_exceptions(showObj.indexerid, exceptions_list) # @UndefinedVariable + scene_exceptions.update_scene_exceptions(showObj.indexerid, exceptions_list) # @UndefinedVariable time.sleep(1) except exceptions.CantUpdateException, e: errors.append("Unable to force an update on scene exceptions of the show.") @@ -2932,22 +2996,23 @@ class Home: if len(errors) > 0: ui.notifications.error('%d error%s while saving changes:' % (len(errors), "" if len(errors) == 1 else "s"), - '
    ' + '\n'.join(['
  • %s
  • ' % error for error in errors]) + "
") + '
    ' + '\n'.join(['
  • %s
  • ' % error for error in errors]) + "
") redirect("/home/displayShow?show=" + show) @cherrypy.expose def deleteShow(self, show=None): - if show == None: + if show is None: return _genericMessage("Error", "Invalid show ID") showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show)) - if showObj == None: + if showObj is None: return _genericMessage("Error", "Unable to find the specified show") - if sickbeard.showQueueScheduler.action.isBeingAdded(showObj) or sickbeard.showQueueScheduler.action.isBeingUpdated(showObj): # @UndefinedVariable + if sickbeard.showQueueScheduler.action.isBeingAdded( + showObj) or sickbeard.showQueueScheduler.action.isBeingUpdated(showObj): # @UndefinedVariable return _genericMessage("Error", "Shows can't be deleted while they're being added or updated.") showObj.deleteShow() @@ -2958,20 +3023,20 @@ class Home: @cherrypy.expose def refreshShow(self, show=None): - if show == None: + if show is None: return _genericMessage("Error", "Invalid show ID") showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show)) - if showObj == None: + if showObj is None: return _genericMessage("Error", "Unable to find the specified show") # force the update from the DB try: - sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable + sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable except exceptions.CantRefreshException, e: ui.notifications.error("Unable to refresh this show.", - ex(e)) + ex(e)) time.sleep(3) @@ -2980,20 +3045,20 @@ class Home: @cherrypy.expose def updateShow(self, show=None, force=0): - if show == None: + if show is None: return _genericMessage("Error", "Invalid show ID") showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show)) - if showObj == None: + if showObj is None: return _genericMessage("Error", "Unable to find the specified show") # force the update try: - sickbeard.showQueueScheduler.action.updateShow(showObj, bool(force)) # @UndefinedVariable + sickbeard.showQueueScheduler.action.updateShow(showObj, bool(force)) # @UndefinedVariable except exceptions.CantUpdateException, e: ui.notifications.error("Unable to update this show.", - ex(e)) + ex(e)) # just give it some time time.sleep(3) @@ -3003,16 +3068,16 @@ class Home: @cherrypy.expose def subtitleShow(self, show=None, force=0): - if show == None: + if show is None: return _genericMessage("Error", "Invalid show ID") showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show)) - if showObj == None: + if showObj is None: return _genericMessage("Error", "Unable to find the specified show") # search and download subtitles - sickbeard.showQueueScheduler.action.downloadSubtitles(showObj, bool(force)) # @UndefinedVariable + sickbeard.showQueueScheduler.action.downloadSubtitles(showObj, bool(force)) # @UndefinedVariable time.sleep(3) @@ -3038,7 +3103,8 @@ class Home: @cherrypy.expose def updatePLEX(self): if notifiers.plex_notifier.update_library(): - ui.notifications.message("Library update command sent to Plex Media Server host: " + sickbeard.PLEX_SERVER_HOST) + ui.notifications.message( + "Library update command sent to Plex Media Server host: " + sickbeard.PLEX_SERVER_HOST) else: ui.notifications.error("Unable to contact Plex Media Server host: " + sickbeard.PLEX_SERVER_HOST) redirect('/home/') @@ -3046,7 +3112,7 @@ class Home: @cherrypy.expose def setStatus(self, show=None, eps=None, status=None, direct=False): - if show == None or eps == None or status == None: + if show is None or eps is None or status is None: errMsg = "You must specify a show and at least one episode" if direct: ui.notifications.error('Error', errMsg) @@ -3064,7 +3130,7 @@ class Home: showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show)) - if showObj == None: + if showObj is None: errMsg = "Error", "Show not in show list" if direct: ui.notifications.error('Error', errMsg) @@ -3075,7 +3141,7 @@ class Home: wanted_segments = [] failed_segments = {} - if eps != None: + if eps is not None: sql_l = [] for curEp in eps.split('|'): @@ -3086,7 +3152,7 @@ class Home: epObj = showObj.getEpisode(int(epInfo[0]), int(epInfo[1])) - if epObj == None: + if epObj is None: return _genericMessage("Error", "Episode couldn't be retrieved") if int(status) in (WANTED, FAILED): @@ -3097,7 +3163,7 @@ class Home: wanted_segments.append(epObj.season) # figure out what episodes failed so we can retry them - failed_segments.setdefault(epObj.season,[]).append(epObj.episode) + failed_segments.setdefault(epObj.season, []).append(epObj.episode) with epObj.lock: # don't let them mess up UNAIRED episodes @@ -3105,14 +3171,21 @@ class Home: logger.log(u"Refusing to change status of " + curEp + " because it is UNAIRED", logger.ERROR) continue - if int(status) in Quality.DOWNLOADED and epObj.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.DOWNLOADED + [IGNORED] and not ek.ek(os.path.isfile, epObj.location): - logger.log(u"Refusing to change status of " + curEp + " to DOWNLOADED because it's not SNATCHED/DOWNLOADED", logger.ERROR) + if int( + status) in Quality.DOWNLOADED and epObj.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.DOWNLOADED + [ + IGNORED] and not ek.ek(os.path.isfile, epObj.location): + logger.log( + u"Refusing to change status of " + curEp + " to DOWNLOADED because it's not SNATCHED/DOWNLOADED", + logger.ERROR) continue - if int(status) == FAILED and epObj.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.DOWNLOADED: - logger.log(u"Refusing to change status of " + curEp + " to FAILED because it's not SNATCHED/DOWNLOADED", logger.ERROR) + if int( + status) == FAILED and epObj.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.DOWNLOADED: + logger.log( + u"Refusing to change status of " + curEp + " to FAILED because it's not SNATCHED/DOWNLOADED", + logger.ERROR) continue - + epObj.status = int(status) sql_l.append(epObj.get_sql()) @@ -3120,13 +3193,14 @@ class Home: myDB = db.DBConnection() myDB.mass_action(sql_l) - if int(status) == WANTED: + if int(status) == WANTED: msg = "Backlog was automatically started for the following seasons of " + showObj.name + ":
    " for cur_segment in wanted_segments: msg += "
  • Season " + str(cur_segment) + "
  • " - logger.log(u"Sending backlog for " + showObj.name + " season " + str(cur_segment) + " because some eps were set to wanted") + logger.log(u"Sending backlog for " + showObj.name + " season " + str( + cur_segment) + " because some eps were set to wanted") cur_backlog_queue_item = search_queue.BacklogQueueItem(showObj, cur_segment) - sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) # @UndefinedVariable + sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) # @UndefinedVariable msg += "
" if wanted_segments: @@ -3136,14 +3210,15 @@ class Home: msg = "Retrying Search was automatically started for the following season of " + showObj.name + ":
" for cur_segment in failed_segments: msg += "
  • Season " + str(cur_segment) + "
  • " - logger.log(u"Retrying Search for " + showObj.name + " season " + str(cur_segment) + " because some eps were set to failed") + logger.log(u"Retrying Search for " + showObj.name + " season " + str( + cur_segment) + " because some eps were set to failed") cur_failed_queue_item = search_queue.FailedQueueItem(showObj, cur_segment) - sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item) # @UndefinedVariable + sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item) # @UndefinedVariable msg += "" if failed_segments: ui.notifications.message("Retry Search started", msg) - + if direct: return json.dumps({'result': 'success'}) else: @@ -3152,16 +3227,16 @@ class Home: @cherrypy.expose def testRename(self, show=None): - if show == None: + if show is None: return _genericMessage("Error", "You must specify a show") showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show)) - if showObj == None: + if showObj is None: return _genericMessage("Error", "Show not in show list") try: - show_loc = showObj.location #@UnusedVariable + show_loc = showObj.location #@UnusedVariable except exceptions.ShowDirNotFoundException: return _genericMessage("Error", "Can't rename episodes when the show dir is missing.") @@ -3198,24 +3273,24 @@ class Home: @cherrypy.expose def doRename(self, show=None, eps=None): - if show == None or eps == None: + if show is None or eps is None: errMsg = "You must specify a show and at least one episode" return _genericMessage("Error", errMsg) show_obj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show)) - if show_obj == None: + if show_obj is None: errMsg = "Error", "Show not in show list" return _genericMessage("Error", errMsg) try: - show_loc = show_obj.location #@UnusedVariable + show_loc = show_obj.location #@UnusedVariable except exceptions.ShowDirNotFoundException: return _genericMessage("Error", "Can't rename episodes when the show dir is missing.") myDB = db.DBConnection() - if eps == None: + if eps is None: redirect("/home/displayShow?show=" + show) for curEp in eps.split('|'): @@ -3223,11 +3298,13 @@ class Home: epInfo = curEp.split('x') # this is probably the worst possible way to deal with double eps but I've kinda painted myself into a corner here with this stupid database - ep_result = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND 5=5", [show, epInfo[0], epInfo[1]]) + ep_result = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND 5=5", + [show, epInfo[0], epInfo[1]]) if not ep_result: logger.log(u"Unable to find an episode for " + curEp + ", skipping", logger.WARNING) continue - related_eps_result = myDB.select("SELECT * FROM tv_episodes WHERE location = ? AND episode != ?", [ep_result[0]["location"], epInfo[1]]) + related_eps_result = myDB.select("SELECT * FROM tv_episodes WHERE location = ? AND episode != ?", + [ep_result[0]["location"], epInfo[1]]) root_ep_obj = show_obj.getEpisode(int(epInfo[0]), int(epInfo[1])) root_ep_obj.relatedEps = [] @@ -3252,10 +3329,10 @@ class Home: # make a queue item for it and put it on the queue ep_queue_item = search_queue.ManualSearchQueueItem(ep_obj) - sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) # @UndefinedVariable + sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) # @UndefinedVariable # wait until the queue item tells us whether it worked or not - while ep_queue_item.success == None: # @UndefinedVariable + while ep_queue_item.success is None: # @UndefinedVariable time.sleep(1) # return the correct json value @@ -3270,7 +3347,7 @@ class Home: return json.dumps({'result': statusStrings[ep_obj.status], 'quality': quality_class - }) + }) return json.dumps({'result': 'failure'}) @@ -3291,7 +3368,10 @@ class Home: # return the correct json value if previous_subtitles != ep_obj.subtitles: - status = 'New subtitles downloaded: %s' % ' '.join(["" + subliminal.language.Language(x).name + "" for x in sorted(list(set(ep_obj.subtitles).difference(previous_subtitles)))]) + status = 'New subtitles downloaded: %s' % ' '.join([ + "" + subliminal.language.Language(x).name + "" for x in + sorted(list(set(ep_obj.subtitles).difference(previous_subtitles)))]) else: status = 'No subtitles downloaded' ui.notifications.message('Subtitles Search', status) @@ -3303,29 +3383,29 @@ class Home: # sanitize: if sceneSeason in ['null', '']: sceneSeason = None if sceneEpisode in ['null', '']: sceneEpisode = None - - result = { + + result = { 'success': True, 'forSeason': forSeason, 'forEpisode': forEpisode, } - - # retrieve the episode object and fail if we can't get one + + # retrieve the episode object and fail if we can't get one ep_obj = _getEpisode(show, forSeason, forEpisode) if isinstance(ep_obj, str): result['success'] = False result['errorMessage'] = ep_obj else: - logger.log(u"setEpisodeSceneNumbering for %s from %sx%s to %sx%s" % + logger.log(u"setEpisodeSceneNumbering for %s from %sx%s to %sx%s" % (show, forSeason, forEpisode, sceneSeason, sceneEpisode), logger.DEBUG) - + forSeason = int(forSeason) forEpisode = int(forEpisode) if sceneSeason is not None: sceneSeason = int(sceneSeason) if sceneEpisode is not None: sceneEpisode = int(sceneEpisode) - + set_scene_numbering(show, forSeason, forEpisode, sceneSeason, sceneEpisode) - + sn = get_scene_numbering(show, forSeason, forEpisode) if sn: (result['sceneSeason'], result['sceneEpisode']) = sn @@ -3333,7 +3413,7 @@ class Home: (result['sceneSeason'], result['sceneEpisode']) = (None, None) return json.dumps(result) - + @cherrypy.expose def retryEpisode(self, show, season, episode): @@ -3350,10 +3430,10 @@ class Home: # make a queue item for it and put it on the queue ep_queue_item = search_queue.FailedQueueItem(ep_obj.show, {ep_obj.season: ep_obj.episode}) - sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) # @UndefinedVariable + sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) # @UndefinedVariable # wait until the queue item tells us whether it worked or not - while ep_queue_item.success == None: # @UndefinedVariable + while ep_queue_item.success is None: # @UndefinedVariable time.sleep(1) # return the correct json value @@ -3368,10 +3448,11 @@ class Home: return json.dumps({'result': statusStrings[ep_obj.status], 'quality': quality_class - }) + }) return json.dumps({'result': 'failure'}) + # try: # # @@ -3394,10 +3475,8 @@ class Home: # return json.dumps({'result': 'success'}) class UI: - @cherrypy.expose def add_message(self): - ui.notifications.message('Test 1', 'This is test number 1') ui.notifications.error('Test 2', 'This is test number 2') @@ -3409,15 +3488,14 @@ class UI: cur_notification_num = 1 for cur_notification in ui.notifications.get_notifications(): messages['notification-' + str(cur_notification_num)] = {'title': cur_notification.title, - 'message': cur_notification.message, - 'type': cur_notification.type} + 'message': cur_notification.message, + 'type': cur_notification.type} cur_notification_num += 1 return json.dumps(messages) class WebInterface: - @cherrypy.expose def robots_txt(self): """ Keep web crawlers out """ @@ -3528,14 +3606,23 @@ class WebInterface: done_show_list = [] qualList = Quality.DOWNLOADED + Quality.SNATCHED + [ARCHIVED, IGNORED] - sql_results = myDB.select("SELECT *, tv_shows.status as show_status FROM tv_episodes, tv_shows WHERE season != 0 AND airdate >= ? AND airdate < ? AND tv_shows.indexer_id = tv_episodes.showid AND tv_episodes.status NOT IN (" + ','.join(['?'] * len(qualList)) + ")", [today, next_week] + qualList) + sql_results = myDB.select( + "SELECT *, tv_shows.status as show_status FROM tv_episodes, tv_shows WHERE season != 0 AND airdate >= ? AND airdate < ? AND tv_shows.indexer_id = tv_episodes.showid AND tv_episodes.status NOT IN (" + ','.join( + ['?'] * len(qualList)) + ")", [today, next_week] + qualList) for cur_result in sql_results: done_show_list.append(int(cur_result["showid"])) - more_sql_results = myDB.select("SELECT *, tv_shows.status as show_status FROM tv_episodes outer_eps, tv_shows WHERE season != 0 AND showid NOT IN (" + ','.join(['?'] * len(done_show_list)) + ") AND tv_shows.indexer_id = outer_eps.showid AND airdate = (SELECT airdate FROM tv_episodes inner_eps WHERE inner_eps.season != 0 AND inner_eps.showid = outer_eps.showid AND inner_eps.airdate >= ? ORDER BY inner_eps.airdate ASC LIMIT 1) AND outer_eps.status NOT IN (" + ','.join(['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED)) + ")", done_show_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED) + more_sql_results = myDB.select( + "SELECT *, tv_shows.status as show_status FROM tv_episodes outer_eps, tv_shows WHERE season != 0 AND showid NOT IN (" + ','.join( + ['?'] * len( + done_show_list)) + ") AND tv_shows.indexer_id = outer_eps.showid AND airdate = (SELECT airdate FROM tv_episodes inner_eps WHERE inner_eps.season != 0 AND inner_eps.showid = outer_eps.showid AND inner_eps.airdate >= ? ORDER BY inner_eps.airdate ASC LIMIT 1) AND outer_eps.status NOT IN (" + ','.join( + ['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED)) + ")", + done_show_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED) sql_results += more_sql_results - more_sql_results = myDB.select("SELECT *, tv_shows.status as show_status FROM tv_episodes, tv_shows WHERE season != 0 AND tv_shows.indexer_id = tv_episodes.showid AND airdate < ? AND airdate >= ? AND tv_episodes.status = ? AND tv_episodes.status NOT IN (" + ','.join(['?'] * len(qualList)) + ")", [today, recently, WANTED] + qualList) + more_sql_results = myDB.select( + "SELECT *, tv_shows.status as show_status FROM tv_episodes, tv_shows WHERE season != 0 AND tv_shows.indexer_id = tv_episodes.showid AND airdate < ? AND airdate >= ? AND tv_episodes.status = ? AND tv_episodes.status NOT IN (" + ','.join( + ['?'] * len(qualList)) + ")", [today, recently, WANTED] + qualList) sql_results += more_sql_results # sort by localtime @@ -3550,33 +3637,35 @@ class WebInterface: # add localtime to the dict for index, item in enumerate(sql_results): - sql_results[index]['localtime'] = network_timezones.parse_date_time(item['airdate'],item['airs'],item['network']) + sql_results[index]['localtime'] = network_timezones.parse_date_time(item['airdate'], item['airs'], + item['network']) #Normalize/Format the Airing Time try: locale.setlocale(locale.LC_TIME, 'us_US') sql_results[index]['localtime_string'] = sql_results[index]['localtime'].strftime("%A %H:%M %p") - locale.setlocale(locale.LC_ALL, '') #Reseting to default locale + locale.setlocale(locale.LC_ALL, '') #Reseting to default locale except: sql_results[index]['localtime_string'] = sql_results[index]['localtime'].strftime("%A %H:%M %p") sql_results.sort(sorts[sickbeard.COMING_EPS_SORT]) t = PageTemplate(file="comingEpisodes.tmpl") -# paused_item = { 'title': '', 'path': 'toggleComingEpsDisplayPaused' } -# paused_item['title'] = 'Hide Paused' if sickbeard.COMING_EPS_DISPLAY_PAUSED else 'Show Paused' - paused_item = { 'title': 'View Paused:', 'path': {'': ''} } - paused_item['path'] = {'Hide': 'toggleComingEpsDisplayPaused'} if sickbeard.COMING_EPS_DISPLAY_PAUSED else {'Show': 'toggleComingEpsDisplayPaused'} + # paused_item = { 'title': '', 'path': 'toggleComingEpsDisplayPaused' } + # paused_item['title'] = 'Hide Paused' if sickbeard.COMING_EPS_DISPLAY_PAUSED else 'Show Paused' + paused_item = {'title': 'View Paused:', 'path': {'': ''}} + paused_item['path'] = {'Hide': 'toggleComingEpsDisplayPaused'} if sickbeard.COMING_EPS_DISPLAY_PAUSED else { + 'Show': 'toggleComingEpsDisplayPaused'} t.submenu = [ - { 'title': 'Sort by:', 'path': {'Date': 'setComingEpsSort/?sort=date', - 'Show': 'setComingEpsSort/?sort=show', - 'Network': 'setComingEpsSort/?sort=network', - }}, + {'title': 'Sort by:', 'path': {'Date': 'setComingEpsSort/?sort=date', + 'Show': 'setComingEpsSort/?sort=show', + 'Network': 'setComingEpsSort/?sort=network', + }}, - { 'title': 'Layout:', 'path': {'Banner': 'setComingEpsLayout/?layout=banner', - 'Poster': 'setComingEpsLayout/?layout=poster', - 'List': 'setComingEpsLayout/?layout=list', - }}, + {'title': 'Layout:', 'path': {'Banner': 'setComingEpsLayout/?layout=banner', + 'Poster': 'setComingEpsLayout/?layout=poster', + 'List': 'setComingEpsLayout/?layout=list', + }}, paused_item, ] @@ -3590,7 +3679,6 @@ class WebInterface: else: t.layout = sickbeard.COMING_EPS_LAYOUT - return _munge(t) # Raw iCalendar implementation by Pedro Jose Pereira Vieito (@pvieito). @@ -3608,7 +3696,7 @@ class WebInterface: time_re = re.compile('([0-9]{1,2})\:([0-9]{2})(\ |)([AM|am|PM|pm]{2})') - # Create a iCal string + # Create a iCal string ical = 'BEGIN:VCALENDAR\r\n' ical += 'VERSION:2.0\r\n' ical += 'X-WR-CALNAME:Sick Beard\r\n' @@ -3623,25 +3711,32 @@ class WebInterface: future_date = (datetime.date.today() + datetime.timedelta(weeks=52)).toordinal() # Get all the shows that are not paused and are currently on air (from kjoconnor Fork) - calendar_shows = myDB.select("SELECT show_name, indexer_id, network, airs, runtime FROM tv_shows WHERE status = 'Continuing' OR status = 'Returning Series' AND paused != '1'") + calendar_shows = myDB.select( + "SELECT show_name, indexer_id, network, airs, runtime FROM tv_shows WHERE status = 'Continuing' OR status = 'Returning Series' AND paused != '1'") for show in calendar_shows: # Get all episodes of this show airing between today and next month - episode_list = myDB.select("SELECT indexerid, name, season, episode, description, airdate FROM tv_episodes WHERE airdate >= ? AND airdate < ? AND showid = ?", (past_date, future_date, int(show["indexer_id"]))) + episode_list = myDB.select( + "SELECT indexerid, name, season, episode, description, airdate FROM tv_episodes WHERE airdate >= ? AND airdate < ? AND showid = ?", + (past_date, future_date, int(show["indexer_id"]))) for episode in episode_list: air_date_time = network_timezones.parse_date_time(episode['airdate'], show["airs"], show['network']) - + # Create event for episode ical = ical + 'BEGIN:VEVENT\r\n' ical = ical + 'DTSTART;VALUE=DATE:' + str(air_date_time.date()).replace("-", "") + '\r\n' ical = ical + 'SUMMARY:' + show['show_name'] + ': ' + episode['name'] + '\r\n' - ical = ical + 'UID:Sick-Beard-' + str(datetime.date.today().isoformat()) + '-' + show['show_name'].replace(" ", "-") + '-E' + str(episode['episode']) + 'S' + str(episode['season']) + '\r\n' + ical = ical + 'UID:Sick-Beard-' + str(datetime.date.today().isoformat()) + '-' + show[ + 'show_name'].replace(" ", "-") + '-E' + str(episode['episode']) + 'S' + str( + episode['season']) + '\r\n' if (episode['description'] != ''): - ical = ical + 'DESCRIPTION:' + show['airs'] + ' on ' + show['network'] + '\\n\\n' + episode['description'].splitlines()[0] + '\r\n' + ical = ical + 'DESCRIPTION:' + show['airs'] + ' on ' + show['network'] + '\\n\\n' + \ + episode['description'].splitlines()[0] + '\r\n' else: ical = ical + 'DESCRIPTION:' + show['airs'] + ' on ' + show['network'] + '\r\n' - ical = ical + 'LOCATION:' + 'Episode ' + str(episode['episode']) + ' - Season ' + str(episode['season']) + '\r\n' + ical = ical + 'LOCATION:' + 'Episode ' + str(episode['episode']) + ' - Season ' + str( + episode['season']) + '\r\n' ical = ical + 'END:VEVENT\r\n' # Ending the iCal @@ -3663,4 +3758,4 @@ class WebInterface: errorlogs = ErrorLogs() - ui = UI() + ui = UI() \ No newline at end of file diff --git a/sickbeard/webserveInit.py b/sickbeard/webserveInit.py index 130be29f..7c508e40 100644 --- a/sickbeard/webserveInit.py +++ b/sickbeard/webserveInit.py @@ -27,22 +27,23 @@ from sickbeard.webserve import WebInterface from sickbeard.helpers import create_https_certificates -def initWebServer(options = {}): - options.setdefault('port', 8081) - options.setdefault('host', '0.0.0.0') - options.setdefault('log_dir', None) - options.setdefault('username', '') - options.setdefault('password', '') - options.setdefault('web_root', '/') - assert isinstance(options['port'], int) - assert 'data_root' in options - def http_error_401_hander(status, message, traceback, version): - """ Custom handler for 401 error """ - if status != "401 Unauthorized": - logger.log(u"CherryPy caught an error: %s %s" % (status, message), logger.ERROR) - logger.log(traceback, logger.DEBUG) - return r''' +def initWebServer(options={}): + options.setdefault('port', 8081) + options.setdefault('host', '0.0.0.0') + options.setdefault('log_dir', None) + options.setdefault('username', '') + options.setdefault('password', '') + options.setdefault('web_root', '/') + assert isinstance(options['port'], int) + assert 'data_root' in options + + def http_error_401_hander(status, message, traceback, version): + """ Custom handler for 401 error """ + if status != "401 Unauthorized": + logger.log(u"CherryPy caught an error: %s %s" % (status, message), logger.ERROR) + logger.log(traceback, logger.DEBUG) + return r''' %s @@ -54,9 +55,9 @@ def initWebServer(options = {}): ''' % ('Access denied', status) - def http_error_404_hander(status, message, traceback, version): - """ Custom handler for 404 error, redirect back to main page """ - return r''' + def http_error_404_hander(status, message, traceback, version): + """ Custom handler for 404 error, redirect back to main page """ + return r''' 404 @@ -72,125 +73,124 @@ def initWebServer(options = {}): ''' % options['web_root'] - # cherrypy setup - enable_https = options['enable_https'] - https_cert = options['https_cert'] - https_key = options['https_key'] + # cherrypy setup + enable_https = options['enable_https'] + https_cert = options['https_cert'] + https_key = options['https_key'] - if enable_https: - # If either the HTTPS certificate or key do not exist, make some self-signed ones. - if not (https_cert and os.path.exists(https_cert)) or not (https_key and os.path.exists(https_key)): - if not create_https_certificates(https_cert, https_key): - logger.log(u"Unable to create CERT/KEY files, disabling HTTPS") - sickbeard.ENABLE_HTTPS = False - enable_https = False - - if not (os.path.exists(https_cert) and os.path.exists(https_key)): - logger.log(u"Disabled HTTPS because of missing CERT and KEY files", logger.WARNING) + if enable_https: + # If either the HTTPS certificate or key do not exist, make some self-signed ones. + if not (https_cert and os.path.exists(https_cert)) or not (https_key and os.path.exists(https_key)): + if not create_https_certificates(https_cert, https_key): + logger.log(u"Unable to create CERT/KEY files, disabling HTTPS") sickbeard.ENABLE_HTTPS = False enable_https = False - mime_gzip = ('text/html', - 'text/plain', - 'text/css', - 'text/javascript', - 'application/javascript', - 'text/x-javascript', - 'application/x-javascript', - 'text/x-json', - 'application/json' - ) + if not (os.path.exists(https_cert) and os.path.exists(https_key)): + logger.log(u"Disabled HTTPS because of missing CERT and KEY files", logger.WARNING) + sickbeard.ENABLE_HTTPS = False + enable_https = False - options_dict = { - 'server.socket_port': options['port'], - 'server.socket_host': options['host'], - 'log.screen': False, - 'engine.autoreload.on': False, - 'engine.autoreload.frequency': 100, - 'engine.reexec_retry': 100, - 'tools.gzip.on': True, - 'tools.gzip.mime_types': mime_gzip, - 'error_page.401': http_error_401_hander, - 'error_page.404': http_error_404_hander, - } + mime_gzip = ('text/html', + 'text/plain', + 'text/css', + 'text/javascript', + 'application/javascript', + 'text/x-javascript', + 'application/x-javascript', + 'text/x-json', + 'application/json' + ) - if enable_https: - options_dict['server.ssl_certificate'] = https_cert - options_dict['server.ssl_private_key'] = https_key - protocol = "https" + options_dict = { + 'server.socket_port': options['port'], + 'server.socket_host': options['host'], + 'log.screen': False, + 'engine.autoreload.on': False, + 'engine.autoreload.frequency': 100, + 'engine.reexec_retry': 100, + 'tools.gzip.on': True, + 'tools.gzip.mime_types': mime_gzip, + 'error_page.401': http_error_401_hander, + 'error_page.404': http_error_404_hander, + } + + if enable_https: + options_dict['server.ssl_certificate'] = https_cert + options_dict['server.ssl_private_key'] = https_key + protocol = "https" + else: + protocol = "http" + + logger.log(u"Starting Sick Beard on " + protocol + "://" + str(options['host']) + ":" + str(options['port']) + "/") + cherrypy.config.update(options_dict) + + # setup cherrypy logging + if options['log_dir'] and os.path.isdir(options['log_dir']): + cherrypy.config.update({'log.access_file': os.path.join(options['log_dir'], "cherrypy.log")}) + logger.log('Using %s for cherrypy log' % cherrypy.config['log.access_file']) + + conf = { + '/': { + 'tools.staticdir.root': options['data_root'], + 'tools.encode.on': True, + 'tools.encode.encoding': 'utf-8', + }, + '/images': { + 'tools.staticdir.on': True, + 'tools.staticdir.dir': 'images' + }, + '/js': { + 'tools.staticdir.on': True, + 'tools.staticdir.dir': 'js' + }, + '/css': { + 'tools.staticdir.on': True, + 'tools.staticdir.dir': 'css' + }, + } + app = cherrypy.tree.mount(WebInterface(), options['web_root'], conf) + + # auth + if options['username'] != "" and options['password'] != "": + if sickbeard.CALENDAR_UNPROTECTED: + checkpassword = cherrypy.lib.auth_basic.checkpassword_dict({options['username']: options['password']}) + app.merge({ + '/': { + 'tools.auth_basic.on': True, + 'tools.auth_basic.realm': 'SickBeard', + 'tools.auth_basic.checkpassword': checkpassword + }, + '/api': { + 'tools.auth_basic.on': False + }, + '/calendar': { + 'tools.auth_basic.on': False + }, + '/api/builder': { + 'tools.auth_basic.on': True, + 'tools.auth_basic.realm': 'SickBeard', + 'tools.auth_basic.checkpassword': checkpassword + } + }) else: - protocol = "http" + checkpassword = cherrypy.lib.auth_basic.checkpassword_dict({options['username']: options['password']}) + app.merge({ + '/': { + 'tools.auth_basic.on': True, + 'tools.auth_basic.realm': 'SickBeard', + 'tools.auth_basic.checkpassword': checkpassword + }, + '/api': { + 'tools.auth_basic.on': False + }, + '/api/builder': { + 'tools.auth_basic.on': True, + 'tools.auth_basic.realm': 'SickBeard', + 'tools.auth_basic.checkpassword': checkpassword + } + }) - logger.log(u"Starting Sick Beard on "+protocol+"://" + str(options['host']) + ":" + str(options['port']) + "/") - cherrypy.config.update(options_dict) - - # setup cherrypy logging - if options['log_dir'] and os.path.isdir(options['log_dir']): - cherrypy.config.update({ 'log.access_file': os.path.join(options['log_dir'], "cherrypy.log") }) - logger.log('Using %s for cherrypy log' % cherrypy.config['log.access_file']) - - conf = { - '/': { - 'tools.staticdir.root': options['data_root'], - 'tools.encode.on': True, - 'tools.encode.encoding': 'utf-8', - }, - '/images': { - 'tools.staticdir.on': True, - 'tools.staticdir.dir': 'images' - }, - '/js': { - 'tools.staticdir.on': True, - 'tools.staticdir.dir': 'js' - }, - '/css': { - 'tools.staticdir.on': True, - 'tools.staticdir.dir': 'css' - }, - } - app = cherrypy.tree.mount(WebInterface(), options['web_root'], conf) - - # auth - if options['username'] != "" and options['password'] != "": - if sickbeard.CALENDAR_UNPROTECTED: - checkpassword = cherrypy.lib.auth_basic.checkpassword_dict({options['username']: options['password']}) - app.merge({ - '/': { - 'tools.auth_basic.on': True, - 'tools.auth_basic.realm': 'SickBeard', - 'tools.auth_basic.checkpassword': checkpassword - }, - '/api':{ - 'tools.auth_basic.on': False - }, - '/calendar':{ - 'tools.auth_basic.on': False - }, - '/api/builder':{ - 'tools.auth_basic.on': True, - 'tools.auth_basic.realm': 'SickBeard', - 'tools.auth_basic.checkpassword': checkpassword - } - }) - else: - checkpassword = cherrypy.lib.auth_basic.checkpassword_dict({options['username']: options['password']}) - app.merge({ - '/': { - 'tools.auth_basic.on': True, - 'tools.auth_basic.realm': 'SickBeard', - 'tools.auth_basic.checkpassword': checkpassword - }, - '/api':{ - 'tools.auth_basic.on': False - }, - '/api/builder':{ - 'tools.auth_basic.on': True, - 'tools.auth_basic.realm': 'SickBeard', - 'tools.auth_basic.checkpassword': checkpassword - } - }) - - - cherrypy.server.start() - cherrypy.server.wait() + cherrypy.server.start() + cherrypy.server.wait()