Bunch of code upgrades and bugfixes

This commit is contained in:
echel0n 2014-03-20 01:15:22 -07:00
parent 4b1dc8a63e
commit ce5053f25d
12 changed files with 152 additions and 94 deletions

View file

@ -163,7 +163,7 @@ BACKLOG_SEARCH_FREQUENCY = 21
MIN_SEARCH_FREQUENCY = 10
DEFAULT_SEARCH_FREQUENCY = 60
DEFAULT_SEARCH_FREQUENCY = 40
EZRSS = False

View file

@ -297,7 +297,7 @@ class StatusStrings:
else:
return self.statusStrings[status] + " (" + Quality.qualityStrings[quality] + ")"
else:
return self.statusStrings[name]
return self.statusStrings[name] if self.statusStrings.has_key(name) else ''
def has_key(self, name):
return name in self.statusStrings or name in Quality.DOWNLOADED or name in Quality.SNATCHED or name in Quality.SNATCHED_PROPER or name in Quality.SNATCHED_BEST

View file

@ -18,92 +18,127 @@
from sickbeard.encodingKludge import fixStupidEncodings
def ex(e):
"""
Returns a unicode string from the exception text if it exists.
"""
# sanity check
if not e.args or not e.args[0]:
return ""
e_message = fixStupidEncodings(e.args[0], True)
# if fixStupidEncodings doesn't fix it then maybe it's not a string, in which case we'll try printing it anyway
if not e_message:
try:
e_message = str(e.args[0])
except:
e_message = ""
return e_message
def ex(e):
"""
Returns a unicode string from the exception text if it exists.
"""
e_message = u""
if not e or not e.args:
return e_message
for arg in e.args:
if arg is not None:
if isinstance(arg, (str, unicode)):
fixed_arg = fixStupidEncodings(arg, True)
else:
try:
fixed_arg = u"error " + fixStupidEncodings(str(arg), True)
except:
fixed_arg = None
if fixed_arg:
if not e_message:
e_message = fixed_arg
else:
e_message = e_message + " : " + fixed_arg
return e_message
class SickBeardException(Exception):
"Generic SickBeard Exception - should never be thrown, only subclassed"
"Generic SickBeard Exception - should never be thrown, only subclassed"
class ConfigErrorException(SickBeardException):
"Error in the config file"
"Error in the config file"
class LaterException(SickBeardException):
"Something bad happened that I'll make a real exception for later"
"Something bad happened that I'll make a real exception for later"
class NoNFOException(SickBeardException):
"No NFO was found!"
"No NFO was found!"
class NoShowDirException(SickBeardException):
"Unable to find the show's directory"
"Unable to find the show's directory"
class FileNotFoundException(SickBeardException):
"The specified file doesn't exist"
"The specified file doesn't exist"
class MultipleDBEpisodesException(SickBeardException):
"Found multiple episodes in the DB! Must fix DB first"
"Found multiple episodes in the DB! Must fix DB first"
class MultipleDBShowsException(SickBeardException):
"Found multiple shows in the DB! Must fix DB first"
"Found multiple shows in the DB! Must fix DB first"
class MultipleShowObjectsException(SickBeardException):
"Found multiple objects for the same show! Something is very wrong"
"Found multiple objects for the same show! Something is very wrong"
class WrongShowException(SickBeardException):
"The episode doesn't belong to the same show as its parent folder"
"The episode doesn't belong to the same show as its parent folder"
class ShowNotFoundException(SickBeardException):
"The show wasn't found in the indexer's listings"
"The show wasn't found on the Indexer"
class EpisodeNotFoundException(SickBeardException):
"The episode wasn't found in the indexer's listings"
"The episode wasn't found on the Indexer"
class NewzbinAPIThrottled(SickBeardException):
"Newzbin has throttled us, deal with it"
"Newzbin has throttled us, deal with it"
class TVRageException(SickBeardException):
"TVRage API did something bad"
"TVRage API did something bad"
class ShowDirNotFoundException(SickBeardException):
"The show dir doesn't exist"
"The show dir doesn't exist"
class AuthException(SickBeardException):
"Your authentication information is incorrect"
"Your authentication information is incorrect"
class EpisodeDeletedException(SickBeardException):
"This episode has been deleted"
"This episode has been deleted"
class CantRefreshException(SickBeardException):
"The show can't be refreshed right now"
"The show can't be refreshed right now"
class CantUpdateException(SickBeardException):
"The show can't be updated right now"
"The show can't be updated right now"
class PostProcessingFailed(SickBeardException):
"Post-processing the episode failed"
"Post-processing the episode failed"
class FailedProcessingFailed(SickBeardException):
"Post-processing the failed release failed"
class FailedHistoryMultiSnatchException(SickBeardException):
"Episode was snatched again before the first one was done"
class FailedHistoryNotFoundException(SickBeardException):
"The release was not found in the failed download history tracker"

View file

@ -198,7 +198,7 @@ Returns a byte-string retrieved from the url provider.
def _remove_file_failed(file):
try:
os.remove(file)
ek.ek(os.remove,file)
except:
pass
@ -687,8 +687,8 @@ def parse_json(data):
try:
parsedJSON = json.loads(data)
except ValueError:
logger.log(u"Error trying to decode json data:" + data, logger.ERROR)
except ValueError, e:
logger.log(u"Error trying to decode json data. Error: " + ex(e), logger.DEBUG)
return None
return parsedJSON
@ -710,7 +710,7 @@ def parse_xml(data, del_xmlns=False):
try:
parsedXML = etree.fromstring(data)
except Exception, e:
logger.log(u"Error trying to parse xml data: " + data + " to Elementtree, Error: " + ex(e), logger.DEBUG)
logger.log(u"Error trying to parse xml data. Error: " + ex(e), logger.DEBUG)
parsedXML = None
return parsedXML
@ -940,4 +940,22 @@ def suffix(d):
return 'th' if 11<=d<=13 else {1:'st',2:'nd',3:'rd'}.get(d%10, 'th')
def custom_strftime(format, t):
return t.strftime(format).replace('{S}', str(t.day) + suffix(t.day))
return t.strftime(format).replace('{S}', str(t.day) + suffix(t.day))
def is_hidden_folder(folder):
"""
Returns True if folder is hidden.
On Linux based systems hidden folders start with . (dot)
folder: Full path of folder to check
"""
if ek.ek(os.path.isdir, folder):
if ek.ek(os.path.basename, folder).startswith('.'):
return True
return False
def real_path(path):
"""
Returns: the canonicalized absolute pathname. The resulting path will have no symbolic link, '/./' or '/../' components.
"""
return ek.ek(os.path.normpath, ek.ek(os.path.normcase, ek.ek(os.path.realpath, path)))

View file

@ -172,7 +172,7 @@ class XBMC_12PlusMetadata(generic.GenericMetadata):
genre = etree.SubElement(tv_node, "genre")
if getattr(myShow, 'genre', None) is not None:
if isinstance(myShow["genre"], basestring):
genre.text = " / ".join(x.strip() for x in myShow["genre"].split('|') if x.strip())
genre.text = " / ".join(x.strip() for x in myShow["genre"].split('|') if x.strip())
premiered = etree.SubElement(tv_node, "premiered")
if getattr(myShow, 'firstaired', None) is not None:

View file

@ -22,7 +22,7 @@ from sickbeard import db
from sickbeard import helpers
from sickbeard import logger
from sickbeard import encodingKludge as ek
from os.path import basename, realpath, join, isfile
from os.path import basename, join, isfile
import os
import re
import datetime
@ -50,9 +50,9 @@ def _remove_old_zoneinfo():
else:
return
cur_file = ek.ek(realpath, u'lib/dateutil/zoneinfo/' + cur_zoneinfo)
cur_file = helpers.real_path(ek.ek(join,ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
for (path, dirs, files) in ek.ek(os.walk,ek.ek(realpath,u'lib/dateutil/zoneinfo/')):
for (path, dirs, files) in ek.ek(os.walk,helpers.real_path(ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__))):
for filename in files:
if filename.endswith('.tar.gz'):
file_w_path = ek.ek(join,path,filename)
@ -90,7 +90,8 @@ def _update_zoneinfo():
# now load the new zoneinfo
url_tar = u'https://github.com/Prinz23/sb_network_timezones/raw/master/' + new_zoneinfo
zonefile = ek.ek(realpath, u'lib/dateutil/zoneinfo/' + new_zoneinfo)
zonefile = helpers.real_path(ek.ek(join,ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo))
zonefile_tmp = re.sub(r"\.tar\.gz$",'.tmp', zonefile)
if (ek.ek(os.path.exists,zonefile_tmp)):
@ -103,6 +104,10 @@ def _update_zoneinfo():
if not helpers.download_file(url_tar, zonefile_tmp):
return
if not ek.ek(os.path.exists,zonefile_tmp):
logger.log(u"Download of " + zonefile_tmp + " failed.",logger.ERROR)
return
new_hash = str(helpers.md5_for_file(zonefile_tmp))
if (zoneinfo_md5.upper() == new_hash.upper()):
@ -110,7 +115,7 @@ def _update_zoneinfo():
try:
# remove the old zoneinfo file
if (cur_zoneinfo is not None):
old_file = ek.ek(realpath, u'lib/dateutil/zoneinfo/' + cur_zoneinfo)
old_file = helpers.real_path(ek.ek(join,ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
if (ek.ek(os.path.exists,old_file)):
ek.ek(os.remove,old_file)
# rename downloaded file
@ -201,7 +206,11 @@ def get_network_timezone(network, network_dict):
try:
if lib.dateutil.zoneinfo.ZONEINFOFILE is not None:
return tz.gettz(network_dict[network])
n_t = tz.gettz(network_dict[network])
if n_t is not None:
return n_t
else:
return sb_timezone
else:
return sb_timezone
except:

View file

@ -42,7 +42,7 @@ from sickbeard import clients
from sickbeard import tv
from lib import requests
from bs4 import BeautifulSoup
from lib.bs4 import BeautifulSoup
from lib.unidecode import unidecode
class KATProvider(generic.TorrentProvider):
@ -201,11 +201,7 @@ class KATProvider(generic.TorrentProvider):
search_string['Episode'].append(ep_string)
else:
for show_name in set(allPossibleShowNames(ep_obj.show)):
ep_string = sanitizeSceneName(show_name) +' '+ \
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} +'|'+\
sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} +'|'+\
sickbeard.config.naming_ep_type[3] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} + ' %s category:tv' %add_string \
ep_string = sanitizeSceneName(show_name) +' '+'season:'+str(ep_obj.season)+' episode:'+str(ep_obj.episode)
search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
return [search_string]
@ -341,7 +337,7 @@ class KATProvider(generic.TorrentProvider):
helpers.chmodAsParent(magnetFileName)
except EnvironmentError:
except EnvironmentError, e:
logger.log("Unable to save the file: " + ex(e), logger.ERROR)
return False
@ -364,7 +360,7 @@ class KATProvider(generic.TorrentProvider):
for sqlShow in sqlResults:
curShow = helpers.findCertainShow(sickbeard.showList, int(sqlShow["showid"]))
curEp = curShow.getEpisode(int(sqlShow["season"]),int(sqlShow["episode"]))
curEp = curShow.getEpisode(int(sqlShow["season"]), int(sqlShow["episode"]))
searchString = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK')
for item in self._doSearch(searchString[0]):

View file

@ -21,7 +21,7 @@ from __future__ import with_statement
import sys
import os
import traceback
import urllib, urllib2, urlparse
import urllib, urlparse
import re
import datetime
@ -239,7 +239,7 @@ class PublicHDProvider(generic.TorrentProvider):
helpers.chmodAsParent(magnetFileName)
except EnvironmentError:
except EnvironmentError, e:
logger.log("Unable to save the file: " + ex(e), logger.ERROR)
return False

View file

@ -41,7 +41,7 @@ class TorrentRssProvider(generic.TorrentProvider):
generic.TorrentProvider.__init__(self, name)
self.cache = TorrentRssCache(self)
self.url = url
self.url = re.sub('\/$', '', url)
self.enabled = True
self.supportsBacklog = False
self.session = None

View file

@ -289,27 +289,26 @@ class ThePirateBayProvider(generic.TorrentProvider):
def getURL(self, url, headers=None):
if not headers:
headers = []
headers = {}
# Glype Proxies does not support Direct Linking.
# We have to fake a search on the proxy site to get data
if self.proxy.isEnabled():
headers.append(('Referer', self.proxy.getProxyURL()))
headers.update({'referer': self.proxy.getProxyURL()})
result = None
try:
# Remove double-slashes from url
parsed = list(urlparse.urlparse(url))
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
url = urlparse.urlunparse(parsed)
result = helpers.getURL(url, headers=headers)
except (urllib2.HTTPError, IOError), e:
logger.log(u"Error loading " + self.name + " URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
r = requests.get(url, headers=headers)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
logger.log(u"Error loading "+self.name+" URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
return None
return result
if r.status_code != 200:
logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(r.status_code) + ': ' + requests.clients.http_error_code[r.status_code], logger.WARNING)
return None
return r.content
def downloadResult(self, result):
"""
@ -340,7 +339,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
helpers.chmodAsParent(magnetFileName)
except EnvironmentError:
except EnvironmentError, e:
logger.log("Unable to save the file: " + ex(e), logger.ERROR)
return False

View file

@ -49,7 +49,7 @@ from sickbeard import history
from sickbeard import encodingKludge as ek
from common import Quality, Overview
from common import Quality, Overview, statusStrings
from common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, ARCHIVED, IGNORED, UNAIRED, WANTED, SKIPPED, UNKNOWN, FAILED
from common import NAMING_DUPLICATE, NAMING_EXTEND, NAMING_LIMITED_EXTEND, NAMING_SEPARATED_REPEAT, NAMING_LIMITED_EXTEND_E_PREFIXED
@ -93,8 +93,6 @@ class TVShow(object):
self.loadFromDB()
self.saveToDB()
def _getLocation(self):
# no dir check needed if missing show dirs are created during post-processing
if sickbeard.CREATE_MISSING_SHOW_DIRS:
@ -965,51 +963,54 @@ class TVShow(object):
def wantEpisode(self, season, episode, quality, manualSearch=False):
logger.log(u"Checking if we want episode " + str(season) + "x" + str(episode) + " at quality " + Quality.qualityStrings[quality], logger.DEBUG)
logger.log(u"Checking if found episode " + str(season) + "x" + str(episode) + " is wanted at quality " + Quality.qualityStrings[quality], logger.DEBUG)
# if the quality isn't one we want under any circumstances then just say no
anyQualities, bestQualities = Quality.splitQuality(self.quality)
logger.log(u"any,best = " + str(anyQualities) + " " + str(bestQualities) + " and we are " + str(quality), logger.DEBUG)
logger.log(u"any,best = " + str(anyQualities) + " " + str(bestQualities) + " and found " + str(quality), logger.DEBUG)
if quality not in anyQualities + bestQualities:
logger.log(u"I know for sure I don't want this episode, saying no", logger.DEBUG)
logger.log(u"Don't want this quality, ignoring found episode", logger.DEBUG)
return False
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [self.indexerid, season, episode])
if not sqlResults or not len(sqlResults):
logger.log(u"Unable to find the episode", logger.DEBUG)
logger.log(u"Unable to find a matching episode in database, ignoring found episode", logger.DEBUG)
return False
epStatus = int(sqlResults[0]["status"])
epStatus_text = statusStrings[epStatus]
logger.log(u"current episode status: " + str(epStatus), logger.DEBUG)
logger.log(u"Existing episode status: " + str(epStatus) + " (" + epStatus_text + ")", logger.DEBUG)
# if we know we don't want it then just say no
if epStatus in (SKIPPED, IGNORED, ARCHIVED) and not manualSearch:
logger.log(u"Ep is skipped or marked as archived, not bothering", logger.DEBUG)
logger.log(u"Existing episode status is skipped/ignored/archived, ignoring found episode", logger.DEBUG)
return False
# if it's one of these then we want it as long as it's in our allowed initial qualities
if quality in anyQualities + bestQualities:
if epStatus in (WANTED, UNAIRED, SKIPPED):
logger.log(u"Ep is wanted/unaired/skipped, definitely get it", logger.DEBUG)
logger.log(u"Existing episode status is wanted/unaired/skipped, getting found episode", logger.DEBUG)
return True
elif manualSearch:
logger.log(u"Usually I would ignore this ep but because you forced the search I'm overriding the default and allowing the quality", logger.DEBUG)
logger.log(u"Usually ignoring found episode, but forced search allows the quality, getting found episode", logger.DEBUG)
return True
else:
logger.log(u"This quality looks like something we might want but I don't know for sure yet", logger.DEBUG)
logger.log(u"Quality is on wanted list, need to check if it's better than existing quality", logger.DEBUG)
curStatus, curQuality = Quality.splitCompositeStatus(epStatus)
# if we are re-downloading then we only want it if it's in our bestQualities list and better than what we have
if curStatus in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER and quality in bestQualities and quality > curQuality:
logger.log(u"We already have this ep but the new one is better quality, saying yes", logger.DEBUG)
if curStatus in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST and quality in bestQualities and quality > curQuality:
logger.log(u"Episode already exists but the found episode has better quality, getting found episode", logger.DEBUG)
return True
else:
logger.log(u"Episode already exists and the found episode has same/lower quality, ignoring found episode", logger.DEBUG)
logger.log(u"None of the conditions were met so I'm just saying no", logger.DEBUG)
logger.log(u"None of the conditions were met, ignoring found episode", logger.DEBUG)
return False
def getOverview(self, epStatus):

View file

@ -258,7 +258,7 @@ class TVCache():
logger.log(u"Trying to look the show up in the show database", logger.DEBUG)
showResult = helpers.searchDBForShow(parse_result.series_name)
if showResult:
logger.log(parse_result.series_name + " was found to be show " + showResult[2] + " ("+str(showResult[1])+") in our DB.", logger.DEBUG)
logger.log(u"" + parse_result.series_name + " was found to be show " + showResult[2] + " (" + str(showResult[1]) + ") in our DB.", logger.DEBUG)
indexer_id = showResult[1]
# if the DB lookup fails then do a comprehensive regex search