Merge pull request #399 from adam111316/feature/AddPy23Compat

Change py2 exception clauses to py2/3 compatible clauses
This commit is contained in:
adam111316 2015-06-08 23:02:40 +08:00
commit 785a2451f4
64 changed files with 237 additions and 207 deletions

View file

@ -29,6 +29,8 @@
* Add coverage testing and coveralls support * Add coverage testing and coveralls support
* Update feedparser library 5.1.3 to 5.2.0 (8c62940) * Update feedparser library 5.1.3 to 5.2.0 (8c62940)
* Remove feedcache implementation and library * Remove feedcache implementation and library
* Change py2 exception clauses to py2/3 compatible clauses
* Add py2/3 regression testing for exception clauses
[develop changelog] [develop changelog]
* Update Requests library 2.7.0 (ab1f493) to 2.7.0 (8b5e457) * Update Requests library 2.7.0 (ab1f493) to 2.7.0 (8b5e457)

View file

@ -387,7 +387,7 @@ class SickGear(object):
pid = os.fork() # @UndefinedVariable - only available in UNIX pid = os.fork() # @UndefinedVariable - only available in UNIX
if pid != 0: if pid != 0:
os._exit(0) os._exit(0)
except OSError, e: except OSError as e:
sys.stderr.write('fork #1 failed: %d (%s)\n' % (e.errno, e.strerror)) sys.stderr.write('fork #1 failed: %d (%s)\n' % (e.errno, e.strerror))
sys.exit(1) sys.exit(1)
@ -402,7 +402,7 @@ class SickGear(object):
pid = os.fork() # @UndefinedVariable - only available in UNIX pid = os.fork() # @UndefinedVariable - only available in UNIX
if pid != 0: if pid != 0:
os._exit(0) os._exit(0)
except OSError, e: except OSError as e:
sys.stderr.write('fork #2 failed: %d (%s)\n' % (e.errno, e.strerror)) sys.stderr.write('fork #2 failed: %d (%s)\n' % (e.errno, e.strerror))
sys.exit(1) sys.exit(1)
@ -412,7 +412,7 @@ class SickGear(object):
logger.log(u'Writing PID: %s to %s' % (pid, self.PIDFILE)) logger.log(u'Writing PID: %s to %s' % (pid, self.PIDFILE))
try: try:
file(self.PIDFILE, 'w').write('%s\n' % pid) file(self.PIDFILE, 'w').write('%s\n' % pid)
except IOError, e: except IOError as e:
logger.log_error_and_exit( logger.log_error_and_exit(
u'Unable to write PID file: %s Error: %s [%s]' % (self.PIDFILE, e.strerror, e.errno)) u'Unable to write PID file: %s Error: %s [%s]' % (self.PIDFILE, e.strerror, e.errno))
@ -456,7 +456,7 @@ class SickGear(object):
curShow = TVShow(int(sqlShow['indexer']), int(sqlShow['indexer_id'])) curShow = TVShow(int(sqlShow['indexer']), int(sqlShow['indexer_id']))
curShow.nextEpisode() curShow.nextEpisode()
sickbeard.showList.append(curShow) sickbeard.showList.append(curShow)
except Exception, e: except Exception as e:
logger.log( logger.log(
u'There was an error creating the show in %s: %s' % (sqlShow['location'], str(e).decode('utf-8', u'There was an error creating the show in %s: %s' % (sqlShow['location'], str(e).decode('utf-8',
'replace')), 'replace')),

View file

@ -22,7 +22,7 @@ try:
fp = open(configFilename, 'r') fp = open(configFilename, 'r')
config.readfp(fp) config.readfp(fp)
fp.close() fp.close()
except IOError, e: except IOError as e:
print 'Could not find/read Sickbeard config.ini: ' + str(e) print 'Could not find/read Sickbeard config.ini: ' + str(e)
print 'Possibly wrong mediaToSickbeard.py location. Ensure the file is in the autoProcessTV subdir of your Sickbeard installation' print 'Possibly wrong mediaToSickbeard.py location. Ensure the file is in the autoProcessTV subdir of your Sickbeard installation'
time.sleep(3) time.sleep(3)
@ -180,7 +180,7 @@ def main():
sess = requests.Session() sess = requests.Session()
sess.post(login_url, data={'username': username, 'password': password}, stream=True, verify=False) sess.post(login_url, data={'username': username, 'password': password}, stream=True, verify=False)
response = sess.get(url, auth=(username, password), params=params, verify=False, allow_redirects=False) response = sess.get(url, auth=(username, password), params=params, verify=False, allow_redirects=False)
except Exception, e: except Exception as e:
scriptlogger.error(u': Unknown exception raised when opening url: ' + str(e)) scriptlogger.error(u': Unknown exception raised when opening url: ' + str(e))
time.sleep(3) time.sleep(3)
sys.exit() sys.exit()

View file

@ -84,7 +84,7 @@ def foldersAtPath(path, includeParent=False, includeFiles=False):
try: try:
fileList = [{'name': filename, 'path': ek.ek(os.path.join, path, filename)} for filename in fileList = [{'name': filename, 'path': ek.ek(os.path.join, path, filename)} for filename in
ek.ek(os.listdir, path)] ek.ek(os.listdir, path)]
except OSError, e: except OSError as e:
logger.log(u'Unable to open %s: %r / %s' % (path, e, e), logger.WARNING) logger.log(u'Unable to open %s: %r / %s' % (path, e, e), logger.WARNING)
fileList = [{'name': filename, 'path': ek.ek(os.path.join, parentPath, filename)} for filename in fileList = [{'name': filename, 'path': ek.ek(os.path.join, parentPath, filename)} for filename in
ek.ek(os.listdir, parentPath)] ek.ek(os.listdir, parentPath)]

View file

@ -53,19 +53,19 @@ class GenericClient(object):
try: try:
response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files, response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files,
timeout=120, verify=False) timeout=120, verify=False)
except requests.exceptions.ConnectionError, e: except requests.exceptions.ConnectionError as e:
logger.log(self.name + u': Unable to connect ' + ex(e), logger.ERROR) logger.log(self.name + u': Unable to connect ' + ex(e), logger.ERROR)
return False return False
except (requests.exceptions.MissingSchema, requests.exceptions.InvalidURL): except (requests.exceptions.MissingSchema, requests.exceptions.InvalidURL):
logger.log(self.name + u': Invalid Host', logger.ERROR) logger.log(self.name + u': Invalid Host', logger.ERROR)
return False return False
except requests.exceptions.HTTPError, e: except requests.exceptions.HTTPError as e:
logger.log(self.name + u': Invalid HTTP Request ' + ex(e), logger.ERROR) logger.log(self.name + u': Invalid HTTP Request ' + ex(e), logger.ERROR)
return False return False
except requests.exceptions.Timeout, e: except requests.exceptions.Timeout as e:
logger.log(self.name + u': Connection Timeout ' + ex(e), logger.ERROR) logger.log(self.name + u': Connection Timeout ' + ex(e), logger.ERROR)
return False return False
except Exception, e: except Exception as e:
logger.log(self.name + u': Unknown exception raised when sending torrent to ' + self.name + ': ' + ex(e), logger.log(self.name + u': Unknown exception raised when sending torrent to ' + self.name + ': ' + ex(e),
logger.ERROR) logger.ERROR)
return False return False
@ -199,7 +199,7 @@ class GenericClient(object):
if result.priority != 0 and not self._set_torrent_priority(result): if result.priority != 0 and not self._set_torrent_priority(result):
logger.log(self.name + u': Unable to set priority for Torrent', logger.ERROR) logger.log(self.name + u': Unable to set priority for Torrent', logger.ERROR)
except Exception, e: except Exception as e:
logger.log(self.name + u': Failed Sending Torrent: ' + result.name + ' - ' + result.hash, logger.ERROR) logger.log(self.name + u': Failed Sending Torrent: ' + result.name + ' - ' + result.hash, logger.ERROR)
logger.log(self.name + u': Exception raised when sending torrent: ' + ex(e), logger.DEBUG) logger.log(self.name + u': Exception raised when sending torrent: ' + ex(e), logger.DEBUG)
return r_code return r_code

View file

@ -95,7 +95,7 @@ class DBConnection(object):
self.connection.commit() self.connection.commit()
logger.log(u'Transaction with ' + str(len(querylist)) + u' queries executed', logger.DEBUG) logger.log(u'Transaction with ' + str(len(querylist)) + u' queries executed', logger.DEBUG)
return sqlResult return sqlResult
except sqlite3.OperationalError, e: except sqlite3.OperationalError as e:
sqlResult = [] sqlResult = []
if self.connection: if self.connection:
self.connection.rollback() self.connection.rollback()
@ -106,7 +106,7 @@ class DBConnection(object):
else: else:
logger.log(u'DB error: ' + ex(e), logger.ERROR) logger.log(u'DB error: ' + ex(e), logger.ERROR)
raise raise
except sqlite3.DatabaseError, e: except sqlite3.DatabaseError as e:
if self.connection: if self.connection:
self.connection.rollback() self.connection.rollback()
logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR) logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR)
@ -135,7 +135,7 @@ class DBConnection(object):
self.connection.commit() self.connection.commit()
# get out of the connection attempt loop since we were successful # get out of the connection attempt loop since we were successful
break break
except sqlite3.OperationalError, e: except sqlite3.OperationalError as e:
if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]: if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]:
logger.log(u'DB error: ' + ex(e), logger.WARNING) logger.log(u'DB error: ' + ex(e), logger.WARNING)
attempt += 1 attempt += 1
@ -143,7 +143,7 @@ class DBConnection(object):
else: else:
logger.log(u'DB error: ' + ex(e), logger.ERROR) logger.log(u'DB error: ' + ex(e), logger.ERROR)
raise raise
except sqlite3.DatabaseError, e: except sqlite3.DatabaseError as e:
logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR) logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR)
raise raise
@ -252,7 +252,7 @@ def _processUpgrade(connection, upgradeClass):
logger.log(u'Database upgrade required: %s' % prettyName(upgradeClass.__name__), logger.MESSAGE) logger.log(u'Database upgrade required: %s' % prettyName(upgradeClass.__name__), logger.MESSAGE)
try: try:
instance.execute() instance.execute()
except sqlite3.DatabaseError, e: except sqlite3.DatabaseError as e:
# attemping to restore previous DB backup and perform upgrade # attemping to restore previous DB backup and perform upgrade
try: try:
instance.execute() instance.execute()
@ -442,7 +442,7 @@ def MigrationCode(myDB):
try: try:
update = schema[db_version](myDB) update = schema[db_version](myDB)
db_version = update.execute() db_version = update.execute()
except Exception, e: except Exception as e:
myDB.close() myDB.close()
logger.log(u'Failed to update database with error: %s attempting recovery...' % ex(e), logger.ERROR) logger.log(u'Failed to update database with error: %s attempting recovery...' % ex(e), logger.ERROR)

View file

@ -132,7 +132,7 @@ def revertEpisode(epObj):
epObj.status = WANTED epObj.status = WANTED
epObj.saveToDB() epObj.saveToDB()
except EpisodeNotFoundException, e: except EpisodeNotFoundException as e:
logger.log(u"Unable to create episode, please set its status manually: " + ex(e), logger.log(u"Unable to create episode, please set its status manually: " + ex(e),
logger.WARNING) logger.WARNING)
@ -146,7 +146,7 @@ def markFailed(epObj):
epObj.status = Quality.compositeStatus(FAILED, quality) epObj.status = Quality.compositeStatus(FAILED, quality)
epObj.saveToDB() epObj.saveToDB()
except EpisodeNotFoundException, e: except EpisodeNotFoundException as e:
logger.log(u"Unable to get episode, please set its status manually: " + ex(e), logger.WARNING) logger.log(u"Unable to get episode, please set its status manually: " + ex(e), logger.WARNING)
return log_str return log_str

View file

@ -286,7 +286,7 @@ def hardlinkFile(srcFile, destFile):
try: try:
ek.ek(link, srcFile, destFile) ek.ek(link, srcFile, destFile)
fixSetGroupID(destFile) fixSetGroupID(destFile)
except Exception, e: except Exception as e:
logger.log(u"Failed to create hardlink of " + srcFile + " at " + destFile + ": " + ex(e) + ". Copying instead", logger.log(u"Failed to create hardlink of " + srcFile + " at " + destFile + ": " + ex(e) + ". Copying instead",
logger.ERROR) logger.ERROR)
copyFile(srcFile, destFile) copyFile(srcFile, destFile)
@ -326,7 +326,7 @@ def make_dirs(path):
try: try:
logger.log(u"Folder " + path + " doesn't exist, creating it", logger.DEBUG) logger.log(u"Folder " + path + " doesn't exist, creating it", logger.DEBUG)
ek.ek(os.makedirs, path) ek.ek(os.makedirs, path)
except (OSError, IOError), e: except (OSError, IOError) as e:
logger.log(u"Failed creating " + path + " : " + ex(e), logger.ERROR) logger.log(u"Failed creating " + path + " : " + ex(e), logger.ERROR)
return False return False
@ -350,7 +350,7 @@ def make_dirs(path):
chmodAsParent(ek.ek(os.path.normpath, sofar)) chmodAsParent(ek.ek(os.path.normpath, sofar))
# do the library update for synoindex # do the library update for synoindex
notifiers.synoindex_notifier.addFolder(sofar) notifiers.synoindex_notifier.addFolder(sofar)
except (OSError, IOError), e: except (OSError, IOError) as e:
logger.log(u"Failed creating " + sofar + " : " + ex(e), logger.ERROR) logger.log(u"Failed creating " + sofar + " : " + ex(e), logger.ERROR)
return False return False
@ -397,7 +397,7 @@ def rename_ep_file(cur_path, new_path, old_path_length=0):
try: try:
logger.log(u"Renaming file from " + cur_path + " to " + new_path) logger.log(u"Renaming file from " + cur_path + " to " + new_path)
ek.ek(os.rename, cur_path, new_path) ek.ek(os.rename, cur_path, new_path)
except (OSError, IOError), e: except (OSError, IOError) as e:
logger.log(u"Failed renaming " + cur_path + " to " + new_path + ": " + ex(e), logger.ERROR) logger.log(u"Failed renaming " + cur_path + " to " + new_path + ": " + ex(e), logger.ERROR)
return False return False
@ -433,7 +433,7 @@ def delete_empty_folders(check_empty_dir, keep_dir=None):
ek.ek(os.rmdir, check_empty_dir) ek.ek(os.rmdir, check_empty_dir)
# do the library update for synoindex # do the library update for synoindex
notifiers.synoindex_notifier.deleteFolder(check_empty_dir) notifiers.synoindex_notifier.deleteFolder(check_empty_dir)
except OSError, e: except OSError as e:
logger.log(u"Unable to delete " + check_empty_dir + ": " + repr(e) + " / " + str(e), logger.WARNING) logger.log(u"Unable to delete " + check_empty_dir + ": " + repr(e) + " / " + str(e), logger.WARNING)
break break
check_empty_dir = ek.ek(os.path.dirname, check_empty_dir) check_empty_dir = ek.ek(os.path.dirname, check_empty_dir)
@ -596,7 +596,7 @@ def create_https_certificates(ssl_cert, ssl_key):
from OpenSSL import crypto # @UnresolvedImport from OpenSSL import crypto # @UnresolvedImport
from lib.certgen import createKeyPair, createCertRequest, createCertificate, TYPE_RSA, \ from lib.certgen import createKeyPair, createCertRequest, createCertificate, TYPE_RSA, \
serial # @UnresolvedImport serial # @UnresolvedImport
except Exception, e: except Exception as e:
logger.log(u"pyopenssl module missing, please install for https access", logger.WARNING) logger.log(u"pyopenssl module missing, please install for https access", logger.WARNING)
return False return False
@ -642,7 +642,7 @@ def parse_xml(data, del_xmlns=False):
try: try:
parsedXML = etree.fromstring(data) parsedXML = etree.fromstring(data)
except Exception, e: except Exception as e:
logger.log(u"Error trying to parse xml data. Error: " + ex(e), logger.DEBUG) logger.log(u"Error trying to parse xml data. Error: " + ex(e), logger.DEBUG)
parsedXML = None parsedXML = None
@ -664,7 +664,7 @@ def backupVersionedFile(old_file, version):
shutil.copy(old_file, new_file) shutil.copy(old_file, new_file)
logger.log(u'Backup done', logger.DEBUG) logger.log(u'Backup done', logger.DEBUG)
break break
except Exception, e: except Exception as e:
logger.log(u'Error while trying to back up %s to %s : %s' % (old_file, new_file, ex(e)), logger.WARNING) logger.log(u'Error while trying to back up %s to %s : %s' % (old_file, new_file, ex(e)), logger.WARNING)
num_tries += 1 num_tries += 1
time.sleep(3) time.sleep(3)
@ -692,7 +692,7 @@ def restoreVersionedFile(backup_file, version):
u"Trying to backup " + new_file + " to " + new_file + "." + "r" + str(version) + " before restoring backup", u"Trying to backup " + new_file + " to " + new_file + "." + "r" + str(version) + " before restoring backup",
logger.DEBUG) logger.DEBUG)
shutil.move(new_file, new_file + '.' + 'r' + str(version)) shutil.move(new_file, new_file + '.' + 'r' + str(version))
except Exception, e: except Exception as e:
logger.log( logger.log(
u"Error while trying to backup DB file " + restore_file + " before proceeding with restore: " + ex(e), u"Error while trying to backup DB file " + restore_file + " before proceeding with restore: " + ex(e),
logger.WARNING) logger.WARNING)
@ -708,7 +708,7 @@ def restoreVersionedFile(backup_file, version):
shutil.copy(restore_file, new_file) shutil.copy(restore_file, new_file)
logger.log(u"Restore done", logger.DEBUG) logger.log(u"Restore done", logger.DEBUG)
break break
except Exception, e: except Exception as e:
logger.log(u"Error while trying to restore " + restore_file + ": " + ex(e), logger.WARNING) logger.log(u"Error while trying to restore " + restore_file + ": " + ex(e), logger.WARNING)
numTries += 1 numTries += 1
time.sleep(1) time.sleep(1)
@ -946,14 +946,14 @@ def set_up_anidb_connection():
auth = False auth = False
try: try:
auth = sickbeard.ADBA_CONNECTION.authed() auth = sickbeard.ADBA_CONNECTION.authed()
except Exception, e: except Exception as e:
logger.log(u'exception msg: ' + str(e)) logger.log(u'exception msg: ' + str(e))
pass pass
if not auth: if not auth:
try: try:
sickbeard.ADBA_CONNECTION.auth(sickbeard.ANIDB_USERNAME, sickbeard.ANIDB_PASSWORD) sickbeard.ADBA_CONNECTION.auth(sickbeard.ANIDB_USERNAME, sickbeard.ANIDB_PASSWORD)
except Exception, e: except Exception as e:
logger.log(u'exception msg: ' + str(e)) logger.log(u'exception msg: ' + str(e))
return False return False
else: else:
@ -1164,16 +1164,16 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
% (url, resp.status_code, http_err_text), logger.DEBUG) % (url, resp.status_code, http_err_text), logger.DEBUG)
return return
except requests.exceptions.HTTPError, e: except requests.exceptions.HTTPError as e:
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING) logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
return return
except requests.exceptions.ConnectionError, e: except requests.exceptions.ConnectionError as e:
logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING) logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING)
return return
except requests.exceptions.Timeout, e: except requests.exceptions.Timeout as e:
logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING) logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
return return
except requests.exceptions.ReadTimeout, e: except requests.exceptions.ReadTimeout as e:
logger.log(u'Read timed out ' + str(e.message) + ' while loading URL ' + url, logger.WARNING) logger.log(u'Read timed out ' + str(e.message) + ' while loading URL ' + url, logger.WARNING)
return return
except Exception: except Exception:
@ -1230,19 +1230,19 @@ def download_file(url, filename, session=None):
fp.flush() fp.flush()
chmodAsParent(filename) chmodAsParent(filename)
except requests.exceptions.HTTPError, e: except requests.exceptions.HTTPError as e:
_remove_file_failed(filename) _remove_file_failed(filename)
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING) logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
return False return False
except requests.exceptions.ConnectionError, e: except requests.exceptions.ConnectionError as e:
_remove_file_failed(filename) _remove_file_failed(filename)
logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING) logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING)
return False return False
except requests.exceptions.Timeout, e: except requests.exceptions.Timeout as e:
_remove_file_failed(filename) _remove_file_failed(filename)
logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING) logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
return False return False
except EnvironmentError, e: except EnvironmentError as e:
_remove_file_failed(filename) _remove_file_failed(filename)
logger.log(u"Unable to save the file: " + ex(e), logger.ERROR) logger.log(u"Unable to save the file: " + ex(e), logger.ERROR)
return False return False
@ -1282,7 +1282,7 @@ def clearCache(force=False):
if force or (update_datetime - cache_file_modified > max_age): if force or (update_datetime - cache_file_modified > max_age):
try: try:
ek.ek(os.remove, cache_file) ek.ek(os.remove, cache_file)
except OSError, e: except OSError as e:
logger.log(u"Unable to clean " + cache_root + ": " + repr(e) + " / " + str(e), logger.log(u"Unable to clean " + cache_root + ": " + repr(e) + " / " + str(e),
logger.WARNING) logger.WARNING)
break break

View file

@ -305,7 +305,7 @@ class GenericMetadata():
helpers.chmodAsParent(nfo_file_path) helpers.chmodAsParent(nfo_file_path)
return True return True
except IOError, e: except IOError as e:
logger.log( logger.log(
u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e),
logger.ERROR) logger.ERROR)
@ -432,7 +432,7 @@ class GenericMetadata():
data.write(nfo_file, encoding="utf-8") data.write(nfo_file, encoding="utf-8")
nfo_file.close() nfo_file.close()
helpers.chmodAsParent(nfo_file_path) helpers.chmodAsParent(nfo_file_path)
except IOError, e: except IOError as e:
logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e),
logger.ERROR) logger.ERROR)
return False return False
@ -477,7 +477,7 @@ class GenericMetadata():
data.write(nfo_file, encoding="utf-8") data.write(nfo_file, encoding="utf-8")
nfo_file.close() nfo_file.close()
helpers.chmodAsParent(nfo_file_path) helpers.chmodAsParent(nfo_file_path)
except IOError, e: except IOError as e:
logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e),
logger.ERROR) logger.ERROR)
return False return False
@ -725,7 +725,7 @@ class GenericMetadata():
outFile.write(image_data) outFile.write(image_data)
outFile.close() outFile.close()
helpers.chmodAsParent(image_path) helpers.chmodAsParent(image_path)
except IOError, e: except IOError as e:
logger.log( logger.log(
u"Unable to write image to " + image_path + " - are you sure the show folder is writable? " + ex(e), u"Unable to write image to " + image_path + " - are you sure the show folder is writable? " + ex(e),
logger.ERROR) logger.ERROR)
@ -761,7 +761,7 @@ class GenericMetadata():
t = sickbeard.indexerApi(show_obj.indexer).indexer(**lINDEXER_API_PARMS) t = sickbeard.indexerApi(show_obj.indexer).indexer(**lINDEXER_API_PARMS)
indexer_show_obj = t[show_obj.indexerid] indexer_show_obj = t[show_obj.indexerid]
except (sickbeard.indexer_error, IOError), e: except (sickbeard.indexer_error, IOError) as e:
logger.log(u"Unable to look up show on " + sickbeard.indexerApi( logger.log(u"Unable to look up show on " + sickbeard.indexerApi(
show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR) show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR)
return None return None
@ -822,7 +822,7 @@ class GenericMetadata():
t = sickbeard.indexerApi(show_obj.indexer).indexer(**lINDEXER_API_PARMS) t = sickbeard.indexerApi(show_obj.indexer).indexer(**lINDEXER_API_PARMS)
indexer_show_obj = t[show_obj.indexerid] indexer_show_obj = t[show_obj.indexerid]
except (sickbeard.indexer_error, IOError), e: except (sickbeard.indexer_error, IOError) as e:
logger.log(u"Unable to look up show on " + sickbeard.indexerApi( logger.log(u"Unable to look up show on " + sickbeard.indexerApi(
show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR) show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR)
return result return result
@ -875,7 +875,7 @@ class GenericMetadata():
t = sickbeard.indexerApi(show_obj.indexer).indexer(**lINDEXER_API_PARMS) t = sickbeard.indexerApi(show_obj.indexer).indexer(**lINDEXER_API_PARMS)
indexer_show_obj = t[show_obj.indexerid] indexer_show_obj = t[show_obj.indexerid]
except (sickbeard.indexer_error, IOError), e: except (sickbeard.indexer_error, IOError) as e:
logger.log(u"Unable to look up show on " + sickbeard.indexerApi( logger.log(u"Unable to look up show on " + sickbeard.indexerApi(
show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR) show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR)
return result return result
@ -953,7 +953,7 @@ class GenericMetadata():
logger.log(u"Invalid Indexer ID (" + str(indexer_id) + "), not using metadata file", logger.WARNING) logger.log(u"Invalid Indexer ID (" + str(indexer_id) + "), not using metadata file", logger.WARNING)
return empty_return return empty_return
except Exception, e: except Exception as e:
logger.log( logger.log(
u"There was an error parsing your existing metadata file: '" + metadata_path + "' error: " + ex(e), u"There was an error parsing your existing metadata file: '" + metadata_path + "' error: " + ex(e),
logger.WARNING) logger.WARNING)
@ -984,7 +984,7 @@ class GenericMetadata():
elif poster and result['poster_path']: elif poster and result['poster_path']:
return "{0}{1}{2}".format(base_url, max_size, result['poster_path']) return "{0}{1}{2}".format(base_url, max_size, result['poster_path'])
except Exception, e: except Exception as e:
pass pass
logger.log(u"Could not find any posters or background for " + show.name, logger.DEBUG) logger.log(u"Could not find any posters or background for " + show.name, logger.DEBUG)

View file

@ -234,9 +234,9 @@ class KODIMetadata(generic.GenericMetadata):
try: try:
t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS) t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS)
myShow = t[ep_obj.show.indexerid] myShow = t[ep_obj.show.indexerid]
except sickbeard.indexer_shownotfound, e: except sickbeard.indexer_shownotfound as e:
raise exceptions.ShowNotFoundException(e.message) raise exceptions.ShowNotFoundException(e.message)
except sickbeard.indexer_error, e: except sickbeard.indexer_error as e:
logger.log(u'Unable to connect to ' + sickbeard.indexerApi( logger.log(u'Unable to connect to ' + sickbeard.indexerApi(
ep_obj.show.indexer).name + ' while creating meta files - skipping - ' + ex(e), logger.ERROR) ep_obj.show.indexer).name + ' while creating meta files - skipping - ' + ex(e), logger.ERROR)
return return

View file

@ -236,9 +236,9 @@ class Mede8erMetadata(mediabrowser.MediaBrowserMetadata):
t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS) t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS)
myShow = t[ep_obj.show.indexerid] myShow = t[ep_obj.show.indexerid]
except sickbeard.indexer_shownotfound, e: except sickbeard.indexer_shownotfound as e:
raise exceptions.ShowNotFoundException(e.message) raise exceptions.ShowNotFoundException(e.message)
except sickbeard.indexer_error, e: except sickbeard.indexer_error as e:
logger.log(u"Unable to connect to TVDB while creating meta files - skipping - " + ex(e), logger.ERROR) logger.log(u"Unable to connect to TVDB while creating meta files - skipping - " + ex(e), logger.ERROR)
return False return False

View file

@ -408,9 +408,9 @@ class MediaBrowserMetadata(generic.GenericMetadata):
t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS) t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS)
myShow = t[ep_obj.show.indexerid] myShow = t[ep_obj.show.indexerid]
except sickbeard.indexer_shownotfound, e: except sickbeard.indexer_shownotfound as e:
raise exceptions.ShowNotFoundException(e.message) raise exceptions.ShowNotFoundException(e.message)
except sickbeard.indexer_error, e: except sickbeard.indexer_error as e:
logger.log(u"Unable to connect to " + sickbeard.indexerApi( logger.log(u"Unable to connect to " + sickbeard.indexerApi(
ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR) ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR)
return False return False

View file

@ -181,9 +181,9 @@ class TIVOMetadata(generic.GenericMetadata):
t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS) t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS)
myShow = t[ep_obj.show.indexerid] myShow = t[ep_obj.show.indexerid]
except sickbeard.indexer_shownotfound, e: except sickbeard.indexer_shownotfound as e:
raise exceptions.ShowNotFoundException(str(e)) raise exceptions.ShowNotFoundException(str(e))
except sickbeard.indexer_error, e: except sickbeard.indexer_error as e:
logger.log(u"Unable to connect to " + sickbeard.indexerApi( logger.log(u"Unable to connect to " + sickbeard.indexerApi(
ep_obj.show.indexer).name + " while creating meta files - skipping - " + str(e), logger.ERROR) ep_obj.show.indexer).name + " while creating meta files - skipping - " + str(e), logger.ERROR)
return False return False
@ -327,7 +327,7 @@ class TIVOMetadata(generic.GenericMetadata):
helpers.chmodAsParent(nfo_file_path) helpers.chmodAsParent(nfo_file_path)
except EnvironmentError, e: except EnvironmentError as e:
logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e),
logger.ERROR) logger.ERROR)
return False return False

View file

@ -192,9 +192,9 @@ class WDTVMetadata(generic.GenericMetadata):
t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS) t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS)
myShow = t[ep_obj.show.indexerid] myShow = t[ep_obj.show.indexerid]
except sickbeard.indexer_shownotfound, e: except sickbeard.indexer_shownotfound as e:
raise exceptions.ShowNotFoundException(e.message) raise exceptions.ShowNotFoundException(e.message)
except sickbeard.indexer_error, e: except sickbeard.indexer_error as e:
logger.log(u"Unable to connect to " + sickbeard.indexerApi( logger.log(u"Unable to connect to " + sickbeard.indexerApi(
ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR) ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR)
return False return False

View file

@ -234,9 +234,9 @@ class XBMC_12PlusMetadata(generic.GenericMetadata):
try: try:
t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS) t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS)
myShow = t[ep_obj.show.indexerid] myShow = t[ep_obj.show.indexerid]
except sickbeard.indexer_shownotfound, e: except sickbeard.indexer_shownotfound as e:
raise exceptions.ShowNotFoundException(e.message) raise exceptions.ShowNotFoundException(e.message)
except sickbeard.indexer_error, e: except sickbeard.indexer_error as e:
logger.log(u"Unable to connect to " + sickbeard.indexerApi( logger.log(u"Unable to connect to " + sickbeard.indexerApi(
ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR) ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR)
return return

View file

@ -95,7 +95,7 @@ class NameParser(object):
for cur_pattern_num, (cur_pattern_name, cur_pattern) in enumerate(regexItem): for cur_pattern_num, (cur_pattern_name, cur_pattern) in enumerate(regexItem):
try: try:
cur_regex = re.compile(cur_pattern, re.VERBOSE | re.IGNORECASE) cur_regex = re.compile(cur_pattern, re.VERBOSE | re.IGNORECASE)
except re.error, errormsg: except re.error as errormsg:
logger.log(u"WARNING: Invalid episode_pattern, %s. %s" % (errormsg, cur_pattern)) logger.log(u"WARNING: Invalid episode_pattern, %s. %s" % (errormsg, cur_pattern))
else: else:
self.compiled_regexes[index].append([cur_pattern_num, cur_pattern_name, cur_regex]) self.compiled_regexes[index].append([cur_pattern_num, cur_pattern_name, cur_regex])
@ -166,7 +166,7 @@ class NameParser(object):
day = tmp_month day = tmp_month
try: try:
result.air_date = datetime.date(year, month, day) result.air_date = datetime.date(year, month, day)
except ValueError, e: except ValueError as e:
raise InvalidNameException(ex(e)) raise InvalidNameException(ex(e))
if 'extra_info' in named_groups: if 'extra_info' in named_groups:
@ -257,7 +257,7 @@ class NameParser(object):
except sickbeard.indexer_episodenotfound: except sickbeard.indexer_episodenotfound:
logger.log(u"Unable to find episode with date " + str(bestResult.air_date) + " for show " + bestResult.show.name + ", skipping", logger.WARNING) logger.log(u"Unable to find episode with date " + str(bestResult.air_date) + " for show " + bestResult.show.name + ", skipping", logger.WARNING)
episode_numbers = [] episode_numbers = []
except sickbeard.indexer_error, e: except sickbeard.indexer_error as e:
logger.log(u"Unable to contact " + sickbeard.indexerApi(bestResult.show.indexer).name + ": " + ex(e), logger.WARNING) logger.log(u"Unable to contact " + sickbeard.indexerApi(bestResult.show.indexer).name + ": " + ex(e), logger.WARNING)
episode_numbers = [] episode_numbers = []

View file

@ -188,7 +188,7 @@ def validate_name(pattern, multi=None, anime_type=None, file_only=False, abd=Fal
try: try:
result = parser.parse(new_name) result = parser.parse(new_name)
except Exception, e: except Exception as e:
logger.log(u"Unable to parse " + new_name + ", not valid", logger.DEBUG) logger.log(u"Unable to parse " + new_name + ", not valid", logger.DEBUG)
return False return False

View file

@ -62,7 +62,7 @@ class Boxcar2Notifier:
handle = urllib2.urlopen(req, data) handle = urllib2.urlopen(req, data)
handle.close() handle.close()
except urllib2.URLError, e: except urllib2.URLError as e:
# if we get an error back that doesn't have an error code then who knows what's really happening # if we get an error back that doesn't have an error code then who knows what's really happening
if not hasattr(e, 'code'): if not hasattr(e, 'code'):
logger.log(u'BOXCAR2: Notification failed.' + ex(e), logger.ERROR) logger.log(u'BOXCAR2: Notification failed.' + ex(e), logger.ERROR)

View file

@ -142,7 +142,7 @@ class GrowlNotifier:
return self._send_growl(opts, message) return self._send_growl(opts, message)
else: else:
return False return False
except Exception, e: except Exception as e:
logger.log(u"GROWL: Unable to send growl to " + opts['host'] + ":" + str(opts['port']) + " - " + ex(e), logger.WARNING) logger.log(u"GROWL: Unable to send growl to " + opts['host'] + ":" + str(opts['port']) + " - " + ex(e), logger.WARNING)
return False return False
@ -186,7 +186,7 @@ class GrowlNotifier:
try: try:
return self._send(opts['host'], opts['port'], register.encode(), opts['debug']) return self._send(opts['host'], opts['port'], register.encode(), opts['debug'])
except Exception, e: except Exception as e:
logger.log(u"GROWL: Unable to send growl to " + opts['host'] + ":" + str(opts['port']) + " - " + ex(e), logger.WARNING) logger.log(u"GROWL: Unable to send growl to " + opts['host'] + ":" + str(opts['port']) + " - " + ex(e), logger.WARNING)
return False return False

View file

@ -102,7 +102,7 @@ class KODINotifier:
try: try:
response = urllib2.urlopen(req) response = urllib2.urlopen(req)
except urllib2.URLError, e: except urllib2.URLError as e:
logger.log(u'KODI: Warning: Couldn\'t contact Kodi at ' + host + '- ' + ex(e), logger.WARNING) logger.log(u'KODI: Warning: Couldn\'t contact Kodi at ' + host + '- ' + ex(e), logger.WARNING)
return False return False
@ -112,11 +112,11 @@ class KODINotifier:
response.close() response.close()
logger.log(u'KODI: JSON response: ' + str(result), logger.DEBUG) logger.log(u'KODI: JSON response: ' + str(result), logger.DEBUG)
return result # need to return response for parsing return result # need to return response for parsing
except ValueError, e: except ValueError as e:
logger.log(u'KODI: Unable to decode JSON response: ' + response, logger.WARNING) logger.log(u'KODI: Unable to decode JSON response: ' + response, logger.WARNING)
return False return False
except IOError, e: except IOError as e:
logger.log(u'KODI: Warning: Couldn\'t contact Kodi at ' + host + ' - ' + ex(e), logger.WARNING) logger.log(u'KODI: Warning: Couldn\'t contact Kodi at ' + host + ' - ' + ex(e), logger.WARNING)
return False return False

View file

@ -44,13 +44,13 @@ def diagnose():
else: else:
try: try:
bus = dbus.SessionBus() bus = dbus.SessionBus()
except dbus.DBusException, e: except dbus.DBusException as e:
return (u"<p>Error: unable to connect to D-Bus session bus: <code>%s</code>." return (u"<p>Error: unable to connect to D-Bus session bus: <code>%s</code>."
u"<p>Are you running SickGear in a desktop session?") % (cgi.escape(e),) u"<p>Are you running SickGear in a desktop session?") % (cgi.escape(e),)
try: try:
bus.get_object('org.freedesktop.Notifications', bus.get_object('org.freedesktop.Notifications',
'/org/freedesktop/Notifications') '/org/freedesktop/Notifications')
except dbus.DBusException, e: except dbus.DBusException as e:
return (u"<p>Error: there doesn't seem to be a notification daemon available: <code>%s</code> " return (u"<p>Error: there doesn't seem to be a notification daemon available: <code>%s</code> "
u"<p>Try installing notification-daemon or notify-osd.") % (cgi.escape(e),) u"<p>Try installing notification-daemon or notify-osd.") % (cgi.escape(e),)
return u"<p>Error: Unable to send notification." return u"<p>Error: Unable to send notification."

View file

@ -121,13 +121,13 @@ class NMJNotifier:
req = urllib2.Request(mount) req = urllib2.Request(mount)
logger.log(u"Try to mount network drive via url: %s" % (mount), logger.DEBUG) logger.log(u"Try to mount network drive via url: %s" % (mount), logger.DEBUG)
handle = urllib2.urlopen(req) handle = urllib2.urlopen(req)
except IOError, e: except IOError as e:
if hasattr(e, 'reason'): if hasattr(e, 'reason'):
logger.log(u"NMJ: Could not contact Popcorn Hour on host %s: %s" % (host, e.reason), logger.WARNING) logger.log(u"NMJ: Could not contact Popcorn Hour on host %s: %s" % (host, e.reason), logger.WARNING)
elif hasattr(e, 'code'): elif hasattr(e, 'code'):
logger.log(u"NMJ: Problem with Popcorn Hour on host %s: %s" % (host, e.code), logger.WARNING) logger.log(u"NMJ: Problem with Popcorn Hour on host %s: %s" % (host, e.code), logger.WARNING)
return False return False
except Exception, e: except Exception as e:
logger.log(u"NMJ: Unknown exception: " + ex(e), logger.ERROR) logger.log(u"NMJ: Unknown exception: " + ex(e), logger.ERROR)
return False return False
@ -148,13 +148,13 @@ class NMJNotifier:
logger.log(u"Sending NMJ scan update command via url: %s" % (updateUrl), logger.DEBUG) logger.log(u"Sending NMJ scan update command via url: %s" % (updateUrl), logger.DEBUG)
handle = urllib2.urlopen(req) handle = urllib2.urlopen(req)
response = handle.read() response = handle.read()
except IOError, e: except IOError as e:
if hasattr(e, 'reason'): if hasattr(e, 'reason'):
logger.log(u"NMJ: Could not contact Popcorn Hour on host %s: %s" % (host, e.reason), logger.WARNING) logger.log(u"NMJ: Could not contact Popcorn Hour on host %s: %s" % (host, e.reason), logger.WARNING)
elif hasattr(e, 'code'): elif hasattr(e, 'code'):
logger.log(u"NMJ: Problem with Popcorn Hour on host %s: %s" % (host, e.code), logger.WARNING) logger.log(u"NMJ: Problem with Popcorn Hour on host %s: %s" % (host, e.code), logger.WARNING)
return False return False
except Exception, e: except Exception as e:
logger.log(u"NMJ: Unknown exception: " + ex(e), logger.ERROR) logger.log(u"NMJ: Unknown exception: " + ex(e), logger.ERROR)
return False return False
@ -162,7 +162,7 @@ class NMJNotifier:
try: try:
et = etree.fromstring(response) et = etree.fromstring(response)
result = et.findtext("returnValue") result = et.findtext("returnValue")
except SyntaxError, e: except SyntaxError as e:
logger.log(u"Unable to parse XML returned from the Popcorn Hour: %s" % (e), logger.ERROR) logger.log(u"Unable to parse XML returned from the Popcorn Hour: %s" % (e), logger.ERROR)
return False return False

View file

@ -90,7 +90,7 @@ class NMJv2Notifier:
sickbeard.NMJv2_DATABASE = DB_path sickbeard.NMJv2_DATABASE = DB_path
return True return True
except IOError, e: except IOError as e:
logger.log(u"Warning: Couldn't contact popcorn hour on host %s: %s" % (host, e), logger.WARNING) logger.log(u"Warning: Couldn't contact popcorn hour on host %s: %s" % (host, e), logger.WARNING)
return False return False
return False return False
@ -119,19 +119,19 @@ class NMJv2Notifier:
time.sleep(300.0 / 1000.0) time.sleep(300.0 / 1000.0)
handle2 = urllib2.urlopen(req) handle2 = urllib2.urlopen(req)
response2 = handle2.read() response2 = handle2.read()
except IOError, e: except IOError as e:
logger.log(u"Warning: Couldn't contact popcorn hour on host %s: %s" % (host, e), logger.WARNING) logger.log(u"Warning: Couldn't contact popcorn hour on host %s: %s" % (host, e), logger.WARNING)
return False return False
try: try:
et = etree.fromstring(response1) et = etree.fromstring(response1)
result1 = et.findtext("returnValue") result1 = et.findtext("returnValue")
except SyntaxError, e: except SyntaxError as e:
logger.log(u"Unable to parse XML returned from the Popcorn Hour: update_scandir, %s" % (e), logger.ERROR) logger.log(u"Unable to parse XML returned from the Popcorn Hour: update_scandir, %s" % (e), logger.ERROR)
return False return False
try: try:
et = etree.fromstring(response2) et = etree.fromstring(response2)
result2 = et.findtext("returnValue") result2 = et.findtext("returnValue")
except SyntaxError, e: except SyntaxError as e:
logger.log(u"Unable to parse XML returned from the Popcorn Hour: scanner_start, %s" % (e), logger.ERROR) logger.log(u"Unable to parse XML returned from the Popcorn Hour: scanner_start, %s" % (e), logger.ERROR)
return False return False

View file

@ -88,7 +88,7 @@ class PLEXNotifier:
# could return result response = re.compile('<html><li>(.+\w)</html>').findall(result) # could return result response = re.compile('<html><li>(.+\w)</html>').findall(result)
return 'OK' return 'OK'
except (urllib2.URLError, IOError), e: except (urllib2.URLError, IOError) as e:
logger.log(u'PLEX: Warning: Couldn\'t contact Plex at ' + fixStupidEncodings(url) + ' ' + ex(e), logger.WARNING) logger.log(u'PLEX: Warning: Couldn\'t contact Plex at ' + fixStupidEncodings(url) + ' ' + ex(e), logger.WARNING)
return False return False
@ -219,7 +219,7 @@ class PLEXNotifier:
try: try:
xml_tree = etree.parse(urllib.urlopen(url)) xml_tree = etree.parse(urllib.urlopen(url))
media_container = xml_tree.getroot() media_container = xml_tree.getroot()
except IOError, e: except IOError as e:
logger.log(u'PLEX: Error while trying to contact Plex Media Server: ' + ex(e), logger.ERROR) logger.log(u'PLEX: Error while trying to contact Plex Media Server: ' + ex(e), logger.ERROR)
hosts_failed.append(cur_host) hosts_failed.append(cur_host)
continue continue
@ -255,7 +255,7 @@ class PLEXNotifier:
try: try:
force and urllib.urlopen(url) force and urllib.urlopen(url)
host_list.append(cur_host) host_list.append(cur_host)
except Exception, e: except Exception as e:
logger.log(u'PLEX: Error updating library section for Plex Media Server: ' + ex(e), logger.ERROR) logger.log(u'PLEX: Error updating library section for Plex Media Server: ' + ex(e), logger.ERROR)
hosts_failed.append(cur_host) hosts_failed.append(cur_host)

View file

@ -74,7 +74,7 @@ class PushbulletNotifier:
handle.close() handle.close()
except socket.timeout: except socket.timeout:
return False return False
except urllib2.URLError, e: except urllib2.URLError as e:
if e.code == 404: if e.code == 404:
logger.log(u'PUSHBULLET: Access token is wrong/not associated to a device.', logger.ERROR) logger.log(u'PUSHBULLET: Access token is wrong/not associated to a device.', logger.ERROR)

View file

@ -97,7 +97,7 @@ class PushoverNotifier:
handle = urllib2.urlopen(req, data) handle = urllib2.urlopen(req, data)
handle.close() handle.close()
except urllib2.URLError, e: except urllib2.URLError as e:
# HTTP status 404 if the provided email address isn't a Pushover user. # HTTP status 404 if the provided email address isn't a Pushover user.
if e.code == 404: if e.code == 404:
logger.log(u'PUSHOVER: Username is wrong/not a Pushover email. Pushover will send an email to it', logger.WARNING) logger.log(u'PUSHOVER: Username is wrong/not a Pushover email. Pushover will send an email to it', logger.WARNING)

View file

@ -91,14 +91,14 @@ class pyTivoNotifier:
try: try:
response = urlopen(request) #@UnusedVariable response = urlopen(request) #@UnusedVariable
except HTTPError , e: except HTTPError as e:
if hasattr(e, 'reason'): if hasattr(e, 'reason'):
logger.log(u"pyTivo notification: Error, failed to reach a server - " + e.reason, logger.ERROR) logger.log(u"pyTivo notification: Error, failed to reach a server - " + e.reason, logger.ERROR)
return False return False
elif hasattr(e, 'code'): elif hasattr(e, 'code'):
logger.log(u"pyTivo notification: Error, the server couldn't fulfill the request - " + e.code, logger.ERROR) logger.log(u"pyTivo notification: Error, the server couldn't fulfill the request - " + e.code, logger.ERROR)
return False return False
except Exception, e: except Exception as e:
logger.log(u"PYTIVO: Unknown exception: " + ex(e), logger.ERROR) logger.log(u"PYTIVO: Unknown exception: " + ex(e), logger.ERROR)
return False return False
else: else:

View file

@ -58,7 +58,7 @@ class synoIndexNotifier:
cwd=sickbeard.PROG_DIR) cwd=sickbeard.PROG_DIR)
out, err = p.communicate() #@UnusedVariable out, err = p.communicate() #@UnusedVariable
logger.log(u"Script result: " + str(out), logger.DEBUG) logger.log(u"Script result: " + str(out), logger.DEBUG)
except OSError, e: except OSError as e:
logger.log(u"Unable to run synoindex: " + ex(e), logger.ERROR) logger.log(u"Unable to run synoindex: " + ex(e), logger.ERROR)
def deleteFolder(self, cur_path): def deleteFolder(self, cur_path):
@ -83,7 +83,7 @@ class synoIndexNotifier:
cwd=sickbeard.PROG_DIR) cwd=sickbeard.PROG_DIR)
out, err = p.communicate() #@UnusedVariable out, err = p.communicate() #@UnusedVariable
logger.log(u"Script result: " + str(out), logger.DEBUG) logger.log(u"Script result: " + str(out), logger.DEBUG)
except OSError, e: except OSError as e:
logger.log(u"Unable to run synoindex: " + ex(e), logger.ERROR) logger.log(u"Unable to run synoindex: " + ex(e), logger.ERROR)

View file

@ -56,7 +56,7 @@ class synologyNotifier:
cwd=sickbeard.PROG_DIR) cwd=sickbeard.PROG_DIR)
out, err = p.communicate() #@UnusedVariable out, err = p.communicate() #@UnusedVariable
logger.log(u"Script result: " + str(out), logger.DEBUG) logger.log(u"Script result: " + str(out), logger.DEBUG)
except OSError, e: except OSError as e:
logger.log(u"Unable to run synodsmnotify: " + ex(e)) logger.log(u"Unable to run synodsmnotify: " + ex(e))

View file

@ -129,7 +129,7 @@ class TwitterNotifier:
try: try:
api.PostUpdate(message.encode('utf8')) api.PostUpdate(message.encode('utf8'))
except Exception, e: except Exception as e:
logger.log(u"Error Sending Tweet: " + ex(e), logger.ERROR) logger.log(u"Error Sending Tweet: " + ex(e), logger.ERROR)
return False return False

View file

@ -247,7 +247,7 @@ class XBMCNotifier:
logger.log(u"XBMC HTTP response: " + result.replace('\n', ''), logger.DEBUG) logger.log(u"XBMC HTTP response: " + result.replace('\n', ''), logger.DEBUG)
return result return result
except (urllib2.URLError, IOError), e: except (urllib2.URLError, IOError) as e:
logger.log(u"Warning: Couldn't contact XBMC HTTP at " + fixStupidEncodings(url) + " " + ex(e), logger.log(u"Warning: Couldn't contact XBMC HTTP at " + fixStupidEncodings(url) + " " + ex(e),
logger.WARNING) logger.WARNING)
return False return False
@ -304,7 +304,7 @@ class XBMCNotifier:
encSqlXML = urllib.quote(sqlXML, ':\\/<>') encSqlXML = urllib.quote(sqlXML, ':\\/<>')
try: try:
et = etree.fromstring(encSqlXML) et = etree.fromstring(encSqlXML)
except SyntaxError, e: except SyntaxError as e:
logger.log(u"Unable to parse XML returned from XBMC: " + ex(e), logger.ERROR) logger.log(u"Unable to parse XML returned from XBMC: " + ex(e), logger.ERROR)
return False return False
@ -385,7 +385,7 @@ class XBMCNotifier:
try: try:
response = urllib2.urlopen(req) response = urllib2.urlopen(req)
except urllib2.URLError, e: except urllib2.URLError as e:
logger.log(u"Error while trying to retrieve XBMC API version for " + host + ": " + ex(e), logger.log(u"Error while trying to retrieve XBMC API version for " + host + ": " + ex(e),
logger.WARNING) logger.WARNING)
return False return False
@ -396,11 +396,11 @@ class XBMCNotifier:
response.close() response.close()
logger.log(u"XBMC JSON response: " + str(result), logger.DEBUG) logger.log(u"XBMC JSON response: " + str(result), logger.DEBUG)
return result # need to return response for parsing return result # need to return response for parsing
except ValueError, e: except ValueError as e:
logger.log(u"Unable to decode JSON: " + response, logger.WARNING) logger.log(u"Unable to decode JSON: " + response, logger.WARNING)
return False return False
except IOError, e: except IOError as e:
logger.log(u"Warning: Couldn't contact XBMC JSON API at " + fixStupidEncodings(url) + " " + ex(e), logger.log(u"Warning: Couldn't contact XBMC JSON API at " + fixStupidEncodings(url) + " " + ex(e),
logger.WARNING) logger.WARNING)
return False return False

View file

@ -92,7 +92,7 @@ def saveNZB(nzbName, nzbString):
with ek.ek(open, nzbName + ".nzb", 'w') as nzb_fh: with ek.ek(open, nzbName + ".nzb", 'w') as nzb_fh:
nzb_fh.write(nzbString) nzb_fh.write(nzbString)
except EnvironmentError, e: except EnvironmentError as e:
logger.log(u"Unable to save NZB: " + ex(e), logger.ERROR) logger.log(u"Unable to save NZB: " + ex(e), logger.ERROR)

View file

@ -63,7 +63,7 @@ def sendNZB(nzb, proper=False):
logger.ERROR) logger.ERROR)
return False return False
except xmlrpclib.ProtocolError, e: except xmlrpclib.ProtocolError as e:
if (e.errmsg == "Unauthorized"): if (e.errmsg == "Unauthorized"):
logger.log(u"NZBget username or password is incorrect.", logger.ERROR) logger.log(u"NZBget username or password is incorrect.", logger.ERROR)
else: else:

View file

@ -234,7 +234,7 @@ class PostProcessor(object):
ek.ek(send2trash, cur_file) ek.ek(send2trash, cur_file)
else: else:
ek.ek(os.remove, cur_file) ek.ek(os.remove, cur_file)
except OSError, e: except OSError as e:
self._log(u'Unable to delete file %s: %s' % (cur_file, str(e.strerror)), logger.DEBUG) self._log(u'Unable to delete file %s: %s' % (cur_file, str(e.strerror)), logger.DEBUG)
if True is not ek.ek(os.path.isfile, cur_file): if True is not ek.ek(os.path.isfile, cur_file):
@ -329,7 +329,7 @@ class PostProcessor(object):
helpers.moveFile(cur_file_path, new_file_path) helpers.moveFile(cur_file_path, new_file_path)
helpers.chmodAsParent(new_file_path) helpers.chmodAsParent(new_file_path)
self._log(u'Moved file from' + (success_tmpl % (cur_file_path, new_file_path)), logger.DEBUG) self._log(u'Moved file from' + (success_tmpl % (cur_file_path, new_file_path)), logger.DEBUG)
except (IOError, OSError), e: except (IOError, OSError) as e:
self._log(u'Unable to move file %s<br />.. %s' % (success_tmpl % (cur_file_path, new_file_path), str(e)), logger.ERROR) self._log(u'Unable to move file %s<br />.. %s' % (success_tmpl % (cur_file_path, new_file_path), str(e)), logger.ERROR)
raise e raise e
@ -350,7 +350,7 @@ class PostProcessor(object):
helpers.copyFile(cur_file_path, new_file_path) helpers.copyFile(cur_file_path, new_file_path)
helpers.chmodAsParent(new_file_path) helpers.chmodAsParent(new_file_path)
self._log(u'Copied file from' + (success_tmpl % (cur_file_path, new_file_path)), logger.DEBUG) self._log(u'Copied file from' + (success_tmpl % (cur_file_path, new_file_path)), logger.DEBUG)
except (IOError, OSError), e: except (IOError, OSError) as e:
self._log(u'Unable to copy %s<br />.. %s' % (success_tmpl % (cur_file_path, new_file_path), str(e)), logger.ERROR) self._log(u'Unable to copy %s<br />.. %s' % (success_tmpl % (cur_file_path, new_file_path), str(e)), logger.ERROR)
raise e raise e
@ -371,7 +371,7 @@ class PostProcessor(object):
helpers.hardlinkFile(cur_file_path, new_file_path) helpers.hardlinkFile(cur_file_path, new_file_path)
helpers.chmodAsParent(new_file_path) helpers.chmodAsParent(new_file_path)
self._log(u'Hard linked file from' + (success_tmpl % (cur_file_path, new_file_path)), logger.DEBUG) self._log(u'Hard linked file from' + (success_tmpl % (cur_file_path, new_file_path)), logger.DEBUG)
except (IOError, OSError), e: except (IOError, OSError) as e:
self._log(u'Unable to link file %s<br />.. %s' % (success_tmpl % (cur_file_path, new_file_path), str(e)), logger.ERROR) self._log(u'Unable to link file %s<br />.. %s' % (success_tmpl % (cur_file_path, new_file_path), str(e)), logger.ERROR)
raise e raise e
@ -393,7 +393,7 @@ class PostProcessor(object):
helpers.chmodAsParent(new_file_path) helpers.chmodAsParent(new_file_path)
self._log(u'Moved then symbolic linked file from' + (success_tmpl % (cur_file_path, new_file_path)), self._log(u'Moved then symbolic linked file from' + (success_tmpl % (cur_file_path, new_file_path)),
logger.DEBUG) logger.DEBUG)
except (IOError, OSError), e: except (IOError, OSError) as e:
self._log(u'Unable to link file %s<br />.. %s' % (success_tmpl % (cur_file_path, new_file_path), str(e)), logger.ERROR) self._log(u'Unable to link file %s<br />.. %s' % (success_tmpl % (cur_file_path, new_file_path), str(e)), logger.ERROR)
raise e raise e
@ -543,7 +543,7 @@ class PostProcessor(object):
try: try:
(cur_show, cur_season, cur_episodes, cur_quality) = cur_attempt() (cur_show, cur_season, cur_episodes, cur_quality) = cur_attempt()
except (InvalidNameException, InvalidShowException), e: except (InvalidNameException, InvalidShowException) as e:
logger.log(u'Unable to parse, skipping: ' + ex(e), logger.DEBUG) logger.log(u'Unable to parse, skipping: ' + ex(e), logger.DEBUG)
continue continue
@ -621,7 +621,7 @@ class PostProcessor(object):
# now that we've figured out which episode this file is just load it manually # now that we've figured out which episode this file is just load it manually
try: try:
cur_ep = show.getEpisode(season, episode) cur_ep = show.getEpisode(season, episode)
except exceptions.EpisodeNotFoundException, e: except exceptions.EpisodeNotFoundException as e:
self._log(u'Unable to create episode: ' + ex(e), logger.DEBUG) self._log(u'Unable to create episode: ' + ex(e), logger.DEBUG)
raise exceptions.PostProcessingFailed() raise exceptions.PostProcessingFailed()
@ -706,10 +706,10 @@ class PostProcessor(object):
out, err = p.communicate() # @UnusedVariable out, err = p.communicate() # @UnusedVariable
self._log(u'Script result: ' + str(out), logger.DEBUG) self._log(u'Script result: ' + str(out), logger.DEBUG)
except OSError, e: except OSError as e:
self._log(u'Unable to run extra_script: ' + ex(e)) self._log(u'Unable to run extra_script: ' + ex(e))
except Exception, e: except Exception as e:
self._log(u'Unable to run extra_script: ' + ex(e)) self._log(u'Unable to run extra_script: ' + ex(e))
def _safe_replace(self, ep_obj, new_ep_quality): def _safe_replace(self, ep_obj, new_ep_quality):
@ -1048,5 +1048,5 @@ class PostProcessor(object):
self._log(u'Adding the file to the anidb mylist', logger.DEBUG) self._log(u'Adding the file to the anidb mylist', logger.DEBUG)
try: try:
self.anidbEpisode.add_to_mylist(status=1) # status = 1 sets the status of the file to "internal HDD" self.anidbEpisode.add_to_mylist(status=1) # status = 1 sets the status of the file to "internal HDD"
except Exception, e: except Exception as e:
self._log(u'exception msg: ' + str(e)) self._log(u'exception msg: ' + str(e))

View file

@ -97,7 +97,7 @@ class ProcessTVShow(object):
# try deleting folder # try deleting folder
try: try:
shutil.rmtree(folder) shutil.rmtree(folder)
except (OSError, IOError), e: except (OSError, IOError) as e:
logger.log(u'Warning: unable to delete folder: %s: %s' % (folder, ex(e)), logger.WARNING) logger.log(u'Warning: unable to delete folder: %s: %s' % (folder, ex(e)), logger.WARNING)
return False return False
@ -128,14 +128,14 @@ class ProcessTVShow(object):
self._log_helper(u'Changing ReadOnly flag for file ' + cur_file) self._log_helper(u'Changing ReadOnly flag for file ' + cur_file)
try: try:
ek.ek(os.chmod, cur_file_path, stat.S_IWRITE) ek.ek(os.chmod, cur_file_path, stat.S_IWRITE)
except OSError, e: except OSError as e:
self._log_helper(u'Cannot change permissions of %s: %s' % (cur_file_path, str(e.strerror))) self._log_helper(u'Cannot change permissions of %s: %s' % (cur_file_path, str(e.strerror)))
try: try:
if use_trash: if use_trash:
ek.ek(send2trash, cur_file_path) ek.ek(send2trash, cur_file_path)
else: else:
ek.ek(os.remove, cur_file_path) ek.ek(os.remove, cur_file_path)
except OSError, e: except OSError as e:
self._log_helper(u'Unable to delete file %s: %s' % (cur_file, str(e.strerror))) self._log_helper(u'Unable to delete file %s: %s' % (cur_file, str(e.strerror)))
if True is not ek.ek(os.path.isfile, cur_file_path): if True is not ek.ek(os.path.isfile, cur_file_path):
@ -401,7 +401,7 @@ class ProcessTVShow(object):
rar_handle.extract(path=path, withSubpath=False, overwrite=False) rar_handle.extract(path=path, withSubpath=False, overwrite=False)
unpacked_files += [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir] unpacked_files += [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir]
del rar_handle del rar_handle
except Exception, e: except Exception as e:
self._log_helper(u'Failed to unpack archive %s: %s' % (archive, ex(e)), logger.ERROR) self._log_helper(u'Failed to unpack archive %s: %s' % (archive, ex(e)), logger.ERROR)
self._set_process_success(False) self._set_process_success(False)
continue continue
@ -509,7 +509,7 @@ class ProcessTVShow(object):
processor = postProcessor.PostProcessor(cur_video_file_path, nzb_name, process_method, force_replace, use_trash=use_trash) processor = postProcessor.PostProcessor(cur_video_file_path, nzb_name, process_method, force_replace, use_trash=use_trash)
file_success = processor.process() file_success = processor.process()
process_fail_message = '' process_fail_message = ''
except exceptions.PostProcessingFailed, e: except exceptions.PostProcessingFailed as e:
file_success = False file_success = False
process_fail_message = '<br />.. ' + ex(e) process_fail_message = '<br />.. ' + ex(e)
@ -560,7 +560,7 @@ class ProcessTVShow(object):
processor = failedProcessor.FailedProcessor(dir_name, nzb_name) processor = failedProcessor.FailedProcessor(dir_name, nzb_name)
self._set_process_success(processor.process()) self._set_process_success(processor.process())
process_fail_message = '' process_fail_message = ''
except exceptions.FailedProcessingFailed, e: except exceptions.FailedProcessingFailed as e:
self._set_process_success(False) self._set_process_success(False)
process_fail_message = ex(e) process_fail_message = ex(e)

View file

@ -74,10 +74,10 @@ def _getProperList():
try: try:
curPropers = curProvider.findPropers(search_date) curPropers = curProvider.findPropers(search_date)
except exceptions.AuthException, e: except exceptions.AuthException as e:
logger.log(u'Authentication error: ' + ex(e), logger.ERROR) logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
continue continue
except Exception, e: except Exception as e:
logger.log(u'Error while searching ' + curProvider.name + ', skipping: ' + ex(e), logger.ERROR) logger.log(u'Error while searching ' + curProvider.name + ', skipping: ' + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG) logger.log(traceback.format_exc(), logger.DEBUG)
continue continue

View file

@ -74,7 +74,7 @@ class BitSoupProvider(generic.TorrentProvider):
try: try:
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False) response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR) logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
return False return False
@ -195,7 +195,7 @@ class BitSoupProvider(generic.TorrentProvider):
items[mode].append(item) items[mode].append(item)
except Exception, e: except Exception as e:
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
#For each search mode sort all the items by seeders #For each search mode sort all the items by seeders

View file

@ -123,7 +123,7 @@ class BTNProvider(generic.TorrentProvider):
try: try:
parsedJSON = server.getTorrents(apikey, params, int(results_per_page), int(offset)) parsedJSON = server.getTorrents(apikey, params, int(results_per_page), int(offset))
except jsonrpclib.jsonrpc.ProtocolError, error: except jsonrpclib.jsonrpc.ProtocolError as error:
logger.log(u"JSON-RPC protocol error while accessing " + self.name + ": " + ex(error), logger.ERROR) logger.log(u"JSON-RPC protocol error while accessing " + self.name + ": " + ex(error), logger.ERROR)
parsedJSON = {'api-error': ex(error)} parsedJSON = {'api-error': ex(error)}
return parsedJSON return parsedJSON
@ -131,11 +131,11 @@ class BTNProvider(generic.TorrentProvider):
except socket.timeout: except socket.timeout:
logger.log(u"Timeout while accessing " + self.name, logger.WARNING) logger.log(u"Timeout while accessing " + self.name, logger.WARNING)
except socket.error, error: except socket.error as error:
# Note that sometimes timeouts are thrown as socket errors # Note that sometimes timeouts are thrown as socket errors
logger.log(u"Socket error while accessing " + self.name + ": " + error[1], logger.ERROR) logger.log(u"Socket error while accessing " + self.name + ": " + error[1], logger.ERROR)
except Exception, error: except Exception as error:
errorstring = str(error) errorstring = str(error)
if (errorstring.startswith('<') and errorstring.endswith('>')): if (errorstring.startswith('<') and errorstring.endswith('>')):
errorstring = errorstring[1:-1] errorstring = errorstring[1:-1]

View file

@ -83,7 +83,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
try: try:
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False) response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
logger.log(u'Unable to connect to %s provider: %s' % (self.name, ex(e)), logger.ERROR) logger.log(u'Unable to connect to %s provider: %s' % (self.name, ex(e)), logger.ERROR)
return False return False
@ -232,7 +232,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
items[mode].append(item) items[mode].append(item)
except Exception, e: except Exception as e:
logger.log(u'Failed parsing %s Traceback: %s' % (self.name, traceback.format_exc()), logger.ERROR) logger.log(u'Failed parsing %s Traceback: %s' % (self.name, traceback.format_exc()), logger.ERROR)
# For each search mode sort all the items by seeders # For each search mode sort all the items by seeders

View file

@ -86,7 +86,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
try: try:
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False) response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR) logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
return False return False
@ -255,7 +255,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
items[mode].append(item) items[mode].append(item)
except Exception, e: except Exception as e:
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
#For each search mode sort all the items by seeders #For each search mode sort all the items by seeders

View file

@ -70,7 +70,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
try: try:
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False) response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR) logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
return False return False
@ -198,7 +198,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
logger.log(u"Found result: " + torrent_name + " (" + torrent_details_url + ")", logger.DEBUG) logger.log(u"Found result: " + torrent_name + " (" + torrent_details_url + ")", logger.DEBUG)
items[mode].append(item) items[mode].append(item)
except Exception, e: except Exception as e:
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
results += items[mode] results += items[mode]

View file

@ -139,7 +139,7 @@ class KATProvider(generic.TorrentProvider):
return title return title
except Exception, e: except Exception as e:
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
@ -267,7 +267,7 @@ class KATProvider(generic.TorrentProvider):
items[mode].append(item) items[mode].append(item)
except Exception, e: except Exception as e:
logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(),
logger.ERROR) logger.ERROR)

View file

@ -230,7 +230,7 @@ class NextGenProvider(generic.TorrentProvider):
logger.WARNING) logger.WARNING)
continue continue
except Exception, e: except Exception as e:
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(),
logger.ERROR) logger.ERROR)

View file

@ -116,13 +116,13 @@ class TorrentRssProvider(generic.TorrentProvider):
torrent_file = self.getURL(url) torrent_file = self.getURL(url)
try: try:
bdecode(torrent_file) bdecode(torrent_file)
except Exception, e: except Exception as e:
self.dumpHTML(torrent_file) self.dumpHTML(torrent_file)
return (False, 'Torrent link is not a valid torrent file: ' + ex(e)) return (False, 'Torrent link is not a valid torrent file: ' + ex(e))
return (True, 'RSS feed Parsed correctly') return (True, 'RSS feed Parsed correctly')
except Exception, e: except Exception as e:
return (False, 'Error when trying to load RSS: ' + ex(e)) return (False, 'Error when trying to load RSS: ' + ex(e))
def dumpHTML(self, data): def dumpHTML(self, data):
@ -134,7 +134,7 @@ class TorrentRssProvider(generic.TorrentProvider):
fileOut.write(data) fileOut.write(data)
fileOut.close() fileOut.close()
helpers.chmodAsParent(dumpName) helpers.chmodAsParent(dumpName)
except IOError, e: except IOError as e:
logger.log("Unable to save the file: " + ex(e), logger.ERROR) logger.log("Unable to save the file: " + ex(e), logger.ERROR)
return False return False
logger.log(u"Saved custom_torrent html dump " + dumpName + " ", logger.MESSAGE) logger.log(u"Saved custom_torrent html dump " + dumpName + " ", logger.MESSAGE)

View file

@ -68,7 +68,7 @@ class SCCProvider(generic.TorrentProvider):
try: try:
response = self.session.post(self.urls['login'], data=login_params, headers=self.headers, timeout=30, verify=False) response = self.session.post(self.urls['login'], data=login_params, headers=self.headers, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
logger.log(u'Unable to connect to %s provider: %s' % (self.name, ex(e)), logger.ERROR) logger.log(u'Unable to connect to %s provider: %s' % (self.name, ex(e)), logger.ERROR)
return False return False
@ -210,7 +210,7 @@ class SCCProvider(generic.TorrentProvider):
items[mode].append(item) items[mode].append(item)
except Exception, e: except Exception as e:
logger.log(u'Failed parsing %s Traceback: %s' % (self.name, traceback.format_exc()), logger.ERROR) logger.log(u'Failed parsing %s Traceback: %s' % (self.name, traceback.format_exc()), logger.ERROR)
# For each search mode sort all the items by seeders # For each search mode sort all the items by seeders

View file

@ -62,7 +62,7 @@ class SpeedCDProvider(generic.TorrentProvider):
try: try:
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False) response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR) logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
return False return False

View file

@ -101,7 +101,7 @@ class TokyoToshokanProvider(generic.TorrentProvider):
item = title.lstrip(), url item = title.lstrip(), url
results.append(item) results.append(item)
except Exception, e: except Exception as e:
logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)

View file

@ -65,7 +65,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
try: try:
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False) response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR) logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
return False return False
@ -190,7 +190,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
items[mode].append(item) items[mode].append(item)
except Exception, e: except Exception as e:
logger.log(u'Failed parsing ' + self.name + ' Traceback: ' + traceback.format_exc(), logger.ERROR) logger.log(u'Failed parsing ' + self.name + ' Traceback: ' + traceback.format_exc(), logger.ERROR)
# For each search mode sort all the items by seeders # For each search mode sort all the items by seeders

View file

@ -76,7 +76,7 @@ class TorrentDayProvider(generic.TorrentProvider):
try: try:
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False) response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR) logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
return False return False

View file

@ -69,7 +69,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
try: try:
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False) response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR) logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
return False return False
@ -190,7 +190,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
items[mode].append(item) items[mode].append(item)
except Exception, e: except Exception as e:
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
#For each search mode sort all the items by seeders #For each search mode sort all the items by seeders

View file

@ -99,11 +99,11 @@ def sendNZB(nzb):
f = opener.open(req) f = opener.open(req)
except (EOFError, IOError), e: except (EOFError, IOError) as e:
logger.log(u"Unable to connect to SABnzbd: " + ex(e), logger.ERROR) logger.log(u"Unable to connect to SABnzbd: " + ex(e), logger.ERROR)
return False return False
except httplib.InvalidURL, e: except httplib.InvalidURL as e:
logger.log(u"Invalid SABnzbd host, check your config: " + ex(e), logger.ERROR) logger.log(u"Invalid SABnzbd host, check your config: " + ex(e), logger.ERROR)
return False return False
@ -115,7 +115,7 @@ def sendNZB(nzb):
# if we opened the URL connection then read the result from SAB # if we opened the URL connection then read the result from SAB
try: try:
result = f.readlines() result = f.readlines()
except Exception, e: except Exception as e:
logger.log(u"Error trying to get result from SABnzbd, NZB not sent: " + ex(e), logger.ERROR) logger.log(u"Error trying to get result from SABnzbd, NZB not sent: " + ex(e), logger.ERROR)
return False return False
@ -144,7 +144,7 @@ def sendNZB(nzb):
def _checkSabResponse(f): def _checkSabResponse(f):
try: try:
result = f.readlines() result = f.readlines()
except Exception, e: except Exception as e:
logger.log(u"Error trying to get result from SABnzbd" + ex(e), logger.ERROR) logger.log(u"Error trying to get result from SABnzbd" + ex(e), logger.ERROR)
return False, "Error from SABnzbd" return False, "Error from SABnzbd"
@ -156,7 +156,7 @@ def _checkSabResponse(f):
sabJson = {} sabJson = {}
try: try:
sabJson = json.loads(sabText) sabJson = json.loads(sabText)
except ValueError, e: except ValueError as e:
pass pass
if sabText == "Missing authentication": if sabText == "Missing authentication":
@ -172,10 +172,10 @@ def _checkSabResponse(f):
def _sabURLOpenSimple(url): def _sabURLOpenSimple(url):
try: try:
f = urllib.urlopen(url) f = urllib.urlopen(url)
except (EOFError, IOError), e: except (EOFError, IOError) as e:
logger.log(u"Unable to connect to SABnzbd: " + ex(e), logger.ERROR) logger.log(u"Unable to connect to SABnzbd: " + ex(e), logger.ERROR)
return False, "Unable to connect" return False, "Unable to connect"
except httplib.InvalidURL, e: except httplib.InvalidURL as e:
logger.log(u"Invalid SABnzbd host, check your config: " + ex(e), logger.ERROR) logger.log(u"Invalid SABnzbd host, check your config: " + ex(e), logger.ERROR)
return False, "Invalid SABnzbd host" return False, "Invalid SABnzbd host"
if f is None: if f is None:

View file

@ -526,7 +526,7 @@ def xem_refresh(indexer_id, indexer, force=False):
else: else:
logger.log(u"Empty lookup result - no XEM data for show %s on %s" % ( logger.log(u"Empty lookup result - no XEM data for show %s on %s" % (
indexer_id, sickbeard.indexerApi(indexer).name,), logger.DEBUG) indexer_id, sickbeard.indexerApi(indexer).name,), logger.DEBUG)
except Exception, e: except Exception as e:
logger.log( logger.log(
u"Exception while refreshing XEM data for show " + str(indexer_id) + " on " + sickbeard.indexerApi( u"Exception while refreshing XEM data for show " + str(indexer_id) + " on " + sickbeard.indexerApi(
indexer).name + ": " + ex(e), logger.WARNING) indexer).name + ": " + ex(e), logger.WARNING)

View file

@ -88,7 +88,7 @@ class Scheduler(threading.Thread):
logger.log(u"Starting new thread: " + self.name, logger.DEBUG) logger.log(u"Starting new thread: " + self.name, logger.DEBUG)
self.action.run() self.action.run()
except Exception, e: except Exception as e:
logger.log(u"Exception generated in thread " + self.name + ": " + ex(e), logger.ERROR) logger.log(u"Exception generated in thread " + self.name + ": " + ex(e), logger.ERROR)
logger.log(repr(traceback.format_exc()), logger.DEBUG) logger.log(repr(traceback.format_exc()), logger.DEBUG)

View file

@ -78,7 +78,7 @@ def _downloadResult(result):
helpers.chmodAsParent(fileName) helpers.chmodAsParent(fileName)
except EnvironmentError, e: except EnvironmentError as e:
logger.log(u"Error trying to save NZB to black hole: " + ex(e), logger.ERROR) logger.log(u"Error trying to save NZB to black hole: " + ex(e), logger.ERROR)
newResult = False newResult = False
elif resProvider.providerType == "torrent": elif resProvider.providerType == "torrent":
@ -483,10 +483,10 @@ def searchProviders(show, episodes, manualSearch=False):
try: try:
curProvider.cache._clearCache() curProvider.cache._clearCache()
searchResults = curProvider.findSearchResults(show, episodes, search_mode, manualSearch) searchResults = curProvider.findSearchResults(show, episodes, search_mode, manualSearch)
except exceptions.AuthException, e: except exceptions.AuthException as e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR) logger.log(u"Authentication error: " + ex(e), logger.ERROR)
break break
except Exception, e: except Exception as e:
logger.log(u"Error while searching " + curProvider.name + ", skipping: " + ex(e), logger.ERROR) logger.log(u"Error while searching " + curProvider.name + ", skipping: " + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG) logger.log(traceback.format_exc(), logger.DEBUG)
break break

View file

@ -94,7 +94,7 @@ class ShowUpdater():
piList.append(curQueueItem) piList.append(curQueueItem)
except (exceptions.CantUpdateException, exceptions.CantRefreshException), e: except (exceptions.CantUpdateException, exceptions.CantRefreshException) as e:
logger.log(u'Automatic update failed: ' + ex(e), logger.ERROR) logger.log(u'Automatic update failed: ' + ex(e), logger.ERROR)
ui.ProgressIndicators.setIndicator('dailyUpdate', ui.QueueProgressIndicator('Daily Update', piList)) ui.ProgressIndicators.setIndicator('dailyUpdate', ui.QueueProgressIndicator('Daily Update', piList))

View file

@ -308,7 +308,7 @@ class QueueItemAdd(ShowQueueItem):
self.indexer).name) + ' but contains no season/episode data.') self.indexer).name) + ' but contains no season/episode data.')
self._finishEarly() self._finishEarly()
return return
except Exception, e: except Exception as e:
logger.log(u'Unable to find show ID:' + str(self.indexer_id) + ' on Indexer: ' + str( logger.log(u'Unable to find show ID:' + str(self.indexer_id) + ' on Indexer: ' + str(
sickbeard.indexerApi(self.indexer).name), logger.ERROR) sickbeard.indexerApi(self.indexer).name), logger.ERROR)
ui.notifications.error('Unable to add show', ui.notifications.error('Unable to add show',
@ -349,7 +349,7 @@ class QueueItemAdd(ShowQueueItem):
if self.show.classification and 'sports' in self.show.classification.lower(): if self.show.classification and 'sports' in self.show.classification.lower():
self.show.sports = 1 self.show.sports = 1
except sickbeard.indexer_exception, e: except sickbeard.indexer_exception as e:
logger.log( logger.log(
u'Unable to add show due to an error with ' + sickbeard.indexerApi(self.indexer).name + ': ' + ex(e), u'Unable to add show due to an error with ' + sickbeard.indexerApi(self.indexer).name + ': ' + ex(e),
logger.ERROR) logger.ERROR)
@ -369,7 +369,7 @@ class QueueItemAdd(ShowQueueItem):
self._finishEarly() self._finishEarly()
return return
except Exception, e: except Exception as e:
logger.log(u'Error trying to add show: ' + ex(e), logger.ERROR) logger.log(u'Error trying to add show: ' + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG) logger.log(traceback.format_exc(), logger.DEBUG)
self._finishEarly() self._finishEarly()
@ -379,7 +379,7 @@ class QueueItemAdd(ShowQueueItem):
try: try:
self.show.saveToDB() self.show.saveToDB()
except Exception, e: except Exception as e:
logger.log(u'Error saving the show to the database: ' + ex(e), logger.ERROR) logger.log(u'Error saving the show to the database: ' + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG) logger.log(traceback.format_exc(), logger.DEBUG)
self._finishEarly() self._finishEarly()
@ -390,7 +390,7 @@ class QueueItemAdd(ShowQueueItem):
try: try:
self.show.loadEpisodesFromIndexer() self.show.loadEpisodesFromIndexer()
except Exception, e: except Exception as e:
logger.log( logger.log(
u'Error with ' + sickbeard.indexerApi(self.show.indexer).name + ', not creating episode list: ' + ex(e), u'Error with ' + sickbeard.indexerApi(self.show.indexer).name + ', not creating episode list: ' + ex(e),
logger.ERROR) logger.ERROR)
@ -398,7 +398,7 @@ class QueueItemAdd(ShowQueueItem):
try: try:
self.show.loadEpisodesFromDir() self.show.loadEpisodesFromDir()
except Exception, e: except Exception as e:
logger.log(u'Error searching directory for episodes: ' + ex(e), logger.ERROR) logger.log(u'Error searching directory for episodes: ' + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG) logger.log(traceback.format_exc(), logger.DEBUG)
@ -587,11 +587,11 @@ class QueueItemUpdate(ShowQueueItem):
result = self.show.loadFromIndexer(cache=not self.force) result = self.show.loadFromIndexer(cache=not self.force)
if None is not result: if None is not result:
return return
except sickbeard.indexer_error, e: except sickbeard.indexer_error as e:
logger.log(u'Unable to contact ' + sickbeard.indexerApi(self.show.indexer).name + ', aborting: ' + ex(e), logger.log(u'Unable to contact ' + sickbeard.indexerApi(self.show.indexer).name + ', aborting: ' + ex(e),
logger.WARNING) logger.WARNING)
return return
except sickbeard.indexer_attributenotfound, e: except sickbeard.indexer_attributenotfound as e:
logger.log(u'Data retrieved from ' + sickbeard.indexerApi( logger.log(u'Data retrieved from ' + sickbeard.indexerApi(
self.show.indexer).name + ' was incomplete, aborting: ' + ex(e), logger.ERROR) self.show.indexer).name + ' was incomplete, aborting: ' + ex(e), logger.ERROR)
return return
@ -601,7 +601,7 @@ class QueueItemUpdate(ShowQueueItem):
try: try:
self.show.saveToDB() self.show.saveToDB()
except Exception, e: except Exception as e:
logger.log(u'Error saving the episode to the database: ' + ex(e), logger.ERROR) logger.log(u'Error saving the episode to the database: ' + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG) logger.log(traceback.format_exc(), logger.DEBUG)
@ -613,7 +613,7 @@ class QueueItemUpdate(ShowQueueItem):
logger.log(u'Loading all episodes from ' + sickbeard.indexerApi(self.show.indexer).name + '', logger.DEBUG) logger.log(u'Loading all episodes from ' + sickbeard.indexerApi(self.show.indexer).name + '', logger.DEBUG)
try: try:
IndexerEpList = self.show.loadEpisodesFromIndexer(cache=not self.force) IndexerEpList = self.show.loadEpisodesFromIndexer(cache=not self.force)
except sickbeard.indexer_exception, e: except sickbeard.indexer_exception as e:
logger.log(u'Unable to get info from ' + sickbeard.indexerApi( logger.log(u'Unable to get info from ' + sickbeard.indexerApi(
self.show.indexer).name + ', the show info will not be refreshed: ' + ex(e), logger.ERROR) self.show.indexer).name + ', the show info will not be refreshed: ' + ex(e), logger.ERROR)
IndexerEpList = None IndexerEpList = None

View file

@ -412,7 +412,7 @@ class TVShow(object):
logger.log(str(self.indexerid) + u": Creating episode from " + mediaFile, logger.DEBUG) logger.log(str(self.indexerid) + u": Creating episode from " + mediaFile, logger.DEBUG)
try: try:
curEpisode = self.makeEpFromFile(ek.ek(os.path.join, self._location, mediaFile)) curEpisode = self.makeEpFromFile(ek.ek(os.path.join, self._location, mediaFile))
except (exceptions.ShowNotFoundException, exceptions.EpisodeNotFoundException), e: except (exceptions.ShowNotFoundException, exceptions.EpisodeNotFoundException) as e:
logger.log(u"Episode " + mediaFile + " returned an exception: " + ex(e), logger.ERROR) logger.log(u"Episode " + mediaFile + " returned an exception: " + ex(e), logger.ERROR)
continue continue
except exceptions.EpisodeDeletedException: except exceptions.EpisodeDeletedException:
@ -493,7 +493,7 @@ class TVShow(object):
if curSeason not in cachedSeasons: if curSeason not in cachedSeasons:
try: try:
cachedSeasons[curSeason] = cachedShow[curSeason] cachedSeasons[curSeason] = cachedShow[curSeason]
except sickbeard.indexer_seasonnotfound, e: except sickbeard.indexer_seasonnotfound as e:
logger.log(u"Error when trying to load the episode from " + sickbeard.indexerApi( logger.log(u"Error when trying to load the episode from " + sickbeard.indexerApi(
self.indexer).name + ": " + e.message, logger.WARNING) self.indexer).name + ": " + e.message, logger.WARNING)
deleteEp = True deleteEp = True
@ -921,9 +921,9 @@ class TVShow(object):
logger.log(u'Retrieving show info from IMDb', logger.DEBUG) logger.log(u'Retrieving show info from IMDb', logger.DEBUG)
try: try:
self._get_imdb_info() self._get_imdb_info()
except imdb_exceptions.IMDbError, e: except imdb_exceptions.IMDbError as e:
logger.log(u'Something is wrong with IMDb api: ' + ex(e), logger.WARNING) logger.log(u'Something is wrong with IMDb api: ' + ex(e), logger.WARNING)
except Exception, e: except Exception as e:
logger.log(u'Error loading IMDb info: ' + ex(e), logger.ERROR) logger.log(u'Error loading IMDb info: ' + ex(e), logger.ERROR)
logger.log(u'' + traceback.format_exc(), logger.DEBUG) logger.log(u'' + traceback.format_exc(), logger.DEBUG)
@ -1051,7 +1051,7 @@ class TVShow(object):
else: else:
os.remove(cache_file) os.remove(cache_file)
except OSError, e: except OSError as e:
logger.log(u'Unable to %s %s: %s / %s' % (action, cache_file, repr(e), str(e)), logger.WARNING) logger.log(u'Unable to %s %s: %s / %s' % (action, cache_file, repr(e), str(e)), logger.WARNING)
# remove entire show folder # remove entire show folder
@ -1079,7 +1079,7 @@ class TVShow(object):
except exceptions.ShowDirNotFoundException: except exceptions.ShowDirNotFoundException:
logger.log(u"Show folder does not exist, no need to %s %s" % (action, self._location), logger.WARNING) logger.log(u"Show folder does not exist, no need to %s %s" % (action, self._location), logger.WARNING)
except OSError, e: except OSError as e:
logger.log(u'Unable to %s %s: %s / %s' % (action, self._location, repr(e), str(e)), logger.WARNING) logger.log(u'Unable to %s %s: %s / %s' % (action, self._location, repr(e), str(e)), logger.WARNING)
def populateCache(self): def populateCache(self):
@ -1681,7 +1681,7 @@ class TVEpisode(object):
else: else:
myEp = cachedSeason[episode] myEp = cachedSeason[episode]
except (sickbeard.indexer_error, IOError), e: except (sickbeard.indexer_error, IOError) as e:
logger.log(u"" + sickbeard.indexerApi(self.indexer).name + " threw up an error: " + ex(e), logger.DEBUG) logger.log(u"" + sickbeard.indexerApi(self.indexer).name + " threw up an error: " + ex(e), logger.DEBUG)
# if the episode is already valid just log it, if not throw it up # if the episode is already valid just log it, if not throw it up
if self.name: if self.name:
@ -1846,12 +1846,12 @@ class TVEpisode(object):
if ek.ek(os.path.isfile, nfoFile): if ek.ek(os.path.isfile, nfoFile):
try: try:
showXML = etree.ElementTree(file=nfoFile) showXML = etree.ElementTree(file=nfoFile)
except (SyntaxError, ValueError), e: except (SyntaxError, ValueError) as e:
logger.log(u"Error loading the NFO, backing up the NFO and skipping for now: " + ex(e), logger.log(u"Error loading the NFO, backing up the NFO and skipping for now: " + ex(e),
logger.ERROR) # TODO: figure out what's wrong and fix it logger.ERROR) # TODO: figure out what's wrong and fix it
try: try:
ek.ek(os.rename, nfoFile, nfoFile + ".old") ek.ek(os.rename, nfoFile, nfoFile + ".old")
except Exception, e: except Exception as e:
logger.log( logger.log(
u"Failed to rename your episode's NFO file - you need to delete it or fix it: " + ex(e), u"Failed to rename your episode's NFO file - you need to delete it or fix it: " + ex(e),
logger.ERROR) logger.ERROR)
@ -2175,7 +2175,7 @@ class TVEpisode(object):
try: try:
np = NameParser(name, showObj=show, naming_pattern=True) np = NameParser(name, showObj=show, naming_pattern=True)
parse_result = np.parse(name) parse_result = np.parse(name)
except (InvalidNameException, InvalidShowException), e: except (InvalidNameException, InvalidShowException) as e:
logger.log(u"Unable to get parse release_group: " + ex(e), logger.DEBUG) logger.log(u"Unable to get parse release_group: " + ex(e), logger.DEBUG)
return '' return ''

View file

@ -42,7 +42,7 @@ class CacheDBConnection(db.DBConnection):
try: try:
if not self.hasTable('lastUpdate'): if not self.hasTable('lastUpdate'):
self.action('CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)') self.action('CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)')
except Exception, e: except Exception as e:
if str(e) != 'table lastUpdate already exists': if str(e) != 'table lastUpdate already exists':
raise raise

View file

@ -379,7 +379,7 @@ class GitUpdateManager(UpdateManager):
else: else:
try: try:
self._check_github_for_update() self._check_github_for_update()
except Exception, e: except Exception as e:
logger.log(u"Unable to contact github, can't check for update: " + repr(e), logger.ERROR) logger.log(u"Unable to contact github, can't check for update: " + repr(e), logger.ERROR)
return False return False
@ -464,7 +464,7 @@ class SourceUpdateManager(UpdateManager):
# need this to run first to set self._newest_commit_hash # need this to run first to set self._newest_commit_hash
try: try:
self._check_github_for_update() self._check_github_for_update()
except Exception, e: except Exception as e:
logger.log(u"Unable to contact github, can't check for update: " + repr(e), logger.ERROR) logger.log(u"Unable to contact github, can't check for update: " + repr(e), logger.ERROR)
return False return False
@ -611,7 +611,7 @@ class SourceUpdateManager(UpdateManager):
os.chmod(new_path, stat.S_IWRITE) os.chmod(new_path, stat.S_IWRITE)
os.remove(new_path) os.remove(new_path)
os.renames(old_path, new_path) os.renames(old_path, new_path)
except Exception, e: except Exception as e:
logger.log(u"Unable to update " + new_path + ': ' + ex(e), logger.DEBUG) logger.log(u"Unable to update " + new_path + ': ' + ex(e), logger.DEBUG)
os.remove(old_path) # Trash the updated file without moving in new path os.remove(old_path) # Trash the updated file without moving in new path
continue continue
@ -623,7 +623,7 @@ class SourceUpdateManager(UpdateManager):
sickbeard.CUR_COMMIT_HASH = self._newest_commit_hash sickbeard.CUR_COMMIT_HASH = self._newest_commit_hash
sickbeard.CUR_COMMIT_BRANCH = self.branch sickbeard.CUR_COMMIT_BRANCH = self.branch
except Exception, e: except Exception as e:
logger.log(u"Error while trying to update: " + ex(e), logger.ERROR) logger.log(u"Error while trying to update: " + ex(e), logger.ERROR)
logger.log(u"Traceback: " + traceback.format_exc(), logger.DEBUG) logger.log(u"Traceback: " + traceback.format_exc(), logger.DEBUG)
return False return False

View file

@ -119,7 +119,7 @@ class Api(webserve.BaseHandler):
else: # if debug was not set we wrap the "call_dispatcher" in a try block to assure a json output else: # if debug was not set we wrap the "call_dispatcher" in a try block to assure a json output
try: try:
outDict = _call_dispatcher(self, args, kwargs) outDict = _call_dispatcher(self, args, kwargs)
except Exception, e: # real internal error oohhh nooo :( except Exception as e: # real internal error oohhh nooo :(
logger.log(u"API :: " + ex(e), logger.ERROR) logger.log(u"API :: " + ex(e), logger.ERROR)
errorData = {"error_msg": ex(e), errorData = {"error_msg": ex(e),
"args": args, "args": args,
@ -140,7 +140,7 @@ class Api(webserve.BaseHandler):
if 'jsonp' in self.request.query_arguments: if 'jsonp' in self.request.query_arguments:
out = self.request.arguments['jsonp'] + '(' + out + ');' # wrap with JSONP call if requested out = self.request.arguments['jsonp'] + '(' + out + ');' # wrap with JSONP call if requested
except Exception, e: # if we fail to generate the output fake an error except Exception as e: # if we fail to generate the output fake an error
logger.log(u'API :: ' + traceback.format_exc(), logger.DEBUG) logger.log(u'API :: ' + traceback.format_exc(), logger.DEBUG)
out = '{"result":"' + result_type_map[RESULT_ERROR] + '", "message": "error while composing output: "' + ex( out = '{"result":"' + result_type_map[RESULT_ERROR] + '", "message": "error while composing output: "' + ex(
e) + '"}' e) + '"}'
@ -205,7 +205,7 @@ def call_dispatcher(handler, args, kwargs):
curOutDict = TVDBShorthandWrapper(handler, curArgs, curKwargs, cmd).run() curOutDict = TVDBShorthandWrapper(handler, curArgs, curKwargs, cmd).run()
else: else:
curOutDict = _responds(RESULT_ERROR, "No such cmd: '" + cmd + "'") curOutDict = _responds(RESULT_ERROR, "No such cmd: '" + cmd + "'")
except ApiError, e: # Api errors that we raised, they are harmless except ApiError as e: # Api errors that we raised, they are harmless
curOutDict = _responds(RESULT_ERROR, msg=ex(e)) curOutDict = _responds(RESULT_ERROR, msg=ex(e))
else: # if someone chained one of the forbiden cmds they will get an error for this one cmd else: # if someone chained one of the forbiden cmds they will get an error for this one cmd
curOutDict = _responds(RESULT_ERROR, msg="The cmd '" + cmd + "' is not supported while chaining") curOutDict = _responds(RESULT_ERROR, msg="The cmd '" + cmd + "' is not supported while chaining")
@ -1609,7 +1609,7 @@ class CMD_SickBeardSearchIndexers(ApiCall):
try: try:
apiData = t[str(self.name).encode()] apiData = t[str(self.name).encode()]
except Exception, e: except Exception as e:
pass pass
if not apiData: if not apiData:
@ -2537,7 +2537,7 @@ class CMD_ShowUpdate(ApiCall):
try: try:
sickbeard.showQueueScheduler.action.updateShow(showObj, True) #@UndefinedVariable sickbeard.showQueueScheduler.action.updateShow(showObj, True) #@UndefinedVariable
return _responds(RESULT_SUCCESS, msg=str(showObj.name) + " has queued to be updated") return _responds(RESULT_SUCCESS, msg=str(showObj.name) + " has queued to be updated")
except exceptions.CantUpdateException, e: except exceptions.CantUpdateException as e:
logger.log(u"API:: Unable to update " + str(showObj.name) + ". " + str(ex(e)), logger.ERROR) logger.log(u"API:: Unable to update " + str(showObj.name) + ". " + str(ex(e)), logger.ERROR)
return _responds(RESULT_FAILURE, msg="Unable to update " + str(showObj.name)) return _responds(RESULT_FAILURE, msg="Unable to update " + str(showObj.name))

View file

@ -1262,7 +1262,7 @@ class Home(MainHandler):
try: try:
anime = adba.Anime(sickbeard.ADBA_CONNECTION, name=showObj.name) anime = adba.Anime(sickbeard.ADBA_CONNECTION, name=showObj.name)
t.groups = anime.get_groups() t.groups = anime.get_groups()
except Exception, e: except Exception as e:
t.groups.append(dict([('name', 'Fail:AniDB connect. Restart sg else check debug log'), ('rating', ''), ('range', '')])) t.groups.append(dict([('name', 'Fail:AniDB connect. Restart sg else check debug log'), ('rating', ''), ('range', '')]))
else: else:
t.groups.append(dict([('name', 'Did not initialise AniDB. Check debug log if reqd.'), ('rating', ''), ('range', '')])) t.groups.append(dict([('name', 'Did not initialise AniDB. Check debug log if reqd.'), ('rating', ''), ('range', '')]))
@ -1344,7 +1344,7 @@ class Home(MainHandler):
showObj.flatten_folders = flatten_folders showObj.flatten_folders = flatten_folders
try: try:
sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable
except exceptions.CantRefreshException, e: except exceptions.CantRefreshException as e:
errors.append('Unable to refresh this show: ' + ex(e)) errors.append('Unable to refresh this show: ' + ex(e))
showObj.paused = paused showObj.paused = paused
@ -1374,7 +1374,7 @@ class Home(MainHandler):
showObj.location = location showObj.location = location
try: try:
sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable
except exceptions.CantRefreshException, e: except exceptions.CantRefreshException as e:
errors.append('Unable to refresh this show:' + ex(e)) errors.append('Unable to refresh this show:' + ex(e))
# grab updated info from TVDB # grab updated info from TVDB
# showObj.loadEpisodesFromIndexer() # showObj.loadEpisodesFromIndexer()
@ -1391,7 +1391,7 @@ class Home(MainHandler):
try: try:
sickbeard.showQueueScheduler.action.updateShow(showObj, True) # @UndefinedVariable sickbeard.showQueueScheduler.action.updateShow(showObj, True) # @UndefinedVariable
time.sleep(cpu_presets[sickbeard.CPU_PRESET]) time.sleep(cpu_presets[sickbeard.CPU_PRESET])
except exceptions.CantUpdateException, e: except exceptions.CantUpdateException as e:
errors.append('Unable to force an update on the show.') errors.append('Unable to force an update on the show.')
if do_update_exceptions: if do_update_exceptions:
@ -1399,14 +1399,14 @@ class Home(MainHandler):
scene_exceptions.update_scene_exceptions(showObj.indexerid, exceptions_list) # @UndefinedVdexerid) scene_exceptions.update_scene_exceptions(showObj.indexerid, exceptions_list) # @UndefinedVdexerid)
buildNameCache(showObj) buildNameCache(showObj)
time.sleep(cpu_presets[sickbeard.CPU_PRESET]) time.sleep(cpu_presets[sickbeard.CPU_PRESET])
except exceptions.CantUpdateException, e: except exceptions.CantUpdateException as e:
errors.append('Unable to force an update on scene exceptions of the show.') errors.append('Unable to force an update on scene exceptions of the show.')
if do_update_scene_numbering: if do_update_scene_numbering:
try: try:
sickbeard.scene_numbering.xem_refresh(showObj.indexerid, showObj.indexer) # @UndefinedVariable sickbeard.scene_numbering.xem_refresh(showObj.indexerid, showObj.indexer) # @UndefinedVariable
time.sleep(cpu_presets[sickbeard.CPU_PRESET]) time.sleep(cpu_presets[sickbeard.CPU_PRESET])
except exceptions.CantUpdateException, e: except exceptions.CantUpdateException as e:
errors.append('Unable to force an update on scene numbering of the show.') errors.append('Unable to force an update on scene numbering of the show.')
if directCall: if directCall:
@ -1456,7 +1456,7 @@ class Home(MainHandler):
# force the update from the DB # force the update from the DB
try: try:
sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable
except exceptions.CantRefreshException, e: except exceptions.CantRefreshException as e:
ui.notifications.error('Unable to refresh this show.', ui.notifications.error('Unable to refresh this show.',
ex(e)) ex(e))
@ -1477,7 +1477,7 @@ class Home(MainHandler):
# force the update # force the update
try: try:
sickbeard.showQueueScheduler.action.updateShow(showObj, bool(force), bool(web)) sickbeard.showQueueScheduler.action.updateShow(showObj, bool(force), bool(web))
except exceptions.CantUpdateException, e: except exceptions.CantUpdateException as e:
ui.notifications.error('Unable to update this show.', ui.notifications.error('Unable to update this show.',
ex(e)) ex(e))
@ -1985,7 +1985,7 @@ class Home(MainHandler):
try: try:
anime = adba.Anime(sickbeard.ADBA_CONNECTION, name=show_name) anime = adba.Anime(sickbeard.ADBA_CONNECTION, name=show_name)
groups = anime.get_groups() groups = anime.get_groups()
except Exception, e: except Exception as e:
logger.log(u'exception msg: ' + str(e), logger.DEBUG) logger.log(u'exception msg: ' + str(e), logger.DEBUG)
return json.dumps({'result': 'fail', 'resp': 'connect'}) return json.dumps({'result': 'fail', 'resp': 'connect'})
@ -2062,7 +2062,7 @@ class NewHomeAddShows(Home):
try: try:
# add search results # add search results
results.setdefault(indexer, []).extend(t[search_term]) results.setdefault(indexer, []).extend(t[search_term])
except Exception, e: except Exception as e:
continue continue
map(final_results.extend, map(final_results.extend,
@ -3117,7 +3117,7 @@ class Manage(MainHandler):
try: try:
sickbeard.showQueueScheduler.action.updateShow(showObj, True, True) # @UndefinedVariable sickbeard.showQueueScheduler.action.updateShow(showObj, True, True) # @UndefinedVariable
updates.append(showObj.name) updates.append(showObj.name)
except exceptions.CantUpdateException, e: except exceptions.CantUpdateException as e:
errors.append('Unable to update show ' + showObj.name + ': ' + ex(e)) errors.append('Unable to update show ' + showObj.name + ': ' + ex(e))
# don't bother refreshing shows that were updated anyway # don't bother refreshing shows that were updated anyway
@ -3125,7 +3125,7 @@ class Manage(MainHandler):
try: try:
sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable
refreshes.append(showObj.name) refreshes.append(showObj.name)
except exceptions.CantRefreshException, e: except exceptions.CantRefreshException as e:
errors.append('Unable to refresh show ' + showObj.name + ': ' + ex(e)) errors.append('Unable to refresh show ' + showObj.name + ': ' + ex(e))
if curShowID in toRename: if curShowID in toRename:
@ -3612,7 +3612,7 @@ class ConfigGeneral(Config):
try: try:
pulls = sickbeard.versionCheckScheduler.action.list_remote_pulls() pulls = sickbeard.versionCheckScheduler.action.list_remote_pulls()
return json.dumps({'result': 'success', 'pulls': pulls}) return json.dumps({'result': 'success', 'pulls': pulls})
except Exception, e: except Exception as e:
logger.log(u'exception msg: ' + str(e), logger.DEBUG) logger.log(u'exception msg: ' + str(e), logger.DEBUG)
return json.dumps({'result': 'fail'}) return json.dumps({'result': 'fail'})
@ -3621,7 +3621,7 @@ class ConfigGeneral(Config):
try: try:
branches = sickbeard.versionCheckScheduler.action.list_remote_branches() branches = sickbeard.versionCheckScheduler.action.list_remote_branches()
return json.dumps({'result': 'success', 'branches': branches}) return json.dumps({'result': 'success', 'branches': branches})
except Exception, e: except Exception as e:
logger.log(u'exception msg: ' + str(e), logger.DEBUG) logger.log(u'exception msg: ' + str(e), logger.DEBUG)
return json.dumps({'result': 'fail'}) return json.dumps({'result': 'fail'})
@ -3905,7 +3905,7 @@ class ConfigPostProcessing(Config):
return 'supported' return 'supported'
logger.log(u'Rar Not Supported: Can not read the content of test file', logger.ERROR) logger.log(u'Rar Not Supported: Can not read the content of test file', logger.ERROR)
return 'not supported' return 'not supported'
except Exception, e: except Exception as e:
logger.log(u'Rar Not Supported: ' + ex(e), logger.ERROR) logger.log(u'Rar Not Supported: ' + ex(e), logger.ERROR)
return 'not supported' return 'not supported'

View file

@ -0,0 +1,28 @@
import unittest
import subprocess
import os
class CompatibilityTests(unittest.TestCase):
def test_except(self):
path = os.path.abspath('..')
pyfiles = []
for rootdir in ['sickbeard', 'tests']:
for dirpath, subdirs, files in os.walk(os.path.join(path, rootdir)):
for x in files:
if x.endswith('.py'):
pyfiles.append(os.path.join(dirpath, x))
pyfiles.append(os.path.join(path,'SickBeard.py'))
output = subprocess.Popen('2to3 -f except %s' % ' '.join(pyfiles), shell=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()[0]
if output:
print('Changes to be made for Python 2/3 compatibility as follows:')
print(output)
self.fail('Python 2/3 incompatibility detected')
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(QualityTests)
unittest.TextTestRunner(verbosity=2).run(suite)

View file

@ -149,7 +149,7 @@ class TestCacheDBConnection(TestDBConnection, object):
sql = "CREATE TABLE " + providerName + " (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, url TEXT, time NUMERIC, quality TEXT);" sql = "CREATE TABLE " + providerName + " (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, url TEXT, time NUMERIC, quality TEXT);"
self.connection.execute(sql) self.connection.execute(sql)
self.connection.commit() self.connection.commit()
except sqlite3.OperationalError, e: except sqlite3.OperationalError as e:
if str(e) != "table " + providerName + " already exists": if str(e) != "table " + providerName + " already exists":
raise raise
@ -158,7 +158,7 @@ class TestCacheDBConnection(TestDBConnection, object):
sql = "CREATE TABLE lastUpdate (provider TEXT, time NUMERIC);" sql = "CREATE TABLE lastUpdate (provider TEXT, time NUMERIC);"
self.connection.execute(sql) self.connection.execute(sql)
self.connection.commit() self.connection.commit()
except sqlite3.OperationalError, e: except sqlite3.OperationalError as e:
if str(e) != "table lastUpdate already exists": if str(e) != "table lastUpdate already exists":
raise raise

View file

@ -61,7 +61,7 @@ class XEMBasicTests(test.SickbeardTestDBCase):
try: try:
curShow = TVShow(int(sqlShow["indexer"]), int(sqlShow["indexer_id"])) curShow = TVShow(int(sqlShow["indexer"]), int(sqlShow["indexer_id"]))
sickbeard.showList.append(curShow) sickbeard.showList.append(curShow)
except Exception, e: except Exception as e:
print "There was an error creating the show" print "There was an error creating the show"
def test_formating(self): def test_formating(self):