mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-05 17:43:37 +00:00
Merge pull request #399 from adam111316/feature/AddPy23Compat
Change py2 exception clauses to py2/3 compatible clauses
This commit is contained in:
commit
785a2451f4
64 changed files with 237 additions and 207 deletions
|
@ -29,6 +29,8 @@
|
|||
* Add coverage testing and coveralls support
|
||||
* Update feedparser library 5.1.3 to 5.2.0 (8c62940)
|
||||
* Remove feedcache implementation and library
|
||||
* Change py2 exception clauses to py2/3 compatible clauses
|
||||
* Add py2/3 regression testing for exception clauses
|
||||
|
||||
[develop changelog]
|
||||
* Update Requests library 2.7.0 (ab1f493) to 2.7.0 (8b5e457)
|
||||
|
|
|
@ -387,7 +387,7 @@ class SickGear(object):
|
|||
pid = os.fork() # @UndefinedVariable - only available in UNIX
|
||||
if pid != 0:
|
||||
os._exit(0)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
sys.stderr.write('fork #1 failed: %d (%s)\n' % (e.errno, e.strerror))
|
||||
sys.exit(1)
|
||||
|
||||
|
@ -402,7 +402,7 @@ class SickGear(object):
|
|||
pid = os.fork() # @UndefinedVariable - only available in UNIX
|
||||
if pid != 0:
|
||||
os._exit(0)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
sys.stderr.write('fork #2 failed: %d (%s)\n' % (e.errno, e.strerror))
|
||||
sys.exit(1)
|
||||
|
||||
|
@ -412,7 +412,7 @@ class SickGear(object):
|
|||
logger.log(u'Writing PID: %s to %s' % (pid, self.PIDFILE))
|
||||
try:
|
||||
file(self.PIDFILE, 'w').write('%s\n' % pid)
|
||||
except IOError, e:
|
||||
except IOError as e:
|
||||
logger.log_error_and_exit(
|
||||
u'Unable to write PID file: %s Error: %s [%s]' % (self.PIDFILE, e.strerror, e.errno))
|
||||
|
||||
|
@ -456,7 +456,7 @@ class SickGear(object):
|
|||
curShow = TVShow(int(sqlShow['indexer']), int(sqlShow['indexer_id']))
|
||||
curShow.nextEpisode()
|
||||
sickbeard.showList.append(curShow)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(
|
||||
u'There was an error creating the show in %s: %s' % (sqlShow['location'], str(e).decode('utf-8',
|
||||
'replace')),
|
||||
|
|
|
@ -22,7 +22,7 @@ try:
|
|||
fp = open(configFilename, 'r')
|
||||
config.readfp(fp)
|
||||
fp.close()
|
||||
except IOError, e:
|
||||
except IOError as e:
|
||||
print 'Could not find/read Sickbeard config.ini: ' + str(e)
|
||||
print 'Possibly wrong mediaToSickbeard.py location. Ensure the file is in the autoProcessTV subdir of your Sickbeard installation'
|
||||
time.sleep(3)
|
||||
|
@ -180,7 +180,7 @@ def main():
|
|||
sess = requests.Session()
|
||||
sess.post(login_url, data={'username': username, 'password': password}, stream=True, verify=False)
|
||||
response = sess.get(url, auth=(username, password), params=params, verify=False, allow_redirects=False)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
scriptlogger.error(u': Unknown exception raised when opening url: ' + str(e))
|
||||
time.sleep(3)
|
||||
sys.exit()
|
||||
|
|
|
@ -84,7 +84,7 @@ def foldersAtPath(path, includeParent=False, includeFiles=False):
|
|||
try:
|
||||
fileList = [{'name': filename, 'path': ek.ek(os.path.join, path, filename)} for filename in
|
||||
ek.ek(os.listdir, path)]
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
logger.log(u'Unable to open %s: %r / %s' % (path, e, e), logger.WARNING)
|
||||
fileList = [{'name': filename, 'path': ek.ek(os.path.join, parentPath, filename)} for filename in
|
||||
ek.ek(os.listdir, parentPath)]
|
||||
|
|
|
@ -53,19 +53,19 @@ class GenericClient(object):
|
|||
try:
|
||||
response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files,
|
||||
timeout=120, verify=False)
|
||||
except requests.exceptions.ConnectionError, e:
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
logger.log(self.name + u': Unable to connect ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
except (requests.exceptions.MissingSchema, requests.exceptions.InvalidURL):
|
||||
logger.log(self.name + u': Invalid Host', logger.ERROR)
|
||||
return False
|
||||
except requests.exceptions.HTTPError, e:
|
||||
except requests.exceptions.HTTPError as e:
|
||||
logger.log(self.name + u': Invalid HTTP Request ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
except requests.exceptions.Timeout, e:
|
||||
except requests.exceptions.Timeout as e:
|
||||
logger.log(self.name + u': Connection Timeout ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(self.name + u': Unknown exception raised when sending torrent to ' + self.name + ': ' + ex(e),
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
@ -199,7 +199,7 @@ class GenericClient(object):
|
|||
if result.priority != 0 and not self._set_torrent_priority(result):
|
||||
logger.log(self.name + u': Unable to set priority for Torrent', logger.ERROR)
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(self.name + u': Failed Sending Torrent: ' + result.name + ' - ' + result.hash, logger.ERROR)
|
||||
logger.log(self.name + u': Exception raised when sending torrent: ' + ex(e), logger.DEBUG)
|
||||
return r_code
|
||||
|
|
|
@ -95,7 +95,7 @@ class DBConnection(object):
|
|||
self.connection.commit()
|
||||
logger.log(u'Transaction with ' + str(len(querylist)) + u' queries executed', logger.DEBUG)
|
||||
return sqlResult
|
||||
except sqlite3.OperationalError, e:
|
||||
except sqlite3.OperationalError as e:
|
||||
sqlResult = []
|
||||
if self.connection:
|
||||
self.connection.rollback()
|
||||
|
@ -106,7 +106,7 @@ class DBConnection(object):
|
|||
else:
|
||||
logger.log(u'DB error: ' + ex(e), logger.ERROR)
|
||||
raise
|
||||
except sqlite3.DatabaseError, e:
|
||||
except sqlite3.DatabaseError as e:
|
||||
if self.connection:
|
||||
self.connection.rollback()
|
||||
logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR)
|
||||
|
@ -135,7 +135,7 @@ class DBConnection(object):
|
|||
self.connection.commit()
|
||||
# get out of the connection attempt loop since we were successful
|
||||
break
|
||||
except sqlite3.OperationalError, e:
|
||||
except sqlite3.OperationalError as e:
|
||||
if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]:
|
||||
logger.log(u'DB error: ' + ex(e), logger.WARNING)
|
||||
attempt += 1
|
||||
|
@ -143,7 +143,7 @@ class DBConnection(object):
|
|||
else:
|
||||
logger.log(u'DB error: ' + ex(e), logger.ERROR)
|
||||
raise
|
||||
except sqlite3.DatabaseError, e:
|
||||
except sqlite3.DatabaseError as e:
|
||||
logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR)
|
||||
raise
|
||||
|
||||
|
@ -252,7 +252,7 @@ def _processUpgrade(connection, upgradeClass):
|
|||
logger.log(u'Database upgrade required: %s' % prettyName(upgradeClass.__name__), logger.MESSAGE)
|
||||
try:
|
||||
instance.execute()
|
||||
except sqlite3.DatabaseError, e:
|
||||
except sqlite3.DatabaseError as e:
|
||||
# attemping to restore previous DB backup and perform upgrade
|
||||
try:
|
||||
instance.execute()
|
||||
|
@ -442,7 +442,7 @@ def MigrationCode(myDB):
|
|||
try:
|
||||
update = schema[db_version](myDB)
|
||||
db_version = update.execute()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
myDB.close()
|
||||
logger.log(u'Failed to update database with error: %s attempting recovery...' % ex(e), logger.ERROR)
|
||||
|
||||
|
|
|
@ -132,7 +132,7 @@ def revertEpisode(epObj):
|
|||
epObj.status = WANTED
|
||||
epObj.saveToDB()
|
||||
|
||||
except EpisodeNotFoundException, e:
|
||||
except EpisodeNotFoundException as e:
|
||||
logger.log(u"Unable to create episode, please set its status manually: " + ex(e),
|
||||
logger.WARNING)
|
||||
|
||||
|
@ -146,7 +146,7 @@ def markFailed(epObj):
|
|||
epObj.status = Quality.compositeStatus(FAILED, quality)
|
||||
epObj.saveToDB()
|
||||
|
||||
except EpisodeNotFoundException, e:
|
||||
except EpisodeNotFoundException as e:
|
||||
logger.log(u"Unable to get episode, please set its status manually: " + ex(e), logger.WARNING)
|
||||
|
||||
return log_str
|
||||
|
|
|
@ -286,7 +286,7 @@ def hardlinkFile(srcFile, destFile):
|
|||
try:
|
||||
ek.ek(link, srcFile, destFile)
|
||||
fixSetGroupID(destFile)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Failed to create hardlink of " + srcFile + " at " + destFile + ": " + ex(e) + ". Copying instead",
|
||||
logger.ERROR)
|
||||
copyFile(srcFile, destFile)
|
||||
|
@ -326,7 +326,7 @@ def make_dirs(path):
|
|||
try:
|
||||
logger.log(u"Folder " + path + " doesn't exist, creating it", logger.DEBUG)
|
||||
ek.ek(os.makedirs, path)
|
||||
except (OSError, IOError), e:
|
||||
except (OSError, IOError) as e:
|
||||
logger.log(u"Failed creating " + path + " : " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -350,7 +350,7 @@ def make_dirs(path):
|
|||
chmodAsParent(ek.ek(os.path.normpath, sofar))
|
||||
# do the library update for synoindex
|
||||
notifiers.synoindex_notifier.addFolder(sofar)
|
||||
except (OSError, IOError), e:
|
||||
except (OSError, IOError) as e:
|
||||
logger.log(u"Failed creating " + sofar + " : " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -397,7 +397,7 @@ def rename_ep_file(cur_path, new_path, old_path_length=0):
|
|||
try:
|
||||
logger.log(u"Renaming file from " + cur_path + " to " + new_path)
|
||||
ek.ek(os.rename, cur_path, new_path)
|
||||
except (OSError, IOError), e:
|
||||
except (OSError, IOError) as e:
|
||||
logger.log(u"Failed renaming " + cur_path + " to " + new_path + ": " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -433,7 +433,7 @@ def delete_empty_folders(check_empty_dir, keep_dir=None):
|
|||
ek.ek(os.rmdir, check_empty_dir)
|
||||
# do the library update for synoindex
|
||||
notifiers.synoindex_notifier.deleteFolder(check_empty_dir)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
logger.log(u"Unable to delete " + check_empty_dir + ": " + repr(e) + " / " + str(e), logger.WARNING)
|
||||
break
|
||||
check_empty_dir = ek.ek(os.path.dirname, check_empty_dir)
|
||||
|
@ -596,7 +596,7 @@ def create_https_certificates(ssl_cert, ssl_key):
|
|||
from OpenSSL import crypto # @UnresolvedImport
|
||||
from lib.certgen import createKeyPair, createCertRequest, createCertificate, TYPE_RSA, \
|
||||
serial # @UnresolvedImport
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"pyopenssl module missing, please install for https access", logger.WARNING)
|
||||
return False
|
||||
|
||||
|
@ -642,7 +642,7 @@ def parse_xml(data, del_xmlns=False):
|
|||
|
||||
try:
|
||||
parsedXML = etree.fromstring(data)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Error trying to parse xml data. Error: " + ex(e), logger.DEBUG)
|
||||
parsedXML = None
|
||||
|
||||
|
@ -664,7 +664,7 @@ def backupVersionedFile(old_file, version):
|
|||
shutil.copy(old_file, new_file)
|
||||
logger.log(u'Backup done', logger.DEBUG)
|
||||
break
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u'Error while trying to back up %s to %s : %s' % (old_file, new_file, ex(e)), logger.WARNING)
|
||||
num_tries += 1
|
||||
time.sleep(3)
|
||||
|
@ -692,7 +692,7 @@ def restoreVersionedFile(backup_file, version):
|
|||
u"Trying to backup " + new_file + " to " + new_file + "." + "r" + str(version) + " before restoring backup",
|
||||
logger.DEBUG)
|
||||
shutil.move(new_file, new_file + '.' + 'r' + str(version))
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(
|
||||
u"Error while trying to backup DB file " + restore_file + " before proceeding with restore: " + ex(e),
|
||||
logger.WARNING)
|
||||
|
@ -708,7 +708,7 @@ def restoreVersionedFile(backup_file, version):
|
|||
shutil.copy(restore_file, new_file)
|
||||
logger.log(u"Restore done", logger.DEBUG)
|
||||
break
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Error while trying to restore " + restore_file + ": " + ex(e), logger.WARNING)
|
||||
numTries += 1
|
||||
time.sleep(1)
|
||||
|
@ -946,14 +946,14 @@ def set_up_anidb_connection():
|
|||
auth = False
|
||||
try:
|
||||
auth = sickbeard.ADBA_CONNECTION.authed()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u'exception msg: ' + str(e))
|
||||
pass
|
||||
|
||||
if not auth:
|
||||
try:
|
||||
sickbeard.ADBA_CONNECTION.auth(sickbeard.ANIDB_USERNAME, sickbeard.ANIDB_PASSWORD)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u'exception msg: ' + str(e))
|
||||
return False
|
||||
else:
|
||||
|
@ -1164,16 +1164,16 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
|
|||
% (url, resp.status_code, http_err_text), logger.DEBUG)
|
||||
return
|
||||
|
||||
except requests.exceptions.HTTPError, e:
|
||||
except requests.exceptions.HTTPError as e:
|
||||
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
|
||||
return
|
||||
except requests.exceptions.ConnectionError, e:
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
||||
return
|
||||
except requests.exceptions.Timeout, e:
|
||||
except requests.exceptions.Timeout as e:
|
||||
logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
||||
return
|
||||
except requests.exceptions.ReadTimeout, e:
|
||||
except requests.exceptions.ReadTimeout as e:
|
||||
logger.log(u'Read timed out ' + str(e.message) + ' while loading URL ' + url, logger.WARNING)
|
||||
return
|
||||
except Exception:
|
||||
|
@ -1230,19 +1230,19 @@ def download_file(url, filename, session=None):
|
|||
fp.flush()
|
||||
|
||||
chmodAsParent(filename)
|
||||
except requests.exceptions.HTTPError, e:
|
||||
except requests.exceptions.HTTPError as e:
|
||||
_remove_file_failed(filename)
|
||||
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
|
||||
return False
|
||||
except requests.exceptions.ConnectionError, e:
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
_remove_file_failed(filename)
|
||||
logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
||||
return False
|
||||
except requests.exceptions.Timeout, e:
|
||||
except requests.exceptions.Timeout as e:
|
||||
_remove_file_failed(filename)
|
||||
logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
||||
return False
|
||||
except EnvironmentError, e:
|
||||
except EnvironmentError as e:
|
||||
_remove_file_failed(filename)
|
||||
logger.log(u"Unable to save the file: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
@ -1282,7 +1282,7 @@ def clearCache(force=False):
|
|||
if force or (update_datetime - cache_file_modified > max_age):
|
||||
try:
|
||||
ek.ek(os.remove, cache_file)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
logger.log(u"Unable to clean " + cache_root + ": " + repr(e) + " / " + str(e),
|
||||
logger.WARNING)
|
||||
break
|
||||
|
|
|
@ -305,7 +305,7 @@ class GenericMetadata():
|
|||
helpers.chmodAsParent(nfo_file_path)
|
||||
|
||||
return True
|
||||
except IOError, e:
|
||||
except IOError as e:
|
||||
logger.log(
|
||||
u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e),
|
||||
logger.ERROR)
|
||||
|
@ -432,7 +432,7 @@ class GenericMetadata():
|
|||
data.write(nfo_file, encoding="utf-8")
|
||||
nfo_file.close()
|
||||
helpers.chmodAsParent(nfo_file_path)
|
||||
except IOError, e:
|
||||
except IOError as e:
|
||||
logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e),
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
@ -477,7 +477,7 @@ class GenericMetadata():
|
|||
data.write(nfo_file, encoding="utf-8")
|
||||
nfo_file.close()
|
||||
helpers.chmodAsParent(nfo_file_path)
|
||||
except IOError, e:
|
||||
except IOError as e:
|
||||
logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e),
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
@ -725,7 +725,7 @@ class GenericMetadata():
|
|||
outFile.write(image_data)
|
||||
outFile.close()
|
||||
helpers.chmodAsParent(image_path)
|
||||
except IOError, e:
|
||||
except IOError as e:
|
||||
logger.log(
|
||||
u"Unable to write image to " + image_path + " - are you sure the show folder is writable? " + ex(e),
|
||||
logger.ERROR)
|
||||
|
@ -761,7 +761,7 @@ class GenericMetadata():
|
|||
|
||||
t = sickbeard.indexerApi(show_obj.indexer).indexer(**lINDEXER_API_PARMS)
|
||||
indexer_show_obj = t[show_obj.indexerid]
|
||||
except (sickbeard.indexer_error, IOError), e:
|
||||
except (sickbeard.indexer_error, IOError) as e:
|
||||
logger.log(u"Unable to look up show on " + sickbeard.indexerApi(
|
||||
show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
@ -822,7 +822,7 @@ class GenericMetadata():
|
|||
|
||||
t = sickbeard.indexerApi(show_obj.indexer).indexer(**lINDEXER_API_PARMS)
|
||||
indexer_show_obj = t[show_obj.indexerid]
|
||||
except (sickbeard.indexer_error, IOError), e:
|
||||
except (sickbeard.indexer_error, IOError) as e:
|
||||
logger.log(u"Unable to look up show on " + sickbeard.indexerApi(
|
||||
show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR)
|
||||
return result
|
||||
|
@ -875,7 +875,7 @@ class GenericMetadata():
|
|||
|
||||
t = sickbeard.indexerApi(show_obj.indexer).indexer(**lINDEXER_API_PARMS)
|
||||
indexer_show_obj = t[show_obj.indexerid]
|
||||
except (sickbeard.indexer_error, IOError), e:
|
||||
except (sickbeard.indexer_error, IOError) as e:
|
||||
logger.log(u"Unable to look up show on " + sickbeard.indexerApi(
|
||||
show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR)
|
||||
return result
|
||||
|
@ -953,7 +953,7 @@ class GenericMetadata():
|
|||
logger.log(u"Invalid Indexer ID (" + str(indexer_id) + "), not using metadata file", logger.WARNING)
|
||||
return empty_return
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(
|
||||
u"There was an error parsing your existing metadata file: '" + metadata_path + "' error: " + ex(e),
|
||||
logger.WARNING)
|
||||
|
@ -984,7 +984,7 @@ class GenericMetadata():
|
|||
elif poster and result['poster_path']:
|
||||
return "{0}{1}{2}".format(base_url, max_size, result['poster_path'])
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
logger.log(u"Could not find any posters or background for " + show.name, logger.DEBUG)
|
||||
|
|
|
@ -234,9 +234,9 @@ class KODIMetadata(generic.GenericMetadata):
|
|||
try:
|
||||
t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS)
|
||||
myShow = t[ep_obj.show.indexerid]
|
||||
except sickbeard.indexer_shownotfound, e:
|
||||
except sickbeard.indexer_shownotfound as e:
|
||||
raise exceptions.ShowNotFoundException(e.message)
|
||||
except sickbeard.indexer_error, e:
|
||||
except sickbeard.indexer_error as e:
|
||||
logger.log(u'Unable to connect to ' + sickbeard.indexerApi(
|
||||
ep_obj.show.indexer).name + ' while creating meta files - skipping - ' + ex(e), logger.ERROR)
|
||||
return
|
||||
|
|
|
@ -236,9 +236,9 @@ class Mede8erMetadata(mediabrowser.MediaBrowserMetadata):
|
|||
|
||||
t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS)
|
||||
myShow = t[ep_obj.show.indexerid]
|
||||
except sickbeard.indexer_shownotfound, e:
|
||||
except sickbeard.indexer_shownotfound as e:
|
||||
raise exceptions.ShowNotFoundException(e.message)
|
||||
except sickbeard.indexer_error, e:
|
||||
except sickbeard.indexer_error as e:
|
||||
logger.log(u"Unable to connect to TVDB while creating meta files - skipping - " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
|
|
@ -408,9 +408,9 @@ class MediaBrowserMetadata(generic.GenericMetadata):
|
|||
t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS)
|
||||
|
||||
myShow = t[ep_obj.show.indexerid]
|
||||
except sickbeard.indexer_shownotfound, e:
|
||||
except sickbeard.indexer_shownotfound as e:
|
||||
raise exceptions.ShowNotFoundException(e.message)
|
||||
except sickbeard.indexer_error, e:
|
||||
except sickbeard.indexer_error as e:
|
||||
logger.log(u"Unable to connect to " + sickbeard.indexerApi(
|
||||
ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
|
|
@ -181,9 +181,9 @@ class TIVOMetadata(generic.GenericMetadata):
|
|||
|
||||
t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS)
|
||||
myShow = t[ep_obj.show.indexerid]
|
||||
except sickbeard.indexer_shownotfound, e:
|
||||
except sickbeard.indexer_shownotfound as e:
|
||||
raise exceptions.ShowNotFoundException(str(e))
|
||||
except sickbeard.indexer_error, e:
|
||||
except sickbeard.indexer_error as e:
|
||||
logger.log(u"Unable to connect to " + sickbeard.indexerApi(
|
||||
ep_obj.show.indexer).name + " while creating meta files - skipping - " + str(e), logger.ERROR)
|
||||
return False
|
||||
|
@ -327,7 +327,7 @@ class TIVOMetadata(generic.GenericMetadata):
|
|||
|
||||
helpers.chmodAsParent(nfo_file_path)
|
||||
|
||||
except EnvironmentError, e:
|
||||
except EnvironmentError as e:
|
||||
logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e),
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
|
|
@ -192,9 +192,9 @@ class WDTVMetadata(generic.GenericMetadata):
|
|||
|
||||
t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS)
|
||||
myShow = t[ep_obj.show.indexerid]
|
||||
except sickbeard.indexer_shownotfound, e:
|
||||
except sickbeard.indexer_shownotfound as e:
|
||||
raise exceptions.ShowNotFoundException(e.message)
|
||||
except sickbeard.indexer_error, e:
|
||||
except sickbeard.indexer_error as e:
|
||||
logger.log(u"Unable to connect to " + sickbeard.indexerApi(
|
||||
ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
|
|
@ -234,9 +234,9 @@ class XBMC_12PlusMetadata(generic.GenericMetadata):
|
|||
try:
|
||||
t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS)
|
||||
myShow = t[ep_obj.show.indexerid]
|
||||
except sickbeard.indexer_shownotfound, e:
|
||||
except sickbeard.indexer_shownotfound as e:
|
||||
raise exceptions.ShowNotFoundException(e.message)
|
||||
except sickbeard.indexer_error, e:
|
||||
except sickbeard.indexer_error as e:
|
||||
logger.log(u"Unable to connect to " + sickbeard.indexerApi(
|
||||
ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR)
|
||||
return
|
||||
|
|
|
@ -95,7 +95,7 @@ class NameParser(object):
|
|||
for cur_pattern_num, (cur_pattern_name, cur_pattern) in enumerate(regexItem):
|
||||
try:
|
||||
cur_regex = re.compile(cur_pattern, re.VERBOSE | re.IGNORECASE)
|
||||
except re.error, errormsg:
|
||||
except re.error as errormsg:
|
||||
logger.log(u"WARNING: Invalid episode_pattern, %s. %s" % (errormsg, cur_pattern))
|
||||
else:
|
||||
self.compiled_regexes[index].append([cur_pattern_num, cur_pattern_name, cur_regex])
|
||||
|
@ -166,7 +166,7 @@ class NameParser(object):
|
|||
day = tmp_month
|
||||
try:
|
||||
result.air_date = datetime.date(year, month, day)
|
||||
except ValueError, e:
|
||||
except ValueError as e:
|
||||
raise InvalidNameException(ex(e))
|
||||
|
||||
if 'extra_info' in named_groups:
|
||||
|
@ -257,7 +257,7 @@ class NameParser(object):
|
|||
except sickbeard.indexer_episodenotfound:
|
||||
logger.log(u"Unable to find episode with date " + str(bestResult.air_date) + " for show " + bestResult.show.name + ", skipping", logger.WARNING)
|
||||
episode_numbers = []
|
||||
except sickbeard.indexer_error, e:
|
||||
except sickbeard.indexer_error as e:
|
||||
logger.log(u"Unable to contact " + sickbeard.indexerApi(bestResult.show.indexer).name + ": " + ex(e), logger.WARNING)
|
||||
episode_numbers = []
|
||||
|
||||
|
|
|
@ -188,7 +188,7 @@ def validate_name(pattern, multi=None, anime_type=None, file_only=False, abd=Fal
|
|||
|
||||
try:
|
||||
result = parser.parse(new_name)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Unable to parse " + new_name + ", not valid", logger.DEBUG)
|
||||
return False
|
||||
|
||||
|
|
|
@ -62,7 +62,7 @@ class Boxcar2Notifier:
|
|||
handle = urllib2.urlopen(req, data)
|
||||
handle.close()
|
||||
|
||||
except urllib2.URLError, e:
|
||||
except urllib2.URLError as e:
|
||||
# if we get an error back that doesn't have an error code then who knows what's really happening
|
||||
if not hasattr(e, 'code'):
|
||||
logger.log(u'BOXCAR2: Notification failed.' + ex(e), logger.ERROR)
|
||||
|
|
|
@ -142,7 +142,7 @@ class GrowlNotifier:
|
|||
return self._send_growl(opts, message)
|
||||
else:
|
||||
return False
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"GROWL: Unable to send growl to " + opts['host'] + ":" + str(opts['port']) + " - " + ex(e), logger.WARNING)
|
||||
return False
|
||||
|
||||
|
@ -186,7 +186,7 @@ class GrowlNotifier:
|
|||
|
||||
try:
|
||||
return self._send(opts['host'], opts['port'], register.encode(), opts['debug'])
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"GROWL: Unable to send growl to " + opts['host'] + ":" + str(opts['port']) + " - " + ex(e), logger.WARNING)
|
||||
return False
|
||||
|
||||
|
|
|
@ -102,7 +102,7 @@ class KODINotifier:
|
|||
|
||||
try:
|
||||
response = urllib2.urlopen(req)
|
||||
except urllib2.URLError, e:
|
||||
except urllib2.URLError as e:
|
||||
logger.log(u'KODI: Warning: Couldn\'t contact Kodi at ' + host + '- ' + ex(e), logger.WARNING)
|
||||
return False
|
||||
|
||||
|
@ -112,11 +112,11 @@ class KODINotifier:
|
|||
response.close()
|
||||
logger.log(u'KODI: JSON response: ' + str(result), logger.DEBUG)
|
||||
return result # need to return response for parsing
|
||||
except ValueError, e:
|
||||
except ValueError as e:
|
||||
logger.log(u'KODI: Unable to decode JSON response: ' + response, logger.WARNING)
|
||||
return False
|
||||
|
||||
except IOError, e:
|
||||
except IOError as e:
|
||||
logger.log(u'KODI: Warning: Couldn\'t contact Kodi at ' + host + ' - ' + ex(e), logger.WARNING)
|
||||
return False
|
||||
|
||||
|
|
|
@ -44,13 +44,13 @@ def diagnose():
|
|||
else:
|
||||
try:
|
||||
bus = dbus.SessionBus()
|
||||
except dbus.DBusException, e:
|
||||
except dbus.DBusException as e:
|
||||
return (u"<p>Error: unable to connect to D-Bus session bus: <code>%s</code>."
|
||||
u"<p>Are you running SickGear in a desktop session?") % (cgi.escape(e),)
|
||||
try:
|
||||
bus.get_object('org.freedesktop.Notifications',
|
||||
'/org/freedesktop/Notifications')
|
||||
except dbus.DBusException, e:
|
||||
except dbus.DBusException as e:
|
||||
return (u"<p>Error: there doesn't seem to be a notification daemon available: <code>%s</code> "
|
||||
u"<p>Try installing notification-daemon or notify-osd.") % (cgi.escape(e),)
|
||||
return u"<p>Error: Unable to send notification."
|
||||
|
|
|
@ -121,13 +121,13 @@ class NMJNotifier:
|
|||
req = urllib2.Request(mount)
|
||||
logger.log(u"Try to mount network drive via url: %s" % (mount), logger.DEBUG)
|
||||
handle = urllib2.urlopen(req)
|
||||
except IOError, e:
|
||||
except IOError as e:
|
||||
if hasattr(e, 'reason'):
|
||||
logger.log(u"NMJ: Could not contact Popcorn Hour on host %s: %s" % (host, e.reason), logger.WARNING)
|
||||
elif hasattr(e, 'code'):
|
||||
logger.log(u"NMJ: Problem with Popcorn Hour on host %s: %s" % (host, e.code), logger.WARNING)
|
||||
return False
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"NMJ: Unknown exception: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -148,13 +148,13 @@ class NMJNotifier:
|
|||
logger.log(u"Sending NMJ scan update command via url: %s" % (updateUrl), logger.DEBUG)
|
||||
handle = urllib2.urlopen(req)
|
||||
response = handle.read()
|
||||
except IOError, e:
|
||||
except IOError as e:
|
||||
if hasattr(e, 'reason'):
|
||||
logger.log(u"NMJ: Could not contact Popcorn Hour on host %s: %s" % (host, e.reason), logger.WARNING)
|
||||
elif hasattr(e, 'code'):
|
||||
logger.log(u"NMJ: Problem with Popcorn Hour on host %s: %s" % (host, e.code), logger.WARNING)
|
||||
return False
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"NMJ: Unknown exception: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -162,7 +162,7 @@ class NMJNotifier:
|
|||
try:
|
||||
et = etree.fromstring(response)
|
||||
result = et.findtext("returnValue")
|
||||
except SyntaxError, e:
|
||||
except SyntaxError as e:
|
||||
logger.log(u"Unable to parse XML returned from the Popcorn Hour: %s" % (e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
|
|
@ -90,7 +90,7 @@ class NMJv2Notifier:
|
|||
sickbeard.NMJv2_DATABASE = DB_path
|
||||
return True
|
||||
|
||||
except IOError, e:
|
||||
except IOError as e:
|
||||
logger.log(u"Warning: Couldn't contact popcorn hour on host %s: %s" % (host, e), logger.WARNING)
|
||||
return False
|
||||
return False
|
||||
|
@ -119,19 +119,19 @@ class NMJv2Notifier:
|
|||
time.sleep(300.0 / 1000.0)
|
||||
handle2 = urllib2.urlopen(req)
|
||||
response2 = handle2.read()
|
||||
except IOError, e:
|
||||
except IOError as e:
|
||||
logger.log(u"Warning: Couldn't contact popcorn hour on host %s: %s" % (host, e), logger.WARNING)
|
||||
return False
|
||||
try:
|
||||
et = etree.fromstring(response1)
|
||||
result1 = et.findtext("returnValue")
|
||||
except SyntaxError, e:
|
||||
except SyntaxError as e:
|
||||
logger.log(u"Unable to parse XML returned from the Popcorn Hour: update_scandir, %s" % (e), logger.ERROR)
|
||||
return False
|
||||
try:
|
||||
et = etree.fromstring(response2)
|
||||
result2 = et.findtext("returnValue")
|
||||
except SyntaxError, e:
|
||||
except SyntaxError as e:
|
||||
logger.log(u"Unable to parse XML returned from the Popcorn Hour: scanner_start, %s" % (e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
|
|
@ -88,7 +88,7 @@ class PLEXNotifier:
|
|||
# could return result response = re.compile('<html><li>(.+\w)</html>').findall(result)
|
||||
return 'OK'
|
||||
|
||||
except (urllib2.URLError, IOError), e:
|
||||
except (urllib2.URLError, IOError) as e:
|
||||
logger.log(u'PLEX: Warning: Couldn\'t contact Plex at ' + fixStupidEncodings(url) + ' ' + ex(e), logger.WARNING)
|
||||
return False
|
||||
|
||||
|
@ -219,7 +219,7 @@ class PLEXNotifier:
|
|||
try:
|
||||
xml_tree = etree.parse(urllib.urlopen(url))
|
||||
media_container = xml_tree.getroot()
|
||||
except IOError, e:
|
||||
except IOError as e:
|
||||
logger.log(u'PLEX: Error while trying to contact Plex Media Server: ' + ex(e), logger.ERROR)
|
||||
hosts_failed.append(cur_host)
|
||||
continue
|
||||
|
@ -255,7 +255,7 @@ class PLEXNotifier:
|
|||
try:
|
||||
force and urllib.urlopen(url)
|
||||
host_list.append(cur_host)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u'PLEX: Error updating library section for Plex Media Server: ' + ex(e), logger.ERROR)
|
||||
hosts_failed.append(cur_host)
|
||||
|
||||
|
|
|
@ -74,7 +74,7 @@ class PushbulletNotifier:
|
|||
handle.close()
|
||||
except socket.timeout:
|
||||
return False
|
||||
except urllib2.URLError, e:
|
||||
except urllib2.URLError as e:
|
||||
|
||||
if e.code == 404:
|
||||
logger.log(u'PUSHBULLET: Access token is wrong/not associated to a device.', logger.ERROR)
|
||||
|
|
|
@ -97,7 +97,7 @@ class PushoverNotifier:
|
|||
handle = urllib2.urlopen(req, data)
|
||||
handle.close()
|
||||
|
||||
except urllib2.URLError, e:
|
||||
except urllib2.URLError as e:
|
||||
# HTTP status 404 if the provided email address isn't a Pushover user.
|
||||
if e.code == 404:
|
||||
logger.log(u'PUSHOVER: Username is wrong/not a Pushover email. Pushover will send an email to it', logger.WARNING)
|
||||
|
|
|
@ -91,14 +91,14 @@ class pyTivoNotifier:
|
|||
|
||||
try:
|
||||
response = urlopen(request) #@UnusedVariable
|
||||
except HTTPError , e:
|
||||
except HTTPError as e:
|
||||
if hasattr(e, 'reason'):
|
||||
logger.log(u"pyTivo notification: Error, failed to reach a server - " + e.reason, logger.ERROR)
|
||||
return False
|
||||
elif hasattr(e, 'code'):
|
||||
logger.log(u"pyTivo notification: Error, the server couldn't fulfill the request - " + e.code, logger.ERROR)
|
||||
return False
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"PYTIVO: Unknown exception: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
else:
|
||||
|
|
|
@ -58,7 +58,7 @@ class synoIndexNotifier:
|
|||
cwd=sickbeard.PROG_DIR)
|
||||
out, err = p.communicate() #@UnusedVariable
|
||||
logger.log(u"Script result: " + str(out), logger.DEBUG)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
logger.log(u"Unable to run synoindex: " + ex(e), logger.ERROR)
|
||||
|
||||
def deleteFolder(self, cur_path):
|
||||
|
@ -83,7 +83,7 @@ class synoIndexNotifier:
|
|||
cwd=sickbeard.PROG_DIR)
|
||||
out, err = p.communicate() #@UnusedVariable
|
||||
logger.log(u"Script result: " + str(out), logger.DEBUG)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
logger.log(u"Unable to run synoindex: " + ex(e), logger.ERROR)
|
||||
|
||||
|
||||
|
|
|
@ -56,7 +56,7 @@ class synologyNotifier:
|
|||
cwd=sickbeard.PROG_DIR)
|
||||
out, err = p.communicate() #@UnusedVariable
|
||||
logger.log(u"Script result: " + str(out), logger.DEBUG)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
logger.log(u"Unable to run synodsmnotify: " + ex(e))
|
||||
|
||||
|
||||
|
|
|
@ -129,7 +129,7 @@ class TwitterNotifier:
|
|||
|
||||
try:
|
||||
api.PostUpdate(message.encode('utf8'))
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Error Sending Tweet: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
|
|
@ -247,7 +247,7 @@ class XBMCNotifier:
|
|||
logger.log(u"XBMC HTTP response: " + result.replace('\n', ''), logger.DEBUG)
|
||||
return result
|
||||
|
||||
except (urllib2.URLError, IOError), e:
|
||||
except (urllib2.URLError, IOError) as e:
|
||||
logger.log(u"Warning: Couldn't contact XBMC HTTP at " + fixStupidEncodings(url) + " " + ex(e),
|
||||
logger.WARNING)
|
||||
return False
|
||||
|
@ -304,7 +304,7 @@ class XBMCNotifier:
|
|||
encSqlXML = urllib.quote(sqlXML, ':\\/<>')
|
||||
try:
|
||||
et = etree.fromstring(encSqlXML)
|
||||
except SyntaxError, e:
|
||||
except SyntaxError as e:
|
||||
logger.log(u"Unable to parse XML returned from XBMC: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -385,7 +385,7 @@ class XBMCNotifier:
|
|||
|
||||
try:
|
||||
response = urllib2.urlopen(req)
|
||||
except urllib2.URLError, e:
|
||||
except urllib2.URLError as e:
|
||||
logger.log(u"Error while trying to retrieve XBMC API version for " + host + ": " + ex(e),
|
||||
logger.WARNING)
|
||||
return False
|
||||
|
@ -396,11 +396,11 @@ class XBMCNotifier:
|
|||
response.close()
|
||||
logger.log(u"XBMC JSON response: " + str(result), logger.DEBUG)
|
||||
return result # need to return response for parsing
|
||||
except ValueError, e:
|
||||
except ValueError as e:
|
||||
logger.log(u"Unable to decode JSON: " + response, logger.WARNING)
|
||||
return False
|
||||
|
||||
except IOError, e:
|
||||
except IOError as e:
|
||||
logger.log(u"Warning: Couldn't contact XBMC JSON API at " + fixStupidEncodings(url) + " " + ex(e),
|
||||
logger.WARNING)
|
||||
return False
|
||||
|
|
|
@ -92,7 +92,7 @@ def saveNZB(nzbName, nzbString):
|
|||
with ek.ek(open, nzbName + ".nzb", 'w') as nzb_fh:
|
||||
nzb_fh.write(nzbString)
|
||||
|
||||
except EnvironmentError, e:
|
||||
except EnvironmentError as e:
|
||||
logger.log(u"Unable to save NZB: " + ex(e), logger.ERROR)
|
||||
|
||||
|
||||
|
|
|
@ -63,7 +63,7 @@ def sendNZB(nzb, proper=False):
|
|||
logger.ERROR)
|
||||
return False
|
||||
|
||||
except xmlrpclib.ProtocolError, e:
|
||||
except xmlrpclib.ProtocolError as e:
|
||||
if (e.errmsg == "Unauthorized"):
|
||||
logger.log(u"NZBget username or password is incorrect.", logger.ERROR)
|
||||
else:
|
||||
|
|
|
@ -234,7 +234,7 @@ class PostProcessor(object):
|
|||
ek.ek(send2trash, cur_file)
|
||||
else:
|
||||
ek.ek(os.remove, cur_file)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
self._log(u'Unable to delete file %s: %s' % (cur_file, str(e.strerror)), logger.DEBUG)
|
||||
|
||||
if True is not ek.ek(os.path.isfile, cur_file):
|
||||
|
@ -329,7 +329,7 @@ class PostProcessor(object):
|
|||
helpers.moveFile(cur_file_path, new_file_path)
|
||||
helpers.chmodAsParent(new_file_path)
|
||||
self._log(u'Moved file from' + (success_tmpl % (cur_file_path, new_file_path)), logger.DEBUG)
|
||||
except (IOError, OSError), e:
|
||||
except (IOError, OSError) as e:
|
||||
self._log(u'Unable to move file %s<br />.. %s' % (success_tmpl % (cur_file_path, new_file_path), str(e)), logger.ERROR)
|
||||
raise e
|
||||
|
||||
|
@ -350,7 +350,7 @@ class PostProcessor(object):
|
|||
helpers.copyFile(cur_file_path, new_file_path)
|
||||
helpers.chmodAsParent(new_file_path)
|
||||
self._log(u'Copied file from' + (success_tmpl % (cur_file_path, new_file_path)), logger.DEBUG)
|
||||
except (IOError, OSError), e:
|
||||
except (IOError, OSError) as e:
|
||||
self._log(u'Unable to copy %s<br />.. %s' % (success_tmpl % (cur_file_path, new_file_path), str(e)), logger.ERROR)
|
||||
raise e
|
||||
|
||||
|
@ -371,7 +371,7 @@ class PostProcessor(object):
|
|||
helpers.hardlinkFile(cur_file_path, new_file_path)
|
||||
helpers.chmodAsParent(new_file_path)
|
||||
self._log(u'Hard linked file from' + (success_tmpl % (cur_file_path, new_file_path)), logger.DEBUG)
|
||||
except (IOError, OSError), e:
|
||||
except (IOError, OSError) as e:
|
||||
self._log(u'Unable to link file %s<br />.. %s' % (success_tmpl % (cur_file_path, new_file_path), str(e)), logger.ERROR)
|
||||
raise e
|
||||
|
||||
|
@ -393,7 +393,7 @@ class PostProcessor(object):
|
|||
helpers.chmodAsParent(new_file_path)
|
||||
self._log(u'Moved then symbolic linked file from' + (success_tmpl % (cur_file_path, new_file_path)),
|
||||
logger.DEBUG)
|
||||
except (IOError, OSError), e:
|
||||
except (IOError, OSError) as e:
|
||||
self._log(u'Unable to link file %s<br />.. %s' % (success_tmpl % (cur_file_path, new_file_path), str(e)), logger.ERROR)
|
||||
raise e
|
||||
|
||||
|
@ -543,7 +543,7 @@ class PostProcessor(object):
|
|||
|
||||
try:
|
||||
(cur_show, cur_season, cur_episodes, cur_quality) = cur_attempt()
|
||||
except (InvalidNameException, InvalidShowException), e:
|
||||
except (InvalidNameException, InvalidShowException) as e:
|
||||
logger.log(u'Unable to parse, skipping: ' + ex(e), logger.DEBUG)
|
||||
continue
|
||||
|
||||
|
@ -621,7 +621,7 @@ class PostProcessor(object):
|
|||
# now that we've figured out which episode this file is just load it manually
|
||||
try:
|
||||
cur_ep = show.getEpisode(season, episode)
|
||||
except exceptions.EpisodeNotFoundException, e:
|
||||
except exceptions.EpisodeNotFoundException as e:
|
||||
self._log(u'Unable to create episode: ' + ex(e), logger.DEBUG)
|
||||
raise exceptions.PostProcessingFailed()
|
||||
|
||||
|
@ -706,10 +706,10 @@ class PostProcessor(object):
|
|||
out, err = p.communicate() # @UnusedVariable
|
||||
self._log(u'Script result: ' + str(out), logger.DEBUG)
|
||||
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
self._log(u'Unable to run extra_script: ' + ex(e))
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self._log(u'Unable to run extra_script: ' + ex(e))
|
||||
|
||||
def _safe_replace(self, ep_obj, new_ep_quality):
|
||||
|
@ -1048,5 +1048,5 @@ class PostProcessor(object):
|
|||
self._log(u'Adding the file to the anidb mylist', logger.DEBUG)
|
||||
try:
|
||||
self.anidbEpisode.add_to_mylist(status=1) # status = 1 sets the status of the file to "internal HDD"
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self._log(u'exception msg: ' + str(e))
|
||||
|
|
|
@ -97,7 +97,7 @@ class ProcessTVShow(object):
|
|||
# try deleting folder
|
||||
try:
|
||||
shutil.rmtree(folder)
|
||||
except (OSError, IOError), e:
|
||||
except (OSError, IOError) as e:
|
||||
logger.log(u'Warning: unable to delete folder: %s: %s' % (folder, ex(e)), logger.WARNING)
|
||||
return False
|
||||
|
||||
|
@ -128,14 +128,14 @@ class ProcessTVShow(object):
|
|||
self._log_helper(u'Changing ReadOnly flag for file ' + cur_file)
|
||||
try:
|
||||
ek.ek(os.chmod, cur_file_path, stat.S_IWRITE)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
self._log_helper(u'Cannot change permissions of %s: %s' % (cur_file_path, str(e.strerror)))
|
||||
try:
|
||||
if use_trash:
|
||||
ek.ek(send2trash, cur_file_path)
|
||||
else:
|
||||
ek.ek(os.remove, cur_file_path)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
self._log_helper(u'Unable to delete file %s: %s' % (cur_file, str(e.strerror)))
|
||||
|
||||
if True is not ek.ek(os.path.isfile, cur_file_path):
|
||||
|
@ -401,7 +401,7 @@ class ProcessTVShow(object):
|
|||
rar_handle.extract(path=path, withSubpath=False, overwrite=False)
|
||||
unpacked_files += [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir]
|
||||
del rar_handle
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self._log_helper(u'Failed to unpack archive %s: %s' % (archive, ex(e)), logger.ERROR)
|
||||
self._set_process_success(False)
|
||||
continue
|
||||
|
@ -509,7 +509,7 @@ class ProcessTVShow(object):
|
|||
processor = postProcessor.PostProcessor(cur_video_file_path, nzb_name, process_method, force_replace, use_trash=use_trash)
|
||||
file_success = processor.process()
|
||||
process_fail_message = ''
|
||||
except exceptions.PostProcessingFailed, e:
|
||||
except exceptions.PostProcessingFailed as e:
|
||||
file_success = False
|
||||
process_fail_message = '<br />.. ' + ex(e)
|
||||
|
||||
|
@ -560,7 +560,7 @@ class ProcessTVShow(object):
|
|||
processor = failedProcessor.FailedProcessor(dir_name, nzb_name)
|
||||
self._set_process_success(processor.process())
|
||||
process_fail_message = ''
|
||||
except exceptions.FailedProcessingFailed, e:
|
||||
except exceptions.FailedProcessingFailed as e:
|
||||
self._set_process_success(False)
|
||||
process_fail_message = ex(e)
|
||||
|
||||
|
|
|
@ -74,10 +74,10 @@ def _getProperList():
|
|||
|
||||
try:
|
||||
curPropers = curProvider.findPropers(search_date)
|
||||
except exceptions.AuthException, e:
|
||||
except exceptions.AuthException as e:
|
||||
logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
|
||||
continue
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u'Error while searching ' + curProvider.name + ', skipping: ' + ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
continue
|
||||
|
|
|
@ -74,7 +74,7 @@ class BitSoupProvider(generic.TorrentProvider):
|
|||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
|
||||
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -195,7 +195,7 @@ class BitSoupProvider(generic.TorrentProvider):
|
|||
|
||||
items[mode].append(item)
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
||||
|
||||
#For each search mode sort all the items by seeders
|
||||
|
|
|
@ -123,7 +123,7 @@ class BTNProvider(generic.TorrentProvider):
|
|||
try:
|
||||
parsedJSON = server.getTorrents(apikey, params, int(results_per_page), int(offset))
|
||||
|
||||
except jsonrpclib.jsonrpc.ProtocolError, error:
|
||||
except jsonrpclib.jsonrpc.ProtocolError as error:
|
||||
logger.log(u"JSON-RPC protocol error while accessing " + self.name + ": " + ex(error), logger.ERROR)
|
||||
parsedJSON = {'api-error': ex(error)}
|
||||
return parsedJSON
|
||||
|
@ -131,11 +131,11 @@ class BTNProvider(generic.TorrentProvider):
|
|||
except socket.timeout:
|
||||
logger.log(u"Timeout while accessing " + self.name, logger.WARNING)
|
||||
|
||||
except socket.error, error:
|
||||
except socket.error as error:
|
||||
# Note that sometimes timeouts are thrown as socket errors
|
||||
logger.log(u"Socket error while accessing " + self.name + ": " + error[1], logger.ERROR)
|
||||
|
||||
except Exception, error:
|
||||
except Exception as error:
|
||||
errorstring = str(error)
|
||||
if (errorstring.startswith('<') and errorstring.endswith('>')):
|
||||
errorstring = errorstring[1:-1]
|
||||
|
|
|
@ -83,7 +83,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
|
|||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
|
||||
logger.log(u'Unable to connect to %s provider: %s' % (self.name, ex(e)), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -232,7 +232,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
|
|||
|
||||
items[mode].append(item)
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u'Failed parsing %s Traceback: %s' % (self.name, traceback.format_exc()), logger.ERROR)
|
||||
|
||||
# For each search mode sort all the items by seeders
|
||||
|
|
|
@ -86,7 +86,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
|
||||
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -255,7 +255,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
items[mode].append(item)
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
||||
|
||||
#For each search mode sort all the items by seeders
|
||||
|
|
|
@ -70,7 +70,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
|
||||
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -198,7 +198,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
logger.log(u"Found result: " + torrent_name + " (" + torrent_details_url + ")", logger.DEBUG)
|
||||
items[mode].append(item)
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
||||
|
||||
results += items[mode]
|
||||
|
|
|
@ -139,7 +139,7 @@ class KATProvider(generic.TorrentProvider):
|
|||
|
||||
return title
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
||||
|
||||
|
||||
|
@ -267,7 +267,7 @@ class KATProvider(generic.TorrentProvider):
|
|||
|
||||
items[mode].append(item)
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(),
|
||||
logger.ERROR)
|
||||
|
||||
|
|
|
@ -230,7 +230,7 @@ class NextGenProvider(generic.TorrentProvider):
|
|||
logger.WARNING)
|
||||
continue
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(),
|
||||
logger.ERROR)
|
||||
|
||||
|
|
|
@ -116,13 +116,13 @@ class TorrentRssProvider(generic.TorrentProvider):
|
|||
torrent_file = self.getURL(url)
|
||||
try:
|
||||
bdecode(torrent_file)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.dumpHTML(torrent_file)
|
||||
return (False, 'Torrent link is not a valid torrent file: ' + ex(e))
|
||||
|
||||
return (True, 'RSS feed Parsed correctly')
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
return (False, 'Error when trying to load RSS: ' + ex(e))
|
||||
|
||||
def dumpHTML(self, data):
|
||||
|
@ -134,7 +134,7 @@ class TorrentRssProvider(generic.TorrentProvider):
|
|||
fileOut.write(data)
|
||||
fileOut.close()
|
||||
helpers.chmodAsParent(dumpName)
|
||||
except IOError, e:
|
||||
except IOError as e:
|
||||
logger.log("Unable to save the file: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
logger.log(u"Saved custom_torrent html dump " + dumpName + " ", logger.MESSAGE)
|
||||
|
|
|
@ -68,7 +68,7 @@ class SCCProvider(generic.TorrentProvider):
|
|||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, headers=self.headers, timeout=30, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
|
||||
logger.log(u'Unable to connect to %s provider: %s' % (self.name, ex(e)), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -210,7 +210,7 @@ class SCCProvider(generic.TorrentProvider):
|
|||
|
||||
items[mode].append(item)
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u'Failed parsing %s Traceback: %s' % (self.name, traceback.format_exc()), logger.ERROR)
|
||||
|
||||
# For each search mode sort all the items by seeders
|
||||
|
|
|
@ -62,7 +62,7 @@ class SpeedCDProvider(generic.TorrentProvider):
|
|||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
|
||||
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
|
|
@ -101,7 +101,7 @@ class TokyoToshokanProvider(generic.TorrentProvider):
|
|||
item = title.lstrip(), url
|
||||
results.append(item)
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
||||
|
||||
|
||||
|
|
|
@ -65,7 +65,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
|
|||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
|
||||
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -190,7 +190,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
|
|||
|
||||
items[mode].append(item)
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u'Failed parsing ' + self.name + ' Traceback: ' + traceback.format_exc(), logger.ERROR)
|
||||
|
||||
# For each search mode sort all the items by seeders
|
||||
|
|
|
@ -76,7 +76,7 @@ class TorrentDayProvider(generic.TorrentProvider):
|
|||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
|
||||
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
|
|
@ -69,7 +69,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
|
|||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
|
||||
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -190,7 +190,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
|
|||
|
||||
items[mode].append(item)
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
||||
|
||||
#For each search mode sort all the items by seeders
|
||||
|
|
|
@ -99,11 +99,11 @@ def sendNZB(nzb):
|
|||
|
||||
f = opener.open(req)
|
||||
|
||||
except (EOFError, IOError), e:
|
||||
except (EOFError, IOError) as e:
|
||||
logger.log(u"Unable to connect to SABnzbd: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
except httplib.InvalidURL, e:
|
||||
except httplib.InvalidURL as e:
|
||||
logger.log(u"Invalid SABnzbd host, check your config: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -115,7 +115,7 @@ def sendNZB(nzb):
|
|||
# if we opened the URL connection then read the result from SAB
|
||||
try:
|
||||
result = f.readlines()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Error trying to get result from SABnzbd, NZB not sent: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -144,7 +144,7 @@ def sendNZB(nzb):
|
|||
def _checkSabResponse(f):
|
||||
try:
|
||||
result = f.readlines()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Error trying to get result from SABnzbd" + ex(e), logger.ERROR)
|
||||
return False, "Error from SABnzbd"
|
||||
|
||||
|
@ -156,7 +156,7 @@ def _checkSabResponse(f):
|
|||
sabJson = {}
|
||||
try:
|
||||
sabJson = json.loads(sabText)
|
||||
except ValueError, e:
|
||||
except ValueError as e:
|
||||
pass
|
||||
|
||||
if sabText == "Missing authentication":
|
||||
|
@ -172,10 +172,10 @@ def _checkSabResponse(f):
|
|||
def _sabURLOpenSimple(url):
|
||||
try:
|
||||
f = urllib.urlopen(url)
|
||||
except (EOFError, IOError), e:
|
||||
except (EOFError, IOError) as e:
|
||||
logger.log(u"Unable to connect to SABnzbd: " + ex(e), logger.ERROR)
|
||||
return False, "Unable to connect"
|
||||
except httplib.InvalidURL, e:
|
||||
except httplib.InvalidURL as e:
|
||||
logger.log(u"Invalid SABnzbd host, check your config: " + ex(e), logger.ERROR)
|
||||
return False, "Invalid SABnzbd host"
|
||||
if f is None:
|
||||
|
|
|
@ -526,7 +526,7 @@ def xem_refresh(indexer_id, indexer, force=False):
|
|||
else:
|
||||
logger.log(u"Empty lookup result - no XEM data for show %s on %s" % (
|
||||
indexer_id, sickbeard.indexerApi(indexer).name,), logger.DEBUG)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(
|
||||
u"Exception while refreshing XEM data for show " + str(indexer_id) + " on " + sickbeard.indexerApi(
|
||||
indexer).name + ": " + ex(e), logger.WARNING)
|
||||
|
|
|
@ -88,7 +88,7 @@ class Scheduler(threading.Thread):
|
|||
logger.log(u"Starting new thread: " + self.name, logger.DEBUG)
|
||||
|
||||
self.action.run()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Exception generated in thread " + self.name + ": " + ex(e), logger.ERROR)
|
||||
logger.log(repr(traceback.format_exc()), logger.DEBUG)
|
||||
|
||||
|
|
|
@ -78,7 +78,7 @@ def _downloadResult(result):
|
|||
|
||||
helpers.chmodAsParent(fileName)
|
||||
|
||||
except EnvironmentError, e:
|
||||
except EnvironmentError as e:
|
||||
logger.log(u"Error trying to save NZB to black hole: " + ex(e), logger.ERROR)
|
||||
newResult = False
|
||||
elif resProvider.providerType == "torrent":
|
||||
|
@ -483,10 +483,10 @@ def searchProviders(show, episodes, manualSearch=False):
|
|||
try:
|
||||
curProvider.cache._clearCache()
|
||||
searchResults = curProvider.findSearchResults(show, episodes, search_mode, manualSearch)
|
||||
except exceptions.AuthException, e:
|
||||
except exceptions.AuthException as e:
|
||||
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
|
||||
break
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Error while searching " + curProvider.name + ", skipping: " + ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
break
|
||||
|
|
|
@ -94,7 +94,7 @@ class ShowUpdater():
|
|||
|
||||
piList.append(curQueueItem)
|
||||
|
||||
except (exceptions.CantUpdateException, exceptions.CantRefreshException), e:
|
||||
except (exceptions.CantUpdateException, exceptions.CantRefreshException) as e:
|
||||
logger.log(u'Automatic update failed: ' + ex(e), logger.ERROR)
|
||||
|
||||
ui.ProgressIndicators.setIndicator('dailyUpdate', ui.QueueProgressIndicator('Daily Update', piList))
|
||||
|
|
|
@ -308,7 +308,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||
self.indexer).name) + ' but contains no season/episode data.')
|
||||
self._finishEarly()
|
||||
return
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u'Unable to find show ID:' + str(self.indexer_id) + ' on Indexer: ' + str(
|
||||
sickbeard.indexerApi(self.indexer).name), logger.ERROR)
|
||||
ui.notifications.error('Unable to add show',
|
||||
|
@ -349,7 +349,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||
if self.show.classification and 'sports' in self.show.classification.lower():
|
||||
self.show.sports = 1
|
||||
|
||||
except sickbeard.indexer_exception, e:
|
||||
except sickbeard.indexer_exception as e:
|
||||
logger.log(
|
||||
u'Unable to add show due to an error with ' + sickbeard.indexerApi(self.indexer).name + ': ' + ex(e),
|
||||
logger.ERROR)
|
||||
|
@ -369,7 +369,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||
self._finishEarly()
|
||||
return
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u'Error trying to add show: ' + ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
self._finishEarly()
|
||||
|
@ -379,7 +379,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||
|
||||
try:
|
||||
self.show.saveToDB()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u'Error saving the show to the database: ' + ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
self._finishEarly()
|
||||
|
@ -390,7 +390,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||
|
||||
try:
|
||||
self.show.loadEpisodesFromIndexer()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(
|
||||
u'Error with ' + sickbeard.indexerApi(self.show.indexer).name + ', not creating episode list: ' + ex(e),
|
||||
logger.ERROR)
|
||||
|
@ -398,7 +398,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||
|
||||
try:
|
||||
self.show.loadEpisodesFromDir()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u'Error searching directory for episodes: ' + ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
|
||||
|
@ -587,11 +587,11 @@ class QueueItemUpdate(ShowQueueItem):
|
|||
result = self.show.loadFromIndexer(cache=not self.force)
|
||||
if None is not result:
|
||||
return
|
||||
except sickbeard.indexer_error, e:
|
||||
except sickbeard.indexer_error as e:
|
||||
logger.log(u'Unable to contact ' + sickbeard.indexerApi(self.show.indexer).name + ', aborting: ' + ex(e),
|
||||
logger.WARNING)
|
||||
return
|
||||
except sickbeard.indexer_attributenotfound, e:
|
||||
except sickbeard.indexer_attributenotfound as e:
|
||||
logger.log(u'Data retrieved from ' + sickbeard.indexerApi(
|
||||
self.show.indexer).name + ' was incomplete, aborting: ' + ex(e), logger.ERROR)
|
||||
return
|
||||
|
@ -601,7 +601,7 @@ class QueueItemUpdate(ShowQueueItem):
|
|||
|
||||
try:
|
||||
self.show.saveToDB()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u'Error saving the episode to the database: ' + ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
|
||||
|
@ -613,7 +613,7 @@ class QueueItemUpdate(ShowQueueItem):
|
|||
logger.log(u'Loading all episodes from ' + sickbeard.indexerApi(self.show.indexer).name + '', logger.DEBUG)
|
||||
try:
|
||||
IndexerEpList = self.show.loadEpisodesFromIndexer(cache=not self.force)
|
||||
except sickbeard.indexer_exception, e:
|
||||
except sickbeard.indexer_exception as e:
|
||||
logger.log(u'Unable to get info from ' + sickbeard.indexerApi(
|
||||
self.show.indexer).name + ', the show info will not be refreshed: ' + ex(e), logger.ERROR)
|
||||
IndexerEpList = None
|
||||
|
|
|
@ -412,7 +412,7 @@ class TVShow(object):
|
|||
logger.log(str(self.indexerid) + u": Creating episode from " + mediaFile, logger.DEBUG)
|
||||
try:
|
||||
curEpisode = self.makeEpFromFile(ek.ek(os.path.join, self._location, mediaFile))
|
||||
except (exceptions.ShowNotFoundException, exceptions.EpisodeNotFoundException), e:
|
||||
except (exceptions.ShowNotFoundException, exceptions.EpisodeNotFoundException) as e:
|
||||
logger.log(u"Episode " + mediaFile + " returned an exception: " + ex(e), logger.ERROR)
|
||||
continue
|
||||
except exceptions.EpisodeDeletedException:
|
||||
|
@ -493,7 +493,7 @@ class TVShow(object):
|
|||
if curSeason not in cachedSeasons:
|
||||
try:
|
||||
cachedSeasons[curSeason] = cachedShow[curSeason]
|
||||
except sickbeard.indexer_seasonnotfound, e:
|
||||
except sickbeard.indexer_seasonnotfound as e:
|
||||
logger.log(u"Error when trying to load the episode from " + sickbeard.indexerApi(
|
||||
self.indexer).name + ": " + e.message, logger.WARNING)
|
||||
deleteEp = True
|
||||
|
@ -921,9 +921,9 @@ class TVShow(object):
|
|||
logger.log(u'Retrieving show info from IMDb', logger.DEBUG)
|
||||
try:
|
||||
self._get_imdb_info()
|
||||
except imdb_exceptions.IMDbError, e:
|
||||
except imdb_exceptions.IMDbError as e:
|
||||
logger.log(u'Something is wrong with IMDb api: ' + ex(e), logger.WARNING)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u'Error loading IMDb info: ' + ex(e), logger.ERROR)
|
||||
logger.log(u'' + traceback.format_exc(), logger.DEBUG)
|
||||
|
||||
|
@ -1051,7 +1051,7 @@ class TVShow(object):
|
|||
else:
|
||||
os.remove(cache_file)
|
||||
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
logger.log(u'Unable to %s %s: %s / %s' % (action, cache_file, repr(e), str(e)), logger.WARNING)
|
||||
|
||||
# remove entire show folder
|
||||
|
@ -1079,7 +1079,7 @@ class TVShow(object):
|
|||
|
||||
except exceptions.ShowDirNotFoundException:
|
||||
logger.log(u"Show folder does not exist, no need to %s %s" % (action, self._location), logger.WARNING)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
logger.log(u'Unable to %s %s: %s / %s' % (action, self._location, repr(e), str(e)), logger.WARNING)
|
||||
|
||||
def populateCache(self):
|
||||
|
@ -1681,7 +1681,7 @@ class TVEpisode(object):
|
|||
else:
|
||||
myEp = cachedSeason[episode]
|
||||
|
||||
except (sickbeard.indexer_error, IOError), e:
|
||||
except (sickbeard.indexer_error, IOError) as e:
|
||||
logger.log(u"" + sickbeard.indexerApi(self.indexer).name + " threw up an error: " + ex(e), logger.DEBUG)
|
||||
# if the episode is already valid just log it, if not throw it up
|
||||
if self.name:
|
||||
|
@ -1846,12 +1846,12 @@ class TVEpisode(object):
|
|||
if ek.ek(os.path.isfile, nfoFile):
|
||||
try:
|
||||
showXML = etree.ElementTree(file=nfoFile)
|
||||
except (SyntaxError, ValueError), e:
|
||||
except (SyntaxError, ValueError) as e:
|
||||
logger.log(u"Error loading the NFO, backing up the NFO and skipping for now: " + ex(e),
|
||||
logger.ERROR) # TODO: figure out what's wrong and fix it
|
||||
try:
|
||||
ek.ek(os.rename, nfoFile, nfoFile + ".old")
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(
|
||||
u"Failed to rename your episode's NFO file - you need to delete it or fix it: " + ex(e),
|
||||
logger.ERROR)
|
||||
|
@ -2175,7 +2175,7 @@ class TVEpisode(object):
|
|||
try:
|
||||
np = NameParser(name, showObj=show, naming_pattern=True)
|
||||
parse_result = np.parse(name)
|
||||
except (InvalidNameException, InvalidShowException), e:
|
||||
except (InvalidNameException, InvalidShowException) as e:
|
||||
logger.log(u"Unable to get parse release_group: " + ex(e), logger.DEBUG)
|
||||
return ''
|
||||
|
||||
|
|
|
@ -42,7 +42,7 @@ class CacheDBConnection(db.DBConnection):
|
|||
try:
|
||||
if not self.hasTable('lastUpdate'):
|
||||
self.action('CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)')
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
if str(e) != 'table lastUpdate already exists':
|
||||
raise
|
||||
|
||||
|
|
|
@ -379,7 +379,7 @@ class GitUpdateManager(UpdateManager):
|
|||
else:
|
||||
try:
|
||||
self._check_github_for_update()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Unable to contact github, can't check for update: " + repr(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -464,7 +464,7 @@ class SourceUpdateManager(UpdateManager):
|
|||
# need this to run first to set self._newest_commit_hash
|
||||
try:
|
||||
self._check_github_for_update()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Unable to contact github, can't check for update: " + repr(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -611,7 +611,7 @@ class SourceUpdateManager(UpdateManager):
|
|||
os.chmod(new_path, stat.S_IWRITE)
|
||||
os.remove(new_path)
|
||||
os.renames(old_path, new_path)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Unable to update " + new_path + ': ' + ex(e), logger.DEBUG)
|
||||
os.remove(old_path) # Trash the updated file without moving in new path
|
||||
continue
|
||||
|
@ -623,7 +623,7 @@ class SourceUpdateManager(UpdateManager):
|
|||
sickbeard.CUR_COMMIT_HASH = self._newest_commit_hash
|
||||
sickbeard.CUR_COMMIT_BRANCH = self.branch
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Error while trying to update: " + ex(e), logger.ERROR)
|
||||
logger.log(u"Traceback: " + traceback.format_exc(), logger.DEBUG)
|
||||
return False
|
||||
|
|
|
@ -119,7 +119,7 @@ class Api(webserve.BaseHandler):
|
|||
else: # if debug was not set we wrap the "call_dispatcher" in a try block to assure a json output
|
||||
try:
|
||||
outDict = _call_dispatcher(self, args, kwargs)
|
||||
except Exception, e: # real internal error oohhh nooo :(
|
||||
except Exception as e: # real internal error oohhh nooo :(
|
||||
logger.log(u"API :: " + ex(e), logger.ERROR)
|
||||
errorData = {"error_msg": ex(e),
|
||||
"args": args,
|
||||
|
@ -140,7 +140,7 @@ class Api(webserve.BaseHandler):
|
|||
if 'jsonp' in self.request.query_arguments:
|
||||
out = self.request.arguments['jsonp'] + '(' + out + ');' # wrap with JSONP call if requested
|
||||
|
||||
except Exception, e: # if we fail to generate the output fake an error
|
||||
except Exception as e: # if we fail to generate the output fake an error
|
||||
logger.log(u'API :: ' + traceback.format_exc(), logger.DEBUG)
|
||||
out = '{"result":"' + result_type_map[RESULT_ERROR] + '", "message": "error while composing output: "' + ex(
|
||||
e) + '"}'
|
||||
|
@ -205,7 +205,7 @@ def call_dispatcher(handler, args, kwargs):
|
|||
curOutDict = TVDBShorthandWrapper(handler, curArgs, curKwargs, cmd).run()
|
||||
else:
|
||||
curOutDict = _responds(RESULT_ERROR, "No such cmd: '" + cmd + "'")
|
||||
except ApiError, e: # Api errors that we raised, they are harmless
|
||||
except ApiError as e: # Api errors that we raised, they are harmless
|
||||
curOutDict = _responds(RESULT_ERROR, msg=ex(e))
|
||||
else: # if someone chained one of the forbiden cmds they will get an error for this one cmd
|
||||
curOutDict = _responds(RESULT_ERROR, msg="The cmd '" + cmd + "' is not supported while chaining")
|
||||
|
@ -1609,7 +1609,7 @@ class CMD_SickBeardSearchIndexers(ApiCall):
|
|||
|
||||
try:
|
||||
apiData = t[str(self.name).encode()]
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
if not apiData:
|
||||
|
@ -2537,7 +2537,7 @@ class CMD_ShowUpdate(ApiCall):
|
|||
try:
|
||||
sickbeard.showQueueScheduler.action.updateShow(showObj, True) #@UndefinedVariable
|
||||
return _responds(RESULT_SUCCESS, msg=str(showObj.name) + " has queued to be updated")
|
||||
except exceptions.CantUpdateException, e:
|
||||
except exceptions.CantUpdateException as e:
|
||||
logger.log(u"API:: Unable to update " + str(showObj.name) + ". " + str(ex(e)), logger.ERROR)
|
||||
return _responds(RESULT_FAILURE, msg="Unable to update " + str(showObj.name))
|
||||
|
||||
|
|
|
@ -1262,7 +1262,7 @@ class Home(MainHandler):
|
|||
try:
|
||||
anime = adba.Anime(sickbeard.ADBA_CONNECTION, name=showObj.name)
|
||||
t.groups = anime.get_groups()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
t.groups.append(dict([('name', 'Fail:AniDB connect. Restart sg else check debug log'), ('rating', ''), ('range', '')]))
|
||||
else:
|
||||
t.groups.append(dict([('name', 'Did not initialise AniDB. Check debug log if reqd.'), ('rating', ''), ('range', '')]))
|
||||
|
@ -1344,7 +1344,7 @@ class Home(MainHandler):
|
|||
showObj.flatten_folders = flatten_folders
|
||||
try:
|
||||
sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable
|
||||
except exceptions.CantRefreshException, e:
|
||||
except exceptions.CantRefreshException as e:
|
||||
errors.append('Unable to refresh this show: ' + ex(e))
|
||||
|
||||
showObj.paused = paused
|
||||
|
@ -1374,7 +1374,7 @@ class Home(MainHandler):
|
|||
showObj.location = location
|
||||
try:
|
||||
sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable
|
||||
except exceptions.CantRefreshException, e:
|
||||
except exceptions.CantRefreshException as e:
|
||||
errors.append('Unable to refresh this show:' + ex(e))
|
||||
# grab updated info from TVDB
|
||||
# showObj.loadEpisodesFromIndexer()
|
||||
|
@ -1391,7 +1391,7 @@ class Home(MainHandler):
|
|||
try:
|
||||
sickbeard.showQueueScheduler.action.updateShow(showObj, True) # @UndefinedVariable
|
||||
time.sleep(cpu_presets[sickbeard.CPU_PRESET])
|
||||
except exceptions.CantUpdateException, e:
|
||||
except exceptions.CantUpdateException as e:
|
||||
errors.append('Unable to force an update on the show.')
|
||||
|
||||
if do_update_exceptions:
|
||||
|
@ -1399,14 +1399,14 @@ class Home(MainHandler):
|
|||
scene_exceptions.update_scene_exceptions(showObj.indexerid, exceptions_list) # @UndefinedVdexerid)
|
||||
buildNameCache(showObj)
|
||||
time.sleep(cpu_presets[sickbeard.CPU_PRESET])
|
||||
except exceptions.CantUpdateException, e:
|
||||
except exceptions.CantUpdateException as e:
|
||||
errors.append('Unable to force an update on scene exceptions of the show.')
|
||||
|
||||
if do_update_scene_numbering:
|
||||
try:
|
||||
sickbeard.scene_numbering.xem_refresh(showObj.indexerid, showObj.indexer) # @UndefinedVariable
|
||||
time.sleep(cpu_presets[sickbeard.CPU_PRESET])
|
||||
except exceptions.CantUpdateException, e:
|
||||
except exceptions.CantUpdateException as e:
|
||||
errors.append('Unable to force an update on scene numbering of the show.')
|
||||
|
||||
if directCall:
|
||||
|
@ -1456,7 +1456,7 @@ class Home(MainHandler):
|
|||
# force the update from the DB
|
||||
try:
|
||||
sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable
|
||||
except exceptions.CantRefreshException, e:
|
||||
except exceptions.CantRefreshException as e:
|
||||
ui.notifications.error('Unable to refresh this show.',
|
||||
ex(e))
|
||||
|
||||
|
@ -1477,7 +1477,7 @@ class Home(MainHandler):
|
|||
# force the update
|
||||
try:
|
||||
sickbeard.showQueueScheduler.action.updateShow(showObj, bool(force), bool(web))
|
||||
except exceptions.CantUpdateException, e:
|
||||
except exceptions.CantUpdateException as e:
|
||||
ui.notifications.error('Unable to update this show.',
|
||||
ex(e))
|
||||
|
||||
|
@ -1985,7 +1985,7 @@ class Home(MainHandler):
|
|||
try:
|
||||
anime = adba.Anime(sickbeard.ADBA_CONNECTION, name=show_name)
|
||||
groups = anime.get_groups()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u'exception msg: ' + str(e), logger.DEBUG)
|
||||
return json.dumps({'result': 'fail', 'resp': 'connect'})
|
||||
|
||||
|
@ -2062,7 +2062,7 @@ class NewHomeAddShows(Home):
|
|||
try:
|
||||
# add search results
|
||||
results.setdefault(indexer, []).extend(t[search_term])
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
continue
|
||||
|
||||
map(final_results.extend,
|
||||
|
@ -3117,7 +3117,7 @@ class Manage(MainHandler):
|
|||
try:
|
||||
sickbeard.showQueueScheduler.action.updateShow(showObj, True, True) # @UndefinedVariable
|
||||
updates.append(showObj.name)
|
||||
except exceptions.CantUpdateException, e:
|
||||
except exceptions.CantUpdateException as e:
|
||||
errors.append('Unable to update show ' + showObj.name + ': ' + ex(e))
|
||||
|
||||
# don't bother refreshing shows that were updated anyway
|
||||
|
@ -3125,7 +3125,7 @@ class Manage(MainHandler):
|
|||
try:
|
||||
sickbeard.showQueueScheduler.action.refreshShow(showObj) # @UndefinedVariable
|
||||
refreshes.append(showObj.name)
|
||||
except exceptions.CantRefreshException, e:
|
||||
except exceptions.CantRefreshException as e:
|
||||
errors.append('Unable to refresh show ' + showObj.name + ': ' + ex(e))
|
||||
|
||||
if curShowID in toRename:
|
||||
|
@ -3612,7 +3612,7 @@ class ConfigGeneral(Config):
|
|||
try:
|
||||
pulls = sickbeard.versionCheckScheduler.action.list_remote_pulls()
|
||||
return json.dumps({'result': 'success', 'pulls': pulls})
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u'exception msg: ' + str(e), logger.DEBUG)
|
||||
return json.dumps({'result': 'fail'})
|
||||
|
||||
|
@ -3621,7 +3621,7 @@ class ConfigGeneral(Config):
|
|||
try:
|
||||
branches = sickbeard.versionCheckScheduler.action.list_remote_branches()
|
||||
return json.dumps({'result': 'success', 'branches': branches})
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u'exception msg: ' + str(e), logger.DEBUG)
|
||||
return json.dumps({'result': 'fail'})
|
||||
|
||||
|
@ -3905,7 +3905,7 @@ class ConfigPostProcessing(Config):
|
|||
return 'supported'
|
||||
logger.log(u'Rar Not Supported: Can not read the content of test file', logger.ERROR)
|
||||
return 'not supported'
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u'Rar Not Supported: ' + ex(e), logger.ERROR)
|
||||
return 'not supported'
|
||||
|
||||
|
|
28
tests/compatibility_tests.py
Normal file
28
tests/compatibility_tests.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
import unittest
|
||||
|
||||
import subprocess
|
||||
import os
|
||||
|
||||
class CompatibilityTests(unittest.TestCase):
|
||||
|
||||
def test_except(self):
|
||||
path = os.path.abspath('..')
|
||||
pyfiles = []
|
||||
for rootdir in ['sickbeard', 'tests']:
|
||||
for dirpath, subdirs, files in os.walk(os.path.join(path, rootdir)):
|
||||
for x in files:
|
||||
if x.endswith('.py'):
|
||||
pyfiles.append(os.path.join(dirpath, x))
|
||||
|
||||
pyfiles.append(os.path.join(path,'SickBeard.py'))
|
||||
|
||||
output = subprocess.Popen('2to3 -f except %s' % ' '.join(pyfiles), shell=True, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE).communicate()[0]
|
||||
if output:
|
||||
print('Changes to be made for Python 2/3 compatibility as follows:')
|
||||
print(output)
|
||||
self.fail('Python 2/3 incompatibility detected')
|
||||
|
||||
if __name__ == '__main__':
|
||||
suite = unittest.TestLoader().loadTestsFromTestCase(QualityTests)
|
||||
unittest.TextTestRunner(verbosity=2).run(suite)
|
|
@ -149,7 +149,7 @@ class TestCacheDBConnection(TestDBConnection, object):
|
|||
sql = "CREATE TABLE " + providerName + " (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, url TEXT, time NUMERIC, quality TEXT);"
|
||||
self.connection.execute(sql)
|
||||
self.connection.commit()
|
||||
except sqlite3.OperationalError, e:
|
||||
except sqlite3.OperationalError as e:
|
||||
if str(e) != "table " + providerName + " already exists":
|
||||
raise
|
||||
|
||||
|
@ -158,7 +158,7 @@ class TestCacheDBConnection(TestDBConnection, object):
|
|||
sql = "CREATE TABLE lastUpdate (provider TEXT, time NUMERIC);"
|
||||
self.connection.execute(sql)
|
||||
self.connection.commit()
|
||||
except sqlite3.OperationalError, e:
|
||||
except sqlite3.OperationalError as e:
|
||||
if str(e) != "table lastUpdate already exists":
|
||||
raise
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ class XEMBasicTests(test.SickbeardTestDBCase):
|
|||
try:
|
||||
curShow = TVShow(int(sqlShow["indexer"]), int(sqlShow["indexer_id"]))
|
||||
sickbeard.showList.append(curShow)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
print "There was an error creating the show"
|
||||
|
||||
def test_formating(self):
|
||||
|
|
Loading…
Reference in a new issue