mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-05 17:43:37 +00:00
Change catch show update task errors.
Change tracebacks to ERROR. Add scene_exceptions error handling with missing data. Add exception catching and traceback logging in show_updater.
This commit is contained in:
parent
d2b19c2975
commit
266c87f5f1
12 changed files with 69 additions and 29 deletions
|
@ -59,6 +59,7 @@
|
|||
* Change cosmetic title on shutdown
|
||||
* Change use TVDb API v2
|
||||
* Change improve search for PROPERS
|
||||
* Change catch show update task errors
|
||||
|
||||
|
||||
[develop changelog]
|
||||
|
|
|
@ -264,7 +264,7 @@ class Quality:
|
|||
logger.log(msg % (filename, e.text), logger.WARNING)
|
||||
except Exception as e:
|
||||
logger.log(msg % (filename, ex(e)), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
|
||||
if parser:
|
||||
if '.avi' == filename[-4::].lower():
|
||||
|
|
|
@ -91,7 +91,7 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
|
|||
continue
|
||||
except Exception as e:
|
||||
logger.log(u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
continue
|
||||
finally:
|
||||
threading.currentThread().name = orig_thread_name
|
||||
|
|
|
@ -21,6 +21,7 @@ import time
|
|||
import threading
|
||||
import datetime
|
||||
import sickbeard
|
||||
import traceback
|
||||
|
||||
from collections import defaultdict
|
||||
from lib import adba
|
||||
|
@ -210,7 +211,12 @@ def retrieve_exceptions():
|
|||
continue
|
||||
|
||||
for cur_exception_dict in exception_dict[cur_indexer_id]:
|
||||
cur_exception, cur_season = cur_exception_dict.items()[0]
|
||||
try:
|
||||
cur_exception, cur_season = cur_exception_dict.items()[0]
|
||||
except Exception:
|
||||
logger.log('scene exception error', logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
continue
|
||||
|
||||
# if this exception isn't already in the DB then add it
|
||||
if cur_exception not in existing_exceptions:
|
||||
|
|
|
@ -529,7 +529,7 @@ def xem_refresh(indexer_id, indexer, force=False):
|
|||
logger.log(
|
||||
u'Exception while refreshing XEM data for show ' + str(indexer_id) + ' on ' + sickbeard.indexerApi(
|
||||
indexer).name + ': ' + ex(e), logger.WARNING)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
|
||||
|
||||
def fix_xem_numbering(indexer_id, indexer):
|
||||
|
|
|
@ -90,7 +90,7 @@ class Scheduler(threading.Thread):
|
|||
self.action.run()
|
||||
except Exception as e:
|
||||
logger.log(u"Exception generated in thread " + self.name + ": " + ex(e), logger.ERROR)
|
||||
logger.log(repr(traceback.format_exc()), logger.DEBUG)
|
||||
logger.log(repr(traceback.format_exc()), logger.ERROR)
|
||||
|
||||
finally:
|
||||
if self.force:
|
||||
|
|
|
@ -530,7 +530,7 @@ def search_providers(show, episodes, manual_search=False, torrent_only=False, tr
|
|||
break
|
||||
except Exception as e:
|
||||
logger.log(u'Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
break
|
||||
finally:
|
||||
threading.currentThread().name = orig_thread_name
|
||||
|
|
|
@ -249,7 +249,7 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
helpers.cpu_sleep()
|
||||
|
||||
except (StandardError, Exception):
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
|
||||
if None is self.success:
|
||||
self.success = False
|
||||
|
@ -398,7 +398,7 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
|
|||
logger.log(u'Unable to find a download for: [%s]' % self.segment.prettyName())
|
||||
|
||||
except (StandardError, Exception):
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
|
||||
finally:
|
||||
# Keep a list with the 100 last executed searches
|
||||
|
@ -445,7 +445,7 @@ class BacklogQueueItem(generic_queue.QueueItem):
|
|||
logger.log(u'No needed episodes found during backlog search for: [%s]' % self.show.name)
|
||||
except (StandardError, Exception):
|
||||
is_error = True
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
|
||||
finally:
|
||||
logger.log('Completed backlog search %sfor: [%s]' % (('', 'with a debug error ')[is_error], self.show.name))
|
||||
|
@ -496,7 +496,7 @@ class FailedQueueItem(generic_queue.QueueItem):
|
|||
pass
|
||||
# logger.log(u'No valid episode found to retry for: [%s]' % self.segment.prettyName())
|
||||
except (StandardError, Exception):
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
|
||||
finally:
|
||||
# Keep a list with the 100 last executed searches
|
||||
|
|
|
@ -369,7 +369,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||
|
||||
except Exception as e:
|
||||
logger.log('Error trying to add show: %s' % ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
self._finishEarly()
|
||||
raise
|
||||
|
||||
|
@ -379,7 +379,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||
self.show.saveToDB()
|
||||
except Exception as e:
|
||||
logger.log('Error saving the show to the database: %s' % ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
self._finishEarly()
|
||||
raise
|
||||
|
||||
|
@ -392,13 +392,13 @@ class QueueItemAdd(ShowQueueItem):
|
|||
logger.log(
|
||||
'Error with %s, not creating episode list: %s' % (sickbeard.indexerApi(self.show.indexer).name, ex(e)),
|
||||
logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
|
||||
try:
|
||||
self.show.loadEpisodesFromDir()
|
||||
except Exception as e:
|
||||
logger.log('Error searching directory for episodes: %s' % ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
|
||||
# if they gave a custom status then change all the eps to it
|
||||
my_db = db.DBConnection()
|
||||
|
@ -616,7 +616,7 @@ class QueueItemUpdate(ShowQueueItem):
|
|||
self.show.saveToDB()
|
||||
except Exception as e:
|
||||
logger.log('Error saving the episode to the database: %s' % ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
|
||||
# get episode list from DB
|
||||
logger.log('Loading all episodes from the database', logger.DEBUG)
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import datetime
|
||||
import traceback
|
||||
|
||||
import sickbeard
|
||||
from sickbeard import logger, exceptions, ui, db, network_timezones, failed_history
|
||||
|
@ -36,36 +37,68 @@ class ShowUpdater:
|
|||
update_date = update_datetime.date()
|
||||
|
||||
# refresh network timezones
|
||||
network_timezones.update_network_dict()
|
||||
try:
|
||||
network_timezones.update_network_dict()
|
||||
except Exception:
|
||||
logger.log('network timezone update error', logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
|
||||
# update xem id lists
|
||||
sickbeard.scene_exceptions.get_xem_ids()
|
||||
try:
|
||||
sickbeard.scene_exceptions.get_xem_ids()
|
||||
except Exception:
|
||||
logger.log('xem id list update error', logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
|
||||
# update scene exceptions
|
||||
sickbeard.scene_exceptions.retrieve_exceptions()
|
||||
try:
|
||||
sickbeard.scene_exceptions.retrieve_exceptions()
|
||||
except Exception:
|
||||
logger.log('scene exceptions update error', logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
|
||||
# sure, why not?
|
||||
if sickbeard.USE_FAILED_DOWNLOADS:
|
||||
failed_history.remove_old_history()
|
||||
try:
|
||||
failed_history.remove_old_history()
|
||||
except Exception:
|
||||
logger.log('Failed History cleanup error', logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
|
||||
# clear the data of unused providers
|
||||
sickbeard.helpers.clear_unused_providers()
|
||||
try:
|
||||
sickbeard.helpers.clear_unused_providers()
|
||||
except Exception:
|
||||
logger.log('unused provider cleanup error', logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
|
||||
# cleanup image cache
|
||||
sickbeard.helpers.cleanup_cache()
|
||||
try:
|
||||
sickbeard.helpers.cleanup_cache()
|
||||
except Exception:
|
||||
logger.log('image cache cleanup error', logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
|
||||
# add missing mapped ids
|
||||
if not sickbeard.background_mapping_task.is_alive():
|
||||
logger.log(u'Updating the Indexer mappings')
|
||||
import threading
|
||||
sickbeard.background_mapping_task = threading.Thread(
|
||||
name='LOAD-MAPPINGS', target=sickbeard.indexermapper.load_mapped_ids, kwargs={'update': True})
|
||||
sickbeard.background_mapping_task.start()
|
||||
try:
|
||||
sickbeard.background_mapping_task = threading.Thread(
|
||||
name='LOAD-MAPPINGS', target=sickbeard.indexermapper.load_mapped_ids, kwargs={'update': True})
|
||||
sickbeard.background_mapping_task.start()
|
||||
except Exception:
|
||||
logger.log('missing mapped ids update error', logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
|
||||
logger.log(u'Doing full update on all shows')
|
||||
|
||||
# clean out cache directory, remove everything > 12 hours old
|
||||
sickbeard.helpers.clearCache()
|
||||
try:
|
||||
sickbeard.helpers.clearCache()
|
||||
except Exception:
|
||||
logger.log('cache dir cleanup error', logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
|
||||
# select 10 'Ended' tv_shows updated more than 90 days ago
|
||||
# and all shows not updated more then 180 days ago to include in this update
|
||||
|
|
|
@ -471,7 +471,7 @@ class TVShow(object):
|
|||
curEpisode.refreshSubtitles()
|
||||
except:
|
||||
logger.log('%s: Could not refresh subtitles' % self.indexerid, logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
|
||||
result = curEpisode.get_sql()
|
||||
if None is not result:
|
||||
|
@ -953,7 +953,7 @@ class TVShow(object):
|
|||
logger.log('Something is wrong with IMDb api: %s' % ex(e), logger.WARNING)
|
||||
except Exception as e:
|
||||
logger.log('Error loading IMDb info: %s' % ex(e), logger.ERROR)
|
||||
logger.log('%s' % traceback.format_exc(), logger.DEBUG)
|
||||
logger.log('%s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
def _get_imdb_info(self):
|
||||
|
||||
|
@ -1203,7 +1203,7 @@ class TVShow(object):
|
|||
episode = self.makeEpFromFile(episodeLoc['location'])
|
||||
subtitles = episode.downloadSubtitles(force=force)
|
||||
except Exception as e:
|
||||
logger.log('Error occurred when downloading subtitles: %s' % traceback.format_exc(), logger.DEBUG)
|
||||
logger.log('Error occurred when downloading subtitles: %s' % traceback.format_exc(), logger.ERROR)
|
||||
return
|
||||
|
||||
def switchIndexer(self, old_indexer, old_indexerid, pausestatus_after=None):
|
||||
|
|
|
@ -144,7 +144,7 @@ class Api(webserve.BaseHandler):
|
|||
out = '%s(%s);' % (callback, out) # wrap with JSONP call if requested
|
||||
|
||||
except Exception as e: # if we fail to generate the output fake an error
|
||||
logger.log(u'API :: ' + traceback.format_exc(), logger.DEBUG)
|
||||
logger.log(u'API :: ' + traceback.format_exc(), logger.ERROR)
|
||||
out = '{"result":"' + result_type_map[RESULT_ERROR] + '", "message": "error while composing output: "' + ex(
|
||||
e) + '"}'
|
||||
|
||||
|
|
Loading…
Reference in a new issue