Merge pull request #835 from JackDandy/feature/ChangeLogger

Change logger to properly cleanup used resources.
This commit is contained in:
JackDandy 2016-12-09 22:37:22 +00:00 committed by GitHub
commit 191986db58
6 changed files with 177 additions and 180 deletions

View file

@ -200,6 +200,7 @@
* Change only display Search Settings/"Usenet retention" if Search NZBs is enabled * Change only display Search Settings/"Usenet retention" if Search NZBs is enabled
* Change sab API request to prevent naming mismatch * Change sab API request to prevent naming mismatch
* Change update rTorrent systems * Change update rTorrent systems
* Change logger to properly cleanup used resources
[develop changelog] [develop changelog]
* Change send nzb data to NZBGet for Anizb instead of url * Change send nzb data to NZBGet for Anizb instead of url
@ -242,6 +243,7 @@
* Change only update enabled torrent providers * Change only update enabled torrent providers
* Fix restart when switching from master to develop * Fix restart when switching from master to develop
* Change update IMDb show parser * Change update IMDb show parser
* Fix rTorrent auth and magnet issue
### 0.11.16 (2016-10-16 17:30:00 UTC) ### 0.11.16 (2016-10-16 17:30:00 UTC)

View file

@ -1138,6 +1138,12 @@ def initialize(console_logging=True):
save_config() save_config()
# start up all the threads # start up all the threads
old_log = os.path.join(LOG_DIR, 'sickbeard.log')
if os.path.isfile(old_log):
try:
os.rename(old_log, os.path.join(LOG_DIR, logger.sb_log_instance.log_file))
except (StandardError, Exception):
pass
logger.sb_log_instance.init_logging(console_logging=console_logging) logger.sb_log_instance.init_logging(console_logging=console_logging)
# initialize the main SB database # initialize the main SB database

View file

@ -25,7 +25,7 @@ from lib.rtorrent import RTorrent
class RtorrentAPI(GenericClient): class RtorrentAPI(GenericClient):
def __init__(self, host=None, username=None, password=None): def __init__(self, host=None, username=None, password=None):
if host and host.startswith('scgi:') and any([username, password]): if host and host.startswith('scgi:'):
username = password = None username = password = None
super(RtorrentAPI, self).__init__('rTorrent', host, username, password) super(RtorrentAPI, self).__init__('rTorrent', host, username, password)
@ -37,7 +37,7 @@ class RtorrentAPI(GenericClient):
self.auth = None self.auth = None
if self.host: if self.host:
try: try:
if self.host and self.host.startswith('scgi:') and any([self.username, self.password]): if self.host and self.host.startswith('scgi:'):
self.username = self.password = None self.username = self.password = None
self.auth = RTorrent(self.host, self.username, self.password, True) self.auth = RTorrent(self.host, self.username, self.password, True)
except (AssertionError, xmlrpclib.ProtocolError) as e: except (AssertionError, xmlrpclib.ProtocolError) as e:
@ -54,7 +54,7 @@ class RtorrentAPI(GenericClient):
if 'file' == cmd: if 'file' == cmd:
torrent = self.auth.load_torrent(kwargs['file']) torrent = self.auth.load_torrent(kwargs['file'])
elif 'magnet' == cmd: elif 'magnet' == cmd:
torrent = self.auth.load_magnet(kwargs['url'], kwargs['btih']) torrent = self.auth.load_magnet(kwargs['uri'], kwargs['btih'])
if torrent: if torrent:

View file

@ -511,10 +511,10 @@ class AddShowidTvdbidIndex(db.SchemaUpgrade):
class AddLastUpdateTVDB(db.SchemaUpgrade): class AddLastUpdateTVDB(db.SchemaUpgrade):
# Adding column last_update_tvdb to tv_shows for controlling nightly updates # Adding column last_update_tvdb to tv_shows for controlling nightly updates
def execute(self): def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
if not self.hasColumn('tv_shows', 'last_update_tvdb'): if not self.hasColumn('tv_shows', 'last_update_tvdb'):
logger.log(u'Adding column last_update_tvdb to tv_shows') logger.log(u'Adding column last_update_tvdb to tv_shows')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('tv_shows', 'last_update_tvdb', default=1) self.addColumn('tv_shows', 'last_update_tvdb', default=1)
self.incDBVersion() self.incDBVersion()
@ -534,14 +534,21 @@ class AddDBIncreaseTo15(db.SchemaUpgrade):
# 15 -> 16 # 15 -> 16
class AddIMDbInfo(db.SchemaUpgrade): class AddIMDbInfo(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Creating IMDb table imdb_info') db_backed_up = False
self.connection.action( if not self.hasTable('imdb_info'):
'CREATE TABLE imdb_info (tvdb_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)') logger.log(u'Creating IMDb table imdb_info')
db.backup_database('sickbeard.db', self.checkDBVersion())
db_backed_up = True
self.connection.action(
'CREATE TABLE imdb_info (tvdb_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)')
if not self.hasColumn('tv_shows', 'imdb_id'): if not self.hasColumn('tv_shows', 'imdb_id'):
logger.log(u'Adding IMDb column imdb_id to tv_shows') logger.log(u'Adding IMDb column imdb_id to tv_shows')
if not db_backed_up:
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('tv_shows', 'imdb_id') self.addColumn('tv_shows', 'imdb_id')
self.incDBVersion() self.incDBVersion()
@ -551,7 +558,6 @@ class AddIMDbInfo(db.SchemaUpgrade):
# 16 -> 17 # 16 -> 17
class AddProperNamingSupport(db.SchemaUpgrade): class AddProperNamingSupport(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
if not self.hasColumn('tv_shows', 'imdb_id')\ if not self.hasColumn('tv_shows', 'imdb_id')\
and self.hasColumn('tv_shows', 'rls_require_words')\ and self.hasColumn('tv_shows', 'rls_require_words')\
@ -561,6 +567,7 @@ class AddProperNamingSupport(db.SchemaUpgrade):
if not self.hasColumn('tv_episodes', 'is_proper'): if not self.hasColumn('tv_episodes', 'is_proper'):
logger.log(u'Adding column is_proper to tv_episodes') logger.log(u'Adding column is_proper to tv_episodes')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('tv_episodes', 'is_proper') self.addColumn('tv_episodes', 'is_proper')
self.incDBVersion() self.incDBVersion()
@ -570,7 +577,6 @@ class AddProperNamingSupport(db.SchemaUpgrade):
# 17 -> 18 # 17 -> 18
class AddEmailSubscriptionTable(db.SchemaUpgrade): class AddEmailSubscriptionTable(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
if not self.hasColumn('tv_episodes', 'is_proper')\ if not self.hasColumn('tv_episodes', 'is_proper')\
and self.hasColumn('tv_shows', 'rls_require_words')\ and self.hasColumn('tv_shows', 'rls_require_words')\
@ -581,6 +587,7 @@ class AddEmailSubscriptionTable(db.SchemaUpgrade):
if not self.hasColumn('tv_shows', 'notify_list'): if not self.hasColumn('tv_shows', 'notify_list'):
logger.log(u'Adding column notify_list to tv_shows') logger.log(u'Adding column notify_list to tv_shows')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('tv_shows', 'notify_list', 'TEXT', None) self.addColumn('tv_shows', 'notify_list', 'TEXT', None)
self.incDBVersion() self.incDBVersion()
@ -590,7 +597,9 @@ class AddEmailSubscriptionTable(db.SchemaUpgrade):
# 18 -> 19 # 18 -> 19
class AddProperSearch(db.SchemaUpgrade): class AddProperSearch(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion()) if not self.hasColumn('tv_episodes', 'is_proper'):
self.setDBVersion(12)
return self.checkDBVersion()
if not self.hasColumn('tv_shows', 'notify_list')\ if not self.hasColumn('tv_shows', 'notify_list')\
and self.hasColumn('tv_shows', 'rls_require_words')\ and self.hasColumn('tv_shows', 'rls_require_words')\
@ -602,6 +611,7 @@ class AddProperSearch(db.SchemaUpgrade):
if not self.hasColumn('info', 'last_proper_search'): if not self.hasColumn('info', 'last_proper_search'):
logger.log(u'Adding column last_proper_search to info') logger.log(u'Adding column last_proper_search to info')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('info', 'last_proper_search', default=1) self.addColumn('info', 'last_proper_search', default=1)
self.incDBVersion() self.incDBVersion()
@ -611,10 +621,9 @@ class AddProperSearch(db.SchemaUpgrade):
# 19 -> 20 # 19 -> 20
class AddDvdOrderOption(db.SchemaUpgrade): class AddDvdOrderOption(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
if not self.hasColumn('tv_shows', 'dvdorder'): if not self.hasColumn('tv_shows', 'dvdorder'):
logger.log(u'Adding column dvdorder to tv_shows') logger.log(u'Adding column dvdorder to tv_shows')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('tv_shows', 'dvdorder', 'NUMERIC', '0') self.addColumn('tv_shows', 'dvdorder', 'NUMERIC', '0')
self.incDBVersion() self.incDBVersion()
@ -624,10 +633,9 @@ class AddDvdOrderOption(db.SchemaUpgrade):
# 20 -> 21 # 20 -> 21
class AddSubtitlesSupport(db.SchemaUpgrade): class AddSubtitlesSupport(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
if not self.hasColumn('tv_shows', 'subtitles'): if not self.hasColumn('tv_shows', 'subtitles'):
logger.log(u'Adding subtitles to tv_shows and tv_episodes') logger.log(u'Adding subtitles to tv_shows and tv_episodes')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('tv_shows', 'subtitles') self.addColumn('tv_shows', 'subtitles')
self.addColumn('tv_episodes', 'subtitles', 'TEXT', '') self.addColumn('tv_episodes', 'subtitles', 'TEXT', '')
self.addColumn('tv_episodes', 'subtitles_searchcount') self.addColumn('tv_episodes', 'subtitles_searchcount')
@ -753,7 +761,6 @@ class AddArchiveFirstMatchOption(db.SchemaUpgrade):
# 26 -> 27 # 26 -> 27
class AddSceneNumbering(db.SchemaUpgrade): class AddSceneNumbering(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion()) db.backup_database('sickbeard.db', self.checkDBVersion())
@ -791,20 +798,22 @@ class ConvertIndexerToInteger(db.SchemaUpgrade):
# 28 -> 29 # 28 -> 29
class AddRequireAndIgnoreWords(db.SchemaUpgrade): class AddRequireAndIgnoreWords(db.SchemaUpgrade):
# Adding column rls_require_words and rls_ignore_words to tv_shows # Adding column rls_require_words and rls_ignore_words to tv_shows
def execute(self): def execute(self):
if self.hasColumn('tv_shows', 'rls_require_words') and self.hasColumn('tv_shows', 'rls_ignore_words'): if self.hasColumn('tv_shows', 'rls_require_words') and self.hasColumn('tv_shows', 'rls_ignore_words'):
self.incDBVersion() self.incDBVersion()
return self.checkDBVersion() return self.checkDBVersion()
db.backup_database('sickbeard.db', self.checkDBVersion()) db_backed_up = False
if not self.hasColumn('tv_shows', 'rls_require_words'): if not self.hasColumn('tv_shows', 'rls_require_words'):
logger.log(u'Adding column rls_require_words to tv_shows') logger.log(u'Adding column rls_require_words to tv_shows')
db.backup_database('sickbeard.db', self.checkDBVersion())
db_backed_up = True
self.addColumn('tv_shows', 'rls_require_words', 'TEXT', '') self.addColumn('tv_shows', 'rls_require_words', 'TEXT', '')
if not self.hasColumn('tv_shows', 'rls_ignore_words'): if not self.hasColumn('tv_shows', 'rls_ignore_words'):
logger.log(u'Adding column rls_ignore_words to tv_shows') logger.log(u'Adding column rls_ignore_words to tv_shows')
if not db_backed_up:
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('tv_shows', 'rls_ignore_words', 'TEXT', '') self.addColumn('tv_shows', 'rls_ignore_words', 'TEXT', '')
self.incDBVersion() self.incDBVersion()
@ -814,15 +823,18 @@ class AddRequireAndIgnoreWords(db.SchemaUpgrade):
# 29 -> 30 # 29 -> 30
class AddSportsOption(db.SchemaUpgrade): class AddSportsOption(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion()) db_backed_up = False
if not self.hasColumn('tv_shows', 'sports'): if not self.hasColumn('tv_shows', 'sports'):
logger.log(u'Adding column sports to tv_shows') logger.log(u'Adding column sports to tv_shows')
db.backup_database('sickbeard.db', self.checkDBVersion())
db_backed_up = True
self.addColumn('tv_shows', 'sports', 'NUMERIC', '0') self.addColumn('tv_shows', 'sports', 'NUMERIC', '0')
if self.hasColumn('tv_shows', 'air_by_date') and self.hasColumn('tv_shows', 'sports'): if self.hasColumn('tv_shows', 'air_by_date') and self.hasColumn('tv_shows', 'sports'):
# update sports column # update sports column
logger.log(u'[4/4] Updating tv_shows to reflect the correct sports value...', logger.MESSAGE) logger.log(u'[4/4] Updating tv_shows to reflect the correct sports value...', logger.MESSAGE)
if not db_backed_up:
db.backup_database('sickbeard.db', self.checkDBVersion())
cl = [] cl = []
history_quality = self.connection.select( history_quality = self.connection.select(
'SELECT * FROM tv_shows WHERE LOWER(classification) = "sports" AND air_by_date = 1 AND sports = 0') 'SELECT * FROM tv_shows WHERE LOWER(classification) = "sports" AND air_by_date = 1 AND sports = 0')
@ -891,9 +903,8 @@ class AddAnimeBlacklistWhitelist(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion()) db.backup_database('sickbeard.db', self.checkDBVersion())
cl = [] cl = [['CREATE TABLE blacklist (show_id INTEGER, range TEXT, keyword TEXT)'],
cl.append(['CREATE TABLE blacklist (show_id INTEGER, range TEXT, keyword TEXT)']) ['CREATE TABLE whitelist (show_id INTEGER, range TEXT, keyword TEXT)']]
cl.append(['CREATE TABLE whitelist (show_id INTEGER, range TEXT, keyword TEXT)'])
logger.log(u'Creating table blacklist whitelist') logger.log(u'Creating table blacklist whitelist')
self.connection.mass_action(cl) self.connection.mass_action(cl)
@ -995,21 +1006,28 @@ class Migrate41(db.SchemaUpgrade):
# 43,44 -> 10001 # 43,44 -> 10001
class Migrate43(db.SchemaUpgrade): class Migrate43(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
db_backed_up = False
db_chg = None db_chg = None
table = 'tmdb_info' table = 'tmdb_info'
if self.hasTable(table): if self.hasTable(table):
db.backup_database('sickbeard.db', self.checkDBVersion())
db_backed_up = True
logger.log(u'Dropping redundant table tmdb_info') logger.log(u'Dropping redundant table tmdb_info')
self.connection.action('DROP TABLE [%s]' % table) self.connection.action('DROP TABLE [%s]' % table)
db_chg = True db_chg = True
if self.hasColumn('tv_shows', 'tmdb_id'): if self.hasColumn('tv_shows', 'tmdb_id'):
if not db_backed_up:
db.backup_database('sickbeard.db', self.checkDBVersion())
db_backed_up = True
logger.log(u'Dropping redundant tmdb_info refs') logger.log(u'Dropping redundant tmdb_info refs')
self.dropColumn('tv_shows', 'tmdb_id') self.dropColumn('tv_shows', 'tmdb_id')
db_chg = True db_chg = True
if not self.hasTable('db_version'): if not self.hasTable('db_version'):
if not db_backed_up:
db.backup_database('sickbeard.db', self.checkDBVersion())
self.connection.action('PRAGMA user_version = 0') self.connection.action('PRAGMA user_version = 0')
self.connection.action('CREATE TABLE db_version (db_version INTEGER);') self.connection.action('CREATE TABLE db_version (db_version INTEGER);')
self.connection.action('INSERT INTO db_version (db_version) VALUES (0);') self.connection.action('INSERT INTO db_version (db_version) VALUES (0);')
@ -1092,10 +1110,9 @@ class RemoveMinorDBVersion(db.SchemaUpgrade):
# 10003 -> 10002 # 10003 -> 10002
class RemoveMetadataSub(db.SchemaUpgrade): class RemoveMetadataSub(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
if self.hasColumn('tv_shows', 'sub_use_sr_metadata'): if self.hasColumn('tv_shows', 'sub_use_sr_metadata'):
logger.log(u'Dropping redundant column metadata sub') logger.log(u'Dropping redundant column metadata sub')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.dropColumn('tv_shows', 'sub_use_sr_metadata') self.dropColumn('tv_shows', 'sub_use_sr_metadata')
self.setDBVersion(10002) self.setDBVersion(10002)
@ -1119,10 +1136,9 @@ class DBIncreaseTo20001(db.SchemaUpgrade):
# 20001 -> 20002 # 20001 -> 20002
class AddTvShowOverview(db.SchemaUpgrade): class AddTvShowOverview(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
if not self.hasColumn('tv_shows', 'overview'): if not self.hasColumn('tv_shows', 'overview'):
logger.log(u'Adding column overview to tv_shows') logger.log(u'Adding column overview to tv_shows')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('tv_shows', 'overview', 'TEXT', '') self.addColumn('tv_shows', 'overview', 'TEXT', '')
self.setDBVersion(20002) self.setDBVersion(20002)
@ -1132,15 +1148,16 @@ class AddTvShowOverview(db.SchemaUpgrade):
# 20002 -> 20003 # 20002 -> 20003
class AddTvShowTags(db.SchemaUpgrade): class AddTvShowTags(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
if not self.hasColumn('tv_shows', 'tag'): if not self.hasColumn('tv_shows', 'tag'):
logger.log(u'Adding tag to tv_shows') logger.log(u'Adding tag to tv_shows')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('tv_shows', 'tag', 'TEXT', 'Show List') self.addColumn('tv_shows', 'tag', 'TEXT', 'Show List')
self.setDBVersion(20003) self.setDBVersion(20003)
return self.checkDBVersion() return self.checkDBVersion()
# 20003 -> 20004 # 20003 -> 20004
class ChangeMapIndexer(db.SchemaUpgrade): class ChangeMapIndexer(db.SchemaUpgrade):
def execute(self): def execute(self):

View file

@ -1,7 +1,5 @@
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
# #
# This file is part of SickGear. # This file is part of SickGear.
# #
# SickGear is free software: you can redistribute it and/or modify # SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by # it under the terms of the GNU General Public License as published by
@ -18,14 +16,15 @@
from __future__ import with_statement from __future__ import with_statement
import time import codecs
import os
import sys
import threading
import zipfile
import logging import logging
import glob import glob
import codecs import os
import re
import sys
import threading
import time
import zipfile
from logging.handlers import TimedRotatingFileHandler from logging.handlers import TimedRotatingFileHandler
@ -44,14 +43,10 @@ MESSAGE = logging.INFO
DEBUG = logging.DEBUG DEBUG = logging.DEBUG
DB = 5 DB = 5
reverseNames = {u'ERROR': ERROR, reverseNames = {u'ERROR': ERROR, u'WARNING': WARNING, u'INFO': MESSAGE, u'DEBUG': DEBUG, u'DB': DB}
u'WARNING': WARNING,
u'INFO': MESSAGE,
u'DEBUG': DEBUG,
u'DB': DB}
# send logging to null # suppress output with this handler
class NullHandler(logging.Handler): class NullHandler(logging.Handler):
def emit(self, record): def emit(self, record):
pass pass
@ -61,141 +56,92 @@ class SBRotatingLogHandler(object):
def __init__(self, log_file): def __init__(self, log_file):
self.log_file = log_file self.log_file = log_file
self.log_file_path = log_file self.log_file_path = log_file
self.cur_handler = None self.h_file = None
self.h_console = None
self.console_logging = False self.console_logging = False
self.log_lock = threading.Lock() self.log_lock = threading.Lock()
self.log_types = ['sickbeard', 'tornado.application', 'tornado.general', 'imdbpy', 'subliminal']
self.log_types_null = ['tornado.access']
def __del__(self): def __del__(self):
pass pass
def close_log(self, handler=None): def close_log(self, handler=None):
handlers = []
if not handler: if not handler:
handler = self.cur_handler handlers = [self.h_file]
if None is not self.h_console:
handlers += [self.h_console]
elif not isinstance(handler, list):
handlers = [handler]
if handler: for handler in handlers:
sb_logger = logging.getLogger('sickbeard') for logger_name in self.log_types + self.log_types_null:
sub_logger = logging.getLogger('subliminal') logging.getLogger(logger_name).removeHandler(handler)
imdb_logger = logging.getLogger('imdbpy')
tornado_logger = logging.getLogger('tornado')
feedcache_logger = logging.getLogger('feedcache')
sb_logger.removeHandler(handler)
sub_logger.removeHandler(handler)
imdb_logger.removeHandler(handler)
tornado_logger.removeHandler(handler)
feedcache_logger.removeHandler(handler)
handler.flush() handler.flush()
handler.close() handler.close()
def init_logging(self, console_logging=False): def init_logging(self, console_logging=False):
if console_logging: self.console_logging |= console_logging
self.console_logging = console_logging
old_handler = None
# get old handler in case we want to close it
if self.cur_handler:
old_handler = self.cur_handler
else:
# add a new logging level DB
logging.addLevelName(5, 'DB')
# only start console_logging on first initialize
if self.console_logging:
# define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler()
console.setLevel(logging.INFO)
if sickbeard.DEBUG:
console.setLevel(logging.DEBUG)
# set a format which is simpler for console use
console.setFormatter(DispatchingFormatter(
{'sickbeard': logging.Formatter(
'%(asctime)s %(levelname)s::%(message)s', '%H:%M:%S'),
'subliminal': logging.Formatter(
'%(asctime)s %(levelname)s::SUBLIMINAL :: %(message)s', '%H:%M:%S'),
'imdbpy': logging.Formatter(
'%(asctime)s %(levelname)s::IMDBPY :: %(message)s', '%H:%M:%S'),
'tornado.general': logging.Formatter(
'%(asctime)s %(levelname)s::TORNADO :: %(message)s', '%H:%M:%S'),
'tornado.application': logging.Formatter(
'%(asctime)s %(levelname)s::TORNADO :: %(message)s', '%H:%M:%S'),
'feedcache.cache': logging.Formatter(
'%(asctime)s %(levelname)s::FEEDCACHE :: %(message)s', '%H:%M:%S')},
logging.Formatter('%(message)s'), ))
# add the handler to the root logger
logging.getLogger('sickbeard').addHandler(console)
logging.getLogger('tornado.general').addHandler(console)
logging.getLogger('tornado.application').addHandler(console)
logging.getLogger('subliminal').addHandler(console)
logging.getLogger('imdbpy').addHandler(console)
logging.getLogger('feedcache').addHandler(console)
self.log_file_path = os.path.join(sickbeard.LOG_DIR, self.log_file) self.log_file_path = os.path.join(sickbeard.LOG_DIR, self.log_file)
self.cur_handler = self._config_handler() # get old handler for post switch-over closure
logging.getLogger('sickbeard').addHandler(self.cur_handler) old_h_file = old_h_console = None
logging.getLogger('tornado.access').addHandler(NullHandler()) if self.h_file or self.h_console:
logging.getLogger('tornado.general').addHandler(self.cur_handler) if self.h_file:
logging.getLogger('tornado.application').addHandler(self.cur_handler) old_h_file = self.h_file
logging.getLogger('subliminal').addHandler(self.cur_handler) if self.h_console:
logging.getLogger('imdbpy').addHandler(self.cur_handler) old_h_console = self.h_console
logging.getLogger('feedcache').addHandler(self.cur_handler)
# add a new logging level DB
logging.addLevelName(5, 'DB')
if self.console_logging:
# get a console handler to output INFO or higher messages to sys.stderr
h_console = logging.StreamHandler()
h_console.setLevel((logging.INFO, logging.DEBUG)[sickbeard.DEBUG])
h_console.setFormatter(DispatchingFormatter(self._formatters(), logging.Formatter('%(message)s'), ))
self.h_console = h_console
# add the handler to the root logger
for logger_name in self.log_types:
logging.getLogger(logger_name).addHandler(h_console)
for logger_name in self.log_types_null:
logging.getLogger(logger_name).addHandler(NullHandler())
h_file = TimedCompressedRotatingFileHandler(self.log_file_path, logger=self)
h_file.setLevel(reverseNames[sickbeard.FILE_LOGGING_PRESET])
h_file.setFormatter(DispatchingFormatter(self._formatters(False), logging.Formatter('%(message)s'), ))
self.h_file = h_file
for logger_name in self.log_types:
logging.getLogger(logger_name).addHandler(h_file)
log_level = (logging.WARNING, logging.DEBUG)[sickbeard.DEBUG]
for logger_name in [x for x in self.log_types if 'sickbeard' != x]:
logging.getLogger(logger_name).setLevel(log_level)
logging.getLogger('sickbeard').setLevel(DB) logging.getLogger('sickbeard').setLevel(DB)
log_level = logging.WARNING # as now logging in new log folder, close old handlers
if sickbeard.DEBUG: if old_h_file:
log_level = logging.DEBUG self.close_log(old_h_file)
if old_h_console:
self.close_log(old_h_console)
logging.getLogger('tornado.general').setLevel(log_level) def _formatters(self, log_simple=True):
logging.getLogger('tornado.application').setLevel(log_level) fmt = {}
logging.getLogger('subliminal').setLevel(log_level) for logger_name in self.log_types:
logging.getLogger('imdbpy').setLevel(log_level) source = (re.sub('(.*\.\w\w\w).*$', r'\1', logger_name).upper() + ' :: ', '')['sickbeard' == logger_name]
logging.getLogger('feedcache').setLevel(log_level) fmt.setdefault(logger_name, logging.Formatter(
'%(asctime)s %(levelname)' + ('-8', '')[log_simple] + 's ' + source
+ '%(message)s', ('%Y-%m-%d ', '')[log_simple] + '%H:%M:%S'))
# already logging in new log folder, close the old handler return fmt
if old_handler:
self.close_log(old_handler)
# old_handler.flush()
# old_handler.close()
# sb_logger = logging.getLogger('sickbeard')
# sub_logger = logging.getLogger('subliminal')
# imdb_logger = logging.getLogger('imdbpy')
# sb_logger.removeHandler(old_handler)
# subli_logger.removeHandler(old_handler)
# imdb_logger.removeHandler(old_handler)
def _config_handler(self):
"""
Configure a file handler to log at file_name and return it.
"""
file_handler = TimedCompressedRotatingFileHandler(self.log_file_path, when='midnight',
backupCount=16, encoding='utf-8')
file_handler.setLevel(reverseNames[sickbeard.FILE_LOGGING_PRESET])
file_handler.setFormatter(DispatchingFormatter(
{'sickbeard': logging.Formatter(
'%(asctime)s %(levelname)-8s %(message)s', '%Y-%m-%d %H:%M:%S'),
'subliminal': logging.Formatter(
'%(asctime)s %(levelname)-8s SUBLIMINAL :: %(message)s', '%Y-%m-%d %H:%M:%S'),
'imdbpy': logging.Formatter(
'%(asctime)s %(levelname)-8s IMDBPY :: %(message)s', '%Y-%m-%d %H:%M:%S'),
'tornado.general': logging.Formatter(
'%(asctime)s %(levelname)-8s TORNADO :: %(message)s', '%Y-%m-%d %H:%M:%S'),
'tornado.application': logging.Formatter(
'%(asctime)s %(levelname)-8s TORNADO :: %(message)s', '%Y-%m-%d %H:%M:%S'),
'feedcache.cache': logging.Formatter(
'%(asctime)s %(levelname)-8s FEEDCACHE :: %(message)s', '%Y-%m-%d %H:%M:%S')},
logging.Formatter('%(message)s'), ))
return file_handler
def log(self, to_log, log_level=MESSAGE): def log(self, to_log, log_level=MESSAGE):
@ -209,7 +155,6 @@ class SBRotatingLogHandler(object):
# sub_logger = logging.getLogger('subliminal') # sub_logger = logging.getLogger('subliminal')
# imdb_logger = logging.getLogger('imdbpy') # imdb_logger = logging.getLogger('imdbpy')
# tornado_logger = logging.getLogger('tornado') # tornado_logger = logging.getLogger('tornado')
# feedcache_logger = logging.getLogger('feedcache')
try: try:
if DEBUG == log_level: if DEBUG == log_level:
@ -252,11 +197,27 @@ class DispatchingFormatter:
class TimedCompressedRotatingFileHandler(TimedRotatingFileHandler): class TimedCompressedRotatingFileHandler(TimedRotatingFileHandler):
def __init__(self, log_file_path, logger=None, when='midnight', interval=1,
backup_count=16, encoding='utf-8', delay=False, utc=False):
super(TimedCompressedRotatingFileHandler, self).__init__(log_file_path, when, interval,
backup_count, encoding, delay, utc)
self.logger_instance = logger
def doRollover(self):
"""
example:
logger.TimedCompressedRotatingFileHandler(sickbeard.logger.sb_log_instance.log_file_path, when='M', interval=2,
logger=sickbeard.logger.sb_log_instance).doRollover()
"""
if self.logger_instance:
with self.logger_instance.log_lock:
self._do_rollover()
""" """
Extended version of TimedRotatingFileHandler that compress logs on rollover. Extended version of TimedRotatingFileHandler that compress logs on rollover.
by Angel Freire <cuerty at gmail dot com> by Angel Freire <cuerty at gmail dot com>
""" """
def doRollover(self): def _do_rollover(self):
""" """
do a rollover; in this case, a date/time stamp is appended to the filename do a rollover; in this case, a date/time stamp is appended to the filename
when the rollover happens. However, you want the file to be named for the when the rollover happens. However, you want the file to be named for the
@ -268,35 +229,46 @@ class TimedCompressedRotatingFileHandler(TimedRotatingFileHandler):
""" """
self.stream.close() self.stream.close()
# get the time that this sequence started at and make it a TimeTuple
# get the time that this sequence started at
t = self.rolloverAt - self.interval t = self.rolloverAt - self.interval
time_tuple = time.localtime(t) start_time = time.localtime(t)
file_name = self.baseFilename.rpartition('.')[0] file_name = self.baseFilename.rpartition('.')[0]
dfn = '%s_%s.log' % (file_name, time.strftime(self.suffix, time_tuple)) dfn = '%s_%s.log' % (file_name, time.strftime(self.suffix, start_time))
self.delete_logfile(dfn) self.delete_logfile(dfn)
self.logger_instance.close_log()
self.logger_instance.h_file = self.logger_instance.h_console = None
from sickbeard import encodingKludge
try: try:
ek.ek(os.rename, self.baseFilename, dfn) encodingKludge.ek(os.rename, self.baseFilename, dfn)
except (StandardError, Exception): except (StandardError, Exception):
pass pass
if 0 < self.backupCount:
# find the oldest log file and delete it self.logger_instance.init_logging()
s = glob.glob(file_name + '_*')
if len(s) > self.backupCount:
s.sort()
self.delete_logfile(s[0])
# print "%s -> %s" % (self.baseFilename, dfn)
if self.encoding: if self.encoding:
self.stream = codecs.open(self.baseFilename, 'w', self.encoding) self.stream = codecs.open(self.baseFilename, 'w', self.encoding)
else: else:
self.stream = open(self.baseFilename, 'w') self.stream = open(self.baseFilename, 'w')
self.rolloverAt = self.rolloverAt + self.interval
zip_name = '%s.zip' % dfn.rpartition('.')[0] zip_name = '%s.zip' % dfn.rpartition('.')[0]
self.delete_logfile(zip_name) self.delete_logfile(zip_name)
zip_fh = zipfile.ZipFile(zip_name, 'w') with zipfile.ZipFile(zip_name, 'w', zipfile.ZIP_DEFLATED) as zip_fh:
zip_fh.write(dfn, os.path.basename(dfn), zipfile.ZIP_DEFLATED) zip_fh.write(dfn, os.path.basename(dfn))
zip_fh.close()
self.delete_logfile(dfn) self.delete_logfile(dfn)
if 0 < self.backupCount:
# find the oldest log file and delete it
all_names = encodingKludge.ek(glob.glob, file_name + '_*')
if len(all_names) > self.backupCount:
all_names.sort()
self.delete_logfile(all_names[0])
self.rolloverAt = self.rolloverAt + self.interval
@staticmethod @staticmethod
def delete_logfile(filepath): def delete_logfile(filepath):
if os.path.exists(filepath): if os.path.exists(filepath):
@ -306,7 +278,7 @@ class TimedCompressedRotatingFileHandler(TimedRotatingFileHandler):
sickbeard.helpers.remove_file_failed(filepath) sickbeard.helpers.remove_file_failed(filepath)
sb_log_instance = SBRotatingLogHandler('sickbeard.log') sb_log_instance = SBRotatingLogHandler('sickgear.log')
def log(to_log, log_level=MESSAGE): def log(to_log, log_level=MESSAGE):
@ -322,8 +294,8 @@ def close():
def log_set_level(): def log_set_level():
if sb_log_instance.cur_handler: if sb_log_instance.h_file:
sb_log_instance.cur_handler.setLevel(reverseNames[sickbeard.FILE_LOGGING_PRESET]) sb_log_instance.h_file.setLevel(reverseNames[sickbeard.FILE_LOGGING_PRESET])
def current_log_file(): def current_log_file():

View file

@ -54,8 +54,8 @@ FILEPATH = os.path.join(FILEDIR, FILENAME)
SHOWDIR = os.path.join(TESTDIR, SHOWNAME + ' final') SHOWDIR = os.path.join(TESTDIR, SHOWNAME + ' final')
#sickbeard.logger.sb_log_instance = sickbeard.logger.SBRotatingLogHandler(os.path.join(TESTDIR, 'sickbeard.log'), sickbeard.logger.NUM_LOGS, sickbeard.logger.LOG_SIZE) #sickbeard.logger.sb_log_instance = sickbeard.logger.SBRotatingLogHandler(os.path.join(TESTDIR, 'sickgear.log'), sickbeard.logger.NUM_LOGS, sickbeard.logger.LOG_SIZE)
sickbeard.logger.SBRotatingLogHandler.log_file = os.path.join(os.path.join(TESTDIR, 'Logs'), 'test_sickbeard.log') sickbeard.logger.SBRotatingLogHandler.log_file = os.path.join(os.path.join(TESTDIR, 'Logs'), 'test_sickgear.log')
#================= #=================