Merge pull request #828 from JackDandy/feature/FixUnknownQualitySearch

Feature/fix unknown quality search
This commit is contained in:
JackDandy 2016-11-16 22:33:10 +00:00 committed by GitHub
commit 266a512876
21 changed files with 203 additions and 141 deletions

View file

@ -193,6 +193,8 @@
* Fix button "Checkout branch" when stuck on disabled * Fix button "Checkout branch" when stuck on disabled
* Add 'Download Log' to 'Logs & Errors' page * Add 'Download Log' to 'Logs & Errors' page
* Change consolidate shutdown with restart, improve systemd support, bring order to on-init globals * Change consolidate shutdown with restart, improve systemd support, bring order to on-init globals
* Change speed improvement in finding needed categories/qualities (sd, hd, uhd)
* Change add guidance when using the "unknown" quality selection
[develop changelog] [develop changelog]
* Change send nzb data to NZBGet for Anizb instead of url * Change send nzb data to NZBGet for Anizb instead of url
@ -229,6 +231,9 @@
* Change remove deprecated providers being saved to config * Change remove deprecated providers being saved to config
* Change prevent a missing slash typo and correct develop typo after a network outage * Change prevent a missing slash typo and correct develop typo after a network outage
* Change send download logfile as stream * Change send download logfile as stream
* Fix launch browser during startup
* Change don't exclude "unknown" from search
* Fix UHD category select in Recent Search
### 0.11.16 (2016-10-16 17:30:00 UTC) ### 0.11.16 (2016-10-16 17:30:00 UTC)

View file

@ -220,7 +220,7 @@ class SickGear(object):
# Run as a double forked daemon # Run as a double forked daemon
if o in ('-d', '--daemon'): if o in ('-d', '--daemon'):
self.run_as_daemon = True self.run_as_daemon = True
# When running as daemon disable consoleLogging and don't start browser # When running as daemon disable console_logging and don't start browser
self.console_logging = False self.console_logging = False
self.no_launch = True self.no_launch = True

View file

@ -1257,8 +1257,8 @@ div.formpaginate .prev, div.formpaginate .next{
background:#2265A1 background:#2265A1
} }
#customQualityWrapper div.component-group-desc p{ #customQualityWrapper .tip-text p{
color:#666 color:#888
} }
/* ======================================================================= /* =======================================================================

View file

@ -1212,7 +1212,7 @@ div.formpaginate .prev, div.formpaginate .next{
background:#57442b background:#57442b
} }
#customQualityWrapper div.component-group-desc p{ #customQualityWrapper .tip-text p{
color:#666 color:#666
} }

View file

@ -966,11 +966,17 @@ div.formpaginate{
margin-right:6px margin-right:6px
} }
#edit-show #customQualityWrapper div.component-group-desc p, #edit-show #customQualityWrapper .tip-text p,
#addShowForm #customQualityWrapper div.component-group-desc p{ #addShowForm #customQualityWrapper .tip-text p,
#edit-show #customQualityWrapper .tip-text em,
#addShowForm #customQualityWrapper .tip-text em{
font-size:14px font-size:14px
} }
#addShowForm .stepDiv #customQuality.show-if-quality-custom span.component-desc p{
font-size:12px
}
#addShowForm #nameToSearch{ #addShowForm #nameToSearch{
width:460px; width:460px;
margin-top:0 margin-top:0

View file

@ -21,30 +21,40 @@
<div id="customQualityWrapper"> <div id="customQualityWrapper">
<div id="customQuality" class="show-if-quality-custom" style="display:none"> <div id="customQuality" class="show-if-quality-custom" style="display:none">
<div class="component-group-desc"> <div class="component-group-desc tip-text">
<p>An <em>Initial</em> quality episode must be found before an <em>Upgrade to</em> selection is considered.</p> <p>An <em>Initial</em> quality episode must be found before an <em>Upgrade to</em> selection is considered.</p>
<p>Upgrades continue until the highest selected of <em>Upgrade to</em> is matched.</p> <p>Upgrades continue until the highest selected of <em>Upgrade to</em> is matched.</p>
</div> </div>
<span class="component-desc"> <span class="component-desc">
<div style="float:left;padding-right:40px"> <div style="float:left;padding-right:28px">
<h4 class="jumbo">Initial</h4> <h4 class="jumbo">Initial</h4>
#set $anyQualityList = filter(lambda x: x > $Quality.NONE, $Quality.qualityStrings) #set $anyQualityList = filter(lambda x: x > $Quality.NONE, $Quality.qualityStrings)
<select id="anyQualities" name="anyQualities" multiple="multiple" size="$len($anyQualityList)" class="form-control form-control-inline input-sm"> <select id="anyQualities" name="anyQualities" multiple="multiple" size="$len($anyQualityList)" class="form-control form-control-inline input-sm">
#set $has_unknown = False
#for $curQuality in sorted($anyQualityList): #for $curQuality in sorted($anyQualityList):
#set $has_unknown |= ($Quality.UNKNOWN == $curQuality and $curQuality in $anyQualities)
<option value="$curQuality"#echo ('', $html_selected)[$curQuality in $anyQualities]#>$Quality.qualityStrings[$curQuality]</option> <option value="$curQuality"#echo ('', $html_selected)[$curQuality in $anyQualities]#>$Quality.qualityStrings[$curQuality]</option>
#end for #end for
</select> </select>
</div> </div>
<div style="float:left"> <div style="float:left;padding-right:20px">
<h4 class="jumbo">Upgrade to</h4> <h4 class="jumbo">Upgrade to</h4>
#set $bestQualityList = filter(lambda x: x > $Quality.SDTV and x < $Quality.UNKNOWN, $Quality.qualityStrings) #set $bestQualityList = filter(lambda x: x > $Quality.SDTV and x < $Quality.UNKNOWN, $Quality.qualityStrings)
<select id="bestQualities" name="bestQualities" multiple="multiple" size="$len($bestQualityList)" class="form-control form-control-inline input-sm"> <select id="bestQualities" name="bestQualities" multiple="multiple" size="$len($bestQualityList)" class="form-control form-control-inline input-sm">
#for $curQuality in sorted($bestQualityList): #for $curQuality in sorted($bestQualityList):
<option value="$curQuality"#echo ('', $html_selected)[$curQuality in $bestQualities]#>$Quality.qualityStrings[$curQuality]</option> <option value="$curQuality"#echo ('', $html_selected)[$curQuality in $bestQualities]#>$Quality.qualityStrings[$curQuality]</option>
#end for #end for
</select> </select><br />
<span>Ctrl + Click = toggle a quality</span>
</div>
<div style="line-height:normal;padding-top:50px" id="quality-notes" class="tip-text">
<p id="unknown"#if not $has_unknown# style="display:none"#end if#>
<em class="highlight-text">Note:</em> Temporarily use 'Unknown' for releases with no recognised quality.
Full-time use risks snatching bad releases and wastes API hits.
</p>
</div> </div>
</span> </span>
</div> </div>

View file

@ -19,6 +19,17 @@ function setFromPresets (preset) {
}); });
} else } else
elCustomQuality.fadeIn('fast', 'linear'); elCustomQuality.fadeIn('fast', 'linear');
presentTips();
}
function presentTips() {
var tip$ = $('#unknown');
if (/undefined/i.test($('#anyQualities').find('option[value="32768"]').attr('selected'))) {
tip$.fadeOut('fast', 'linear');
} else {
tip$.fadeIn('fast', 'linear');
}
} }
$(document).ready(function() { $(document).ready(function() {
@ -30,4 +41,8 @@ $(document).ready(function() {
}); });
setFromPresets(elQualityPreset.find(selected).val()); setFromPresets(elQualityPreset.find(selected).val());
$('#anyQualities').change(function() {
presentTips();
});
}); });

View file

@ -517,7 +517,7 @@ class Tvdb:
self.config['url_artworkPrefix'] = u'%(base_url)s/banners/%%s' % self.config self.config['url_artworkPrefix'] = u'%(base_url)s/banners/%%s' % self.config
def log(self, msg, log_level=logger.DEBUG): def log(self, msg, log_level=logger.DEBUG):
logger.log('TVDB_API :: %s' % (msg.replace(self.config['apikey'], '<apikey>')), logLevel=log_level) logger.log('TVDB_API :: %s' % (msg.replace(self.config['apikey'], '<apikey>')), log_level=log_level)
@staticmethod @staticmethod
def _get_temp_dir(): def _get_temp_dir():

View file

@ -1138,7 +1138,7 @@ def initialize(console_logging=True):
save_config() save_config()
# start up all the threads # start up all the threads
logger.sb_log_instance.initLogging(consoleLogging=console_logging) logger.sb_log_instance.init_logging(console_logging=console_logging)
# initialize the main SB database # initialize the main SB database
my_db = db.DBConnection() my_db = db.DBConnection()
@ -1800,7 +1800,7 @@ def save_config():
def launch_browser(start_port=None): def launch_browser(start_port=None):
if not start_port: if not start_port:
start_port = WEB_PORT start_port = WEB_PORT
browser_url = 'http%s://localhost:%d%s' % (('s' or '')[not ENABLE_HTTPS], start_port, WEB_ROOT) browser_url = 'http%s://localhost:%d%s' % (('s', '')[not ENABLE_HTTPS], start_port, WEB_ROOT)
try: try:
webbrowser.open(browser_url, 2, 1) webbrowser.open(browser_url, 2, 1)
except (StandardError, Exception): except (StandardError, Exception):

View file

@ -89,7 +89,7 @@ def change_LOG_DIR(log_dir, web_log):
sickbeard.ACTUAL_LOG_DIR = os.path.normpath(log_dir) sickbeard.ACTUAL_LOG_DIR = os.path.normpath(log_dir)
sickbeard.LOG_DIR = abs_log_dir sickbeard.LOG_DIR = abs_log_dir
logger.sb_log_instance.initLogging() logger.sb_log_instance.init_logging()
logger.log(u'Initialized new log file in %s' % sickbeard.LOG_DIR) logger.log(u'Initialized new log file in %s' % sickbeard.LOG_DIR)
log_dir_changed = True log_dir_changed = True

View file

@ -172,10 +172,10 @@ def sanitizeFileName(name):
return name return name
def _remove_file_failed(file): def remove_file_failed(filename):
try: try:
ek.ek(os.remove, file) ek.ek(os.remove, filename)
except: except (StandardError, Exception):
pass pass
@ -323,7 +323,7 @@ def link(src, dst):
if ctypes.windll.kernel32.CreateHardLinkW(unicode(dst), unicode(src), 0) == 0: raise ctypes.WinError() if ctypes.windll.kernel32.CreateHardLinkW(unicode(dst), unicode(src), 0) == 0: raise ctypes.WinError()
else: else:
os.link(src, dst) ek.ek(os.link, src, dst)
def hardlinkFile(srcFile, destFile): def hardlinkFile(srcFile, destFile):
@ -340,10 +340,11 @@ def symlink(src, dst):
if os.name == 'nt': if os.name == 'nt':
import ctypes import ctypes
if ctypes.windll.kernel32.CreateSymbolicLinkW(unicode(dst), unicode(src), 1 if os.path.isdir(src) else 0) in [0, if ctypes.windll.kernel32.CreateSymbolicLinkW(
1280]: raise ctypes.WinError() unicode(dst), unicode(src), 1 if ek.ek(os.path.isdir, src) else 0) in [0, 1280]:
raise ctypes.WinError()
else: else:
os.symlink(src, dst) ek.ek(os.symlink, src, dst)
def moveAndSymlinkFile(srcFile, destFile): def moveAndSymlinkFile(srcFile, destFile):
@ -411,11 +412,11 @@ def rename_ep_file(cur_path, new_path, old_path_length=0):
old_path_length: The length of media file path (old name) WITHOUT THE EXTENSION old_path_length: The length of media file path (old name) WITHOUT THE EXTENSION
""" """
new_dest_dir, new_dest_name = os.path.split(new_path) # @UnusedVariable new_dest_dir, new_dest_name = ek.ek(os.path.split, new_path) # @UnusedVariable
if old_path_length == 0 or old_path_length > len(cur_path): if old_path_length == 0 or old_path_length > len(cur_path):
# approach from the right # approach from the right
cur_file_name, cur_file_ext = os.path.splitext(cur_path) # @UnusedVariable cur_file_name, cur_file_ext = ek.ek(os.path.splitext, cur_path) # @UnusedVariable
else: else:
# approach from the left # approach from the left
cur_file_ext = cur_path[old_path_length:] cur_file_ext = cur_path[old_path_length:]
@ -423,7 +424,7 @@ def rename_ep_file(cur_path, new_path, old_path_length=0):
if cur_file_ext[1:] in subtitleExtensions: if cur_file_ext[1:] in subtitleExtensions:
# Extract subtitle language from filename # Extract subtitle language from filename
sublang = os.path.splitext(cur_file_name)[1][1:] sublang = ek.ek(os.path.splitext, cur_file_name)[1][1:]
# Check if the language extracted from filename is a valid language # Check if the language extracted from filename is a valid language
try: try:
@ -435,7 +436,7 @@ def rename_ep_file(cur_path, new_path, old_path_length=0):
# put the extension on the incoming file # put the extension on the incoming file
new_path += cur_file_ext new_path += cur_file_ext
make_dirs(os.path.dirname(new_path)) make_dirs(ek.ek(os.path.dirname, new_path))
# move the file # move the file
try: try:
@ -724,7 +725,7 @@ def backupVersionedFile(old_file, version):
def restoreVersionedFile(backup_file, version): def restoreVersionedFile(backup_file, version):
numTries = 0 numTries = 0
new_file, backup_version = os.path.splitext(backup_file) new_file, backup_version = ek.ek(os.path.splitext, backup_file)
restore_file = new_file + '.' + 'v' + str(version) restore_file = new_file + '.' + 'v' + str(version)
if not ek.ek(os.path.isfile, new_file): if not ek.ek(os.path.isfile, new_file):
@ -1007,7 +1008,7 @@ def touchFile(fname, atime=None):
if None != atime: if None != atime:
try: try:
with open(fname, 'a'): with open(fname, 'a'):
os.utime(fname, (atime, atime)) ek.ek(os.utime, fname, (atime, atime))
return True return True
except: except:
logger.log(u"File air date stamping not available on your OS", logger.DEBUG) logger.log(u"File air date stamping not available on your OS", logger.DEBUG)
@ -1027,9 +1028,9 @@ def _getTempDir():
try: try:
uid = getpass.getuser() uid = getpass.getuser()
except ImportError: except ImportError:
return os.path.join(tempfile.gettempdir(), "SickGear") return ek.ek(os.path.join, tempfile.gettempdir(), "SickGear")
return os.path.join(tempfile.gettempdir(), "SickGear-%s" % (uid)) return ek.ek(os.path.join, tempfile.gettempdir(), "SickGear-%s" % (uid))
def proxy_setting(proxy_setting, request_url, force=False): def proxy_setting(proxy_setting, request_url, force=False):
@ -1098,7 +1099,7 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
if not kwargs.get('nocache'): if not kwargs.get('nocache'):
cache_dir = sickbeard.CACHE_DIR or _getTempDir() cache_dir = sickbeard.CACHE_DIR or _getTempDir()
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions'))) session = CacheControl(sess=session, cache=caches.FileCache(ek.ek(os.path.join, cache_dir, 'sessions')))
else: else:
del(kwargs['nocache']) del(kwargs['nocache'])
@ -1221,7 +1222,7 @@ def download_file(url, filename, session=None):
if None is session: if None is session:
session = requests.session() session = requests.session()
cache_dir = sickbeard.CACHE_DIR or _getTempDir() cache_dir = sickbeard.CACHE_DIR or _getTempDir()
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions'))) session = CacheControl(sess=session, cache=caches.FileCache(ek.ek(os.path.join, cache_dir, 'sessions')))
# request session headers # request session headers
session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'}) session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})
@ -1258,27 +1259,27 @@ def download_file(url, filename, session=None):
if chunk: if chunk:
fp.write(chunk) fp.write(chunk)
fp.flush() fp.flush()
os.fsync(fp.fileno()) ek.ek(os.fsync, fp.fileno())
chmodAsParent(filename) chmodAsParent(filename)
except requests.exceptions.HTTPError as e: except requests.exceptions.HTTPError as e:
_remove_file_failed(filename) remove_file_failed(filename)
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING) logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
return False return False
except requests.exceptions.ConnectionError as e: except requests.exceptions.ConnectionError as e:
_remove_file_failed(filename) remove_file_failed(filename)
logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING) logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING)
return False return False
except requests.exceptions.Timeout as e: except requests.exceptions.Timeout as e:
_remove_file_failed(filename) remove_file_failed(filename)
logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING) logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
return False return False
except EnvironmentError as e: except EnvironmentError as e:
_remove_file_failed(filename) remove_file_failed(filename)
logger.log(u"Unable to save the file: " + ex(e), logger.ERROR) logger.log(u"Unable to save the file: " + ex(e), logger.ERROR)
return False return False
except Exception: except Exception:
_remove_file_failed(filename) remove_file_failed(filename)
logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING) logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING)
return False return False

View file

@ -50,11 +50,13 @@ reverseNames = {u'ERROR': ERROR,
u'DEBUG': DEBUG, u'DEBUG': DEBUG,
u'DB': DB} u'DB': DB}
# send logging to null # send logging to null
class NullHandler(logging.Handler): class NullHandler(logging.Handler):
def emit(self, record): def emit(self, record):
pass pass
class SBRotatingLogHandler(object): class SBRotatingLogHandler(object):
def __init__(self, log_file): def __init__(self, log_file):
self.log_file = log_file self.log_file = log_file
@ -87,10 +89,10 @@ class SBRotatingLogHandler(object):
handler.flush() handler.flush()
handler.close() handler.close()
def initLogging(self, consoleLogging=False): def init_logging(self, console_logging=False):
if consoleLogging: if console_logging:
self.console_logging = consoleLogging self.console_logging = console_logging
old_handler = None old_handler = None
@ -99,10 +101,10 @@ class SBRotatingLogHandler(object):
old_handler = self.cur_handler old_handler = self.cur_handler
else: else:
#Add a new logging level DB # add a new logging level DB
logging.addLevelName(5, 'DB') logging.addLevelName(5, 'DB')
# only start consoleLogging on first initialize # only start console_logging on first initialize
if self.console_logging: if self.console_logging:
# define a Handler which writes INFO messages or higher to the sys.stderr # define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler() console = logging.StreamHandler()
@ -113,15 +115,18 @@ class SBRotatingLogHandler(object):
# set a format which is simpler for console use # set a format which is simpler for console use
console.setFormatter(DispatchingFormatter( console.setFormatter(DispatchingFormatter(
{'sickbeard': logging.Formatter('%(asctime)s %(levelname)s::%(message)s', '%H:%M:%S'), {'sickbeard': logging.Formatter(
'subliminal': logging.Formatter('%(asctime)s %(levelname)s::SUBLIMINAL :: %(message)s', '%(asctime)s %(levelname)s::%(message)s', '%H:%M:%S'),
'%H:%M:%S'), 'subliminal': logging.Formatter(
'imdbpy': logging.Formatter('%(asctime)s %(levelname)s::IMDBPY :: %(message)s', '%H:%M:%S'), '%(asctime)s %(levelname)s::SUBLIMINAL :: %(message)s', '%H:%M:%S'),
'tornado.general': logging.Formatter('%(asctime)s %(levelname)s::TORNADO :: %(message)s', '%H:%M:%S'), 'imdbpy': logging.Formatter(
'tornado.application': logging.Formatter('%(asctime)s %(levelname)s::TORNADO :: %(message)s', '%H:%M:%S'), '%(asctime)s %(levelname)s::IMDBPY :: %(message)s', '%H:%M:%S'),
'feedcache.cache': logging.Formatter('%(asctime)s %(levelname)s::FEEDCACHE :: %(message)s', 'tornado.general': logging.Formatter(
'%H:%M:%S') '%(asctime)s %(levelname)s::TORNADO :: %(message)s', '%H:%M:%S'),
}, 'tornado.application': logging.Formatter(
'%(asctime)s %(levelname)s::TORNADO :: %(message)s', '%H:%M:%S'),
'feedcache.cache': logging.Formatter(
'%(asctime)s %(levelname)s::FEEDCACHE :: %(message)s', '%H:%M:%S')},
logging.Formatter('%(message)s'), )) logging.Formatter('%(message)s'), ))
# add the handler to the root logger # add the handler to the root logger
@ -155,7 +160,6 @@ class SBRotatingLogHandler(object):
logging.getLogger('imdbpy').setLevel(log_level) logging.getLogger('imdbpy').setLevel(log_level)
logging.getLogger('feedcache').setLevel(log_level) logging.getLogger('feedcache').setLevel(log_level)
# already logging in new log folder, close the old handler # already logging in new log folder, close the old handler
if old_handler: if old_handler:
self.close_log(old_handler) self.close_log(old_handler)
@ -173,54 +177,55 @@ class SBRotatingLogHandler(object):
Configure a file handler to log at file_name and return it. Configure a file handler to log at file_name and return it.
""" """
file_handler = TimedCompressedRotatingFileHandler(self.log_file_path, when='midnight', backupCount=7, encoding='utf-8') file_handler = TimedCompressedRotatingFileHandler(self.log_file_path, when='midnight',
backupCount=16, encoding='utf-8')
file_handler.setLevel(reverseNames[sickbeard.FILE_LOGGING_PRESET]) file_handler.setLevel(reverseNames[sickbeard.FILE_LOGGING_PRESET])
file_handler.setFormatter(DispatchingFormatter( file_handler.setFormatter(DispatchingFormatter(
{'sickbeard': logging.Formatter('%(asctime)s %(levelname)-8s %(message)s', '%Y-%m-%d %H:%M:%S'), {'sickbeard': logging.Formatter(
'subliminal': logging.Formatter('%(asctime)s %(levelname)-8s SUBLIMINAL :: %(message)s', '%(asctime)s %(levelname)-8s %(message)s', '%Y-%m-%d %H:%M:%S'),
'%Y-%m-%d %H:%M:%S'), 'subliminal': logging.Formatter(
'imdbpy': logging.Formatter('%(asctime)s %(levelname)-8s IMDBPY :: %(message)s', '%Y-%m-%d %H:%M:%S'), '%(asctime)s %(levelname)-8s SUBLIMINAL :: %(message)s', '%Y-%m-%d %H:%M:%S'),
'tornado.general': logging.Formatter('%(asctime)s %(levelname)-8s TORNADO :: %(message)s', '%Y-%m-%d %H:%M:%S'), 'imdbpy': logging.Formatter(
'tornado.application': logging.Formatter('%(asctime)s %(levelname)-8s TORNADO :: %(message)s', '%Y-%m-%d %H:%M:%S'), '%(asctime)s %(levelname)-8s IMDBPY :: %(message)s', '%Y-%m-%d %H:%M:%S'),
'feedcache.cache': logging.Formatter('%(asctime)s %(levelname)-8s FEEDCACHE :: %(message)s', 'tornado.general': logging.Formatter(
'%Y-%m-%d %H:%M:%S') '%(asctime)s %(levelname)-8s TORNADO :: %(message)s', '%Y-%m-%d %H:%M:%S'),
}, 'tornado.application': logging.Formatter(
'%(asctime)s %(levelname)-8s TORNADO :: %(message)s', '%Y-%m-%d %H:%M:%S'),
'feedcache.cache': logging.Formatter(
'%(asctime)s %(levelname)-8s FEEDCACHE :: %(message)s', '%Y-%m-%d %H:%M:%S')},
logging.Formatter('%(message)s'), )) logging.Formatter('%(message)s'), ))
return file_handler return file_handler
def log(self, toLog, logLevel=MESSAGE): def log(self, to_log, log_level=MESSAGE):
with self.log_lock: with self.log_lock:
meThread = threading.currentThread().getName() out_line = '%s :: %s' % (threading.currentThread().getName(), to_log)
message = meThread + u" :: " + toLog
out_line = message
sb_logger = logging.getLogger('sickbeard') sb_logger = logging.getLogger('sickbeard')
setattr(sb_logger, 'db', lambda *args: sb_logger.log(DB, *args)) setattr(sb_logger, 'db', lambda *args: sb_logger.log(DB, *args))
sub_logger = logging.getLogger('subliminal') # sub_logger = logging.getLogger('subliminal')
imdb_logger = logging.getLogger('imdbpy') # imdb_logger = logging.getLogger('imdbpy')
tornado_logger = logging.getLogger('tornado') # tornado_logger = logging.getLogger('tornado')
feedcache_logger = logging.getLogger('feedcache') # feedcache_logger = logging.getLogger('feedcache')
try: try:
if logLevel == DEBUG: if DEBUG == log_level:
sb_logger.debug(out_line) sb_logger.debug(out_line)
elif logLevel == MESSAGE: elif MESSAGE == log_level:
sb_logger.info(out_line) sb_logger.info(out_line)
elif logLevel == WARNING: elif WARNING == log_level:
sb_logger.warning(out_line) sb_logger.warning(out_line)
elif logLevel == ERROR: elif ERROR == log_level:
sb_logger.error(out_line) sb_logger.error(out_line)
# add errors to the UI logger # add errors to the UI logger
classes.ErrorViewer.add(classes.UIError(message)) classes.ErrorViewer.add(classes.UIError(out_line))
elif logLevel == DB: elif DB == log_level:
sb_logger.db(out_line) sb_logger.db(out_line)
else: else:
sb_logger.log(logLevel, out_line) sb_logger.log(log_level, out_line)
except ValueError: except ValueError:
pass pass
@ -259,47 +264,54 @@ class TimedCompressedRotatingFileHandler(TimedRotatingFileHandler):
then we have to get a list of matching filenames, sort them and remove then we have to get a list of matching filenames, sort them and remove
the one with the oldest suffix. the one with the oldest suffix.
This method is a copy of the one in TimedRotatingFileHandler. Since it uses This method is modified from the one in TimedRotatingFileHandler.
""" """
self.stream.close() self.stream.close()
# get the time that this sequence started at and make it a TimeTuple # get the time that this sequence started at and make it a TimeTuple
t = self.rolloverAt - self.interval t = self.rolloverAt - self.interval
timeTuple = time.localtime(t) time_tuple = time.localtime(t)
file_name = self.baseFilename.rpartition('.')[0] file_name = self.baseFilename.rpartition('.')[0]
dfn = '%s_%s.log' % (file_name, time.strftime(self.suffix, timeTuple)) dfn = '%s_%s.log' % (file_name, time.strftime(self.suffix, time_tuple))
if os.path.exists(dfn): self.delete_logfile(dfn)
sickbeard.helpers._remove_file_failed(dfn)
try: try:
os.rename(self.baseFilename, dfn) ek.ek(os.rename, self.baseFilename, dfn)
except: except (StandardError, Exception):
pass pass
if self.backupCount > 0: if 0 < self.backupCount:
# find the oldest log file and delete it # find the oldest log file and delete it
s = glob.glob(file_name + '_*') s = glob.glob(file_name + '_*')
if len(s) > self.backupCount: if len(s) > self.backupCount:
s.sort() s.sort()
sickbeard.helpers._remove_file_failed(s[0]) self.delete_logfile(s[0])
# print "%s -> %s" % (self.baseFilename, dfn) # print "%s -> %s" % (self.baseFilename, dfn)
if self.encoding: if self.encoding:
self.stream = codecs.open(self.baseFilename, 'w', self.encoding) self.stream = codecs.open(self.baseFilename, 'w', self.encoding)
else: else:
self.stream = open(self.baseFilename, 'w') self.stream = open(self.baseFilename, 'w')
self.rolloverAt = self.rolloverAt + self.interval self.rolloverAt = self.rolloverAt + self.interval
zip_name = dfn.rpartition('.')[0] + '.zip' zip_name = '%s.zip' % dfn.rpartition('.')[0]
if os.path.exists(zip_name): self.delete_logfile(zip_name)
sickbeard.helpers._remove_file_failed(zip_name) zip_fh = zipfile.ZipFile(zip_name, 'w')
file = zipfile.ZipFile(zip_name, 'w') zip_fh.write(dfn, os.path.basename(dfn), zipfile.ZIP_DEFLATED)
file.write(dfn, os.path.basename(dfn), zipfile.ZIP_DEFLATED) zip_fh.close()
file.close() self.delete_logfile(dfn)
sickbeard.helpers._remove_file_failed(dfn)
@staticmethod
def delete_logfile(filepath):
from sickbeard import encodingKludge
if encodingKludge.ek(os.path.exists, filepath):
if sickbeard.TRASH_ROTATE_LOGS:
encodingKludge.ek(send2trash, filepath)
else:
sickbeard.helpers.remove_file_failed(filepath)
sb_log_instance = SBRotatingLogHandler('sickbeard.log') sb_log_instance = SBRotatingLogHandler('sickbeard.log')
def log(toLog, logLevel=MESSAGE): def log(to_log, log_level=MESSAGE):
sb_log_instance.log(toLog, logLevel) sb_log_instance.log(to_log, log_level)
def log_error_and_exit(error_msg): def log_error_and_exit(error_msg):

View file

@ -293,10 +293,10 @@ class PostProcessor(object):
cur_extension = 'nfo-orig' cur_extension = 'nfo-orig'
# check if file have subtitles language # check if file have subtitles language
if os.path.splitext(cur_extension)[1][1:] in common.subtitleExtensions: if ek.ek(os.path.splitext, cur_extension)[1][1:] in common.subtitleExtensions:
cur_lang = os.path.splitext(cur_extension)[0] cur_lang = ek.ek(os.path.splitext, cur_extension)[0]
if cur_lang in sickbeard.SUBTITLES_LANGUAGES: if cur_lang in sickbeard.SUBTITLES_LANGUAGES:
cur_extension = cur_lang + os.path.splitext(cur_extension)[1] cur_extension = cur_lang + ek.ek(os.path.splitext, cur_extension)[1]
# If new base name then convert name # If new base name then convert name
if new_base_name: if new_base_name:

View file

@ -401,7 +401,7 @@ class ProcessTVShow(object):
return False return False
if failed: if failed:
self._process_failed(os.path.join(path, dir_name), nzb_name_original, showObj=showObj) self._process_failed(ek.ek(os.path.join, path, dir_name), nzb_name_original, showObj=showObj)
return False return False
if helpers.is_hidden_folder(dir_name): if helpers.is_hidden_folder(dir_name):
@ -623,11 +623,11 @@ class ProcessTVShow(object):
result = False result = False
chunks = {} chunks = {}
matcher = re.compile('\.[0-9]+$') matcher = re.compile('\.[0-9]+$')
for dirpath, void, filenames in os.walk(directory): for dirpath, void, filenames in ek.ek(os.walk, directory):
for filename in filenames: for filename in filenames:
if None is not matcher.search(filename): if None is not matcher.search(filename):
maybe_chunk = ek.ek(os.path.join, dirpath, filename) maybe_chunk = ek.ek(os.path.join, dirpath, filename)
base_filepath, ext = os.path.splitext(maybe_chunk) base_filepath, ext = ek.ek(os.path.splitext, maybe_chunk)
if base_filepath not in chunks: if base_filepath not in chunks:
chunks[base_filepath] = [] chunks[base_filepath] = []
chunks[base_filepath].append(maybe_chunk) chunks[base_filepath].append(maybe_chunk)
@ -809,10 +809,10 @@ class ProcessTVShow(object):
break break
else: else:
path, dirs = ek.ek(os.path.split, dir_name) # Script Post Processing path, dirs = ek.ek(os.path.split, dir_name) # Script Post Processing
if None is not nzb_name and not nzb_name.endswith('.nzb') and os.path.isfile( if None is not nzb_name and not nzb_name.endswith('.nzb') and \
os.path.join(dir_name, nzb_name)): # For single torrent file without directory ek.ek(os.path.isfile, ek.ek(os.path.join, dir_name, nzb_name)): # For single torrent file without directory
dirs = [] dirs = []
files = [os.path.join(dir_name, nzb_name)] files = [ek.ek(os.path.join, dir_name, nzb_name)]
else: else:
dirs = [dirs] dirs = [dirs]
files = [] files = []

View file

@ -40,7 +40,7 @@ from hachoir_core.stream import FileInputStream
from sickbeard import helpers, classes, logger, db, tvcache, encodingKludge as ek from sickbeard import helpers, classes, logger, db, tvcache, encodingKludge as ek
from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT
from sickbeard.exceptions import SickBeardException, AuthException, ex from sickbeard.exceptions import SickBeardException, AuthException, ex
from sickbeard.helpers import maybe_plural, _remove_file_failed as remove_file_failed from sickbeard.helpers import maybe_plural, remove_file_failed
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
from sickbeard.show_name_helpers import allPossibleShowNames from sickbeard.show_name_helpers import allPossibleShowNames

View file

@ -444,13 +444,18 @@ class NewznabProvider(generic.NZBProvider):
Quality.HDBLURAY, Quality.FULLHDBLURAY] Quality.HDBLURAY, Quality.FULLHDBLURAY]
max_hd = Quality.FULLHDBLURAY max_hd = Quality.FULLHDBLURAY
for s in searches: for s in searches:
if need_sd and need_hd and need_uhd:
break
if not s.show.is_anime and not s.show.is_sports: if not s.show.is_anime and not s.show.is_sports:
if not need_sd and min(s.wantedQuality) <= max_sd: if Quality.UNKNOWN in s.wantedQuality:
need_sd = True need_sd = need_hd = need_uhd = True
if not need_hd and any(i in hd_qualities for i in s.wantedQuality): else:
need_hd = True if not need_sd and min(s.wantedQuality) <= max_sd:
if not need_uhd and max(s.wantedQuality) > max_hd: need_sd = True
need_uhd = True if not need_hd and any(i in hd_qualities for i in s.wantedQuality):
need_hd = True
if not need_uhd and max(s.wantedQuality) > max_hd:
need_uhd = True
per_ep, limit_per_ep = 0, 0 per_ep, limit_per_ep = 0, 0
if need_sd and not need_hd: if need_sd and not need_hd:
per_ep, limit_per_ep = 10, 25 per_ep, limit_per_ep = 10, 25
@ -474,12 +479,15 @@ class NewznabProvider(generic.NZBProvider):
if not season_search: if not season_search:
need_sd = need_hd = need_uhd = False need_sd = need_hd = need_uhd = False
if not ep_obj.show.is_anime and not ep_obj.show.is_sports: if not ep_obj.show.is_anime and not ep_obj.show.is_sports:
if min(ep_obj.wantedQuality) <= max_sd: if Quality.UNKNOWN in ep_obj.wantedQuality:
need_sd = True need_sd = need_hd = need_uhd = True
if any(i in hd_qualities for i in ep_obj.wantedQuality): else:
need_hd = True if min(ep_obj.wantedQuality) <= max_sd:
if max(ep_obj.wantedQuality) > max_hd: need_sd = True
need_uhd = True if any(i in hd_qualities for i in ep_obj.wantedQuality):
need_hd = True
if max(ep_obj.wantedQuality) > max_hd:
need_uhd = True
return (season_search, need_sd, need_hd, need_uhd, return (season_search, need_sd, need_hd, need_uhd,
(hits_per_page * 100 // hits_per_page * 2, hits_per_page * int(ceil(rel_limit * 1.5)))[season_search]) (hits_per_page * 100 // hits_per_page * 2, hits_per_page * int(ceil(rel_limit * 1.5)))[season_search])

View file

@ -387,7 +387,7 @@ def wanted_episodes(show, from_date, make_dict=False, unaired=False):
ep_obj = show.getEpisode(int(result['season']), int(result['episode'])) ep_obj = show.getEpisode(int(result['season']), int(result['episode']))
ep_obj.wantedQuality = [i for i in (wanted_qualities, initial_qualities)[not_downloaded] ep_obj.wantedQuality = [i for i in (wanted_qualities, initial_qualities)[not_downloaded]
if (common.Quality.UNKNOWN != i and cur_quality < i)] if cur_quality < i]
ep_obj.eps_aired_in_season = ep_count.get(helpers.tryInt(result['season']), 0) ep_obj.eps_aired_in_season = ep_count.get(helpers.tryInt(result['season']), 0)
ep_obj.eps_aired_in_scene_season = ep_count_scene.get( ep_obj.eps_aired_in_scene_season = ep_count_scene.get(
helpers.tryInt(result['scene_season']), 0) if result['scene_season'] else ep_obj.eps_aired_in_season helpers.tryInt(result['scene_season']), 0) if result['scene_season'] else ep_obj.eps_aired_in_season

View file

@ -206,15 +206,20 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
need_anime = True need_anime = True
if not need_sports and curShow.is_sports: if not need_sports and curShow.is_sports:
need_sports = True need_sports = True
if not need_sd or not need_hd: if not need_sd or not need_hd or not need_uhd:
for w in wanted_eps: for w in wanted_eps:
if need_sd and need_hd and need_uhd:
break
if not w.show.is_anime and not w.show.is_sports: if not w.show.is_anime and not w.show.is_sports:
if not need_sd and max_sd >= min(w.wantedQuality): if Quality.UNKNOWN in w.wantedQuality:
need_sd = True need_sd = need_hd = need_uhd = True
if not need_hd and any(i in hd_qualities for i in w.wantedQuality): else:
need_hd = True if not need_sd and max_sd >= min(w.wantedQuality):
if not need_uhd and max_hd < max(w.wantedQuality): need_sd = True
need_uhd = True if not need_hd and any(i in hd_qualities for i in w.wantedQuality):
need_hd = True
if not need_uhd and max_hd < max(w.wantedQuality):
need_uhd = True
self.episodes.extend(wanted_eps) self.episodes.extend(wanted_eps)
self.update_providers(need_anime=need_anime, need_sports=need_sports, self.update_providers(need_anime=need_anime, need_sports=need_sports,

View file

@ -1079,9 +1079,9 @@ class TVShow(object):
logger.log('Attempt to %s cache file %s' % (action, cache_file)) logger.log('Attempt to %s cache file %s' % (action, cache_file))
try: try:
if sickbeard.TRASH_REMOVE_SHOW: if sickbeard.TRASH_REMOVE_SHOW:
send2trash(cache_file) ek.ek(send2trash, cache_file)
else: else:
os.remove(cache_file) ek.ek(os.remove, cache_file)
except OSError as e: except OSError as e:
logger.log('Unable to %s %s: %s / %s' % (action, cache_file, repr(e), str(e)), logger.WARNING) logger.log('Unable to %s %s: %s / %s' % (action, cache_file, repr(e), str(e)), logger.WARNING)
@ -1101,7 +1101,7 @@ class TVShow(object):
logger.log('Unable to change permissions of %s' % self._location, logger.WARNING) logger.log('Unable to change permissions of %s' % self._location, logger.WARNING)
if sickbeard.TRASH_REMOVE_SHOW: if sickbeard.TRASH_REMOVE_SHOW:
send2trash(self.location) ek.ek(send2trash, self.location)
else: else:
ek.ek(shutil.rmtree, self.location) ek.ek(shutil.rmtree, self.location)
@ -1137,7 +1137,7 @@ class TVShow(object):
sql_l = [] sql_l = []
for ep in sqlResults: for ep in sqlResults:
curLoc = os.path.normpath(ep['location']) curLoc = ek.ek(os.path.normpath, ep['location'])
season = int(ep['season']) season = int(ep['season'])
episode = int(ep['episode']) episode = int(ep['episode'])
@ -1149,8 +1149,8 @@ class TVShow(object):
continue continue
# if the path doesn't exist or if it's not in our show dir # if the path doesn't exist or if it's not in our show dir
if not ek.ek(os.path.isfile, curLoc) or not os.path.normpath(curLoc).startswith( if not ek.ek(os.path.isfile, curLoc) or not ek.ek(os.path.normpath, curLoc).startswith(
os.path.normpath(self.location)): ek.ek(os.path.normpath, self.location)):
# check if downloaded files still exist, update our data if this has changed # check if downloaded files still exist, update our data if this has changed
if 1 != sickbeard.SKIP_REMOVED_FILES: if 1 != sickbeard.SKIP_REMOVED_FILES:
@ -1528,7 +1528,7 @@ class TVEpisode(object):
if sickbeard.SUBTITLES_DIR: if sickbeard.SUBTITLES_DIR:
for video in subtitles: for video in subtitles:
subs_new_path = ek.ek(os.path.join, os.path.dirname(video.path), sickbeard.SUBTITLES_DIR) subs_new_path = ek.ek(os.path.join, ek.ek(os.path.dirname, video.path), sickbeard.SUBTITLES_DIR)
dir_exists = helpers.makeDir(subs_new_path) dir_exists = helpers.makeDir(subs_new_path)
if not dir_exists: if not dir_exists:
logger.log('Unable to create subtitles folder %s' % subs_new_path, logger.ERROR) logger.log('Unable to create subtitles folder %s' % subs_new_path, logger.ERROR)
@ -1536,7 +1536,7 @@ class TVEpisode(object):
helpers.chmodAsParent(subs_new_path) helpers.chmodAsParent(subs_new_path)
for subtitle in subtitles.get(video): for subtitle in subtitles.get(video):
new_file_path = ek.ek(os.path.join, subs_new_path, os.path.basename(subtitle.path)) new_file_path = ek.ek(os.path.join, subs_new_path, ek.ek(os.path.basename, subtitle.path))
helpers.moveFile(subtitle.path, new_file_path) helpers.moveFile(subtitle.path, new_file_path)
helpers.chmodAsParent(new_file_path) helpers.chmodAsParent(new_file_path)
else: else:
@ -1664,7 +1664,7 @@ class TVEpisode(object):
# don't overwrite my location # don't overwrite my location
if sql_results[0]['location'] and sql_results[0]['location']: if sql_results[0]['location'] and sql_results[0]['location']:
self.location = os.path.normpath(sql_results[0]['location']) self.location = ek.ek(os.path.normpath, sql_results[0]['location'])
if sql_results[0]['file_size']: if sql_results[0]['file_size']:
self.file_size = int(sql_results[0]['file_size']) self.file_size = int(sql_results[0]['file_size'])
else: else:
@ -2493,7 +2493,7 @@ class TVEpisode(object):
if len(name_groups) == 1: if len(name_groups) == 1:
return '' return ''
else: else:
return self._format_pattern(os.sep.join(name_groups[:-1]), multi) return self._format_pattern(ek.ek(os.sep.join, name_groups[:-1]), multi)
def formatted_filename(self, pattern=None, multi=None, anime_type=None): def formatted_filename(self, pattern=None, multi=None, anime_type=None):
""" """
@ -2605,7 +2605,7 @@ class TVEpisode(object):
""" """
if not datetime.date == type(self.airdate) or 1 == self.airdate.year: if not datetime.date == type(self.airdate) or 1 == self.airdate.year:
logger.log('%s: Did not change modify date of %s because episode date is never aired or invalid' logger.log('%s: Did not change modify date of %s because episode date is never aired or invalid'
% (self.show.indexerid, os.path.basename(self.location)), logger.DEBUG) % (self.show.indexerid, ek.ek(os.path.basename, self.location)), logger.DEBUG)
return return
hr = m = 0 hr = m = 0
@ -2619,7 +2619,7 @@ class TVEpisode(object):
airdatetime = datetime.datetime.combine(self.airdate, airtime) airdatetime = datetime.datetime.combine(self.airdate, airtime)
filemtime = datetime.datetime.fromtimestamp(os.path.getmtime(self.location)) filemtime = datetime.datetime.fromtimestamp(ek.ek(os.path.getmtime, self.location))
if filemtime != airdatetime: if filemtime != airdatetime:
import time import time
@ -2627,7 +2627,7 @@ class TVEpisode(object):
airdatetime = airdatetime.timetuple() airdatetime = airdatetime.timetuple()
if helpers.touchFile(self.location, time.mktime(airdatetime)): if helpers.touchFile(self.location, time.mktime(airdatetime)):
logger.log('%s: Changed modify date of %s to show air date %s' logger.log('%s: Changed modify date of %s to show air date %s'
% (self.show.indexerid, os.path.basename(self.location), time.strftime('%b %d,%Y (%H:%M)', airdatetime))) % (self.show.indexerid, ek.ek(os.path.basename, self.location), time.strftime('%b %d,%Y (%H:%M)', airdatetime)))
def __getstate__(self): def __getstate__(self):
d = dict(self.__dict__) d = dict(self.__dict__)

View file

@ -2147,7 +2147,7 @@ class Home(MainHandler):
# Find the quality class for the episode # Find the quality class for the episode
quality_class = Quality.qualityStrings[Quality.UNKNOWN] quality_class = Quality.qualityStrings[Quality.UNKNOWN]
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status) ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
for x in (SD, HD720p, HD1080p): for x in (SD, HD720p, HD1080p, UHD2160p):
if ep_quality in Quality.splitQuality(x)[0]: if ep_quality in Quality.splitQuality(x)[0]:
quality_class = qualityPresetStrings[x] quality_class = qualityPresetStrings[x]
break break

View file

@ -93,7 +93,7 @@ sickbeard.PROG_DIR = os.path.abspath('..')
sickbeard.DATA_DIR = sickbeard.PROG_DIR sickbeard.DATA_DIR = sickbeard.PROG_DIR
sickbeard.LOG_DIR = os.path.join(TESTDIR, 'Logs') sickbeard.LOG_DIR = os.path.join(TESTDIR, 'Logs')
createTestLogFolder() createTestLogFolder()
sickbeard.logger.sb_log_instance.initLogging(False) sickbeard.logger.sb_log_instance.init_logging(False)
sickbeard.CACHE_DIR = os.path.join(TESTDIR, 'cache') sickbeard.CACHE_DIR = os.path.join(TESTDIR, 'cache')
createTestCacheFolder() createTestCacheFolder()