mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-01 08:53:37 +00:00
Merge pull request #828 from JackDandy/feature/FixUnknownQualitySearch
Feature/fix unknown quality search
This commit is contained in:
commit
266a512876
21 changed files with 203 additions and 141 deletions
|
@ -193,6 +193,8 @@
|
|||
* Fix button "Checkout branch" when stuck on disabled
|
||||
* Add 'Download Log' to 'Logs & Errors' page
|
||||
* Change consolidate shutdown with restart, improve systemd support, bring order to on-init globals
|
||||
* Change speed improvement in finding needed categories/qualities (sd, hd, uhd)
|
||||
* Change add guidance when using the "unknown" quality selection
|
||||
|
||||
[develop changelog]
|
||||
* Change send nzb data to NZBGet for Anizb instead of url
|
||||
|
@ -229,6 +231,9 @@
|
|||
* Change remove deprecated providers being saved to config
|
||||
* Change prevent a missing slash typo and correct develop typo after a network outage
|
||||
* Change send download logfile as stream
|
||||
* Fix launch browser during startup
|
||||
* Change don't exclude "unknown" from search
|
||||
* Fix UHD category select in Recent Search
|
||||
|
||||
|
||||
### 0.11.16 (2016-10-16 17:30:00 UTC)
|
||||
|
|
|
@ -220,7 +220,7 @@ class SickGear(object):
|
|||
# Run as a double forked daemon
|
||||
if o in ('-d', '--daemon'):
|
||||
self.run_as_daemon = True
|
||||
# When running as daemon disable consoleLogging and don't start browser
|
||||
# When running as daemon disable console_logging and don't start browser
|
||||
self.console_logging = False
|
||||
self.no_launch = True
|
||||
|
||||
|
|
|
@ -1257,8 +1257,8 @@ div.formpaginate .prev, div.formpaginate .next{
|
|||
background:#2265A1
|
||||
}
|
||||
|
||||
#customQualityWrapper div.component-group-desc p{
|
||||
color:#666
|
||||
#customQualityWrapper .tip-text p{
|
||||
color:#888
|
||||
}
|
||||
|
||||
/* =======================================================================
|
||||
|
|
|
@ -1212,7 +1212,7 @@ div.formpaginate .prev, div.formpaginate .next{
|
|||
background:#57442b
|
||||
}
|
||||
|
||||
#customQualityWrapper div.component-group-desc p{
|
||||
#customQualityWrapper .tip-text p{
|
||||
color:#666
|
||||
}
|
||||
|
||||
|
|
|
@ -966,11 +966,17 @@ div.formpaginate{
|
|||
margin-right:6px
|
||||
}
|
||||
|
||||
#edit-show #customQualityWrapper div.component-group-desc p,
|
||||
#addShowForm #customQualityWrapper div.component-group-desc p{
|
||||
#edit-show #customQualityWrapper .tip-text p,
|
||||
#addShowForm #customQualityWrapper .tip-text p,
|
||||
#edit-show #customQualityWrapper .tip-text em,
|
||||
#addShowForm #customQualityWrapper .tip-text em{
|
||||
font-size:14px
|
||||
}
|
||||
|
||||
#addShowForm .stepDiv #customQuality.show-if-quality-custom span.component-desc p{
|
||||
font-size:12px
|
||||
}
|
||||
|
||||
#addShowForm #nameToSearch{
|
||||
width:460px;
|
||||
margin-top:0
|
||||
|
|
|
@ -21,30 +21,40 @@
|
|||
|
||||
<div id="customQualityWrapper">
|
||||
<div id="customQuality" class="show-if-quality-custom" style="display:none">
|
||||
<div class="component-group-desc">
|
||||
<div class="component-group-desc tip-text">
|
||||
<p>An <em>Initial</em> quality episode must be found before an <em>Upgrade to</em> selection is considered.</p>
|
||||
<p>Upgrades continue until the highest selected of <em>Upgrade to</em> is matched.</p>
|
||||
</div>
|
||||
|
||||
<span class="component-desc">
|
||||
<div style="float:left;padding-right:40px">
|
||||
<div style="float:left;padding-right:28px">
|
||||
<h4 class="jumbo">Initial</h4>
|
||||
#set $anyQualityList = filter(lambda x: x > $Quality.NONE, $Quality.qualityStrings)
|
||||
<select id="anyQualities" name="anyQualities" multiple="multiple" size="$len($anyQualityList)" class="form-control form-control-inline input-sm">
|
||||
#set $has_unknown = False
|
||||
#for $curQuality in sorted($anyQualityList):
|
||||
#set $has_unknown |= ($Quality.UNKNOWN == $curQuality and $curQuality in $anyQualities)
|
||||
<option value="$curQuality"#echo ('', $html_selected)[$curQuality in $anyQualities]#>$Quality.qualityStrings[$curQuality]</option>
|
||||
#end for
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div style="float:left">
|
||||
<div style="float:left;padding-right:20px">
|
||||
<h4 class="jumbo">Upgrade to</h4>
|
||||
#set $bestQualityList = filter(lambda x: x > $Quality.SDTV and x < $Quality.UNKNOWN, $Quality.qualityStrings)
|
||||
<select id="bestQualities" name="bestQualities" multiple="multiple" size="$len($bestQualityList)" class="form-control form-control-inline input-sm">
|
||||
#for $curQuality in sorted($bestQualityList):
|
||||
<option value="$curQuality"#echo ('', $html_selected)[$curQuality in $bestQualities]#>$Quality.qualityStrings[$curQuality]</option>
|
||||
#end for
|
||||
</select>
|
||||
</select><br />
|
||||
<span>Ctrl + Click = toggle a quality</span>
|
||||
</div>
|
||||
|
||||
<div style="line-height:normal;padding-top:50px" id="quality-notes" class="tip-text">
|
||||
<p id="unknown"#if not $has_unknown# style="display:none"#end if#>
|
||||
<em class="highlight-text">Note:</em> Temporarily use 'Unknown' for releases with no recognised quality.
|
||||
Full-time use risks snatching bad releases and wastes API hits.
|
||||
</p>
|
||||
</div>
|
||||
</span>
|
||||
</div>
|
||||
|
|
|
@ -19,6 +19,17 @@ function setFromPresets (preset) {
|
|||
});
|
||||
} else
|
||||
elCustomQuality.fadeIn('fast', 'linear');
|
||||
|
||||
presentTips();
|
||||
}
|
||||
|
||||
function presentTips() {
|
||||
var tip$ = $('#unknown');
|
||||
if (/undefined/i.test($('#anyQualities').find('option[value="32768"]').attr('selected'))) {
|
||||
tip$.fadeOut('fast', 'linear');
|
||||
} else {
|
||||
tip$.fadeIn('fast', 'linear');
|
||||
}
|
||||
}
|
||||
|
||||
$(document).ready(function() {
|
||||
|
@ -30,4 +41,8 @@ $(document).ready(function() {
|
|||
});
|
||||
|
||||
setFromPresets(elQualityPreset.find(selected).val());
|
||||
|
||||
$('#anyQualities').change(function() {
|
||||
presentTips();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -517,7 +517,7 @@ class Tvdb:
|
|||
self.config['url_artworkPrefix'] = u'%(base_url)s/banners/%%s' % self.config
|
||||
|
||||
def log(self, msg, log_level=logger.DEBUG):
|
||||
logger.log('TVDB_API :: %s' % (msg.replace(self.config['apikey'], '<apikey>')), logLevel=log_level)
|
||||
logger.log('TVDB_API :: %s' % (msg.replace(self.config['apikey'], '<apikey>')), log_level=log_level)
|
||||
|
||||
@staticmethod
|
||||
def _get_temp_dir():
|
||||
|
|
|
@ -1138,7 +1138,7 @@ def initialize(console_logging=True):
|
|||
save_config()
|
||||
|
||||
# start up all the threads
|
||||
logger.sb_log_instance.initLogging(consoleLogging=console_logging)
|
||||
logger.sb_log_instance.init_logging(console_logging=console_logging)
|
||||
|
||||
# initialize the main SB database
|
||||
my_db = db.DBConnection()
|
||||
|
@ -1800,7 +1800,7 @@ def save_config():
|
|||
def launch_browser(start_port=None):
|
||||
if not start_port:
|
||||
start_port = WEB_PORT
|
||||
browser_url = 'http%s://localhost:%d%s' % (('s' or '')[not ENABLE_HTTPS], start_port, WEB_ROOT)
|
||||
browser_url = 'http%s://localhost:%d%s' % (('s', '')[not ENABLE_HTTPS], start_port, WEB_ROOT)
|
||||
try:
|
||||
webbrowser.open(browser_url, 2, 1)
|
||||
except (StandardError, Exception):
|
||||
|
|
|
@ -89,7 +89,7 @@ def change_LOG_DIR(log_dir, web_log):
|
|||
sickbeard.ACTUAL_LOG_DIR = os.path.normpath(log_dir)
|
||||
sickbeard.LOG_DIR = abs_log_dir
|
||||
|
||||
logger.sb_log_instance.initLogging()
|
||||
logger.sb_log_instance.init_logging()
|
||||
logger.log(u'Initialized new log file in %s' % sickbeard.LOG_DIR)
|
||||
log_dir_changed = True
|
||||
|
||||
|
|
|
@ -172,10 +172,10 @@ def sanitizeFileName(name):
|
|||
return name
|
||||
|
||||
|
||||
def _remove_file_failed(file):
|
||||
def remove_file_failed(filename):
|
||||
try:
|
||||
ek.ek(os.remove, file)
|
||||
except:
|
||||
ek.ek(os.remove, filename)
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
|
||||
|
||||
|
@ -323,7 +323,7 @@ def link(src, dst):
|
|||
|
||||
if ctypes.windll.kernel32.CreateHardLinkW(unicode(dst), unicode(src), 0) == 0: raise ctypes.WinError()
|
||||
else:
|
||||
os.link(src, dst)
|
||||
ek.ek(os.link, src, dst)
|
||||
|
||||
|
||||
def hardlinkFile(srcFile, destFile):
|
||||
|
@ -340,10 +340,11 @@ def symlink(src, dst):
|
|||
if os.name == 'nt':
|
||||
import ctypes
|
||||
|
||||
if ctypes.windll.kernel32.CreateSymbolicLinkW(unicode(dst), unicode(src), 1 if os.path.isdir(src) else 0) in [0,
|
||||
1280]: raise ctypes.WinError()
|
||||
if ctypes.windll.kernel32.CreateSymbolicLinkW(
|
||||
unicode(dst), unicode(src), 1 if ek.ek(os.path.isdir, src) else 0) in [0, 1280]:
|
||||
raise ctypes.WinError()
|
||||
else:
|
||||
os.symlink(src, dst)
|
||||
ek.ek(os.symlink, src, dst)
|
||||
|
||||
|
||||
def moveAndSymlinkFile(srcFile, destFile):
|
||||
|
@ -411,11 +412,11 @@ def rename_ep_file(cur_path, new_path, old_path_length=0):
|
|||
old_path_length: The length of media file path (old name) WITHOUT THE EXTENSION
|
||||
"""
|
||||
|
||||
new_dest_dir, new_dest_name = os.path.split(new_path) # @UnusedVariable
|
||||
new_dest_dir, new_dest_name = ek.ek(os.path.split, new_path) # @UnusedVariable
|
||||
|
||||
if old_path_length == 0 or old_path_length > len(cur_path):
|
||||
# approach from the right
|
||||
cur_file_name, cur_file_ext = os.path.splitext(cur_path) # @UnusedVariable
|
||||
cur_file_name, cur_file_ext = ek.ek(os.path.splitext, cur_path) # @UnusedVariable
|
||||
else:
|
||||
# approach from the left
|
||||
cur_file_ext = cur_path[old_path_length:]
|
||||
|
@ -423,7 +424,7 @@ def rename_ep_file(cur_path, new_path, old_path_length=0):
|
|||
|
||||
if cur_file_ext[1:] in subtitleExtensions:
|
||||
# Extract subtitle language from filename
|
||||
sublang = os.path.splitext(cur_file_name)[1][1:]
|
||||
sublang = ek.ek(os.path.splitext, cur_file_name)[1][1:]
|
||||
|
||||
# Check if the language extracted from filename is a valid language
|
||||
try:
|
||||
|
@ -435,7 +436,7 @@ def rename_ep_file(cur_path, new_path, old_path_length=0):
|
|||
# put the extension on the incoming file
|
||||
new_path += cur_file_ext
|
||||
|
||||
make_dirs(os.path.dirname(new_path))
|
||||
make_dirs(ek.ek(os.path.dirname, new_path))
|
||||
|
||||
# move the file
|
||||
try:
|
||||
|
@ -724,7 +725,7 @@ def backupVersionedFile(old_file, version):
|
|||
def restoreVersionedFile(backup_file, version):
|
||||
numTries = 0
|
||||
|
||||
new_file, backup_version = os.path.splitext(backup_file)
|
||||
new_file, backup_version = ek.ek(os.path.splitext, backup_file)
|
||||
restore_file = new_file + '.' + 'v' + str(version)
|
||||
|
||||
if not ek.ek(os.path.isfile, new_file):
|
||||
|
@ -1007,7 +1008,7 @@ def touchFile(fname, atime=None):
|
|||
if None != atime:
|
||||
try:
|
||||
with open(fname, 'a'):
|
||||
os.utime(fname, (atime, atime))
|
||||
ek.ek(os.utime, fname, (atime, atime))
|
||||
return True
|
||||
except:
|
||||
logger.log(u"File air date stamping not available on your OS", logger.DEBUG)
|
||||
|
@ -1027,9 +1028,9 @@ def _getTempDir():
|
|||
try:
|
||||
uid = getpass.getuser()
|
||||
except ImportError:
|
||||
return os.path.join(tempfile.gettempdir(), "SickGear")
|
||||
return ek.ek(os.path.join, tempfile.gettempdir(), "SickGear")
|
||||
|
||||
return os.path.join(tempfile.gettempdir(), "SickGear-%s" % (uid))
|
||||
return ek.ek(os.path.join, tempfile.gettempdir(), "SickGear-%s" % (uid))
|
||||
|
||||
|
||||
def proxy_setting(proxy_setting, request_url, force=False):
|
||||
|
@ -1098,7 +1099,7 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
|
|||
|
||||
if not kwargs.get('nocache'):
|
||||
cache_dir = sickbeard.CACHE_DIR or _getTempDir()
|
||||
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')))
|
||||
session = CacheControl(sess=session, cache=caches.FileCache(ek.ek(os.path.join, cache_dir, 'sessions')))
|
||||
else:
|
||||
del(kwargs['nocache'])
|
||||
|
||||
|
@ -1221,7 +1222,7 @@ def download_file(url, filename, session=None):
|
|||
if None is session:
|
||||
session = requests.session()
|
||||
cache_dir = sickbeard.CACHE_DIR or _getTempDir()
|
||||
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')))
|
||||
session = CacheControl(sess=session, cache=caches.FileCache(ek.ek(os.path.join, cache_dir, 'sessions')))
|
||||
|
||||
# request session headers
|
||||
session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})
|
||||
|
@ -1258,27 +1259,27 @@ def download_file(url, filename, session=None):
|
|||
if chunk:
|
||||
fp.write(chunk)
|
||||
fp.flush()
|
||||
os.fsync(fp.fileno())
|
||||
ek.ek(os.fsync, fp.fileno())
|
||||
|
||||
chmodAsParent(filename)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
_remove_file_failed(filename)
|
||||
remove_file_failed(filename)
|
||||
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
|
||||
return False
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
_remove_file_failed(filename)
|
||||
remove_file_failed(filename)
|
||||
logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
||||
return False
|
||||
except requests.exceptions.Timeout as e:
|
||||
_remove_file_failed(filename)
|
||||
remove_file_failed(filename)
|
||||
logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
||||
return False
|
||||
except EnvironmentError as e:
|
||||
_remove_file_failed(filename)
|
||||
remove_file_failed(filename)
|
||||
logger.log(u"Unable to save the file: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
except Exception:
|
||||
_remove_file_failed(filename)
|
||||
remove_file_failed(filename)
|
||||
logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING)
|
||||
return False
|
||||
|
||||
|
|
|
@ -50,11 +50,13 @@ reverseNames = {u'ERROR': ERROR,
|
|||
u'DEBUG': DEBUG,
|
||||
u'DB': DB}
|
||||
|
||||
|
||||
# send logging to null
|
||||
class NullHandler(logging.Handler):
|
||||
def emit(self, record):
|
||||
pass
|
||||
|
||||
|
||||
class SBRotatingLogHandler(object):
|
||||
def __init__(self, log_file):
|
||||
self.log_file = log_file
|
||||
|
@ -87,10 +89,10 @@ class SBRotatingLogHandler(object):
|
|||
handler.flush()
|
||||
handler.close()
|
||||
|
||||
def initLogging(self, consoleLogging=False):
|
||||
def init_logging(self, console_logging=False):
|
||||
|
||||
if consoleLogging:
|
||||
self.console_logging = consoleLogging
|
||||
if console_logging:
|
||||
self.console_logging = console_logging
|
||||
|
||||
old_handler = None
|
||||
|
||||
|
@ -99,10 +101,10 @@ class SBRotatingLogHandler(object):
|
|||
old_handler = self.cur_handler
|
||||
else:
|
||||
|
||||
#Add a new logging level DB
|
||||
# add a new logging level DB
|
||||
logging.addLevelName(5, 'DB')
|
||||
|
||||
# only start consoleLogging on first initialize
|
||||
# only start console_logging on first initialize
|
||||
if self.console_logging:
|
||||
# define a Handler which writes INFO messages or higher to the sys.stderr
|
||||
console = logging.StreamHandler()
|
||||
|
@ -113,15 +115,18 @@ class SBRotatingLogHandler(object):
|
|||
|
||||
# set a format which is simpler for console use
|
||||
console.setFormatter(DispatchingFormatter(
|
||||
{'sickbeard': logging.Formatter('%(asctime)s %(levelname)s::%(message)s', '%H:%M:%S'),
|
||||
'subliminal': logging.Formatter('%(asctime)s %(levelname)s::SUBLIMINAL :: %(message)s',
|
||||
'%H:%M:%S'),
|
||||
'imdbpy': logging.Formatter('%(asctime)s %(levelname)s::IMDBPY :: %(message)s', '%H:%M:%S'),
|
||||
'tornado.general': logging.Formatter('%(asctime)s %(levelname)s::TORNADO :: %(message)s', '%H:%M:%S'),
|
||||
'tornado.application': logging.Formatter('%(asctime)s %(levelname)s::TORNADO :: %(message)s', '%H:%M:%S'),
|
||||
'feedcache.cache': logging.Formatter('%(asctime)s %(levelname)s::FEEDCACHE :: %(message)s',
|
||||
'%H:%M:%S')
|
||||
},
|
||||
{'sickbeard': logging.Formatter(
|
||||
'%(asctime)s %(levelname)s::%(message)s', '%H:%M:%S'),
|
||||
'subliminal': logging.Formatter(
|
||||
'%(asctime)s %(levelname)s::SUBLIMINAL :: %(message)s', '%H:%M:%S'),
|
||||
'imdbpy': logging.Formatter(
|
||||
'%(asctime)s %(levelname)s::IMDBPY :: %(message)s', '%H:%M:%S'),
|
||||
'tornado.general': logging.Formatter(
|
||||
'%(asctime)s %(levelname)s::TORNADO :: %(message)s', '%H:%M:%S'),
|
||||
'tornado.application': logging.Formatter(
|
||||
'%(asctime)s %(levelname)s::TORNADO :: %(message)s', '%H:%M:%S'),
|
||||
'feedcache.cache': logging.Formatter(
|
||||
'%(asctime)s %(levelname)s::FEEDCACHE :: %(message)s', '%H:%M:%S')},
|
||||
logging.Formatter('%(message)s'), ))
|
||||
|
||||
# add the handler to the root logger
|
||||
|
@ -155,7 +160,6 @@ class SBRotatingLogHandler(object):
|
|||
logging.getLogger('imdbpy').setLevel(log_level)
|
||||
logging.getLogger('feedcache').setLevel(log_level)
|
||||
|
||||
|
||||
# already logging in new log folder, close the old handler
|
||||
if old_handler:
|
||||
self.close_log(old_handler)
|
||||
|
@ -173,54 +177,55 @@ class SBRotatingLogHandler(object):
|
|||
Configure a file handler to log at file_name and return it.
|
||||
"""
|
||||
|
||||
file_handler = TimedCompressedRotatingFileHandler(self.log_file_path, when='midnight', backupCount=7, encoding='utf-8')
|
||||
file_handler = TimedCompressedRotatingFileHandler(self.log_file_path, when='midnight',
|
||||
backupCount=16, encoding='utf-8')
|
||||
file_handler.setLevel(reverseNames[sickbeard.FILE_LOGGING_PRESET])
|
||||
file_handler.setFormatter(DispatchingFormatter(
|
||||
{'sickbeard': logging.Formatter('%(asctime)s %(levelname)-8s %(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'subliminal': logging.Formatter('%(asctime)s %(levelname)-8s SUBLIMINAL :: %(message)s',
|
||||
'%Y-%m-%d %H:%M:%S'),
|
||||
'imdbpy': logging.Formatter('%(asctime)s %(levelname)-8s IMDBPY :: %(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'tornado.general': logging.Formatter('%(asctime)s %(levelname)-8s TORNADO :: %(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'tornado.application': logging.Formatter('%(asctime)s %(levelname)-8s TORNADO :: %(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'feedcache.cache': logging.Formatter('%(asctime)s %(levelname)-8s FEEDCACHE :: %(message)s',
|
||||
'%Y-%m-%d %H:%M:%S')
|
||||
},
|
||||
{'sickbeard': logging.Formatter(
|
||||
'%(asctime)s %(levelname)-8s %(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'subliminal': logging.Formatter(
|
||||
'%(asctime)s %(levelname)-8s SUBLIMINAL :: %(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'imdbpy': logging.Formatter(
|
||||
'%(asctime)s %(levelname)-8s IMDBPY :: %(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'tornado.general': logging.Formatter(
|
||||
'%(asctime)s %(levelname)-8s TORNADO :: %(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'tornado.application': logging.Formatter(
|
||||
'%(asctime)s %(levelname)-8s TORNADO :: %(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'feedcache.cache': logging.Formatter(
|
||||
'%(asctime)s %(levelname)-8s FEEDCACHE :: %(message)s', '%Y-%m-%d %H:%M:%S')},
|
||||
logging.Formatter('%(message)s'), ))
|
||||
|
||||
return file_handler
|
||||
|
||||
def log(self, toLog, logLevel=MESSAGE):
|
||||
def log(self, to_log, log_level=MESSAGE):
|
||||
|
||||
with self.log_lock:
|
||||
|
||||
meThread = threading.currentThread().getName()
|
||||
message = meThread + u" :: " + toLog
|
||||
|
||||
out_line = message
|
||||
out_line = '%s :: %s' % (threading.currentThread().getName(), to_log)
|
||||
|
||||
sb_logger = logging.getLogger('sickbeard')
|
||||
setattr(sb_logger, 'db', lambda *args: sb_logger.log(DB, *args))
|
||||
|
||||
sub_logger = logging.getLogger('subliminal')
|
||||
imdb_logger = logging.getLogger('imdbpy')
|
||||
tornado_logger = logging.getLogger('tornado')
|
||||
feedcache_logger = logging.getLogger('feedcache')
|
||||
# sub_logger = logging.getLogger('subliminal')
|
||||
# imdb_logger = logging.getLogger('imdbpy')
|
||||
# tornado_logger = logging.getLogger('tornado')
|
||||
# feedcache_logger = logging.getLogger('feedcache')
|
||||
|
||||
try:
|
||||
if logLevel == DEBUG:
|
||||
if DEBUG == log_level:
|
||||
sb_logger.debug(out_line)
|
||||
elif logLevel == MESSAGE:
|
||||
elif MESSAGE == log_level:
|
||||
sb_logger.info(out_line)
|
||||
elif logLevel == WARNING:
|
||||
elif WARNING == log_level:
|
||||
sb_logger.warning(out_line)
|
||||
elif logLevel == ERROR:
|
||||
elif ERROR == log_level:
|
||||
sb_logger.error(out_line)
|
||||
# add errors to the UI logger
|
||||
classes.ErrorViewer.add(classes.UIError(message))
|
||||
elif logLevel == DB:
|
||||
classes.ErrorViewer.add(classes.UIError(out_line))
|
||||
elif DB == log_level:
|
||||
sb_logger.db(out_line)
|
||||
else:
|
||||
sb_logger.log(logLevel, out_line)
|
||||
sb_logger.log(log_level, out_line)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
|
@ -259,47 +264,54 @@ class TimedCompressedRotatingFileHandler(TimedRotatingFileHandler):
|
|||
then we have to get a list of matching filenames, sort them and remove
|
||||
the one with the oldest suffix.
|
||||
|
||||
This method is a copy of the one in TimedRotatingFileHandler. Since it uses
|
||||
This method is modified from the one in TimedRotatingFileHandler.
|
||||
|
||||
"""
|
||||
self.stream.close()
|
||||
# get the time that this sequence started at and make it a TimeTuple
|
||||
t = self.rolloverAt - self.interval
|
||||
timeTuple = time.localtime(t)
|
||||
time_tuple = time.localtime(t)
|
||||
file_name = self.baseFilename.rpartition('.')[0]
|
||||
dfn = '%s_%s.log' % (file_name, time.strftime(self.suffix, timeTuple))
|
||||
if os.path.exists(dfn):
|
||||
sickbeard.helpers._remove_file_failed(dfn)
|
||||
dfn = '%s_%s.log' % (file_name, time.strftime(self.suffix, time_tuple))
|
||||
self.delete_logfile(dfn)
|
||||
try:
|
||||
os.rename(self.baseFilename, dfn)
|
||||
except:
|
||||
ek.ek(os.rename, self.baseFilename, dfn)
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
if self.backupCount > 0:
|
||||
if 0 < self.backupCount:
|
||||
# find the oldest log file and delete it
|
||||
s = glob.glob(file_name + '_*')
|
||||
if len(s) > self.backupCount:
|
||||
s.sort()
|
||||
sickbeard.helpers._remove_file_failed(s[0])
|
||||
self.delete_logfile(s[0])
|
||||
# print "%s -> %s" % (self.baseFilename, dfn)
|
||||
if self.encoding:
|
||||
self.stream = codecs.open(self.baseFilename, 'w', self.encoding)
|
||||
else:
|
||||
self.stream = open(self.baseFilename, 'w')
|
||||
self.rolloverAt = self.rolloverAt + self.interval
|
||||
zip_name = dfn.rpartition('.')[0] + '.zip'
|
||||
if os.path.exists(zip_name):
|
||||
sickbeard.helpers._remove_file_failed(zip_name)
|
||||
file = zipfile.ZipFile(zip_name, 'w')
|
||||
file.write(dfn, os.path.basename(dfn), zipfile.ZIP_DEFLATED)
|
||||
file.close()
|
||||
sickbeard.helpers._remove_file_failed(dfn)
|
||||
zip_name = '%s.zip' % dfn.rpartition('.')[0]
|
||||
self.delete_logfile(zip_name)
|
||||
zip_fh = zipfile.ZipFile(zip_name, 'w')
|
||||
zip_fh.write(dfn, os.path.basename(dfn), zipfile.ZIP_DEFLATED)
|
||||
zip_fh.close()
|
||||
self.delete_logfile(dfn)
|
||||
|
||||
@staticmethod
|
||||
def delete_logfile(filepath):
|
||||
from sickbeard import encodingKludge
|
||||
if encodingKludge.ek(os.path.exists, filepath):
|
||||
if sickbeard.TRASH_ROTATE_LOGS:
|
||||
encodingKludge.ek(send2trash, filepath)
|
||||
else:
|
||||
sickbeard.helpers.remove_file_failed(filepath)
|
||||
|
||||
|
||||
sb_log_instance = SBRotatingLogHandler('sickbeard.log')
|
||||
|
||||
|
||||
def log(toLog, logLevel=MESSAGE):
|
||||
sb_log_instance.log(toLog, logLevel)
|
||||
def log(to_log, log_level=MESSAGE):
|
||||
sb_log_instance.log(to_log, log_level)
|
||||
|
||||
|
||||
def log_error_and_exit(error_msg):
|
||||
|
|
|
@ -293,10 +293,10 @@ class PostProcessor(object):
|
|||
cur_extension = 'nfo-orig'
|
||||
|
||||
# check if file have subtitles language
|
||||
if os.path.splitext(cur_extension)[1][1:] in common.subtitleExtensions:
|
||||
cur_lang = os.path.splitext(cur_extension)[0]
|
||||
if ek.ek(os.path.splitext, cur_extension)[1][1:] in common.subtitleExtensions:
|
||||
cur_lang = ek.ek(os.path.splitext, cur_extension)[0]
|
||||
if cur_lang in sickbeard.SUBTITLES_LANGUAGES:
|
||||
cur_extension = cur_lang + os.path.splitext(cur_extension)[1]
|
||||
cur_extension = cur_lang + ek.ek(os.path.splitext, cur_extension)[1]
|
||||
|
||||
# If new base name then convert name
|
||||
if new_base_name:
|
||||
|
|
|
@ -401,7 +401,7 @@ class ProcessTVShow(object):
|
|||
return False
|
||||
|
||||
if failed:
|
||||
self._process_failed(os.path.join(path, dir_name), nzb_name_original, showObj=showObj)
|
||||
self._process_failed(ek.ek(os.path.join, path, dir_name), nzb_name_original, showObj=showObj)
|
||||
return False
|
||||
|
||||
if helpers.is_hidden_folder(dir_name):
|
||||
|
@ -623,11 +623,11 @@ class ProcessTVShow(object):
|
|||
result = False
|
||||
chunks = {}
|
||||
matcher = re.compile('\.[0-9]+$')
|
||||
for dirpath, void, filenames in os.walk(directory):
|
||||
for dirpath, void, filenames in ek.ek(os.walk, directory):
|
||||
for filename in filenames:
|
||||
if None is not matcher.search(filename):
|
||||
maybe_chunk = ek.ek(os.path.join, dirpath, filename)
|
||||
base_filepath, ext = os.path.splitext(maybe_chunk)
|
||||
base_filepath, ext = ek.ek(os.path.splitext, maybe_chunk)
|
||||
if base_filepath not in chunks:
|
||||
chunks[base_filepath] = []
|
||||
chunks[base_filepath].append(maybe_chunk)
|
||||
|
@ -809,10 +809,10 @@ class ProcessTVShow(object):
|
|||
break
|
||||
else:
|
||||
path, dirs = ek.ek(os.path.split, dir_name) # Script Post Processing
|
||||
if None is not nzb_name and not nzb_name.endswith('.nzb') and os.path.isfile(
|
||||
os.path.join(dir_name, nzb_name)): # For single torrent file without directory
|
||||
if None is not nzb_name and not nzb_name.endswith('.nzb') and \
|
||||
ek.ek(os.path.isfile, ek.ek(os.path.join, dir_name, nzb_name)): # For single torrent file without directory
|
||||
dirs = []
|
||||
files = [os.path.join(dir_name, nzb_name)]
|
||||
files = [ek.ek(os.path.join, dir_name, nzb_name)]
|
||||
else:
|
||||
dirs = [dirs]
|
||||
files = []
|
||||
|
|
|
@ -40,7 +40,7 @@ from hachoir_core.stream import FileInputStream
|
|||
from sickbeard import helpers, classes, logger, db, tvcache, encodingKludge as ek
|
||||
from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT
|
||||
from sickbeard.exceptions import SickBeardException, AuthException, ex
|
||||
from sickbeard.helpers import maybe_plural, _remove_file_failed as remove_file_failed
|
||||
from sickbeard.helpers import maybe_plural, remove_file_failed
|
||||
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||
from sickbeard.show_name_helpers import allPossibleShowNames
|
||||
|
||||
|
|
|
@ -444,7 +444,12 @@ class NewznabProvider(generic.NZBProvider):
|
|||
Quality.HDBLURAY, Quality.FULLHDBLURAY]
|
||||
max_hd = Quality.FULLHDBLURAY
|
||||
for s in searches:
|
||||
if need_sd and need_hd and need_uhd:
|
||||
break
|
||||
if not s.show.is_anime and not s.show.is_sports:
|
||||
if Quality.UNKNOWN in s.wantedQuality:
|
||||
need_sd = need_hd = need_uhd = True
|
||||
else:
|
||||
if not need_sd and min(s.wantedQuality) <= max_sd:
|
||||
need_sd = True
|
||||
if not need_hd and any(i in hd_qualities for i in s.wantedQuality):
|
||||
|
@ -474,6 +479,9 @@ class NewznabProvider(generic.NZBProvider):
|
|||
if not season_search:
|
||||
need_sd = need_hd = need_uhd = False
|
||||
if not ep_obj.show.is_anime and not ep_obj.show.is_sports:
|
||||
if Quality.UNKNOWN in ep_obj.wantedQuality:
|
||||
need_sd = need_hd = need_uhd = True
|
||||
else:
|
||||
if min(ep_obj.wantedQuality) <= max_sd:
|
||||
need_sd = True
|
||||
if any(i in hd_qualities for i in ep_obj.wantedQuality):
|
||||
|
|
|
@ -387,7 +387,7 @@ def wanted_episodes(show, from_date, make_dict=False, unaired=False):
|
|||
|
||||
ep_obj = show.getEpisode(int(result['season']), int(result['episode']))
|
||||
ep_obj.wantedQuality = [i for i in (wanted_qualities, initial_qualities)[not_downloaded]
|
||||
if (common.Quality.UNKNOWN != i and cur_quality < i)]
|
||||
if cur_quality < i]
|
||||
ep_obj.eps_aired_in_season = ep_count.get(helpers.tryInt(result['season']), 0)
|
||||
ep_obj.eps_aired_in_scene_season = ep_count_scene.get(
|
||||
helpers.tryInt(result['scene_season']), 0) if result['scene_season'] else ep_obj.eps_aired_in_season
|
||||
|
|
|
@ -206,9 +206,14 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
need_anime = True
|
||||
if not need_sports and curShow.is_sports:
|
||||
need_sports = True
|
||||
if not need_sd or not need_hd:
|
||||
if not need_sd or not need_hd or not need_uhd:
|
||||
for w in wanted_eps:
|
||||
if need_sd and need_hd and need_uhd:
|
||||
break
|
||||
if not w.show.is_anime and not w.show.is_sports:
|
||||
if Quality.UNKNOWN in w.wantedQuality:
|
||||
need_sd = need_hd = need_uhd = True
|
||||
else:
|
||||
if not need_sd and max_sd >= min(w.wantedQuality):
|
||||
need_sd = True
|
||||
if not need_hd and any(i in hd_qualities for i in w.wantedQuality):
|
||||
|
|
|
@ -1079,9 +1079,9 @@ class TVShow(object):
|
|||
logger.log('Attempt to %s cache file %s' % (action, cache_file))
|
||||
try:
|
||||
if sickbeard.TRASH_REMOVE_SHOW:
|
||||
send2trash(cache_file)
|
||||
ek.ek(send2trash, cache_file)
|
||||
else:
|
||||
os.remove(cache_file)
|
||||
ek.ek(os.remove, cache_file)
|
||||
|
||||
except OSError as e:
|
||||
logger.log('Unable to %s %s: %s / %s' % (action, cache_file, repr(e), str(e)), logger.WARNING)
|
||||
|
@ -1101,7 +1101,7 @@ class TVShow(object):
|
|||
logger.log('Unable to change permissions of %s' % self._location, logger.WARNING)
|
||||
|
||||
if sickbeard.TRASH_REMOVE_SHOW:
|
||||
send2trash(self.location)
|
||||
ek.ek(send2trash, self.location)
|
||||
else:
|
||||
ek.ek(shutil.rmtree, self.location)
|
||||
|
||||
|
@ -1137,7 +1137,7 @@ class TVShow(object):
|
|||
|
||||
sql_l = []
|
||||
for ep in sqlResults:
|
||||
curLoc = os.path.normpath(ep['location'])
|
||||
curLoc = ek.ek(os.path.normpath, ep['location'])
|
||||
season = int(ep['season'])
|
||||
episode = int(ep['episode'])
|
||||
|
||||
|
@ -1149,8 +1149,8 @@ class TVShow(object):
|
|||
continue
|
||||
|
||||
# if the path doesn't exist or if it's not in our show dir
|
||||
if not ek.ek(os.path.isfile, curLoc) or not os.path.normpath(curLoc).startswith(
|
||||
os.path.normpath(self.location)):
|
||||
if not ek.ek(os.path.isfile, curLoc) or not ek.ek(os.path.normpath, curLoc).startswith(
|
||||
ek.ek(os.path.normpath, self.location)):
|
||||
|
||||
# check if downloaded files still exist, update our data if this has changed
|
||||
if 1 != sickbeard.SKIP_REMOVED_FILES:
|
||||
|
@ -1528,7 +1528,7 @@ class TVEpisode(object):
|
|||
|
||||
if sickbeard.SUBTITLES_DIR:
|
||||
for video in subtitles:
|
||||
subs_new_path = ek.ek(os.path.join, os.path.dirname(video.path), sickbeard.SUBTITLES_DIR)
|
||||
subs_new_path = ek.ek(os.path.join, ek.ek(os.path.dirname, video.path), sickbeard.SUBTITLES_DIR)
|
||||
dir_exists = helpers.makeDir(subs_new_path)
|
||||
if not dir_exists:
|
||||
logger.log('Unable to create subtitles folder %s' % subs_new_path, logger.ERROR)
|
||||
|
@ -1536,7 +1536,7 @@ class TVEpisode(object):
|
|||
helpers.chmodAsParent(subs_new_path)
|
||||
|
||||
for subtitle in subtitles.get(video):
|
||||
new_file_path = ek.ek(os.path.join, subs_new_path, os.path.basename(subtitle.path))
|
||||
new_file_path = ek.ek(os.path.join, subs_new_path, ek.ek(os.path.basename, subtitle.path))
|
||||
helpers.moveFile(subtitle.path, new_file_path)
|
||||
helpers.chmodAsParent(new_file_path)
|
||||
else:
|
||||
|
@ -1664,7 +1664,7 @@ class TVEpisode(object):
|
|||
|
||||
# don't overwrite my location
|
||||
if sql_results[0]['location'] and sql_results[0]['location']:
|
||||
self.location = os.path.normpath(sql_results[0]['location'])
|
||||
self.location = ek.ek(os.path.normpath, sql_results[0]['location'])
|
||||
if sql_results[0]['file_size']:
|
||||
self.file_size = int(sql_results[0]['file_size'])
|
||||
else:
|
||||
|
@ -2493,7 +2493,7 @@ class TVEpisode(object):
|
|||
if len(name_groups) == 1:
|
||||
return ''
|
||||
else:
|
||||
return self._format_pattern(os.sep.join(name_groups[:-1]), multi)
|
||||
return self._format_pattern(ek.ek(os.sep.join, name_groups[:-1]), multi)
|
||||
|
||||
def formatted_filename(self, pattern=None, multi=None, anime_type=None):
|
||||
"""
|
||||
|
@ -2605,7 +2605,7 @@ class TVEpisode(object):
|
|||
"""
|
||||
if not datetime.date == type(self.airdate) or 1 == self.airdate.year:
|
||||
logger.log('%s: Did not change modify date of %s because episode date is never aired or invalid'
|
||||
% (self.show.indexerid, os.path.basename(self.location)), logger.DEBUG)
|
||||
% (self.show.indexerid, ek.ek(os.path.basename, self.location)), logger.DEBUG)
|
||||
return
|
||||
|
||||
hr = m = 0
|
||||
|
@ -2619,7 +2619,7 @@ class TVEpisode(object):
|
|||
|
||||
airdatetime = datetime.datetime.combine(self.airdate, airtime)
|
||||
|
||||
filemtime = datetime.datetime.fromtimestamp(os.path.getmtime(self.location))
|
||||
filemtime = datetime.datetime.fromtimestamp(ek.ek(os.path.getmtime, self.location))
|
||||
|
||||
if filemtime != airdatetime:
|
||||
import time
|
||||
|
@ -2627,7 +2627,7 @@ class TVEpisode(object):
|
|||
airdatetime = airdatetime.timetuple()
|
||||
if helpers.touchFile(self.location, time.mktime(airdatetime)):
|
||||
logger.log('%s: Changed modify date of %s to show air date %s'
|
||||
% (self.show.indexerid, os.path.basename(self.location), time.strftime('%b %d,%Y (%H:%M)', airdatetime)))
|
||||
% (self.show.indexerid, ek.ek(os.path.basename, self.location), time.strftime('%b %d,%Y (%H:%M)', airdatetime)))
|
||||
|
||||
def __getstate__(self):
|
||||
d = dict(self.__dict__)
|
||||
|
|
|
@ -2147,7 +2147,7 @@ class Home(MainHandler):
|
|||
# Find the quality class for the episode
|
||||
quality_class = Quality.qualityStrings[Quality.UNKNOWN]
|
||||
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
|
||||
for x in (SD, HD720p, HD1080p):
|
||||
for x in (SD, HD720p, HD1080p, UHD2160p):
|
||||
if ep_quality in Quality.splitQuality(x)[0]:
|
||||
quality_class = qualityPresetStrings[x]
|
||||
break
|
||||
|
|
|
@ -93,7 +93,7 @@ sickbeard.PROG_DIR = os.path.abspath('..')
|
|||
sickbeard.DATA_DIR = sickbeard.PROG_DIR
|
||||
sickbeard.LOG_DIR = os.path.join(TESTDIR, 'Logs')
|
||||
createTestLogFolder()
|
||||
sickbeard.logger.sb_log_instance.initLogging(False)
|
||||
sickbeard.logger.sb_log_instance.init_logging(False)
|
||||
|
||||
sickbeard.CACHE_DIR = os.path.join(TESTDIR, 'cache')
|
||||
createTestCacheFolder()
|
||||
|
|
Loading…
Reference in a new issue