diff --git a/CHANGES.md b/CHANGES.md
index dd751c0f..9e068fbd 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,4 +1,9 @@
-### 3.27.2 (2023-02-10 19:25:00 UTC)
+### 3.28.0 (2023-xx-xx xx:xx:00 UTC)
+
+* Change remove calls to legacy py2 fix encoding function
+
+
+### 3.27.2 (2023-02-10 19:25:00 UTC)
* Fix revert update
* Fix installations that don't have previously saved cleanup lock files
@@ -710,7 +715,7 @@
* Change allow Python 3.8.10 and 3.9.5
* Remove PiSexy provider
-* Fix refreshShow, prevent another refresh of show if already in queue and not forced
+* Fix refresh_show, prevent another refresh of show if already in queue and not forced
* Fix webapi set scene season
* Fix set path in all_tests for py2
* Fix webapi exception if no backlog was done before (CMD_SickGearCheckScheduler)
diff --git a/gui/slick/interfaces/default/home.tmpl b/gui/slick/interfaces/default/home.tmpl
index 36615d4c..f37140cf 100644
--- a/gui/slick/interfaces/default/home.tmpl
+++ b/gui/slick/interfaces/default/home.tmpl
@@ -99,7 +99,7 @@
##
- #for $curLoadingShow in $sickgear.show_queue_scheduler.action.loadingShowList
+ #for $curLoadingShow in $sickgear.show_queue_scheduler.action.loading_showlist
##
#if $curLoadingShow.show_obj != None and $curLoadingShow.show_obj in $sg_str('showList')
#continue
@@ -292,7 +292,7 @@
- #for $curLoadingShow in $sickgear.show_queue_scheduler.action.loadingShowList
+ #for $curLoadingShow in $sickgear.show_queue_scheduler.action.loading_showlist
#if $curLoadingShow.show_obj != None and $curLoadingShow.show_obj in $sg_str('showList')
#continue
#end if
diff --git a/gui/slick/interfaces/default/manage.tmpl b/gui/slick/interfaces/default/manage.tmpl
index 7433352f..a8cfe82a 100644
--- a/gui/slick/interfaces/default/manage.tmpl
+++ b/gui/slick/interfaces/default/manage.tmpl
@@ -188,37 +188,37 @@ $xsrf_form_html
#set $show_size = $max if not $show_loc else $get_size($show_loc)
#set $option_state = ''
##
- #set $curUpdate_disabled = $sickgear.show_queue_scheduler.action.isBeingUpdated($cur_show_obj)\
- or $sickgear.show_queue_scheduler.action.isInUpdateQueue($cur_show_obj)
+ #set $curUpdate_disabled = $sickgear.show_queue_scheduler.action.is_being_updated($cur_show_obj)\
+ or $sickgear.show_queue_scheduler.action.is_in_update_queue($cur_show_obj)
#set $tip = ' title="Update%s"' % ('', $disabled_inprogress_tip)[$curUpdate_disabled]
#set $curUpdate = ($tip, $option_state % (('', $disabled)[$curUpdate_disabled], 'update', $tip))
##
- #set $curRefresh_disabled = $sickgear.show_queue_scheduler.action.isBeingRefreshed($cur_show_obj)\
- or $sickgear.show_queue_scheduler.action.isInRefreshQueue($cur_show_obj)
+ #set $curRefresh_disabled = $sickgear.show_queue_scheduler.action.is_being_refreshed($cur_show_obj)\
+ or $sickgear.show_queue_scheduler.action.is_in_refresh_queue($cur_show_obj)
#set $tip = ' title="Rescan%s"' % ('', $disabled_inprogress_tip)[$curRefresh_disabled]
#set $curRefresh = ($tip, $option_state % (('', $disabled)[$curRefresh_disabled], 'refresh', $tip))
##
- #set $curRename_disabled = $sickgear.show_queue_scheduler.action.isBeingRenamed($cur_show_obj)\
- or $sickgear.show_queue_scheduler.action.isInRenameQueue($cur_show_obj)
+ #set $curRename_disabled = $sickgear.show_queue_scheduler.action.is_being_renamed($cur_show_obj)\
+ or $sickgear.show_queue_scheduler.action.is_in_rename_queue($cur_show_obj)
#set $tip = ' title="Rename%s"' % ('', $disabled_inprogress_tip)[$curRename_disabled]
#set $curRename = ($tip, $option_state % (('', $disabled)[$curRename_disabled], 'rename', $tip))
##
#set $subtitles_disabled = not $cur_show_obj.subtitles\
- or $sickgear.show_queue_scheduler.action.isBeingSubtitled($cur_show_obj)\
- or $sickgear.show_queue_scheduler.action.isInSubtitleQueue($cur_show_obj)
+ or $sickgear.show_queue_scheduler.action.is_being_subtitled($cur_show_obj)\
+ or $sickgear.show_queue_scheduler.action.is_in_subtitle_queue($cur_show_obj)
#set $tip = (' title="Search subtitle"', (' title="Search subtitle%s"' % $disabled_inprogress_tip,
$disabled_subtitles_tip)[not $cur_show_obj.subtitles])[$subtitles_disabled]
#set $curSubtitle = ($tip, $option_state % (('', $disabled)[$subtitles_disabled], 'subtitle', $tip))
##
- #set $curDelete_disabled = $sickgear.show_queue_scheduler.action.isBeingRenamed($cur_show_obj)\
- or $sickgear.show_queue_scheduler.action.isInRenameQueue($cur_show_obj)\
- or $sickgear.show_queue_scheduler.action.isInRefreshQueue($cur_show_obj)
+ #set $curDelete_disabled = $sickgear.show_queue_scheduler.action.is_being_renamed($cur_show_obj)\
+ or $sickgear.show_queue_scheduler.action.is_in_rename_queue($cur_show_obj)\
+ or $sickgear.show_queue_scheduler.action.is_in_refresh_queue($cur_show_obj)
#set $tip = ' title="Delete%s"' % ('', $disabled_inprogress_tip)[$curDelete_disabled]
#set $curDelete = ($tip, $option_state % (('', $disabled)[$curDelete_disabled], 'delete', $tip))
##
- #set $curRemove_disabled = $sickgear.show_queue_scheduler.action.isBeingRenamed($cur_show_obj)\
- or $sickgear.show_queue_scheduler.action.isInRenameQueue($cur_show_obj)\
- or $sickgear.show_queue_scheduler.action.isInRefreshQueue($cur_show_obj)
+ #set $curRemove_disabled = $sickgear.show_queue_scheduler.action.is_being_renamed($cur_show_obj)\
+ or $sickgear.show_queue_scheduler.action.is_in_rename_queue($cur_show_obj)\
+ or $sickgear.show_queue_scheduler.action.is_in_refresh_queue($cur_show_obj)
#set $tip = ' title="Remove%s"' % ('', $disabled_inprogress_tip)[$curRemove_disabled]
#set $curRemove = ($tip, $option_state % (('', $disabled)[$curRemove_disabled], 'remove', $tip))
diff --git a/lib/dateutil/zoneinfo/__init__.py b/lib/dateutil/zoneinfo/__init__.py
index 5628bfc3..27cc9df4 100644
--- a/lib/dateutil/zoneinfo/__init__.py
+++ b/lib/dateutil/zoneinfo/__init__.py
@@ -9,8 +9,6 @@ from io import BytesIO
from dateutil.tz import tzfile as _tzfile
-# noinspection PyPep8Naming
-import encodingKludge as ek
import sickgear
__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"]
@@ -27,10 +25,10 @@ class tzfile(_tzfile):
def getzoneinfofile_stream():
try:
# return BytesIO(get_data(__name__, ZONEFILENAME))
- zonefile = ek.ek(os.path.join, sickgear.ZONEINFO_DIR, ZONEFILENAME)
- if not ek.ek(os.path.isfile, zonefile):
+ zonefile = os.path.join(sickgear.ZONEINFO_DIR, ZONEFILENAME)
+ if not os.path.isfile(zonefile):
warnings.warn('Falling back to included zoneinfo file')
- zonefile = ek.ek(os.path.join, ek.ek(os.path.dirname, __file__), ZONEFILENAME)
+ zonefile = os.path.join(os.path.dirname(__file__), ZONEFILENAME)
with open(zonefile, 'rb') as f:
return BytesIO(f.read())
except IOError as e: # TODO switch to FileNotFoundError?
diff --git a/lib/encodingKludge.py b/lib/encodingKludge.py
index 1dcb6f67..7946bf91 100644
--- a/lib/encodingKludge.py
+++ b/lib/encodingKludge.py
@@ -39,7 +39,6 @@ def set_sys_encoding():
:return: The encoding that is set
"""
- sys_encoding = None
should_exit = False
try:
locale.setlocale(locale.LC_ALL, '')
@@ -48,7 +47,7 @@ def set_sys_encoding():
try:
sys_encoding = locale.getpreferredencoding()
except (locale.Error, IOError):
- pass
+ sys_encoding = None
# For OSes that are poorly configured I'll just randomly force UTF-8
if not sys_encoding or sys_encoding in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
diff --git a/lib/exceptions_helper.py b/lib/exceptions_helper.py
index 1232fe0b..e0628bbf 100644
--- a/lib/exceptions_helper.py
+++ b/lib/exceptions_helper.py
@@ -16,9 +16,6 @@
from six import PY2, string_types
-if PY2:
- from encodingKludge import fixStupidEncodings
-
# noinspection PyUnreachableCode
if False:
from typing import AnyStr
@@ -28,128 +25,100 @@ def ex(e):
# type: (BaseException) -> AnyStr
"""Returns a unicode string from the exception text if it exists"""
- if not PY2:
- return str(e)
-
- e_message = u''
-
- if not e or not e.args:
- return e_message
-
- for arg in e.args:
-
- if None is not arg:
- if isinstance(arg, string_types):
- fixed_arg = fixStupidEncodings(arg, True)
-
- else:
- try:
- fixed_arg = u'error ' + fixStupidEncodings(str(arg), True)
-
- except (BaseException, Exception):
- fixed_arg = None
-
- if fixed_arg:
- if not e_message:
- e_message = fixed_arg
-
- else:
- e_message = e_message + ' : ' + fixed_arg
-
- return e_message
+ return str(e)
-class SickBeardException(Exception):
+class SickGearException(Exception):
"""Generic SickGear Exception - should never be thrown, only subclassed"""
-class ConfigErrorException(SickBeardException):
+class ConfigErrorException(SickGearException):
"""Error in the config file"""
-class LaterException(SickBeardException):
+class LaterException(SickGearException):
"""Something bad happened that I'll make a real exception for later"""
-class NoNFOException(SickBeardException):
+class NoNFOException(SickGearException):
"""No NFO was found!"""
-class NoShowDirException(SickBeardException):
+class NoShowDirException(SickGearException):
"""Unable to find the show's directory"""
-class FileNotFoundException(SickBeardException):
+class FileNotFoundException(SickGearException):
"""The specified file doesn't exist"""
-class MultipleDBEpisodesException(SickBeardException):
+class MultipleDBEpisodesException(SickGearException):
"""Found multiple episodes in the DB! Must fix DB first"""
-class MultipleDBShowsException(SickBeardException):
+class MultipleDBShowsException(SickGearException):
"""Found multiple shows in the DB! Must fix DB first"""
-class MultipleShowObjectsException(SickBeardException):
+class MultipleShowObjectsException(SickGearException):
"""Found multiple objects for the same show! Something is very wrong"""
-class WrongShowException(SickBeardException):
+class WrongShowException(SickGearException):
"""The episode doesn't belong to the same show as its parent folder"""
-class ShowNotFoundException(SickBeardException):
+class ShowNotFoundException(SickGearException):
"""The show wasn't found on the Indexer"""
-class EpisodeNotFoundException(SickBeardException):
+class EpisodeNotFoundException(SickGearException):
"""The episode wasn't found on the Indexer"""
-class ShowDirNotFoundException(SickBeardException):
+class ShowDirNotFoundException(SickGearException):
"""The show dir doesn't exist"""
-class AuthException(SickBeardException):
+class AuthException(SickGearException):
"""Your authentication information is incorrect"""
-class EpisodeDeletedException(SickBeardException):
+class EpisodeDeletedException(SickGearException):
"""This episode has been deleted"""
-class CantRefreshException(SickBeardException):
+class CantRefreshException(SickGearException):
"""The show can't be refreshed right now"""
-class CantUpdateException(SickBeardException):
+class CantUpdateException(SickGearException):
"""The show can't be updated right now"""
-class CantSwitchException(SickBeardException):
+class CantSwitchException(SickGearException):
"""The show can't be switched right now"""
-class PostProcessingFailed(SickBeardException):
+class PostProcessingFailed(SickGearException):
"""Post-processing the episode failed"""
-class FailedProcessingFailed(SickBeardException):
+class FailedProcessingFailed(SickGearException):
"""Post-processing the failed release failed"""
-class FailedHistoryMultiSnatchException(SickBeardException):
+class FailedHistoryMultiSnatchException(SickGearException):
"""Episode was snatched again before the first one was done"""
-class FailedHistoryNotFoundException(SickBeardException):
+class FailedHistoryNotFoundException(SickGearException):
"""The release was not found in the failed download history tracker"""
-class EpisodeNotFoundByAbsoluteNumberException(SickBeardException):
+class EpisodeNotFoundByAbsoluteNumberException(SickGearException):
"""The show wasn't found in the DB while looking at Absolute Numbers"""
-class ConnectionSkipException(SickBeardException):
+class ConnectionSkipException(SickGearException):
"""Connection was skipped because of previous errors"""
diff --git a/lib/imdbpie/auth.py b/lib/imdbpie/auth.py
index 3b292fb8..d6e802ce 100644
--- a/lib/imdbpie/auth.py
+++ b/lib/imdbpie/auth.py
@@ -265,9 +265,8 @@ class Auth(object):
except ValueError as e:
if not retry:
cache.close()
- import encodingKludge as ek
import os
- ek.ek(os.remove, ek.ek(os.path.join, self._cachedir, diskcache.core.DBNAME))
+ os.remove(os.path.join(self._cachedir, diskcache.core.DBNAME))
return self._get_creds(retry=True)
else:
raise e
diff --git a/lib/sg_helpers.py b/lib/sg_helpers.py
index 67f2c771..e7ad08c1 100644
--- a/lib/sg_helpers.py
+++ b/lib/sg_helpers.py
@@ -32,7 +32,7 @@ from cfscrape import CloudflareScraper
from send2trash import send2trash
# noinspection PyPep8Naming
-import encodingKludge as ek
+from encodingKludge import SYS_ENCODING
import requests
from _23 import decode_bytes, filter_list, html_unescape, list_range, \
@@ -65,7 +65,7 @@ if False:
html_convert_fractions = {0: '', 25: '¼', 50: '½', 75: '¾', 100: 1}
-PROG_DIR = ek.ek(os.path.join, os.path.dirname(os.path.normpath(os.path.abspath(__file__))), '..')
+PROG_DIR = os.path.join(os.path.dirname(os.path.normpath(os.path.abspath(__file__))), '..')
# Mapping error status codes to official W3C names
http_error_code = {
@@ -676,9 +676,9 @@ def get_system_temp_dir():
try:
uid = getpass.getuser()
except ImportError:
- return ek.ek(os.path.join, tempfile.gettempdir(), 'SickGear')
+ return os.path.join(tempfile.gettempdir(), 'SickGear')
- return ek.ek(os.path.join, tempfile.gettempdir(), 'SickGear-%s' % uid)
+ return os.path.join(tempfile.gettempdir(), 'SickGear-%s' % uid)
def proxy_setting(setting, request_url, force=False):
@@ -834,7 +834,7 @@ def get_url(url, # type: AnyStr
if not kwargs.pop('nocache', False):
cache_dir = CACHE_DIR or get_system_temp_dir()
- session = CacheControl(sess=session, cache=caches.FileCache(ek.ek(os.path.join, cache_dir, 'sessions')))
+ session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')))
provider = kwargs.pop('provider', None)
@@ -1065,11 +1065,11 @@ def scantree(path, # type: AnyStr
:param filter_kind: None to yield everything, True yields directories, False yields files
:param recurse: Recursively scan the tree
"""
- if isinstance(path, string_types) and path and ek.ek(os.path.isdir, path):
+ if isinstance(path, string_types) and path and os.path.isdir(path):
rc_exc, rc_inc = [re.compile(rx % '|'.join(
[x for x in (param, ([param], [])[None is param])[not isinstance(param, list)]]))
for rx, param in ((r'(?i)^(?:(?!%s).)*$', exclude), (r'(?i)%s', include))]
- for entry in ek.ek(scandir, path):
+ for entry in scandir(path):
is_dir = entry.is_dir(follow_symlinks=follow_symlinks)
is_file = entry.is_file(follow_symlinks=follow_symlinks)
no_filter = any([None is filter_kind, filter_kind and is_dir, not filter_kind and is_file])
@@ -1084,25 +1084,25 @@ def scantree(path, # type: AnyStr
def copy_file(src_file, dest_file):
if os.name.startswith('posix'):
- ek.ek(subprocess.call, ['cp', src_file, dest_file])
+ subprocess.call(['cp', src_file, dest_file])
else:
- ek.ek(shutil.copyfile, src_file, dest_file)
+ shutil.copyfile(src_file, dest_file)
try:
- ek.ek(shutil.copymode, src_file, dest_file)
+ shutil.copymode(src_file, dest_file)
except OSError:
pass
def move_file(src_file, dest_file, raise_exceptions=False):
try:
- ek.ek(shutil.move, src_file, dest_file)
+ shutil.move(src_file, dest_file)
fix_set_group_id(dest_file)
except OSError:
copy_file(src_file, dest_file)
- if ek.ek(os.path.exists, dest_file):
+ if os.path.exists(dest_file):
fix_set_group_id(dest_file)
- ek.ek(os.unlink, src_file)
+ os.unlink(src_file)
elif raise_exceptions:
raise OSError('Destination file could not be created: %s' % dest_file)
@@ -1118,13 +1118,13 @@ def fix_set_group_id(child_path):
if os.name in ('nt', 'ce'):
return
- parent_path = ek.ek(os.path.dirname, child_path)
- parent_stat = ek.ek(os.stat, parent_path)
+ parent_path = os.path.dirname(child_path)
+ parent_stat = os.stat(parent_path)
parent_mode = stat.S_IMODE(parent_stat[stat.ST_MODE])
if parent_mode & stat.S_ISGID:
parent_gid = parent_stat[stat.ST_GID]
- child_stat = ek.ek(os.stat, child_path)
+ child_stat = os.stat(child_path)
child_gid = child_stat[stat.ST_GID]
if child_gid == parent_gid:
@@ -1138,7 +1138,7 @@ def fix_set_group_id(child_path):
return
try:
- ek.ek(os.chown, child_path, -1, parent_gid) # only available on UNIX
+ os.chown(child_path, -1, parent_gid) # only available on UNIX
logger.debug(u'Respecting the set-group-ID bit on the parent directory for %s' % child_path)
except OSError:
logger.error(u'Failed to respect the set-group-id bit on the parent directory for %s (setting group id %i)'
@@ -1154,11 +1154,11 @@ def remove_file_perm(filepath, log_err=True):
:param log_err: False to suppress log msgs
:return True if filepath does not exist else None if no removal
"""
- if not ek.ek(os.path.exists, filepath):
+ if not os.path.exists(filepath):
return True
for t in list_range(10): # total seconds to wait 0 - 9 = 45s over 10 iterations
try:
- ek.ek(os.remove, filepath)
+ os.remove(filepath)
except OSError as e:
if getattr(e, 'winerror', 0) not in (5, 32): # 5=access denied (e.g. av), 32=another process has lock
if log_err:
@@ -1167,7 +1167,7 @@ def remove_file_perm(filepath, log_err=True):
except (BaseException, Exception):
pass
time.sleep(t)
- if not ek.ek(os.path.exists, filepath):
+ if not os.path.exists(filepath):
return True
if log_err:
logger.warning('Unable to delete %s' % filepath)
@@ -1195,11 +1195,11 @@ def remove_file(filepath, tree=False, prefix_failure='', log_level=logging.INFO)
result = 'Deleted'
if TRASH_REMOVE_SHOW:
result = 'Trashed'
- ek.ek(send2trash, filepath)
+ send2trash(filepath)
elif tree:
- ek.ek(shutil.rmtree, filepath)
+ shutil.rmtree(filepath)
else:
- ek.ek(os.remove, filepath)
+ os.remove(filepath)
except OSError as e:
if getattr(e, 'winerror', 0) not in (5, 32): # 5=access denied (e.g. av), 32=another process has lock
logger.log(level=log_level, msg=u'%sUnable to %s %s %s: %s' %
@@ -1207,10 +1207,10 @@ def remove_file(filepath, tree=False, prefix_failure='', log_level=logging.INFO)
('file', 'dir')[tree], filepath, ex(e)))
break
time.sleep(t)
- if not ek.ek(os.path.exists, filepath):
+ if not os.path.exists(filepath):
break
- return (None, result)[filepath and not ek.ek(os.path.exists, filepath)]
+ return (None, result)[filepath and not os.path.exists(filepath)]
def touch_file(name, atime=None, dir_name=None):
@@ -1224,9 +1224,9 @@ def touch_file(name, atime=None, dir_name=None):
:return: success
"""
if None is not dir_name:
- name = ek.ek(os.path.join, dir_name, name)
+ name = os.path.join(dir_name, name)
if make_path(dir_name):
- if not ek.ek(os.path.exists, name):
+ if not os.path.exists(name):
with io.open(name, 'w') as fh:
fh.flush()
if None is atime:
@@ -1235,7 +1235,7 @@ def touch_file(name, atime=None, dir_name=None):
if None is not atime:
try:
with open(name, 'a'):
- ek.ek(os.utime, name, (atime, atime))
+ os.utime(name, (atime, atime))
return True
except (BaseException, Exception):
logger.debug('File air date stamping not available on your OS')
@@ -1253,12 +1253,12 @@ def make_path(name, syno=False):
:param syno: whether to trigger a syno library update for path
:return: success or dir exists
"""
- if not ek.ek(os.path.isdir, name):
+ if not os.path.isdir(name):
# Windows, create all missing folders
if os.name in ('nt', 'ce'):
try:
logger.debug(u'Path %s doesn\'t exist, creating it' % name)
- ek.ek(os.makedirs, name)
+ os.makedirs(name)
except (OSError, IOError) as e:
logger.error(u'Failed creating %s : %s' % (name, ex(e)))
return False
@@ -1273,14 +1273,14 @@ def make_path(name, syno=False):
sofar += cur_folder + os.path.sep
# if it exists then just keep walking down the line
- if ek.ek(os.path.isdir, sofar):
+ if os.path.isdir(sofar):
continue
try:
logger.debug(u'Path %s doesn\'t exist, creating it' % sofar)
- ek.ek(os.mkdir, sofar)
+ os.mkdir(sofar)
# use normpath to remove end separator, otherwise checks permissions against itself
- chmod_as_parent(ek.ek(os.path.normpath, sofar))
+ chmod_as_parent(os.path.normpath(sofar))
if syno:
# do the library update for synoindex
NOTIFIERS.NotifierFactory().get('SYNOINDEX').addFolder(sofar)
@@ -1302,19 +1302,19 @@ def chmod_as_parent(child_path):
if os.name in ('nt', 'ce'):
return
- parent_path = ek.ek(os.path.dirname, child_path)
+ parent_path = os.path.dirname(child_path)
if not parent_path:
logger.debug(u'No parent path provided in %s, unable to get permissions from it' % child_path)
return
- parent_path_stat = ek.ek(os.stat, parent_path)
+ parent_path_stat = os.stat(parent_path)
parent_mode = stat.S_IMODE(parent_path_stat[stat.ST_MODE])
- child_path_stat = ek.ek(os.stat, child_path)
+ child_path_stat = os.stat(child_path)
child_path_mode = stat.S_IMODE(child_path_stat[stat.ST_MODE])
- if ek.ek(os.path.isfile, child_path):
+ if os.path.isfile(child_path):
child_mode = file_bit_filter(parent_mode)
else:
child_mode = parent_mode
@@ -1330,7 +1330,7 @@ def chmod_as_parent(child_path):
return
try:
- ek.ek(os.chmod, child_path, child_mode)
+ os.chmod(child_path, child_mode)
logger.debug(u'Setting permissions for %s to %o as parent directory has %o'
% (child_path, child_mode, parent_mode))
except OSError:
@@ -1366,17 +1366,17 @@ def write_file(filepath, # type: AnyStr
"""
result = False
- if make_path(ek.ek(os.path.dirname, filepath)):
+ if make_path(os.path.dirname(filepath)):
try:
if raw:
empty_file = True
- with ek.ek(io.FileIO, filepath, 'wb') as fh:
+ with io.FileIO(filepath, 'wb') as fh:
for chunk in data.iter_content(chunk_size=1024):
if chunk:
empty_file = False
fh.write(chunk)
fh.flush()
- ek.ek(os.fsync, fh.fileno())
+ os.fsync(fh.fileno())
if empty_file:
remove_file_perm(filepath, log_err=False)
return result
@@ -1384,11 +1384,11 @@ def write_file(filepath, # type: AnyStr
w_mode = 'w'
if utf8:
w_mode = 'a'
- with ek.ek(io.FileIO, filepath, 'wb') as fh:
+ with io.FileIO(filepath, 'wb') as fh:
fh.write(codecs.BOM_UTF8)
if xmltree:
- with ek.ek(io.FileIO, filepath, w_mode) as fh:
+ with io.FileIO(filepath, w_mode) as fh:
params = {}
if utf8:
params = dict(encoding='utf-8')
@@ -1397,10 +1397,10 @@ def write_file(filepath, # type: AnyStr
data.write(fh, **params)
else:
if isinstance(data, text_type):
- with ek.ek(io.open, filepath, w_mode, encoding='utf-8') as fh:
+ with io.open(filepath, w_mode, encoding='utf-8') as fh:
fh.write(data)
else:
- with ek.ek(io.FileIO, filepath, w_mode) as fh:
+ with io.FileIO(filepath, w_mode) as fh:
fh.write(data)
chmod_as_parent(filepath)
@@ -1451,7 +1451,7 @@ def replace_extension(filename, new_ext):
def long_path(path):
# type: (AnyStr) -> AnyStr
"""add long path prefix for Windows"""
- if 'nt' == os.name and 260 < len(path) and not path.startswith('\\\\?\\') and ek.ek(os.path.isabs, path):
+ if 'nt' == os.name and 260 < len(path) and not path.startswith('\\\\?\\') and os.path.isabs(path):
return '\\\\?\\' + path
return path
@@ -1504,8 +1504,7 @@ def cmdline_runner(cmd, shell=False, suppress_stderr=False, env=None):
if isinstance(env, dict):
kw.update(env=dict(os.environ, **env))
- if not PY2:
- kw.update(dict(encoding=ek.SYS_ENCODING, text=True, bufsize=0))
+ kw.update(dict(encoding=SYS_ENCODING, text=True, bufsize=0))
if 'win32' == sys.platform:
kw['creationflags'] = 0x08000000 # CREATE_NO_WINDOW (needed for py2exe)
diff --git a/lib/subliminal/videos.py b/lib/subliminal/videos.py
index 84a8fa11..e83fd7c3 100644
--- a/lib/subliminal/videos.py
+++ b/lib/subliminal/videos.py
@@ -29,9 +29,6 @@ import struct
from six import PY2, text_type
from _23 import decode_str
-# noinspection PyPep8Naming
-import encodingKludge as ek
-
__all__ = ['EXTENSIONS', 'MIMETYPES', 'Video', 'Episode', 'Movie', 'UnknownVideo',
'scan', 'hash_opensubtitles', 'hash_thesubdb']
@@ -62,10 +59,10 @@ class Video(object):
self._path = None
self.hashes = {}
self.subtitle_path = subtitle_path
-
+
if PY2 and isinstance(path, text_type):
path = path.encode('utf-8')
-
+
if os.path.exists(path):
self._path = path
self.size = os.path.getsize(self._path)
@@ -150,8 +147,8 @@ class Video(object):
folder = '.'
existing = [f for f in os.listdir(folder) if f.startswith(basename)]
if self.subtitle_path:
- subsDir = ek.ek(os.path.join, folder, self.subtitle_path)
- if ek.ek(os.path.isdir, subsDir):
+ subsDir = os.path.join(folder, self.subtitle_path)
+ if os.path.isdir(subsDir):
existing.extend([f for f in os.listdir(subsDir) if f.startswith(basename)])
for path in existing:
for ext in subtitles.EXTENSIONS:
@@ -232,7 +229,7 @@ def scan(entry, max_depth=3, scan_filter=None, depth=0):
"""
if PY2 and isinstance(entry, text_type):
entry = entry.encode('utf-8')
-
+
if depth > max_depth != 0: # we do not want to search the whole file system except if max_depth = 0
return []
if os.path.isdir(entry): # a dir? recurse
diff --git a/sickgear.py b/sickgear.py
index d8caf88f..d5d5e85d 100755
--- a/sickgear.py
+++ b/sickgear.py
@@ -90,7 +90,7 @@ from multiprocessing import freeze_support
from configobj import ConfigObj
# noinspection PyPep8Naming
-from encodingKludge import EXIT_BAD_ENCODING, SYS_ENCODING
+from encodingKludge import SYS_ENCODING
from exceptions_helper import ex
import sickgear
from sickgear import db, logger, name_cache, network_timezones
@@ -200,10 +200,6 @@ class SickGear(object):
sickgear.PROG_DIR = os.path.dirname(sickgear.MY_FULLNAME)
sickgear.DATA_DIR = sickgear.PROG_DIR
sickgear.MY_ARGS = sys.argv[1:]
- if EXIT_BAD_ENCODING:
- print('Sorry, you MUST add the SickGear folder to the PYTHONPATH environment variable')
- print('or find another way to force Python to use %s for string encoding.' % SYS_ENCODING)
- sys.exit(1)
sickgear.SYS_ENCODING = SYS_ENCODING
legacy_runner = globals().get('_legacy_sickgear_runner')
if not legacy_runner:
diff --git a/sickgear/__init__.py b/sickgear/__init__.py
index c8e3ff06..a0a0ed49 100644
--- a/sickgear/__init__.py
+++ b/sickgear/__init__.py
@@ -34,8 +34,6 @@ import threading
import uuid
import zlib
-# noinspection PyPep8Naming
-import encodingKludge as ek
from . import classes, db, helpers, image_cache, indexermapper, logger, metadata, naming, people_queue, providers, \
scene_exceptions, scene_numbering, scheduler, search_backlog, search_propers, search_queue, search_recent, \
show_queue, show_updater, subtitles, trakt_helpers, traktChecker, version_checker, watchedstate_queue
@@ -513,7 +511,7 @@ ANIDB_USE_MYLIST = False
ADBA_CONNECTION = None # type: Connection
ANIME_TREAT_AS_HDTV = False
-GUI_NAME = None
+GUI_NAME = ''
DEFAULT_HOME = None
FANART_LIMIT = None
FANART_PANEL = None
@@ -811,8 +809,8 @@ def init_stage_1(console_logging):
# clean cache folders
if CACHE_DIR:
helpers.clear_cache()
- ZONEINFO_DIR = ek.ek(os.path.join, CACHE_DIR, 'zoneinfo')
- if not ek.ek(os.path.isdir, ZONEINFO_DIR) and not helpers.make_path(ZONEINFO_DIR):
+ ZONEINFO_DIR = os.path.join(CACHE_DIR, 'zoneinfo')
+ if not os.path.isdir(ZONEINFO_DIR) and not helpers.make_path(ZONEINFO_DIR):
logger.log(u'!!! Creating local zoneinfo dir failed', logger.ERROR)
sg_helpers.CACHE_DIR = CACHE_DIR
sg_helpers.DATA_DIR = DATA_DIR
@@ -1054,8 +1052,8 @@ def init_stage_1(console_logging):
NZBGET_SKIP_PM = bool(check_setting_int(CFG, 'NZBGet', 'nzbget_skip_process_media', 0))
try:
- ng_script_file = ek.ek(os.path.join, ek.ek(os.path.dirname, ek.ek(os.path.dirname, __file__)),
- 'autoProcessTV', 'SickGear-NG', 'SickGear-NG.py')
+ ng_script_file = os.path.join(os.path.dirname(os.path.dirname(__file__)),
+ 'autoProcessTV', 'SickGear-NG', 'SickGear-NG.py')
with io.open(ng_script_file, 'r', encoding='utf8') as ng:
text = ng.read()
NZBGET_SCRIPT_VERSION = re.search(r""".*version: (\d+\.\d+)""", text, flags=re.M).group(1)
@@ -1615,7 +1613,7 @@ def init_stage_2():
cycleTime=datetime.timedelta(hours=1),
start_time=datetime.time(hour=SHOW_UPDATE_HOUR),
threadName='SHOWUPDATER',
- prevent_cycle_run=show_queue_scheduler.action.isShowUpdateRunning) # 3AM
+ prevent_cycle_run=show_queue_scheduler.action.is_show_update_running) # 3AM
people_queue_scheduler = scheduler.Scheduler(
people_queue.PeopleQueue(),
@@ -1718,9 +1716,9 @@ def init_stage_2():
MEMCACHE['history_tab'] = History.menu_tab(MEMCACHE['history_tab_limit'])
try:
- for f in ek.ek(scandir, ek.ek(os.path.join, PROG_DIR, 'gui', GUI_NAME, 'images', 'flags')):
+ for f in scandir(os.path.join(PROG_DIR, 'gui', GUI_NAME, 'images', 'flags')):
if f.is_file():
- MEMCACHE_FLAG_IMAGES[ek.ek(os.path.splitext, f.name)[0].lower()] = True
+ MEMCACHE_FLAG_IMAGES[os.path.splitext(f.name)[0].lower()] = True
except (BaseException, Exception):
pass
diff --git a/sickgear/anime.py b/sickgear/anime.py
index 48347021..47eeb48d 100644
--- a/sickgear/anime.py
+++ b/sickgear/anime.py
@@ -19,8 +19,6 @@ import os
import adba
from adba.aniDBresponses import LoginFirstResponse
-# noinspection PyPep8Naming
-import encodingKludge as ek
from exceptions_helper import ex
import sickgear
@@ -182,7 +180,7 @@ def short_group_names(groups):
def anidb_cache_dir():
# type: (...) -> Optional[AnyStr]
- cache_dir = ek.ek(os.path.join, sickgear.CACHE_DIR or get_system_temp_dir(), 'anidb')
+ cache_dir = os.path.join(sickgear.CACHE_DIR or get_system_temp_dir(), 'anidb')
if not make_path(cache_dir):
cache_dir = None
return cache_dir
diff --git a/sickgear/auto_post_processer.py b/sickgear/auto_post_processer.py
index 5e9f265e..dfa97031 100644
--- a/sickgear/auto_post_processer.py
+++ b/sickgear/auto_post_processer.py
@@ -16,9 +16,6 @@
import os.path
-# noinspection PyPep8Naming
-import encodingKludge as ek
-
import sickgear
from . import logger, processTV
@@ -40,12 +37,12 @@ class PostProcesser(object):
@staticmethod
def _main():
- if not ek.ek(os.path.isdir, sickgear.TV_DOWNLOAD_DIR):
+ if not os.path.isdir(sickgear.TV_DOWNLOAD_DIR):
logger.log(u"Automatic post-processing attempted but dir %s doesn't exist" % sickgear.TV_DOWNLOAD_DIR,
logger.ERROR)
return
- if not ek.ek(os.path.isabs, sickgear.TV_DOWNLOAD_DIR):
+ if not os.path.isabs(sickgear.TV_DOWNLOAD_DIR):
logger.log(u'Automatic post-processing attempted but dir %s is relative '
'(and probably not what you really want to process)' % sickgear.TV_DOWNLOAD_DIR, logger.ERROR)
return
diff --git a/sickgear/browser.py b/sickgear/browser.py
index 1dc1a60b..1c62b9e0 100644
--- a/sickgear/browser.py
+++ b/sickgear/browser.py
@@ -17,8 +17,6 @@
import os
import string
-# noinspection PyPep8Naming
-import encodingKludge as ek
from exceptions_helper import ex
from . import logger
@@ -31,7 +29,7 @@ if 'nt' == os.name:
# adapted from
# http://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python/827490
-def getWinDrives():
+def get_win_drives():
""" Return list of detected drives """
assert 'nt' == os.name
@@ -45,15 +43,6 @@ def getWinDrives():
return drives
-def foldersAtPath(path, include_parent=False, include_files=False, **kwargs):
- """ deprecated_item, remove in 2020 """
- """ prevent issues with requests using legacy params """
- include_parent = include_parent or kwargs.get('includeParent') or False
- include_files = include_files or kwargs.get('includeFiles') or False
- """ /legacy """
- return folders_at_path(path, include_parent, include_files)
-
-
def folders_at_path(path, include_parent=False, include_files=False):
""" Returns a list of dictionaries with the folders contained at the given path
Give the empty string as the path to list the contents of the root path
@@ -61,17 +50,17 @@ def folders_at_path(path, include_parent=False, include_files=False):
"""
# walk up the tree until we find a valid path
- while path and not ek.ek(os.path.isdir, path):
- if path == ek.ek(os.path.dirname, path):
+ while path and not os.path.isdir(path):
+ if path == os.path.dirname(path):
path = ''
break
else:
- path = ek.ek(os.path.dirname, path)
+ path = os.path.dirname(path)
if '' == path:
if 'nt' == os.name:
entries = [{'currentPath': r'\My Computer'}]
- for letter in getWinDrives():
+ for letter in get_win_drives():
letter_path = '%s:\\' % letter
entries.append({'name': letter_path, 'path': letter_path})
return entries
@@ -79,8 +68,8 @@ def folders_at_path(path, include_parent=False, include_files=False):
path = '/'
# fix up the path and find the parent
- path = ek.ek(os.path.abspath, ek.ek(os.path.normpath, path))
- parent_path = ek.ek(os.path.dirname, path)
+ path = os.path.abspath(os.path.normpath(path))
+ parent_path = os.path.dirname(path)
# if we're at the root then the next step is the meta-node showing our drive letters
if 'nt' == os.name and path == parent_path:
@@ -92,7 +81,7 @@ def folders_at_path(path, include_parent=False, include_files=False):
logger.log('Unable to open %s: %r / %s' % (path, e, ex(e)), logger.WARNING)
file_list = get_file_list(parent_path, include_files)
- file_list = sorted(file_list, key=lambda x: ek.ek(os.path.basename, x['name']).lower())
+ file_list = sorted(file_list, key=lambda x: os.path.basename(x['name']).lower())
entries = [{'currentPath': path}]
if include_parent and path != parent_path:
diff --git a/sickgear/common.py b/sickgear/common.py
index ac7c6b45..804fee6b 100644
--- a/sickgear/common.py
+++ b/sickgear/common.py
@@ -240,9 +240,7 @@ class Quality(object):
:rtype: int
"""
- # noinspection PyPep8Naming
- import encodingKludge as ek
- name = ek.ek(os.path.basename, name)
+ name = os.path.basename(name)
# if we have our exact text then assume we put it there
for _x in sorted(iterkeys(Quality.qualityStrings), reverse=True):
@@ -268,10 +266,8 @@ class Quality(object):
:return:
:rtype: int
"""
- # noinspection PyPep8Naming
- import encodingKludge as ek
from sickgear import logger
- name = ek.ek(os.path.basename, name)
+ name = os.path.basename(name)
name_has = (lambda quality_list, func=all: func([re.search(q, name, re.I) for q in quality_list]))
@@ -359,11 +355,9 @@ class Quality(object):
:return:
:rtype: int
"""
- # noinspection PyPep8Naming
- import encodingKludge as ek
from exceptions_helper import ex
from sickgear import logger
- if ek.ek(os.path.isfile, filename):
+ if os.path.isfile(filename):
from hachoir.parser import createParser
from hachoir.metadata import extractMetadata
@@ -372,7 +366,7 @@ class Quality(object):
parser = height = None
msg = 'Hachoir can\'t parse file "%s" content quality because it found error: %s'
try:
- parser = ek.ek(createParser, filename)
+ parser = createParser(filename)
except InputStreamError as e:
logger.log(msg % (filename, ex(e)), logger.WARNING)
except (BaseException, Exception) as e:
diff --git a/sickgear/config.py b/sickgear/config.py
index 759bc0a5..c98df792 100644
--- a/sickgear/config.py
+++ b/sickgear/config.py
@@ -18,9 +18,6 @@ import datetime
import os.path
import re
-# noinspection PyPep8Naming
-import encodingKludge as ek
-
import sickgear
import sickgear.providers
from . import db, helpers, logger, naming
@@ -360,7 +357,7 @@ def clean_url(url, add_slash=True):
scheme, netloc, path, query, fragment = urlsplit(url, 'http')
if not path.endswith('/'):
- basename, ext = ek.ek(os.path.splitext, ek.ek(os.path.basename, path))
+ basename, ext = os.path.splitext(os.path.basename(path))
if not ext and add_slash:
path += '/'
@@ -857,14 +854,14 @@ class ConfigMigrator(object):
# Migration v16: Purge old cache image folder name
@staticmethod
def _migrate_v16():
- if sickgear.CACHE_DIR and ek.ek(os.path.isdir, sickgear.CACHE_DIR):
+ if sickgear.CACHE_DIR and os.path.isdir(sickgear.CACHE_DIR):
cache_default = sickgear.CACHE_DIR
dead_paths = ['anidb', 'imdb', 'trakt']
for path in dead_paths:
sickgear.CACHE_DIR = '%s/images/%s' % (cache_default, path)
helpers.clear_cache(True)
try:
- ek.ek(os.rmdir, sickgear.CACHE_DIR)
+ os.rmdir(sickgear.CACHE_DIR)
except OSError:
pass
sickgear.CACHE_DIR = cache_default
diff --git a/sickgear/databases/mainDB.py b/sickgear/databases/mainDB.py
index 8c50d3ba..be2edcf1 100644
--- a/sickgear/databases/mainDB.py
+++ b/sickgear/databases/mainDB.py
@@ -21,8 +21,6 @@ import re
from .. import db, common, logger
from ..name_parser.parser import NameParser, InvalidNameException, InvalidShowException
import sickgear
-# noinspection PyPep8Naming
-import encodingKludge as ek
from six import iteritems
@@ -432,8 +430,8 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
# if there is no size yet then populate it for us
if (not cur_result['file_size'] or not int(cur_result['file_size'])) \
- and ek.ek(os.path.isfile, cur_result['location']):
- cur_size = ek.ek(os.path.getsize, cur_result['location'])
+ and os.path.isfile(cur_result['location']):
+ cur_size = os.path.getsize(cur_result['location'])
self.connection.action('UPDATE tv_episodes SET file_size = ? WHERE episode_id = ?',
[cur_size, int(cur_result['episode_id'])])
@@ -456,7 +454,7 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
continue
nzb_name = cur_result['resource']
- file_name = ek.ek(os.path.basename, download_sql_result[0]['resource'])
+ file_name = os.path.basename(download_sql_result[0]['resource'])
# take the extension off the filename, it's not needed
if '.' in file_name:
@@ -508,7 +506,7 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
self.upgrade_log(u'Adding release name to all episodes with obvious scene filenames')
for cur_result in empty_sql_result:
- ep_file_name = ek.ek(os.path.basename, cur_result['location'])
+ ep_file_name = os.path.basename(cur_result['location'])
ep_file_name = os.path.splitext(ep_file_name)[0]
# only want to find real scene names here so anything with a space in it is out
@@ -1999,7 +1997,7 @@ class ChangeTmdbID(db.SchemaUpgrade):
self.upgrade_log('Renaming tmdb images')
# noinspection PyProtectedMember
for _dir in (ImageCache._persons_dir(), ImageCache._characters_dir()):
- for _f in ek.ek(scantree, _dir): # type: DirEntry
+ for _f in scantree(_dir): # type: DirEntry
if not _f.is_file(follow_symlinks=False):
continue
try:
@@ -2010,7 +2008,7 @@ class ChangeTmdbID(db.SchemaUpgrade):
continue
try:
move_file(_f.path,
- ek.ek(os.path.join, ek.ek(os.path.dirname, _f.path),
+ os.path.join(os.path.dirname(_f.path),
re.sub('^%s-' % img_src, '%s-' %
cache_img_src[(img_src, TVINFO_TMDB)[TVINFO_TMDB_OLD == img_src]], _f.name)))
except (BaseException, Exception):
diff --git a/sickgear/db.py b/sickgear/db.py
index 5cfc0fc5..b9ee5a4e 100644
--- a/sickgear/db.py
+++ b/sickgear/db.py
@@ -24,8 +24,6 @@ import sqlite3
import threading
import time
-# noinspection PyPep8Naming
-import encodingKludge as ek
from exceptions_helper import ex
import sickgear
@@ -60,7 +58,7 @@ def dbFilename(filename='sickbeard.db', suffix=None):
"""
if suffix:
filename = '%s.%s' % (filename, suffix)
- return ek.ek(os.path.join, sickgear.DATA_DIR, filename)
+ return os.path.join(sickgear.DATA_DIR, filename)
def mass_upsert_sql(table_name, value_dict, key_dict, sanitise=True):
@@ -136,12 +134,12 @@ class DBConnection(object):
logger.log('this python sqlite3 version doesn\'t support backups', logger.DEBUG)
return False, 'this python sqlite3 version doesn\'t support backups'
- if not ek.ek(os.path.isdir, target):
+ if not os.path.isdir(target):
logger.log('Backup target invalid', logger.ERROR)
return False, 'Backup target invalid'
- target_db = ek.ek(os.path.join, target, (backup_filename, self.filename)[None is backup_filename])
- if ek.ek(os.path.exists, target_db):
+ target_db = os.path.join(target, (backup_filename, self.filename)[None is backup_filename])
+ if os.path.exists(target_db):
logger.log('Backup target file already exists', logger.ERROR)
return False, 'Backup target file already exists'
@@ -758,14 +756,14 @@ def MigrationCode(my_db):
def cleanup_old_db_backups(filename):
try:
- d, filename = ek.ek(os.path.split, filename)
+ d, filename = os.path.split(filename)
if not d:
d = sickgear.DATA_DIR
for f in filter_iter(lambda fn: fn.is_file() and filename in fn.name and
re.search(r'\.db(\.v\d+)?\.r\d+$', fn.name),
- ek.ek(scandir, d)):
+ scandir(d)):
try:
- ek.ek(os.unlink, f.path)
+ os.unlink(f.path)
except (BaseException, Exception):
pass
except (BaseException, Exception):
@@ -870,7 +868,7 @@ def backup_all_dbs(target, compress=True, prefer_7z=True):
if not success:
return False, msg
if compress:
- full_path = ek.ek(os.path.join, target, name)
+ full_path = os.path.join(target, name)
if not compress_file(full_path, '%s.db' % cur_db, prefer_7z=prefer_7z):
return False, 'Failure to compress backup'
delete_old_db_backups(target)
diff --git a/sickgear/failedProcessor.py b/sickgear/failedProcessor.py
index 3ebf1844..b1c7b4d8 100644
--- a/sickgear/failedProcessor.py
+++ b/sickgear/failedProcessor.py
@@ -69,7 +69,7 @@ class FailedProcessor(LegacyFailedProcessor):
"""
self._log(u'Failed download detected: (%s, %s)' % (self.nzb_name, self.dir_name))
- releaseName = show_name_helpers.determineReleaseName(self.dir_name, self.nzb_name)
+ releaseName = show_name_helpers.determine_release_name(self.dir_name, self.nzb_name)
if None is releaseName:
self._log(u'Warning: unable to find a valid release name.', logger.WARNING)
raise exceptions_helper.FailedProcessingFailed()
diff --git a/sickgear/helpers.py b/sickgear/helpers.py
index 2baa137e..58dd3562 100644
--- a/sickgear/helpers.py
+++ b/sickgear/helpers.py
@@ -36,8 +36,6 @@ from .common import cpu_presets, mediaExtensions, Overview, Quality, statusStrin
ARCHIVED, DOWNLOADED, FAILED, IGNORED, SKIPPED, SNATCHED_ANY, SUBTITLED, UNAIRED, UNKNOWN, WANTED
from .sgdatetime import timestamp_near
from lib.tvinfo_base.exceptions import *
-# noinspection PyPep8Naming
-import encodingKludge as ek
from exceptions_helper import ex, MultipleShowObjectsException
import dateutil.parser
@@ -171,7 +169,7 @@ def has_image_ext(filename):
:rtype: bool
"""
try:
- if ek.ek(os.path.splitext, filename)[1].lower() in ['.bmp', '.gif', '.jpeg', '.jpg', '.png', '.webp']:
+ if os.path.splitext(filename)[1].lower() in ['.bmp', '.gif', '.jpeg', '.jpg', '.png', '.webp']:
return True
except (BaseException, Exception):
pass
@@ -251,9 +249,9 @@ def make_dir(path):
:return: success of creation
:rtype: bool
"""
- if not ek.ek(os.path.isdir, path):
+ if not os.path.isdir(path):
try:
- ek.ek(os.makedirs, path)
+ os.makedirs(path)
# do a Synology library update
notifiers.NotifierFactory().get('SYNOINDEX').addFolder(path)
except OSError:
@@ -391,7 +389,7 @@ def link(src_file, dest_file):
if 0 == ctypes.windll.kernel32.CreateHardLinkW(text_type(dest_file), text_type(src_file), 0):
raise ctypes.WinError()
else:
- ek.ek(os.link, src_file, dest_file)
+ os.link(src_file, dest_file)
def hardlink_file(src_file, dest_file):
@@ -403,7 +401,7 @@ def hardlink_file(src_file, dest_file):
:type dest_file: AnyStr
"""
try:
- ek.ek(link, src_file, dest_file)
+ link(src_file, dest_file)
fix_set_group_id(dest_file)
except (BaseException, Exception) as e:
logger.log(u"Failed to create hardlink of %s at %s: %s. Copying instead." % (src_file, dest_file, ex(e)),
@@ -423,10 +421,10 @@ def symlink(src_file, dest_file):
import ctypes
if ctypes.windll.kernel32.CreateSymbolicLinkW(
- text_type(dest_file), text_type(src_file), 1 if ek.ek(os.path.isdir, src_file) else 0) in [0, 1280]:
+ text_type(dest_file), text_type(src_file), 1 if os.path.isdir(src_file) else 0) in [0, 1280]:
raise ctypes.WinError()
else:
- ek.ek(os.symlink, src_file, dest_file)
+ os.symlink(src_file, dest_file)
def move_and_symlink_file(src_file, dest_file):
@@ -438,9 +436,9 @@ def move_and_symlink_file(src_file, dest_file):
:type dest_file: AnyStr
"""
try:
- ek.ek(shutil.move, src_file, dest_file)
+ shutil.move(src_file, dest_file)
fix_set_group_id(dest_file)
- ek.ek(symlink, dest_file, src_file)
+ symlink(dest_file, src_file)
except (BaseException, Exception):
logger.log(u"Failed to create symlink of %s at %s. Copying instead" % (src_file, dest_file), logger.ERROR)
copy_file(src_file, dest_file)
@@ -461,11 +459,11 @@ def rename_ep_file(cur_path, new_path, old_path_length=0):
:rtype: bool
"""
- # new_dest_dir, new_dest_name = ek.ek(os.path.split, new_path)
+ # new_dest_dir, new_dest_name = os.path.split(new_path)
if 0 == old_path_length or len(cur_path) < old_path_length:
# approach from the right
- cur_file_name, cur_file_ext = ek.ek(os.path.splitext, cur_path)
+ cur_file_name, cur_file_ext = os.path.splitext(cur_path)
else:
# approach from the left
cur_file_ext = cur_path[old_path_length:]
@@ -473,7 +471,7 @@ def rename_ep_file(cur_path, new_path, old_path_length=0):
if cur_file_ext[1:] in subtitleExtensions:
# Extract subtitle language from filename
- sublang = ek.ek(os.path.splitext, cur_file_name)[1][1:]
+ sublang = os.path.splitext(cur_file_name)[1][1:]
# Check if the language extracted from filename is a valid language
try:
@@ -485,18 +483,18 @@ def rename_ep_file(cur_path, new_path, old_path_length=0):
# put the extension on the incoming file
new_path += cur_file_ext
- make_path(ek.ek(os.path.dirname, new_path), syno=True)
+ make_path(os.path.dirname(new_path), syno=True)
# move the file
try:
logger.log(u'Renaming file from %s to %s' % (cur_path, new_path))
- ek.ek(shutil.move, cur_path, new_path)
+ shutil.move(cur_path, new_path)
except (OSError, IOError) as e:
logger.log(u"Failed renaming " + cur_path + " to " + new_path + ": " + ex(e), logger.ERROR)
return False
# clean up any old folders that are empty
- delete_empty_folders(ek.ek(os.path.dirname, cur_path))
+ delete_empty_folders(os.path.dirname(cur_path))
return True
@@ -517,8 +515,8 @@ def delete_empty_folders(check_empty_dir, keep_dir=None):
logger.log(u"Trying to clean any empty folders under " + check_empty_dir)
# as long as the folder exists and doesn't contain any files, delete it
- while ek.ek(os.path.isdir, check_empty_dir) and check_empty_dir != keep_dir:
- check_files = ek.ek(os.listdir, check_empty_dir)
+ while os.path.isdir(check_empty_dir) and check_empty_dir != keep_dir:
+ check_files = os.listdir(check_empty_dir)
if not check_files or (len(check_files) <= len(ignore_items) and all(
[check_file in ignore_items for check_file in check_files])):
@@ -526,13 +524,13 @@ def delete_empty_folders(check_empty_dir, keep_dir=None):
try:
logger.log(u"Deleting empty folder: " + check_empty_dir)
# need shutil.rmtree when ignore_items is really implemented
- ek.ek(os.rmdir, check_empty_dir)
+ os.rmdir(check_empty_dir)
# do a Synology library update
notifiers.NotifierFactory().get('SYNOINDEX').deleteFolder(check_empty_dir)
except OSError as e:
logger.log(u"Unable to delete " + check_empty_dir + ": " + repr(e) + " / " + ex(e), logger.WARNING)
break
- check_empty_dir = ek.ek(os.path.dirname, check_empty_dir)
+ check_empty_dir = os.path.dirname(check_empty_dir)
else:
break
@@ -565,7 +563,7 @@ def get_absolute_number_from_season_and_episode(show_obj, season, episode):
logger.DEBUG)
else:
logger.debug('No entries for absolute number in show: %s found using %sx%s' %
- (show_obj.unique_name, str(season), str(episode)))
+ (show_obj.unique_name, str(season), str(episode)))
return absolute_number
@@ -608,7 +606,7 @@ def sanitize_scene_name(name):
# tidy up stuff that doesn't belong in scene names
name = re.sub(r'(-?\s|/)', '.', name).replace('&', 'and')
- name = re.sub(r"\.\.*", '.', name).rstrip('.')
+ name = re.sub(r"\.+", '.', name).rstrip('.')
return name
return ''
@@ -675,24 +673,24 @@ def backup_versioned_file(old_file, version):
new_file = '%s.v%s' % (old_file, version)
- if ek.ek(os.path.isfile, new_file):
+ if os.path.isfile(new_file):
changed_old_db = False
for back_nr in range(1, 10000):
alt_name = '%s.r%s' % (new_file, back_nr)
- if not ek.ek(os.path.isfile, alt_name):
+ if not os.path.isfile(alt_name):
try:
shutil.move(new_file, alt_name)
changed_old_db = True
break
except (BaseException, Exception):
- if ek.ek(os.path.isfile, new_file):
+ if os.path.isfile(new_file):
continue
logger.log('could not rename old backup db file', logger.WARNING)
if not changed_old_db:
raise Exception('can\'t create a backup of db')
- while not ek.ek(os.path.isfile, new_file):
- if not ek.ek(os.path.isfile, old_file) or 0 == get_size(old_file):
+ while not os.path.isfile(new_file):
+ if not os.path.isfile(old_file) or 0 == get_size(old_file):
logger.log(u'No need to create backup', logger.DEBUG)
break
@@ -724,12 +722,12 @@ def restore_versioned_file(backup_file, version):
:return: success
:rtype: bool
"""
- numTries = 0
+ num_tries = 0
- new_file, backup_version = ek.ek(os.path.splitext, backup_file)
+ new_file, backup_version = os.path.splitext(backup_file)
restore_file = new_file + '.' + 'v' + str(version)
- if not ek.ek(os.path.isfile, new_file):
+ if not os.path.isfile(new_file):
logger.log(u"Not restoring, " + new_file + " doesn't exist", logger.DEBUG)
return False
@@ -744,8 +742,8 @@ def restore_versioned_file(backup_file, version):
logger.WARNING)
return False
- while not ek.ek(os.path.isfile, new_file):
- if not ek.ek(os.path.isfile, restore_file):
+ while not os.path.isfile(new_file):
+ if not os.path.isfile(restore_file):
logger.log(u"Not restoring, " + restore_file + " doesn't exist", logger.DEBUG)
break
@@ -756,11 +754,11 @@ def restore_versioned_file(backup_file, version):
break
except (BaseException, Exception) as e:
logger.log(u"Error while trying to restore " + restore_file + ": " + ex(e), logger.WARNING)
- numTries += 1
+ num_tries += 1
time.sleep(1)
logger.log(u"Trying again.", logger.DEBUG)
- if 10 <= numTries:
+ if 10 <= num_tries:
logger.log(u"Unable to restore " + restore_file + " to " + new_file + " please do it manually.",
logger.ERROR)
return False
@@ -978,8 +976,8 @@ def is_hidden_folder(folder):
:return: Returns True if folder is hidden
:rtype: bool
"""
- if ek.ek(os.path.isdir, folder):
- if ek.ek(os.path.basename, folder).startswith('.'):
+ if os.path.isdir(folder):
+ if os.path.basename(folder).startswith('.'):
return True
return False
@@ -994,7 +992,7 @@ def real_path(path):
:return: the canonicalized absolute pathname
:rtype: AnyStr
"""
- return ek.ek(os.path.normpath, ek.ek(os.path.normcase, ek.ek(os.path.realpath, ek.ek(os.path.expanduser, path))))
+ return os.path.normpath(os.path.normcase(os.path.realpath(os.path.expanduser(path))))
def validate_show(show_obj, season=None, episode=None):
@@ -1048,7 +1046,7 @@ def clear_cache(force=False):
elif direntry.is_dir(**direntry_args) and direntry.name not in ['cheetah', 'sessions', 'indexers']:
dirty = dirty or False
try:
- ek.ek(os.rmdir, direntry.path)
+ os.rmdir(direntry.path)
except OSError:
dirty = True
@@ -1098,8 +1096,8 @@ def get_size(start_path='.'):
:return: size in bytes
:rtype: int or long
"""
- if ek.ek(os.path.isfile, start_path):
- return ek.ek(os.path.getsize, start_path)
+ if os.path.isfile(start_path):
+ return os.path.getsize(start_path)
try:
return sum(map((lambda x: x.stat(follow_symlinks=False).st_size), scantree(start_path)))
except OSError:
@@ -1115,14 +1113,14 @@ def get_media_stats(start_path='.'):
:param start_path: path to scan
"""
- if ek.ek(os.path.isdir, start_path):
+ if os.path.isdir(start_path):
sizes = sorted(map(lambda y: y.stat(follow_symlinks=False).st_size,
filter(lambda x: has_media_ext(x.name), scantree(start_path))))
if sizes:
return len(sizes), sizes[0], sizes[-1], int(sum(sizes) / len(sizes))
- elif ek.ek(os.path.isfile, start_path):
- size = ek.ek(os.path.getsize, start_path)
+ elif os.path.isfile(start_path):
+ size = os.path.getsize(start_path)
return 1, size, size, size
return 0, 0, 0, 0
@@ -1137,7 +1135,7 @@ def remove_article(text=''):
:return: text without articles
:rtype: AnyStr
"""
- return re.sub(r'(?i)^(?:(?:A(?!\s+to)n?)|The)\s(\w)', r'\1', text)
+ return re.sub(r'(?i)^(?:A(?!\s+to)n?|The)\s(\w)', r'\1', text)
def re_valid_hostname(with_allowed=True):
@@ -1332,11 +1330,11 @@ def cleanup_cache():
Delete old cached files
"""
delete_not_changed_in(
- [ek.ek(os.path.join, sickgear.CACHE_DIR, 'images', 'browse', 'thumb', x)
+ [os.path.join(sickgear.CACHE_DIR, 'images', 'browse', 'thumb', x)
for x in ['anidb', 'imdb', 'trakt', 'tvdb']] +
- [ek.ek(os.path.join, sickgear.CACHE_DIR, 'images', x)
+ [os.path.join(sickgear.CACHE_DIR, 'images', x)
for x in ['characters', 'person']] +
- [ek.ek(os.path.join, sickgear.CACHE_DIR, 'tvinfo_cache')])
+ [os.path.join(sickgear.CACHE_DIR, 'tvinfo_cache')])
def delete_not_changed_in(paths, days=30, minutes=0):
@@ -1377,8 +1375,8 @@ def set_file_timestamp(filename, min_age=3, new_time=None):
"""
min_time = int(timestamp_near((datetime.datetime.now() - datetime.timedelta(days=min_age))))
try:
- if ek.ek(os.path.isfile, filename) and ek.ek(os.path.getmtime, filename) < min_time:
- ek.ek(os.utime, filename, new_time)
+ if os.path.isfile(filename) and os.path.getmtime(filename) < min_time:
+ os.utime(filename, new_time)
except (BaseException, Exception):
pass
@@ -1407,7 +1405,7 @@ def is_link(filepath):
:return: True or False
"""
if 'win32' == sys.platform:
- if not ek.ek(os.path.exists, filepath):
+ if not os.path.exists(filepath):
return False
import ctypes
@@ -1417,7 +1415,7 @@ def is_link(filepath):
attr = ctypes.windll.kernel32.GetFileAttributesW(text_type(filepath))
return invalid_file_attributes != attr and 0 != attr & file_attribute_reparse_point
- return ek.ek(os.path.islink, filepath)
+ return os.path.islink(filepath)
def df():
@@ -1496,11 +1494,11 @@ def path_mapper(search, replace, subject):
:rtype: Tuple[AnyStr, bool]
"""
delim = '/!~!/'
- search = re.sub(r'[\\]', delim, search)
- replace = re.sub(r'[\\]', delim, replace)
- path = re.sub(r'[\\]', delim, subject)
+ search = re.sub(r'\\', delim, search)
+ replace = re.sub(r'\\', delim, replace)
+ path = re.sub(r'\\', delim, subject)
result = re.sub('(?i)^%s' % search, replace, path)
- result = ek.ek(os.path.normpath, re.sub(delim, '/', result))
+ result = os.path.normpath(re.sub(delim, '/', result))
return result, result != subject
@@ -1559,7 +1557,7 @@ def generate_show_dir_name(root_dir, show_name):
san_show_name = san_show_name.replace(' ', '.')
if None is root_dir:
return san_show_name
- return ek.ek(os.path.join, root_dir, san_show_name)
+ return os.path.join(root_dir, san_show_name)
def count_files_dirs(base_dir):
@@ -1572,7 +1570,7 @@ def count_files_dirs(base_dir):
"""
f = d = 0
try:
- files = ek.ek(scandir, base_dir)
+ files = scandir(base_dir)
except OSError as e:
logger.log('Unable to count files %s / %s' % (repr(e), ex(e)), logger.WARNING)
else:
@@ -1603,8 +1601,8 @@ def upgrade_new_naming():
sickgear.CFG.setdefault('GUI', {})['fanart_ratings'] = '%s' % ne
sickgear.CFG.write()
- image_cache_dir = ek.ek(os.path.join, sickgear.CACHE_DIR, 'images')
- bp_match = re.compile(r'(\d+)\.((?:banner|poster|(?:(?:\d+(?:\.\w*)?\.(?:\w{5,8}))\.)?fanart)\.jpg)', flags=re.I)
+ image_cache_dir = os.path.join(sickgear.CACHE_DIR, 'images')
+ bp_match = re.compile(r'(\d+)\.((?:banner|poster|(?:\d+(?:\.\w*)?\.\w{5,8}\.)?fanart)\.jpg)', flags=re.I)
def _set_progress(p_msg, c, s):
ps = None
@@ -1618,14 +1616,14 @@ def upgrade_new_naming():
sickgear.classes.loading_msg.set_msg_progress(p_msg, '{:6.2f}%'.format(ps))
for d in ['', 'thumbnails']:
- bd = ek.ek(os.path.join, image_cache_dir, d)
- if ek.ek(os.path.isdir, bd):
+ bd = os.path.join(image_cache_dir, d)
+ if os.path.isdir(bd):
fc, dc = count_files_dirs(bd)
step = fc / float(100)
cf = 0
p_text = 'Upgrading %s' % (d, 'banner/poster')[not d]
_set_progress(p_text, 0, 0)
- for entry in ek.ek(scandir, bd):
+ for entry in scandir(bd):
if entry.is_file():
cf += 1
_set_progress(p_text, cf, step)
@@ -1634,14 +1632,13 @@ def upgrade_new_naming():
old_id = int(b_s.group(1))
tvid = show_list.get(old_id)
if tvid:
- nb_dir = ek.ek(os.path.join, sickgear.CACHE_DIR, 'images', 'shows',
- '%s-%s' % (tvid, old_id), d)
- if not ek.ek(os.path.isdir, nb_dir):
+ nb_dir = os.path.join(sickgear.CACHE_DIR, 'images', 'shows', '%s-%s' % (tvid, old_id), d)
+ if not os.path.isdir(nb_dir):
try:
- ek.ek(os.makedirs, nb_dir)
+ os.makedirs(nb_dir)
except (BaseException, Exception):
pass
- new_name = ek.ek(os.path.join, nb_dir, bp_match.sub(r'\2', entry.name))
+ new_name = os.path.join(nb_dir, bp_match.sub(r'\2', entry.name))
try:
move_file(entry.path, new_name)
except (BaseException, Exception) as e:
@@ -1650,7 +1647,7 @@ def upgrade_new_naming():
else:
# clean up files without reference in db
try:
- ek.ek(os.remove, entry.path)
+ os.remove(entry.path)
except (BaseException, Exception):
pass
elif entry.is_dir():
@@ -1664,7 +1661,7 @@ def upgrade_new_naming():
p_text = 'Upgrading fanart'
_set_progress(p_text, 0, 0)
try:
- entries = ek.ek(scandir, entry.path)
+ entries = scandir(entry.path)
except OSError as e:
logger.log('Unable to stat dirs %s / %s' % (repr(e), ex(e)), logger.WARNING)
continue
@@ -1676,17 +1673,16 @@ def upgrade_new_naming():
if old_id:
new_id = show_list.get(old_id)
if new_id:
- new_dir_name = ek.ek(os.path.join, sickgear.CACHE_DIR, 'images', 'shows',
- '%s-%s' % (new_id, old_id), 'fanart')
+ new_dir_name = os.path.join(sickgear.CACHE_DIR, 'images', 'shows',
+ '%s-%s' % (new_id, old_id), 'fanart')
try:
move_file(d_entry.path, new_dir_name)
except (BaseException, Exception) as e:
logger.log('Unable to rename %s to %s: %s / %s' %
(d_entry.path, new_dir_name, repr(e), ex(e)), logger.WARNING)
- if ek.ek(os.path.isdir, new_dir_name):
+ if os.path.isdir(new_dir_name):
try:
- f_n = filter_iter(lambda fn: fn.is_file(),
- ek.ek(scandir, new_dir_name))
+ f_n = filter_iter(lambda fn: fn.is_file(), scandir(new_dir_name))
except OSError as e:
logger.log('Unable to rename %s / %s' % (repr(e), ex(e)),
logger.WARNING)
@@ -1704,20 +1700,20 @@ def upgrade_new_naming():
(args[0], args[1], repr(e), ex(e)), logger.WARNING)
else:
try:
- ek.ek(shutil.rmtree, d_entry.path)
+ shutil.rmtree(d_entry.path)
except (BaseException, Exception):
pass
try:
- ek.ek(shutil.rmtree, d_entry.path)
+ shutil.rmtree(d_entry.path)
except (BaseException, Exception):
pass
try:
- ek.ek(os.rmdir, entry.path)
+ os.rmdir(entry.path)
except (BaseException, Exception):
pass
if 'thumbnails' == d:
try:
- ek.ek(os.rmdir, bd)
+ os.rmdir(bd)
except (BaseException, Exception):
pass
_set_progress(p_text, 0, 1)
diff --git a/sickgear/image_cache.py b/sickgear/image_cache.py
index 2f6b5b38..8648bd22 100644
--- a/sickgear/image_cache.py
+++ b/sickgear/image_cache.py
@@ -20,8 +20,6 @@ import os.path
import re
import zlib
-# noinspection PyPep8Naming
-import encodingKludge as ek
import exceptions_helper
from exceptions_helper import ex
import sickgear
@@ -30,7 +28,6 @@ from . import db, logger
from .metadata.generic import GenericMetadata
from .sgdatetime import timestamp_near
from .indexers.indexer_config import TVINFO_TVDB, TVINFO_TVMAZE, TVINFO_TMDB, TVINFO_IMDB
-from lib.tvinfo_base.exceptions import *
from six import itervalues, iteritems
@@ -56,9 +53,9 @@ class ImageCache(object):
characters_dir = None # type: Optional[AnyStr]
def __init__(self):
- if None is ImageCache.base_dir and ek.ek(os.path.exists, sickgear.CACHE_DIR):
- ImageCache.base_dir = ek.ek(os.path.abspath, ek.ek(os.path.join, sickgear.CACHE_DIR, 'images'))
- ImageCache.shows_dir = ek.ek(os.path.abspath, ek.ek(os.path.join, self.base_dir, 'shows'))
+ if None is ImageCache.base_dir and os.path.exists(sickgear.CACHE_DIR):
+ ImageCache.base_dir = os.path.abspath(os.path.join(sickgear.CACHE_DIR, 'images'))
+ ImageCache.shows_dir = os.path.abspath(os.path.join(self.base_dir, 'shows'))
ImageCache.persons_dir = self._persons_dir()
ImageCache.characters_dir = self._characters_dir()
@@ -70,17 +67,17 @@ class ImageCache(object):
# """
# Builds up the full path to the image cache directory
# """
- # return ek.ek(os.path.abspath, ek.ek(os.path.join, sickgear.CACHE_DIR, 'images'))
+ # return os.path.abspath(os.path.join(sickgear.CACHE_DIR, 'images'))
@staticmethod
def _persons_dir():
# type: (...) -> AnyStr
- return ek.ek(os.path.join, sickgear.CACHE_DIR, 'images', 'person')
+ return os.path.join(sickgear.CACHE_DIR, 'images', 'person')
@staticmethod
def _characters_dir():
# type: (...) -> AnyStr
- return ek.ek(os.path.join, sickgear.CACHE_DIR, 'images', 'characters')
+ return os.path.join(sickgear.CACHE_DIR, 'images', 'characters')
def _fanart_dir(self, tvid=None, prodid=None):
# type: (int, int) -> AnyStr
@@ -95,7 +92,7 @@ class ImageCache(object):
:rtype: AnyStr or None
"""
if None not in (tvid, prodid):
- return ek.ek(os.path.abspath, ek.ek(os.path.join, self.shows_dir, '%s-%s' % (tvid, prodid), 'fanart'))
+ return os.path.abspath(os.path.join(self.shows_dir, '%s-%s' % (tvid, prodid), 'fanart'))
def _thumbnails_dir(self, tvid, prodid):
# type: (int, int) -> AnyStr
@@ -109,7 +106,7 @@ class ImageCache(object):
:return: path
:rtype: AnyStr
"""
- return ek.ek(os.path.abspath, ek.ek(os.path.join, self.shows_dir, '%s-%s' % (tvid, prodid), 'thumbnails'))
+ return os.path.abspath(os.path.join(self.shows_dir, '%s-%s' % (tvid, prodid), 'thumbnails'))
@staticmethod
def _person_base_name(person_obj):
@@ -134,7 +131,7 @@ class ImageCache(object):
:param base_path:
"""
filename = '%s.jpg' % base_path or self._person_base_name(person_obj)
- return ek.ek(os.path.join, self.persons_dir, filename)
+ return os.path.join(self.persons_dir, filename)
def person_thumb_path(self, person_obj, base_path=None):
# type: (Optional[Person], AnyStr) -> AnyStr
@@ -144,7 +141,7 @@ class ImageCache(object):
:param base_path:
"""
filename = '%s_thumb.jpg' % base_path or self._person_base_name(person_obj)
- return ek.ek(os.path.join, self.persons_dir, filename)
+ return os.path.join(self.persons_dir, filename)
def person_both_paths(self, person_obj):
# type: (Person) -> Tuple[AnyStr, AnyStr]
@@ -164,7 +161,7 @@ class ImageCache(object):
:param base_path:
"""
filename = '%s.jpg' % base_path or self._character_base_name(character_obj, show_obj)
- return ek.ek(os.path.join, self.characters_dir, filename)
+ return os.path.join(self.characters_dir, filename)
def character_thumb_path(self, character_obj, show_obj, base_path=None):
# type: (Optional[Character], Optional[TVShow], AnyStr) -> AnyStr
@@ -175,7 +172,7 @@ class ImageCache(object):
:param base_path:
"""
filename = '%s_thumb.jpg' % base_path or self._character_base_name(character_obj, show_obj)
- return ek.ek(os.path.join, self.characters_dir, filename)
+ return os.path.join(self.characters_dir, filename)
def character_both_path(self, character_obj, show_obj=None, tvid=None, proid=None, person_obj=None):
# type: (Character, TVShow, integer_types, integer_types, Person) -> Tuple[AnyStr, AnyStr]
@@ -208,7 +205,7 @@ class ImageCache(object):
:return: a full path to the cached poster file for the given tvid prodid
:rtype: AnyStr
"""
- return ek.ek(os.path.join, self.shows_dir, '%s-%s' % (tvid, prodid), 'poster.jpg')
+ return os.path.join(self.shows_dir, '%s-%s' % (tvid, prodid), 'poster.jpg')
def banner_path(self, tvid, prodid):
# type: (int, int) -> AnyStr
@@ -222,7 +219,7 @@ class ImageCache(object):
:return: a full path to the cached banner file for the given tvid prodid
:rtype: AnyStr
"""
- return ek.ek(os.path.join, self.shows_dir, '%s-%s' % (tvid, prodid), 'banner.jpg')
+ return os.path.join(self.shows_dir, '%s-%s' % (tvid, prodid), 'banner.jpg')
def fanart_path(self, tvid, prodid, prefix=''):
# type: (int, int, Optional[AnyStr]) -> AnyStr
@@ -238,7 +235,7 @@ class ImageCache(object):
:return: a full path to the cached fanart file for the given tvid prodid
:rtype: AnyStr
"""
- return ek.ek(os.path.join, self._fanart_dir(tvid, prodid), '%s%s' % (prefix, 'fanart.jpg'))
+ return os.path.join(self._fanart_dir(tvid, prodid), '%s%s' % (prefix, 'fanart.jpg'))
def poster_thumb_path(self, tvid, prodid):
# type: (int, int) -> AnyStr
@@ -252,7 +249,7 @@ class ImageCache(object):
:return: a full path to the cached poster file for the given tvid prodid
:rtype: AnyStr
"""
- return ek.ek(os.path.join, self._thumbnails_dir(tvid, prodid), 'poster.jpg')
+ return os.path.join(self._thumbnails_dir(tvid, prodid), 'poster.jpg')
def banner_thumb_path(self, tvid, prodid):
# type: (int, int) -> AnyStr
@@ -266,7 +263,7 @@ class ImageCache(object):
:return: a full path to the cached poster file for the given tvid prodid
:rtype: AnyStr
"""
- return ek.ek(os.path.join, self._thumbnails_dir(tvid, prodid), 'banner.jpg')
+ return os.path.join(self._thumbnails_dir(tvid, prodid), 'banner.jpg')
@staticmethod
def has_file(image_file):
@@ -278,8 +275,8 @@ class ImageCache(object):
:rtype: bool
"""
result = []
- for filename in ek.ek(glob.glob, image_file):
- result.append(ek.ek(os.path.isfile, filename) and filename)
+ for filename in glob.glob(image_file):
+ result.append(os.path.isfile(filename) and filename)
logger.log(u'Found cached %s' % filename, logger.DEBUG)
not any(result) and logger.log(u'No cache for %s' % image_file, logger.DEBUG)
@@ -367,7 +364,7 @@ class ImageCache(object):
:param image: image file or data
:param is_binary: is data instead of path
"""
- if not is_binary and not ek.ek(os.path.isfile, image):
+ if not is_binary and not os.path.isfile(image):
logger.warning(u'File not found to determine image type of %s' % image)
return
if not image:
@@ -540,7 +537,7 @@ class ImageCache(object):
else:
sg_helpers.copy_file(image_path, dest_path)
- return ek.ek(os.path.isfile, dest_path) and dest_path or None
+ return os.path.isfile(dest_path) and dest_path or None
def _cache_info_source_images(self, show_obj, img_type, num_files=0, max_files=500, force=False, show_infos=None):
# type: (TVShow, int, int, int, bool, ShowInfosDict) -> bool
@@ -588,7 +585,7 @@ class ImageCache(object):
return False
crcs = []
- for cache_file_name in ek.ek(glob.glob, dest_path):
+ for cache_file_name in glob.glob(dest_path):
with open(cache_file_name, mode='rb') as resource:
crc = '%05X' % (zlib.crc32(resource.read()) & 0xFFFFFFFF)
if crc not in crcs:
@@ -627,7 +624,7 @@ class ImageCache(object):
success += (0, 1)[result]
if num_files > max_files:
break
- total = len(ek.ek(glob.glob, dest_path))
+ total = len(glob.glob(dest_path))
logger.log(u'Saved %s fanart images%s. Cached %s of max %s fanart file%s'
% (success,
('', ' from ' + ', '.join([x for x in list(set(sources))]))[0 < len(sources)],
@@ -696,7 +693,7 @@ class ImageCache(object):
cache_path = self.fanart_path(*arg_tvid_prodid).replace('fanart.jpg', '')
# num_images = len(fnmatch.filter(os.listdir(cache_path), '*.jpg'))
- for cache_dir in ek.ek(glob.glob, cache_path):
+ for cache_dir in glob.glob(cache_path):
if show_obj.tvid_prodid in sickgear.FANART_RATINGS:
del (sickgear.FANART_RATINGS[show_obj.tvid_prodid])
result = sg_helpers.remove_file(cache_dir, tree=True)
@@ -712,11 +709,11 @@ class ImageCache(object):
needed = []
if any([need_images[self.POSTER], need_images[self.BANNER]]):
poster_path = cur_provider.get_poster_path(show_obj)
- if poster_path not in checked_files and ek.ek(os.path.isfile, poster_path):
+ if poster_path not in checked_files and os.path.isfile(poster_path):
needed += [[False, poster_path]]
if need_images[self.FANART]:
fanart_path = cur_provider.get_fanart_path(show_obj)
- if fanart_path not in checked_files and ek.ek(os.path.isfile, fanart_path):
+ if fanart_path not in checked_files and os.path.isfile(fanart_path):
needed += [[True, fanart_path]]
if 0 == len(needed):
break
diff --git a/sickgear/indexers/indexer_api.py b/sickgear/indexers/indexer_api.py
index 3d3e7b64..530faa96 100644
--- a/sickgear/indexers/indexer_api.py
+++ b/sickgear/indexers/indexer_api.py
@@ -16,10 +16,9 @@
import os
from .indexer_config import init_config, tvinfo_config
-from sg_helpers import make_path, proxy_setting
+from sg_helpers import proxy_setting
import sickgear
from lib.tvinfo_base import TVInfoBase
-import encodingKludge as ek
from _23 import list_values
@@ -41,8 +40,7 @@ class TVInfoAPI(object):
if tvinfo_config[self.tvid]['active'] or ('no_dummy' in kwargs and True is kwargs['no_dummy']):
if 'no_dummy' in kwargs:
kwargs.pop('no_dummy')
- indexer_cache_dir = ek.ek(os.path.join, sickgear.CACHE_DIR, 'tvinfo_cache',
- tvinfo_config[self.tvid]['name'])
+ indexer_cache_dir = os.path.join(sickgear.CACHE_DIR, 'tvinfo_cache', tvinfo_config[self.tvid]['name'])
kwargs['diskcache_dir'] = indexer_cache_dir
return tvinfo_config[self.tvid]['module'](*args, **kwargs)
else:
diff --git a/sickgear/logger.py b/sickgear/logger.py
index 3aa6791d..39821266 100644
--- a/sickgear/logger.py
+++ b/sickgear/logger.py
@@ -337,9 +337,8 @@ class TimedCompressedRotatingFileHandler(TimedRotatingFileHandler):
except AttributeError:
pass
- import encodingKludge
try:
- encodingKludge.ek(os.rename, self.baseFilename, dfn)
+ os.rename(self.baseFilename, dfn)
except (BaseException, Exception):
pass
@@ -360,9 +359,8 @@ class TimedCompressedRotatingFileHandler(TimedRotatingFileHandler):
if 0 < self.backupCount:
# find the oldest log file and delete it
# phase out files named sickgear.log in favour of sickgear.logs over backup_count days
- all_names = encodingKludge.ek(glob.glob, file_name + '_*') + \
- encodingKludge.ek(glob.glob, encodingKludge.ek(os.path.join, encodingKludge.ek(
- os.path.dirname, file_name), 'sickbeard_*'))
+ all_names = glob.glob(file_name + '_*') \
+ + glob.glob(os.path.join(os.path.dirname(file_name), 'sickbeard_*'))
if len(all_names) > self.backupCount:
all_names.sort()
self.delete_logfile(all_names[0])
diff --git a/sickgear/metadata/generic.py b/sickgear/metadata/generic.py
index a182a389..add8865d 100644
--- a/sickgear/metadata/generic.py
+++ b/sickgear/metadata/generic.py
@@ -30,8 +30,6 @@ from ..indexers.indexer_config import TVINFO_TVDB, TVINFO_TMDB
from lib.tvinfo_base import TVInfoImage, TVInfoImageType, TVInfoImageSize
from lib.tvinfo_base.exceptions import *
import sickgear
-# noinspection PyPep8Naming
-import encodingKludge as ek
from exceptions_helper import ex
from lib.fanart.core import Request as fanartRequest
import lib.fanart as fanart
@@ -127,13 +125,13 @@ class GenericMetadata(object):
def get_id(self):
# type: (...) -> AnyStr
- return GenericMetadata.makeID(self.name)
+ return GenericMetadata.make_id(self.name)
@staticmethod
- def makeID(name):
+ def make_id(name):
# type: (AnyStr) -> AnyStr
name_id = re.sub("[+]", "plus", name)
- name_id = re.sub(r"[^\w\d_]", "_", name_id).lower()
+ name_id = re.sub(r"[^\w_]", "_", name_id).lower()
return name_id
def set_config(self, string):
@@ -151,69 +149,69 @@ class GenericMetadata(object):
self.season_all_banner = config_list[9]
def _has_show_metadata(self, show_obj):
- # type: (sickgear.tv.TVShow) -> AnyStr
- result = ek.ek(os.path.isfile, self.get_show_file_path(show_obj))
+ # type: (sickgear.tv.TVShow) -> bool
+ result = os.path.isfile(self.get_show_file_path(show_obj))
logger.log(u"Checking if " + self.get_show_file_path(show_obj) + " exists: " + str(result), logger.DEBUG)
return result
def has_episode_metadata(self, ep_obj):
- # type: (sickgear.tv.TVEpisode) -> AnyStr
- result = ek.ek(os.path.isfile, self.get_episode_file_path(ep_obj))
+ # type: (sickgear.tv.TVEpisode) -> bool
+ result = os.path.isfile(self.get_episode_file_path(ep_obj))
logger.log(u"Checking if " + self.get_episode_file_path(ep_obj) + " exists: " + str(result), logger.DEBUG)
return result
def _has_fanart(self, show_obj):
- # type: (sickgear.tv.TVShow) -> AnyStr
- result = ek.ek(os.path.isfile, self.get_fanart_path(show_obj))
+ # type: (sickgear.tv.TVShow) -> bool
+ result = os.path.isfile(self.get_fanart_path(show_obj))
logger.log(u"Checking if " + self.get_fanart_path(show_obj) + " exists: " + str(result), logger.DEBUG)
return result
def _has_poster(self, show_obj):
- # type: (sickgear.tv.TVShow) -> AnyStr
- result = ek.ek(os.path.isfile, self.get_poster_path(show_obj))
+ # type: (sickgear.tv.TVShow) -> bool
+ result = os.path.isfile(self.get_poster_path(show_obj))
logger.log(u"Checking if " + self.get_poster_path(show_obj) + " exists: " + str(result), logger.DEBUG)
return result
def _has_banner(self, show_obj):
- # type: (sickgear.tv.TVShow) -> AnyStr
- result = ek.ek(os.path.isfile, self.get_banner_path(show_obj))
+ # type: (sickgear.tv.TVShow) -> bool
+ result = os.path.isfile(self.get_banner_path(show_obj))
logger.log(u"Checking if " + self.get_banner_path(show_obj) + " exists: " + str(result), logger.DEBUG)
return result
def has_episode_thumb(self, ep_obj):
- # type: (sickgear.tv.TVEpisode) -> AnyStr
+ # type: (sickgear.tv.TVEpisode) -> bool
location = self.get_episode_thumb_path(ep_obj)
- result = None is not location and ek.ek(os.path.isfile, location)
+ result = None is not location and os.path.isfile(location)
if location:
logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG)
return result
def _has_season_poster(self, show_obj, season):
- # type: (sickgear.tv.TVShow,int) -> AnyStr
+ # type: (sickgear.tv.TVShow,int) -> bool
location = self.get_season_poster_path(show_obj, season)
- result = None is not location and ek.ek(os.path.isfile, location)
+ result = None is not location and os.path.isfile(location)
if location:
logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG)
return result
def _has_season_banner(self, show_obj, season):
- # type: (sickgear.tv.TVShow,int) -> AnyStr
+ # type: (sickgear.tv.TVShow,int) -> bool
location = self.get_season_banner_path(show_obj, season)
- result = None is not location and ek.ek(os.path.isfile, location)
+ result = None is not location and os.path.isfile(location)
if location:
logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG)
return result
def _has_season_all_poster(self, show_obj):
- # type: (sickgear.tv.TVShow) -> AnyStr
- result = ek.ek(os.path.isfile, self.get_season_all_poster_path(show_obj))
+ # type: (sickgear.tv.TVShow) -> bool
+ result = os.path.isfile(self.get_season_all_poster_path(show_obj))
logger.log(u"Checking if " + self.get_season_all_poster_path(show_obj) + " exists: " + str(result),
logger.DEBUG)
return result
def _has_season_all_banner(self, show_obj):
- # type: (sickgear.tv.TVShow) -> AnyStr
- result = ek.ek(os.path.isfile, self.get_season_all_banner_path(show_obj))
+ # type: (sickgear.tv.TVShow) -> bool
+ result = os.path.isfile(self.get_season_all_banner_path(show_obj))
logger.log(u"Checking if " + self.get_season_all_banner_path(show_obj) + " exists: " + str(result),
logger.DEBUG)
return result
@@ -245,7 +243,7 @@ class GenericMetadata(object):
def get_show_file_path(self, show_obj):
# type: (sickgear.tv.TVShow) -> AnyStr
- return ek.ek(os.path.join, show_obj.location, self._show_metadata_filename)
+ return os.path.join(show_obj.location, self._show_metadata_filename)
def get_episode_file_path(self, ep_obj):
# type: (sickgear.tv.TVEpisode) -> AnyStr
@@ -253,15 +251,15 @@ class GenericMetadata(object):
def get_fanart_path(self, show_obj):
# type: (sickgear.tv.TVShow) -> AnyStr
- return ek.ek(os.path.join, show_obj.location, self.fanart_name)
+ return os.path.join(show_obj.location, self.fanart_name)
def get_poster_path(self, show_obj):
# type: (sickgear.tv.TVShow) -> AnyStr
- return ek.ek(os.path.join, show_obj.location, self.poster_name)
+ return os.path.join(show_obj.location, self.poster_name)
def get_banner_path(self, show_obj):
# type: (sickgear.tv.TVShow) -> AnyStr
- return ek.ek(os.path.join, show_obj.location, self.banner_name)
+ return os.path.join(show_obj.location, self.banner_name)
def get_episode_thumb_path(self, ep_obj):
# type: (sickgear.tv.TVEpisode) -> Optional[AnyStr]
@@ -269,7 +267,7 @@ class GenericMetadata(object):
Returns the path where the episode thumbnail should be stored.
ep_obj: a TVEpisode instance for which to create the thumbnail
"""
- if ek.ek(os.path.isfile, ep_obj.location):
+ if os.path.isfile(ep_obj.location):
tbn_filename = ep_obj.location.rpartition('.')
@@ -296,7 +294,7 @@ class GenericMetadata(object):
else:
season_poster_filename = 'season' + str(season).zfill(2)
- return ek.ek(os.path.join, show_obj.location, season_poster_filename + '-poster.jpg')
+ return os.path.join(show_obj.location, season_poster_filename + '-poster.jpg')
def get_season_banner_path(self, show_obj, season):
# type: (sickgear.tv.TVShow, int) -> AnyStr
@@ -314,15 +312,15 @@ class GenericMetadata(object):
else:
season_banner_filename = 'season' + str(season).zfill(2)
- return ek.ek(os.path.join, show_obj.location, season_banner_filename + '-banner.jpg')
+ return os.path.join(show_obj.location, season_banner_filename + '-banner.jpg')
def get_season_all_poster_path(self, show_obj):
# type: (sickgear.tv.TVShow) -> AnyStr
- return ek.ek(os.path.join, show_obj.location, self.season_all_poster_name)
+ return os.path.join(show_obj.location, self.season_all_poster_name)
def get_season_all_banner_path(self, show_obj):
# type: (sickgear.tv.TVShow) -> AnyStr
- return ek.ek(os.path.join, show_obj.location, self.season_all_banner_name)
+ return os.path.join(show_obj.location, self.season_all_banner_name)
def _show_data(self, show_obj):
# type: (sickgear.tv.TVShow) -> Optional[Union[bool, etree.Element]]
@@ -393,7 +391,7 @@ class GenericMetadata(object):
self.name, show_obj.unique_name))
nfo_file_path = self.get_show_file_path(show_obj)
- with ek.ek(io.open, nfo_file_path, 'r', encoding='utf8') as xmlFileObj:
+ with io.open(nfo_file_path, 'r', encoding='utf8') as xmlFileObj:
show_xml = etree.ElementTree(file=xmlFileObj)
tvid = show_xml.find('indexer')
@@ -821,7 +819,7 @@ class GenericMetadata(object):
"""
# don't bother overwriting it
- if not force and ek.ek(os.path.isfile, image_path):
+ if not force and os.path.isfile(image_path):
logger.log(u"Image already exists, not downloading", logger.DEBUG)
return False
@@ -829,17 +827,17 @@ class GenericMetadata(object):
logger.log(u"Unable to retrieve image, skipping", logger.WARNING)
return False
- image_dir = ek.ek(os.path.dirname, image_path)
+ image_dir = os.path.dirname(image_path)
try:
- if not ek.ek(os.path.isdir, image_dir):
+ if not os.path.isdir(image_dir):
logger.log(u"Metadata dir didn't exist, creating it at " + image_dir, logger.DEBUG)
- ek.ek(os.makedirs, image_dir)
+ os.makedirs(image_dir)
sg_helpers.chmod_as_parent(image_dir)
- outFile = ek.ek(open, image_path, 'wb')
- outFile.write(image_data)
- outFile.close()
+ out_file = open(image_path, 'wb')
+ out_file.write(image_data)
+ out_file.close()
sg_helpers.chmod_as_parent(image_path)
except IOError as e:
logger.log(
@@ -903,13 +901,13 @@ class GenericMetadata(object):
try:
alt_url = '%swww.%s%s' % re.findall(
- r'(https?://)(?:artworks\.)?(thetvdb\.[^/]+/banners/[^\d]+[^.]+)(?:_t)(.*)', _url)[0][0:3]
+ r'(https?://)(?:artworks\.)?(thetvdb\.[^/]+/banners/\D+[^.]+)_t(.*)', _url)[0][0:3]
if alt_url not in _urls[0]:
_urls[1].append(alt_url)
except (IndexError, Exception):
try:
alt_url = '%sartworks.%s_t%s' % re.findall(
- r'(https?://)(?:www\.)?(thetvdb\.[^/]+/banners/[^\d]+[^.]+)(.*)', _url)[0][0:3]
+ r'(https?://)(?:www\.)?(thetvdb\.[^/]+/banners/\D+[^.]+)(.*)', _url)[0][0:3]
if alt_url not in _urls[0]:
_urls[1].append(alt_url)
except (IndexError, Exception):
@@ -1010,7 +1008,7 @@ class GenericMetadata(object):
thumb_url = _de_dupe(thumb_url)
if not thumb_url:
thumb_url = img_url
- yield (img_url, thumb_url)
+ yield img_url, thumb_url
elif img_url:
yield img_url
@@ -1113,7 +1111,7 @@ class GenericMetadata(object):
return result
- def retrieveShowMetadata(self, folder):
+ def retrieve_show_metadata(self, folder):
# type: (AnyStr) -> Union[Tuple[int, int, AnyStr], Tuple[None, None, None]]
"""
Used only when mass adding Existing Shows,
@@ -1124,39 +1122,39 @@ class GenericMetadata(object):
empty_return = (None, None, None)
- metadata_path = ek.ek(os.path.join, folder, self._show_metadata_filename)
+ metadata_path = os.path.join(folder, self._show_metadata_filename)
- if not ek.ek(os.path.isdir, folder) or not ek.ek(os.path.isfile, metadata_path):
+ if not os.path.isdir(folder) or not os.path.isfile(metadata_path):
logger.log(u"Can't load the metadata file from " + repr(metadata_path) + ", it doesn't exist", logger.DEBUG)
return empty_return
logger.log(u"Loading show info from metadata file in " + folder, logger.DEBUG)
try:
- with ek.ek(io.open, metadata_path, 'r', encoding='utf8') as xmlFileObj:
- showXML = etree.ElementTree(file=xmlFileObj)
+ with io.open(metadata_path, 'r', encoding='utf8') as xmlFileObj:
+ show_xml = etree.ElementTree(file=xmlFileObj)
- if None is showXML.findtext('title') \
- or all(None is _f for _f in (showXML.find('//uniqueid[@type]'),
- showXML.findtext('tvdbid'),
- showXML.findtext('id'),
- showXML.findtext('indexer'))):
+ if None is show_xml.findtext('title') \
+ or all(None is _f for _f in (show_xml.find('//uniqueid[@type]'),
+ show_xml.findtext('tvdbid'),
+ show_xml.findtext('id'),
+ show_xml.findtext('indexer'))):
logger.log(u"Invalid info in tvshow.nfo (missing name or id):"
- + str(showXML.findtext('title')) + ' '
- + str(showXML.findtext('indexer')) + ' '
- + str(showXML.findtext('tvdbid')) + ' '
- + str(showXML.findtext('id')))
+ + str(show_xml.findtext('title')) + ' '
+ + str(show_xml.findtext('indexer')) + ' '
+ + str(show_xml.findtext('tvdbid')) + ' '
+ + str(show_xml.findtext('id')))
return empty_return
- name = showXML.findtext('title')
+ name = show_xml.findtext('title')
try:
- tvid = int(showXML.findtext('indexer'))
+ tvid = int(show_xml.findtext('indexer'))
except (BaseException, Exception):
tvid = None
# handle v2 format of .nfo file
- default_source = showXML.find('//uniqueid[@default="true"]')
+ default_source = show_xml.find('//uniqueid[@default="true"]')
if None is not default_source:
use_tvid = default_source.attrib.get('type') or tvid
if isinstance(use_tvid, string_types):
@@ -1166,17 +1164,17 @@ class GenericMetadata(object):
if use_tvid and None is not prodid:
return use_tvid, prodid, name
- prodid = showXML.find('//uniqueid[@type="tvdb"]')
+ prodid = show_xml.find('//uniqueid[@type="tvdb"]')
if None is not prodid:
prodid = int(prodid.text)
tvid = TVINFO_TVDB
- elif None is not showXML.findtext('tvdbid'):
- prodid = int(showXML.findtext('tvdbid'))
+ elif None is not show_xml.findtext('tvdbid'):
+ prodid = int(show_xml.findtext('tvdbid'))
tvid = TVINFO_TVDB
- elif None is not showXML.findtext('id'):
- prodid = int(showXML.findtext('id'))
+ elif None is not show_xml.findtext('id'):
+ prodid = int(show_xml.findtext('id'))
try:
- tvid = TVINFO_TVDB if [s for s in showXML.findall('.//*')
+ tvid = TVINFO_TVDB if [s for s in show_xml.findall('.//*')
if s.text and -1 != s.text.find('thetvdb.com')] else tvid
except (BaseException, Exception):
pass
diff --git a/sickgear/metadata/kodi.py b/sickgear/metadata/kodi.py
index 15f0e3cc..e679ebeb 100644
--- a/sickgear/metadata/kodi.py
+++ b/sickgear/metadata/kodi.py
@@ -25,8 +25,6 @@ import sg_helpers
from ..indexers.indexer_config import TVINFO_IMDB, TVINFO_TVDB
from lib.tvinfo_base.exceptions import *
import sickgear
-# noinspection PyPep8Naming
-import encodingKludge as ek
import exceptions_helper
from exceptions_helper import ex
from lxml_etree import etree
@@ -472,8 +470,8 @@ def remove_default_attr(*args, **kwargs):
if nfo_path:
# show
try:
- if ek.ek(os.path.isfile, nfo_path):
- with ek.ek(io.open, nfo_path, 'r', encoding='utf8') as xml_file_obj:
+ if os.path.isfile(nfo_path):
+ with io.open(nfo_path, 'r', encoding='utf8') as xml_file_obj:
xmltree = etree.ElementTree(file=xml_file_obj)
# remove default="" attributes
@@ -519,8 +517,8 @@ def remove_default_attr(*args, **kwargs):
try:
changed = False
nfo_path = kodi.get_episode_file_path(cur_ep_obj)
- if nfo_path and ek.ek(os.path.isfile, nfo_path):
- with ek.ek(io.open, nfo_path, 'r', encoding='utf8') as xml_file_obj:
+ if nfo_path and os.path.isfile(nfo_path):
+ with io.open(nfo_path, 'r', encoding='utf8') as xml_file_obj:
xmltree = etree.ElementTree(file=xml_file_obj)
# remove default="" attributes
@@ -573,8 +571,8 @@ def rebuild_nfo(*args, **kwargs):
try:
nfo_path = kodi.get_show_file_path(cur_show_obj)
- if nfo_path and ek.ek(os.path.isfile, nfo_path):
- with ek.ek(io.open, nfo_path, 'r', encoding='utf8') as xml_file_obj:
+ if nfo_path and os.path.isfile(nfo_path):
+ with io.open(nfo_path, 'r', encoding='utf8') as xml_file_obj:
xmltree = etree.ElementTree(file=xml_file_obj)
# check xml keys exist to validate file as type Kodi episode or tvshow .nfo
diff --git a/sickgear/metadata/mediabrowser.py b/sickgear/metadata/mediabrowser.py
index ad73b059..d3a2947a 100644
--- a/sickgear/metadata/mediabrowser.py
+++ b/sickgear/metadata/mediabrowser.py
@@ -24,8 +24,6 @@ from .. import logger
import sg_helpers
from lib.tvinfo_base.exceptions import *
import sickgear
-# noinspection PyPep8Naming
-import encodingKludge as ek
import exceptions_helper
from exceptions_helper import ex
from lxml_etree import etree
@@ -98,7 +96,7 @@ class MediaBrowserMetadata(generic.GenericMetadata):
self.eg_season_all_banner = "not supported" # type: AnyStr
# Override with empty methods for unsupported features
- def retrieveShowMetadata(self, folder):
+ def retrieve_show_metadata(self, folder):
# type: (AnyStr) -> Tuple[None, None, None]
# while show metadata is generated, it is not supported for our lookup
return None, None, None
@@ -120,10 +118,10 @@ class MediaBrowserMetadata(generic.GenericMetadata):
ep_obj: a TVEpisode object to get the path for
"""
- if ek.ek(os.path.isfile, ep_obj.location):
- xml_file_name = sg_helpers.replace_extension(ek.ek(os.path.basename, ep_obj.location), self._ep_nfo_extension)
- metadata_dir_name = ek.ek(os.path.join, ek.ek(os.path.dirname, ep_obj.location), 'metadata')
- xml_file_path = ek.ek(os.path.join, metadata_dir_name, xml_file_name)
+ if os.path.isfile(ep_obj.location):
+ xml_file_name = sg_helpers.replace_extension(os.path.basename(ep_obj.location), self._ep_nfo_extension)
+ metadata_dir_name = os.path.join(os.path.dirname(ep_obj.location), 'metadata')
+ xml_file_path = os.path.join(metadata_dir_name, xml_file_name)
else:
logger.log(u"Episode location doesn't exist: " + str(ep_obj.location), logger.DEBUG)
return ''
@@ -139,10 +137,10 @@ class MediaBrowserMetadata(generic.GenericMetadata):
ep_obj: a TVEpisode object to get the path from
"""
- if ek.ek(os.path.isfile, ep_obj.location):
- metadata_dir_name = ek.ek(os.path.join, ek.ek(os.path.dirname, ep_obj.location), 'metadata')
- tbn_file_name = sg_helpers.replace_extension(ek.ek(os.path.basename, ep_obj.location), 'jpg')
- return ek.ek(os.path.join, metadata_dir_name, tbn_file_name)
+ if os.path.isfile(ep_obj.location):
+ metadata_dir_name = os.path.join(os.path.dirname(ep_obj.location), 'metadata')
+ tbn_file_name = sg_helpers.replace_extension(os.path.basename(ep_obj.location), 'jpg')
+ return os.path.join(metadata_dir_name, tbn_file_name)
def get_season_poster_path(self, show_obj, season):
# type: (sickgear.tv.TVShow, int) -> Optional[AnyStr]
@@ -152,8 +150,7 @@ class MediaBrowserMetadata(generic.GenericMetadata):
If no season folder exists, None is returned
"""
- dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if
- ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))]
+ dir_list = [x for x in os.listdir(show_obj.location) if os.path.isdir(os.path.join(show_obj.location, x))]
season_dir_regex = r'^Season\s+(\d+)$'
@@ -183,7 +180,7 @@ class MediaBrowserMetadata(generic.GenericMetadata):
logger.log(u"Using " + str(season_dir) + "/folder.jpg as season dir for season " + str(season), logger.DEBUG)
- return ek.ek(os.path.join, show_obj.location, season_dir, 'folder.jpg')
+ return os.path.join(show_obj.location, season_dir, 'folder.jpg')
def get_season_banner_path(self, show_obj, season):
# type: (sickgear.tv.TVShow, int) -> Optional[AnyStr]
@@ -193,8 +190,7 @@ class MediaBrowserMetadata(generic.GenericMetadata):
If no season folder exists, None is returned
"""
- dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if
- ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))]
+ dir_list = [x for x in os.listdir(show_obj.location) if os.path.isdir(os.path.join(show_obj.location, x))]
season_dir_regex = r'^Season\s+(\d+)$'
@@ -224,7 +220,7 @@ class MediaBrowserMetadata(generic.GenericMetadata):
logger.log(u"Using " + str(season_dir) + "/banner.jpg as season dir for season " + str(season), logger.DEBUG)
- return ek.ek(os.path.join, show_obj.location, season_dir, 'banner.jpg')
+ return os.path.join(show_obj.location, season_dir, 'banner.jpg')
def _show_data(self, show_obj):
# type: (sickgear.tv.TVShow) -> Optional[Union[bool, etree.Element]]
diff --git a/sickgear/metadata/ps3.py b/sickgear/metadata/ps3.py
index 4e91cdb2..8941cbc8 100644
--- a/sickgear/metadata/ps3.py
+++ b/sickgear/metadata/ps3.py
@@ -17,8 +17,6 @@
import os
from . import generic
-# noinspection PyPep8Naming
-import encodingKludge as ek
import sickgear
# noinspection PyUnreachableCode
@@ -79,7 +77,7 @@ class PS3Metadata(generic.GenericMetadata):
self.eg_season_all_banner = "not supported" # type: AnyStr
# Override with empty methods for unsupported features
- def retrieveShowMetadata(self, folder):
+ def retrieve_show_metadata(self, folder):
# type: (AnyStr) -> Tuple[None, None, None]
# no show metadata generated, we abort this lookup function
return None, None, None
@@ -132,7 +130,7 @@ class PS3Metadata(generic.GenericMetadata):
ep_obj: a TVEpisode instance for which to create the thumbnail
"""
- if ek.ek(os.path.isfile, ep_obj.location):
+ if os.path.isfile(ep_obj.location):
tbn_filename = ep_obj.location + ".cover.jpg"
else:
return None
diff --git a/sickgear/metadata/tivo.py b/sickgear/metadata/tivo.py
index 4a3f78e9..eced781d 100644
--- a/sickgear/metadata/tivo.py
+++ b/sickgear/metadata/tivo.py
@@ -25,8 +25,6 @@ from .. import logger
import sg_helpers
from lib.tvinfo_base.exceptions import *
import sickgear
-# noinspection PyPep8Naming
-import encodingKludge as ek
import exceptions_helper
from exceptions_helper import ex
@@ -89,7 +87,7 @@ class TIVOMetadata(generic.GenericMetadata):
self.eg_season_all_banner = "not supported" # type: AnyStr
# Override with empty methods for unsupported features
- def retrieveShowMetadata(self, folder):
+ def retrieve_show_metadata(self, folder):
# type: (AnyStr) -> Tuple[None, None, None]
# no show metadata generated, we abort this lookup function
return None, None, None
@@ -155,10 +153,10 @@ class TIVOMetadata(generic.GenericMetadata):
ep_obj: a TVEpisode object to get the path for
"""
- if ek.ek(os.path.isfile, ep_obj.location):
- metadata_file_name = ek.ek(os.path.basename, ep_obj.location) + "." + self._ep_nfo_extension
- metadata_dir_name = ek.ek(os.path.join, ek.ek(os.path.dirname, ep_obj.location), '.meta')
- metadata_file_path = ek.ek(os.path.join, metadata_dir_name, metadata_file_name)
+ if os.path.isfile(ep_obj.location):
+ metadata_file_name = os.path.basename(ep_obj.location) + "." + self._ep_nfo_extension
+ metadata_dir_name = os.path.join(os.path.dirname(ep_obj.location), '.meta')
+ metadata_file_path = os.path.join(metadata_dir_name, metadata_file_name)
else:
logger.log(u"Episode location doesn't exist: " + str(ep_obj.location), logger.DEBUG)
return ''
@@ -335,17 +333,17 @@ class TIVOMetadata(generic.GenericMetadata):
return False
nfo_file_path = self.get_episode_file_path(ep_obj)
- nfo_file_dir = ek.ek(os.path.dirname, nfo_file_path)
+ nfo_file_dir = os.path.dirname(nfo_file_path)
try:
- if not ek.ek(os.path.isdir, nfo_file_dir):
+ if not os.path.isdir(nfo_file_dir):
logger.log(u"Metadata dir didn't exist, creating it at " + nfo_file_dir, logger.DEBUG)
- ek.ek(os.makedirs, nfo_file_dir)
+ os.makedirs(nfo_file_dir)
sg_helpers.chmod_as_parent(nfo_file_dir)
logger.log(u"Writing episode nfo file to " + nfo_file_path, logger.DEBUG)
- with ek.ek(open, nfo_file_path, 'w') as nfo_file:
+ with open(nfo_file_path, 'w') as nfo_file:
# Calling encode directly, b/c often descriptions have wonky characters.
nfo_file.write(data.encode("utf-8"))
diff --git a/sickgear/metadata/wdtv.py b/sickgear/metadata/wdtv.py
index 97ae9611..0864e43d 100644
--- a/sickgear/metadata/wdtv.py
+++ b/sickgear/metadata/wdtv.py
@@ -24,8 +24,6 @@ from .. import logger
import sg_helpers
from lib.tvinfo_base.exceptions import *
import sickgear
-# noinspection PyPep8Naming
-import encodingKludge as ek
import exceptions_helper
from exceptions_helper import ex
from lxml_etree import etree
@@ -92,7 +90,7 @@ class WDTVMetadata(generic.GenericMetadata):
self.eg_season_all_banner = "not supported" # type: AnyStr
# Override with empty methods for unsupported features
- def retrieveShowMetadata(self, folder):
+ def retrieve_show_metadata(self, folder):
# type: (AnyStr) -> Tuple[None, None, None]
# no show metadata generated, we abort this lookup function
return None, None, None
@@ -137,7 +135,7 @@ class WDTVMetadata(generic.GenericMetadata):
ep_obj: a TVEpisode instance for which to create the thumbnail
"""
- if ek.ek(os.path.isfile, ep_obj.location):
+ if os.path.isfile(ep_obj.location):
return sg_helpers.replace_extension(ep_obj.location, 'metathumb')
def get_season_poster_path(self, show_obj, season):
@@ -148,8 +146,7 @@ class WDTVMetadata(generic.GenericMetadata):
If no season folder exists, None is returned
"""
- dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if
- ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))]
+ dir_list = [x for x in os.listdir(show_obj.location) if os.path.isdir(os.path.join(show_obj.location, x))]
season_dir_regex = r'^Season\s+(\d+)$'
@@ -176,7 +173,7 @@ class WDTVMetadata(generic.GenericMetadata):
logger.log(u"Using " + str(season_dir) + "/folder.jpg as season dir for season " + str(season), logger.DEBUG)
- return ek.ek(os.path.join, show_obj.location, season_dir, 'folder.jpg')
+ return os.path.join(show_obj.location, season_dir, 'folder.jpg')
def _ep_data(self, ep_obj):
# type: (sickgear.tv.TVEpisode) -> Optional[Union[bool, etree.Element]]
diff --git a/sickgear/metadata/xbmc.py b/sickgear/metadata/xbmc.py
index 99445335..ae5de5a3 100644
--- a/sickgear/metadata/xbmc.py
+++ b/sickgear/metadata/xbmc.py
@@ -20,8 +20,6 @@ import os
from . import generic, xbmc_12plus
import sg_helpers
import sickgear
-# noinspection PyPep8Naming
-import encodingKludge as ek
# noinspection PyUnreachableCode
if False:
@@ -104,7 +102,7 @@ class XBMCMetadata(xbmc_12plus.XBMC12PlusMetadata):
ep_obj: a TVEpisode instance for which to create the thumbnail
"""
- if ek.ek(os.path.isfile, ep_obj.location):
+ if os.path.isfile(ep_obj.location):
tbn_filename = sg_helpers.replace_extension(ep_obj.location, 'tbn')
else:
return None
@@ -127,7 +125,7 @@ class XBMCMetadata(xbmc_12plus.XBMC12PlusMetadata):
else:
season_poster_filename = 'season' + str(season).zfill(2)
- return ek.ek(os.path.join, show_obj.location, season_poster_filename + '.tbn')
+ return os.path.join(show_obj.location, season_poster_filename + '.tbn')
# present a standard "interface" from the module
diff --git a/sickgear/name_parser/parser.py b/sickgear/name_parser/parser.py
index 676a878d..8d63bb59 100644
--- a/sickgear/name_parser/parser.py
+++ b/sickgear/name_parser/parser.py
@@ -32,8 +32,6 @@ except ImportError:
regex = None
from . import regexes
-# noinspection PyPep8Naming
-import encodingKludge as ek
from exceptions_helper import ex
import sickgear
from .. import common, db, helpers, logger, scene_exceptions, scene_numbering
@@ -381,7 +379,7 @@ class NameParser(object):
season_number = int(ep_obj['seasonnumber'])
episode_numbers = [int(ep_obj['episodenumber'])]
- except BaseTVinfoEpisodenotfound as e:
+ except BaseTVinfoEpisodenotfound:
logger.warning(u'Unable to find episode with date %s for show %s, skipping' %
(best_result.air_date, show_obj.unique_name))
episode_numbers = []
@@ -581,7 +579,7 @@ class NameParser(object):
return cached
# break it into parts if there are any (dirname, file name, extension)
- dir_name, file_name = ek.ek(os.path.split, name)
+ dir_name, file_name = os.path.split(name)
if self.file_name:
base_file_name = helpers.remove_extension(file_name)
@@ -596,7 +594,7 @@ class NameParser(object):
file_name_result = self._parse_string(base_file_name)
# use only the direct parent dir
- dir_name = ek.ek(os.path.basename, dir_name)
+ dir_name = os.path.basename(dir_name)
# parse the dirname for extra info if needed
dir_name_result = self._parse_string(dir_name)
diff --git a/sickgear/naming.py b/sickgear/naming.py
index 0bddae7d..9a24e43c 100644
--- a/sickgear/naming.py
+++ b/sickgear/naming.py
@@ -22,9 +22,6 @@ from . import common, logger, tv
from .common import Quality, DOWNLOADED
from .name_parser.parser import NameParser
-# noinspection PyPep8Naming
-import encodingKludge as ek
-
# noinspection PyUnreachableCode
if False:
from typing import AnyStr, Dict, List
@@ -239,7 +236,7 @@ def validate_name(pattern, multi=None, anime_type=None, file_only=False, abd=Fal
new_name = u'%s.ext' % sample_ep_obj.formatted_filename(pattern, multi, anime_type)
new_path = sample_ep_obj.formatted_dir(pattern, multi)
if not file_only:
- new_name = ek.ek(os.path.join, new_path, new_name)
+ new_name = os.path.join(new_path, new_name)
if not new_name:
logger.log(u'Unable to create a name out of %s' % pattern, logger.DEBUG)
diff --git a/sickgear/network_timezones.py b/sickgear/network_timezones.py
index 9dda4d22..04c70aef 100644
--- a/sickgear/network_timezones.py
+++ b/sickgear/network_timezones.py
@@ -25,8 +25,6 @@ import sickgear
from . import db, helpers, logger
from sg_helpers import int_to_time
-# noinspection PyPep8Naming
-import encodingKludge as ek
from lib.dateutil import tz, zoneinfo
from lib.tzlocal import get_localzone
@@ -126,8 +124,8 @@ def get_utc():
pass
if isinstance(utc, datetime.tzinfo):
return utc
- tz_utc_file = ek.ek(os.path.join, ek.ek(os.path.dirname, zoneinfo.__file__), 'Greenwich')
- if ek.ek(os.path.isfile, tz_utc_file):
+ tz_utc_file = os.path.join(os.path.dirname(zoneinfo.__file__), 'Greenwich')
+ if os.path.isfile(tz_utc_file):
return tz.tzfile(tz_utc_file)
@@ -154,7 +152,7 @@ def _remove_old_zoneinfo():
"""
if None is not zoneinfo.ZONEFILENAME:
current_file = helpers.real_path(
- ek.ek(os.path.join, sickgear.ZONEINFO_DIR, ek.ek(os.path.basename, zoneinfo.ZONEFILENAME)))
+ os.path.join(sickgear.ZONEINFO_DIR, os.path.basename(zoneinfo.ZONEFILENAME)))
for entry in chain.from_iterable([scantree(helpers.real_path(_dir), include=r'\.tar\.gz$', filter_kind=False)
for _dir in (sickgear.ZONEINFO_DIR, )]): # type: DirEntry
if current_file != entry.path:
@@ -192,9 +190,9 @@ def _update_zoneinfo():
current_file = zoneinfo.ZONEFILENAME
if None is not current_file:
- current_file = ek.ek(os.path.basename, current_file)
- zonefile = helpers.real_path(ek.ek(os.path.join, sickgear.ZONEINFO_DIR, current_file))
- zonemetadata = None if not ek.ek(os.path.isfile, zonefile) else \
+ current_file = os.path.basename(current_file)
+ zonefile = helpers.real_path(os.path.join(sickgear.ZONEINFO_DIR, current_file))
+ zonemetadata = None if not os.path.isfile(zonefile) else \
zoneinfo.ZoneInfoFile(zoneinfo.getzoneinfofile_stream()).metadata
newtz_regex = re.search(r'(\d{4}[^.]+)', new_zoneinfo)
@@ -220,7 +218,7 @@ def _update_zoneinfo():
if not helpers.download_file(url_tar, zonefile_tmp):
return
- if not ek.ek(os.path.exists, zonefile_tmp):
+ if not os.path.exists(zonefile_tmp):
logger.log(u'Download of %s failed.' % zonefile_tmp, logger.ERROR)
return
@@ -233,7 +231,7 @@ def _update_zoneinfo():
if None is not current_file:
remove_file_perm(zonefile)
# rename downloaded file
- ek.ek(os.rename, zonefile_tmp, zonefile)
+ os.rename(zonefile_tmp, zonefile)
setattr(zoneinfo, '_CLASS_ZONE_INSTANCE', list())
tz.gettz.cache_clear()
from dateutil.zoneinfo import get_zonefile_instance
@@ -612,7 +610,6 @@ def get_episode_time(d, # type: int
return SGDatetime.from_timestamp(ep_timestamp, tzinfo=tzinfo, tz_aware=True, local_time=False)
except OverflowError:
logger.debug('Invalid timestamp: %s, using fallback' % ep_timestamp)
- ep_timestamp = None
ep_time = None
if isinstance(ep_airtime, integer_types):
diff --git a/sickgear/notifiers/__init__.py b/sickgear/notifiers/__init__.py
index 342e18e8..1b56c4f5 100644
--- a/sickgear/notifiers/__init__.py
+++ b/sickgear/notifiers/__init__.py
@@ -24,8 +24,6 @@ from . import emby, kodi, plex, xbmc, \
discord, emailnotify, gitter, libnotify, growl, prowl, slack, telegram, trakt
import sickgear
-# noinspection PyPep8Naming
-import encodingKludge as ek
from _23 import filter_iter, list_values
@@ -159,7 +157,7 @@ def notify_update_library(ep_obj, flush_q=False):
continue
shows.add(show_name)
else:
- parent_dir = re.sub(r'[/\\]+%s.*' % show_name, '', ek.ek(os.path.dirname, location))
+ parent_dir = re.sub(r'[/\\]+%s.*' % show_name, '', os.path.dirname(location))
parent_dir = re.sub(r'^(.{,2})[/\\]', '', parent_dir)
if parent_dir in locations:
continue
diff --git a/sickgear/notifiers/plex.py b/sickgear/notifiers/plex.py
index f9ad58cf..5eaf646c 100644
--- a/sickgear/notifiers/plex.py
+++ b/sickgear/notifiers/plex.py
@@ -18,7 +18,6 @@ import re
from .generic import Notifier
import sickgear
-from encodingKludge import fixStupidEncodings
from exceptions_helper import ex
from _23 import b64encodestring, decode_str, etree, filter_iter, list_values, unquote_plus, urlencode
@@ -73,7 +72,7 @@ class PLEXNotifier(Notifier):
return True
except (urllib.error.URLError, IOError) as e:
- self._log_warning(u'Couldn\'t contact Plex at ' + fixStupidEncodings(url) + ' ' + ex(e))
+ self._log_warning(u'Couldn\'t contact Plex at ' + url + ' ' + ex(e))
return False
@staticmethod
diff --git a/sickgear/notifiers/pytivo.py b/sickgear/notifiers/pytivo.py
index 9a512d3d..b05dddaa 100644
--- a/sickgear/notifiers/pytivo.py
+++ b/sickgear/notifiers/pytivo.py
@@ -18,8 +18,6 @@ import os
from .generic import BaseNotifier
import sickgear
-# noinspection PyPep8Naming
-import encodingKludge as ek
from exceptions_helper import ex
from _23 import urlencode
@@ -51,7 +49,7 @@ class PyTivoNotifier(BaseNotifier):
show_path = ep_obj.show_obj.location
show_name = ep_obj.show_obj.name
- root_show_and_season = ek.ek(os.path.dirname, ep_obj.location)
+ root_show_and_season = os.path.dirname(ep_obj.location)
abs_path = ep_obj.location
# Some show names have colons in them which are illegal in a path location, so strip them out.
diff --git a/sickgear/notifiers/synoindex.py b/sickgear/notifiers/synoindex.py
index 8e0c9fbf..6e4bd5a9 100644
--- a/sickgear/notifiers/synoindex.py
+++ b/sickgear/notifiers/synoindex.py
@@ -18,8 +18,6 @@
import os
from .generic import BaseNotifier
-# noinspection PyPep8Naming
-import encodingKludge as ek
from exceptions_helper import ex
from sg_helpers import cmdline_runner
@@ -35,7 +33,7 @@ class SynoIndexNotifier(BaseNotifier):
def _cmdline_run(self, synoindex_cmd):
self._log_debug(u'Executing command ' + str(synoindex_cmd))
- self._log_debug(u'Absolute path to command: ' + ek.ek(os.path.abspath, synoindex_cmd[0]))
+ self._log_debug(u'Absolute path to command: ' + os.path.abspath(synoindex_cmd[0]))
try:
output, err, exit_status = cmdline_runner(synoindex_cmd)
self._log_debug(u'Script result: %s' % output)
@@ -44,8 +42,7 @@ class SynoIndexNotifier(BaseNotifier):
def _move_object(self, old_path, new_path):
if self.is_enabled():
- self._cmdline_run(['/usr/syno/bin/synoindex', '-N', ek.ek(os.path.abspath, new_path),
- ek.ek(os.path.abspath, old_path)])
+ self._cmdline_run(['/usr/syno/bin/synoindex', '-N', os.path.abspath(new_path), os.path.abspath(old_path)])
def deleteFolder(self, cur_path):
self._make_object('-D', cur_path)
@@ -61,7 +58,7 @@ class SynoIndexNotifier(BaseNotifier):
def _make_object(self, cmd_arg, cur_path):
if self.is_enabled():
- self._cmdline_run(['/usr/syno/bin/synoindex', cmd_arg, ek.ek(os.path.abspath, cur_path)])
+ self._cmdline_run(['/usr/syno/bin/synoindex', cmd_arg, os.path.abspath(cur_path)])
def update_library(self, ep_obj=None, **kwargs):
self.addFile(ep_obj.location)
diff --git a/sickgear/notifiers/synologynotifier.py b/sickgear/notifiers/synologynotifier.py
index bc6a6d9b..1aacb2f8 100644
--- a/sickgear/notifiers/synologynotifier.py
+++ b/sickgear/notifiers/synologynotifier.py
@@ -18,8 +18,6 @@
import os
from .generic import Notifier
-# noinspection PyPep8Naming
-import encodingKludge as ek
from exceptions_helper import ex
from sg_helpers import cmdline_runner
@@ -30,7 +28,7 @@ class SynologyNotifier(Notifier):
synodsmnotify_cmd = ['/usr/syno/bin/synodsmnotify', '@administrators', title, body]
self._log(u'Executing command ' + str(synodsmnotify_cmd))
- self._log_debug(u'Absolute path to command: ' + ek.ek(os.path.abspath, synodsmnotify_cmd[0]))
+ self._log_debug(u'Absolute path to command: ' + os.path.abspath(synodsmnotify_cmd[0]))
try:
output, err, exit_status = cmdline_runner(synodsmnotify_cmd)
self._log_debug(u'Script result: %s' % output)
diff --git a/sickgear/notifiers/telegram.py b/sickgear/notifiers/telegram.py
index 6cd31518..f528864e 100644
--- a/sickgear/notifiers/telegram.py
+++ b/sickgear/notifiers/telegram.py
@@ -22,8 +22,6 @@ import re
from ..common import USER_AGENT
from .generic import Notifier
-# noinspection PyPep8Naming
-import encodingKludge as ek
from exceptions_helper import ex
import sickgear
from sickgear.image_cache import ImageCache
@@ -51,11 +49,11 @@ class TelegramNotifier(Notifier):
msg = re.sub('(?i) ?', ' ', msg)
if use_icon:
- image_path = ek.ek(os.path.join, sickgear.PROG_DIR, 'gui', 'slick', 'images', 'banner_thumb.jpg')
+ image_path = os.path.join(sickgear.PROG_DIR, 'gui', 'slick', 'images', 'banner_thumb.jpg')
if not self._testing:
show_obj = ep_obj.show_obj
banner_path = ImageCache().banner_thumb_path(show_obj.tvid, show_obj.prodid)
- if ek.ek(os.path.isfile, banner_path):
+ if os.path.isfile(banner_path):
image_path = banner_path
with open(image_path, 'rb') as f:
diff --git a/sickgear/notifiers/xbmc.py b/sickgear/notifiers/xbmc.py
index 3b18c51c..71b24718 100644
--- a/sickgear/notifiers/xbmc.py
+++ b/sickgear/notifiers/xbmc.py
@@ -20,7 +20,6 @@ import time
from .generic import Notifier
import sickgear
from exceptions_helper import ex
-from encodingKludge import fixStupidEncodings
from json_helper import json_dumps, json_load
from _23 import b64encodestring, decode_str, etree, quote, unquote, unquote_plus, urlencode
@@ -163,9 +162,9 @@ class XBMCNotifier(Notifier):
# if we have a password, use authentication
if password:
req.add_header('Authorization', 'Basic %s' % b64encodestring('%s:%s' % (username, password)))
- self._log_debug(u'Contacting (with auth header) via url: ' + fixStupidEncodings(url))
+ self._log_debug(u'Contacting (with auth header) via url: ' + url)
else:
- self._log_debug(u'Contacting via url: ' + fixStupidEncodings(url))
+ self._log_debug(u'Contacting via url: ' + url)
http_response_obj = urllib.request.urlopen(req) # PY2 http_response_obj has no `with` context manager
result = decode_str(http_response_obj.read(), sickgear.SYS_ENCODING)
@@ -175,7 +174,7 @@ class XBMCNotifier(Notifier):
return result
except (urllib.error.URLError, IOError) as e:
- self._log_warning(u'Couldn\'t contact HTTP at %s %s' % (fixStupidEncodings(url), ex(e)))
+ self._log_warning(u'Couldn\'t contact HTTP at %s %s' % (url, ex(e)))
return False
def _update_library_http(self, host=None, show_name=None):
@@ -303,9 +302,9 @@ class XBMCNotifier(Notifier):
# if we have a password, use authentication
if password:
req.add_header('Authorization', 'Basic %s' % b64encodestring('%s:%s' % (username, password)))
- self._log_debug(u'Contacting (with auth header) via url: ' + fixStupidEncodings(url))
+ self._log_debug(u'Contacting (with auth header) via url: ' + url)
else:
- self._log_debug(u'Contacting via url: ' + fixStupidEncodings(url))
+ self._log_debug(u'Contacting via url: ' + url)
try:
http_response_obj = urllib.request.urlopen(req) # PY2 http_response_obj has no `with` context manager
@@ -324,7 +323,7 @@ class XBMCNotifier(Notifier):
return False
except IOError as e:
- self._log_warning(u'Couldn\'t contact JSON API at ' + fixStupidEncodings(url) + ' ' + ex(e))
+ self._log_warning(u'Couldn\'t contact JSON API at ' + url + ' ' + ex(e))
return False
def _update_library_json(self, host=None, show_name=None):
diff --git a/sickgear/nzbSplitter.py b/sickgear/nzbSplitter.py
index d1447e8b..da179d7c 100644
--- a/sickgear/nzbSplitter.py
+++ b/sickgear/nzbSplitter.py
@@ -21,8 +21,6 @@ import re
from lxml_etree import etree
-# noinspection PyPep8Naming
-import encodingKludge as ek
from exceptions_helper import ex
import sickgear
@@ -112,9 +110,9 @@ def getSeasonNZBs(name, url_data, season):
cur_ep = match.group(1)
fn = name_extractor(cur_file.get('subject', ''))
if cur_ep == re.sub(r'\+\d+\.par2$', '', fn, flags=re.I):
- bn, ext = ek.ek(os.path.splitext, fn)
+ bn, ext = os.path.splitext(fn)
cur_ep = re.sub(r'\.(part\d+|vol\d+(\+\d+)?)$', '', bn, flags=re.I)
- bn, ext = ek.ek(os.path.splitext, cur_ep)
+ bn, ext = os.path.splitext(cur_ep)
if isinstance(ext, string_types) \
and re.search(r'^\.(nzb|r\d{2}|rar|7z|zip|par2|vol\d+|nfo|srt|txt|bat|sh|mkv|mp4|avi|wmv)$', ext,
flags=re.I):
@@ -155,7 +153,7 @@ def saveNZB(nzb_name, nzb_string):
:type nzb_string: AnyStr
"""
try:
- with ek.ek(open, nzb_name + '.nzb', 'w') as nzb_fh:
+ with open(nzb_name + '.nzb', 'w') as nzb_fh:
nzb_fh.write(nzb_string)
except EnvironmentError as e:
diff --git a/sickgear/piper.py b/sickgear/piper.py
index 6ac111e9..99ee0ee3 100644
--- a/sickgear/piper.py
+++ b/sickgear/piper.py
@@ -1,13 +1,5 @@
import sys
-# noinspection PyPep8Naming
-import encodingKludge as ek
-
-if ek.EXIT_BAD_ENCODING:
- print('Sorry, you MUST add the SickGear folder to the PYTHONPATH environment variable')
- print('or find another way to force Python to use %s for string encoding.' % ek.SYS_ENCODING)
- sys.exit(1)
-
# #################################
# Sanity check passed, can continue
# #################################
@@ -32,7 +24,7 @@ def is_pip_ok():
:return: True if pip is ok
"""
- pip_ok = '/' != ek.ek(os.path.expanduser, '~')
+ pip_ok = '/' != os.path.expanduser('~')
if pip_ok:
pip_version, _, _ = _get_pip_version()
if not pip_version:
@@ -115,7 +107,7 @@ def initial_requirements():
def extras_failed_filepath(data_dir):
- return ek.ek(os.path.join, data_dir, '.pip_req_spec_failed.txt')
+ return os.path.join(data_dir, '.pip_req_spec_failed.txt')
def load_ignorables(data_dir):
@@ -124,7 +116,7 @@ def load_ignorables(data_dir):
data = []
filepath = extras_failed_filepath(data_dir)
- if ek.ek(os.path.isfile, filepath):
+ if os.path.isfile(filepath):
try:
with io.open(filepath, 'r', encoding='UTF8') as fp:
data = fp.readlines()
@@ -194,7 +186,7 @@ def _check_pip_env(pip_outdated=False, reset_fails=False):
from sickgear import logger, PROG_DIR, DATA_DIR
for cur_reco_file in ['requirements.txt', 'recommended.txt']:
try:
- with io.open(ek.ek(os.path.join, PROG_DIR, cur_reco_file)) as fh:
+ with io.open(os.path.join(PROG_DIR, cur_reco_file)) as fh:
input_reco += ['%s\n' % line.strip() for line in fh] # must ensure EOL marker
except (BaseException, Exception):
pass
@@ -302,7 +294,7 @@ def pip_update(loading_msg, updates_todo, data_dir):
failed_lines = []
input_reco = None
- piper_path = ek.ek(os.path.join, data_dir, '.pip_req_spec_temp.txt')
+ piper_path = os.path.join(data_dir, '.pip_req_spec_temp.txt')
for cur_project_name, cur_data in iteritems(updates_todo):
msg = 'Installing package "%s"' % cur_project_name
if cur_data.get('info'):
@@ -339,7 +331,7 @@ def pip_update(loading_msg, updates_todo, data_dir):
if not parsed_name:
parsed_name = re.findall(r'(?sim)up-to-date\S+\s*(%s).*?\s\(([^)]+)\)$' % find_name, output)
parsed_name = ['' if not parsed_name else '-'.join(parsed_name[0])]
- pip_version = re.findall(r'%s-([\d.]+).*?' % find_name, ek.ek(os.path.basename, parsed_name[0]), re.I)[0]
+ pip_version = re.findall(r'%s-([\d.]+).*?' % find_name, os.path.basename(parsed_name[0]), re.I)[0]
except (BaseException, Exception):
pass
diff --git a/sickgear/postProcessor.py b/sickgear/postProcessor.py
index 5585fd3a..945f257b 100644
--- a/sickgear/postProcessor.py
+++ b/sickgear/postProcessor.py
@@ -22,8 +22,6 @@ import re
import stat
import threading
-# noinspection PyPep8Naming
-import encodingKludge as ek
import exceptions_helper
from exceptions_helper import ex
@@ -64,17 +62,16 @@ class PostProcessor(object):
nzb_name: The name of the NZB which resulted in this file being downloaded (optional)
"""
# absolute path to the folder that is being processed
- self.folder_path = long_path(ek.ek(os.path.dirname, long_path(
- ek.ek(os.path.abspath, long_path(file_path))))) # type: AnyStr
+ self.folder_path = long_path(os.path.dirname(long_path(os.path.abspath(long_path(file_path))))) # type: AnyStr
# full path to file
self.file_path = long_path(file_path) # type: AnyStr
# file name only
- self.file_name = ek.ek(os.path.basename, long_path(file_path)) # type: AnyStr
+ self.file_name = os.path.basename(long_path(file_path)) # type: AnyStr
# the name of the folder only
- self.folder_name = ek.ek(os.path.basename, self.folder_path) # type: AnyStr
+ self.folder_name = os.path.basename(self.folder_path) # type: AnyStr
# name of the NZB that resulted in this folder
self.nzb_name = nzb_name # type: AnyStr or None
@@ -112,8 +109,8 @@ class PostProcessor(object):
:param level: The log level to use (optional)
:type level: int
"""
- logger_msg = re.sub(r'(?i) \.*', '', message)
- logger_msg = re.sub('(?i)]+>([^<]+)<[/]a>', r'\1', logger_msg)
+ logger_msg = re.sub(r'(?i) \.*', '', message)
+ logger_msg = re.sub('(?i)]+>([^<]+)', r'\1', logger_msg)
logger.log(u'%s' % logger_msg, level)
self.log += message + '\n'
@@ -136,12 +133,12 @@ class PostProcessor(object):
return PostProcessor.DOESNT_EXIST
# if the new file exists, return the appropriate code depending on the size
- if ek.ek(os.path.isfile, existing_file):
+ if os.path.isfile(existing_file):
new_file = u'New file %s .. is ' % self.file_path
- if ek.ek(os.path.getsize, self.file_path) == ek.ek(os.path.getsize, existing_file):
+ if os.path.getsize(self.file_path) == os.path.getsize(existing_file):
self._log(u'%sthe same size as %s' % (new_file, existing_file), logger.DEBUG)
return PostProcessor.EXISTS_SAME
- elif ek.ek(os.path.getsize, self.file_path) < ek.ek(os.path.getsize, existing_file):
+ elif os.path.getsize(self.file_path) < os.path.getsize(existing_file):
self._log(u'%ssmaller than %s' % (new_file, existing_file), logger.DEBUG)
return PostProcessor.EXISTS_LARGER
else:
@@ -188,7 +185,7 @@ class PostProcessor(object):
base_name = re.sub(r'[\[\]*?]', r'[\g<0>]', base_name)
for meta_ext in ['', '-thumb', '.ext', '.ext.cover', '.metathumb']:
- for associated_file_path in ek.ek(glob.glob, '%s%s.*' % (base_name, meta_ext)):
+ for associated_file_path in glob.glob('%s%s.*' % (base_name, meta_ext)):
# only add associated to list
if associated_file_path == file_path:
continue
@@ -201,7 +198,7 @@ class PostProcessor(object):
if re.search(r'(^.+\.(rar|r\d+)$)', associated_file_path):
continue
- if ek.ek(os.path.isfile, associated_file_path):
+ if os.path.isfile(associated_file_path):
file_path_list.append(associated_file_path)
return file_path_list
@@ -230,13 +227,13 @@ class PostProcessor(object):
# delete the file and any other files which we want to delete
for cur_file in file_list:
- if ek.ek(os.path.isfile, cur_file):
+ if os.path.isfile(cur_file):
# check first the read-only attribute
- file_attribute = ek.ek(os.stat, cur_file)[0]
+ file_attribute = os.stat(cur_file)[0]
if not file_attribute & stat.S_IWRITE:
# File is read-only, so make it writeable
try:
- ek.ek(os.chmod, cur_file, stat.S_IWRITE)
+ os.chmod(cur_file, stat.S_IWRITE)
self._log(u'Changed read only permissions to writeable to delete file %s'
% cur_file, logger.DEBUG)
except (BaseException, Exception):
@@ -245,7 +242,7 @@ class PostProcessor(object):
removal_type = helpers.remove_file(cur_file, log_level=logger.DEBUG)
- if True is not ek.ek(os.path.isfile, cur_file):
+ if True is not os.path.isfile(cur_file):
self._log(u'%s file %s' % (removal_type, cur_file), logger.DEBUG)
# do the library update for synoindex
@@ -294,7 +291,7 @@ class PostProcessor(object):
# deal with all files
for cur_file_path in file_list:
- cur_file_name = ek.ek(os.path.basename, cur_file_path)
+ cur_file_name = os.path.basename(cur_file_path)
# get the extension without .
cur_extension = cur_file_path[old_base_name_length + 1:]
@@ -304,10 +301,10 @@ class PostProcessor(object):
cur_extension = 'nfo-orig'
# check if file have subtitles language
- if ek.ek(os.path.splitext, cur_extension)[1][1:] in common.subtitleExtensions:
- cur_lang = ek.ek(os.path.splitext, cur_extension)[0]
+ if os.path.splitext(cur_extension)[1][1:] in common.subtitleExtensions:
+ cur_lang = os.path.splitext(cur_extension)[0]
if cur_lang in sickgear.SUBTITLES_LANGUAGES:
- cur_extension = cur_lang + ek.ek(os.path.splitext, cur_extension)[1]
+ cur_extension = cur_lang + os.path.splitext(cur_extension)[1]
# If new base name then convert name
if new_base_name:
@@ -317,15 +314,15 @@ class PostProcessor(object):
new_file_name = helpers.replace_extension(cur_file_name, cur_extension)
if sickgear.SUBTITLES_DIR and cur_extension in common.subtitleExtensions:
- subs_new_path = ek.ek(os.path.join, new_path, sickgear.SUBTITLES_DIR)
+ subs_new_path = os.path.join(new_path, sickgear.SUBTITLES_DIR)
dir_exists = helpers.make_dir(subs_new_path)
if not dir_exists:
logger.log(u'Unable to create subtitles folder ' + subs_new_path, logger.ERROR)
else:
helpers.chmod_as_parent(subs_new_path)
- new_file_path = ek.ek(os.path.join, subs_new_path, new_file_name)
+ new_file_path = os.path.join(subs_new_path, new_file_name)
else:
- new_file_path = ek.ek(os.path.join, new_path, new_file_name)
+ new_file_path = os.path.join(new_path, new_file_name)
if None is action_tmpl:
action(cur_file_path, new_file_path)
@@ -598,7 +595,7 @@ class PostProcessor(object):
and parse_result.release_group:
if not self.release_name:
- self.release_name = helpers.remove_extension(ek.ek(os.path.basename, parse_result.original_name))
+ self.release_name = helpers.remove_extension(os.path.basename(parse_result.original_name))
else:
logger.log(u'Parse result not sufficient (all following have to be set). will not save release name',
@@ -824,7 +821,7 @@ class PostProcessor(object):
try:
script_cmd = [piece for piece in re.split("( |\\\".*?\\\"|'.*?')", script_name) if piece.strip()]
- script_cmd[0] = ek.ek(os.path.abspath, script_cmd[0])
+ script_cmd[0] = os.path.abspath(script_cmd[0])
self._log(u'Absolute path to script: ' + script_cmd[0], logger.DEBUG)
if PY2:
@@ -883,7 +880,7 @@ class PostProcessor(object):
"""
try:
- existing_show_path = ek.ek(os.path.isdir, ep_obj.show.location)
+ existing_show_path = os.path.isdir(ep_obj.show.location)
except exceptions_helper.ShowDirNotFoundException:
existing_show_path = False
@@ -1062,10 +1059,10 @@ class PostProcessor(object):
:rtype: bool
"""
- self._log(u'Processing... %s%s' % (ek.ek(os.path.relpath, self.file_path, self.folder_path),
+ self._log(u'Processing... %s%s' % (os.path.relpath(self.file_path, self.folder_path),
(u' .. from nzb %s' % self.nzb_name, u'')[None is self.nzb_name]))
- if ek.ek(os.path.isdir, self.file_path):
+ if os.path.isdir(self.file_path):
self._log(u'Expecting file %s .. is actually a directory, skipping' % self.file_path)
return False
@@ -1110,9 +1107,9 @@ class PostProcessor(object):
try:
self._delete(cur_ep_obj.location, associated_files=True)
- # clean up any left over folders
+ # clean up any leftover folders
if cur_ep_obj.location:
- helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep_obj.location),
+ helpers.delete_empty_folders(os.path.dirname(cur_ep_obj.location),
keep_dir=ep_obj.show_obj.location)
except (OSError, IOError):
raise exceptions_helper.PostProcessingFailed(u'Unable to delete existing files')
@@ -1122,10 +1119,10 @@ class PostProcessor(object):
# cur_ep_obj.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality)
# if the show directory doesn't exist then make it if allowed
- if not ek.ek(os.path.isdir, ep_obj.show_obj.location) and sickgear.CREATE_MISSING_SHOW_DIRS:
+ if not os.path.isdir(ep_obj.show_obj.location) and sickgear.CREATE_MISSING_SHOW_DIRS:
self._log(u'Show directory does not exist, creating it', logger.DEBUG)
try:
- ek.ek(os.mkdir, ep_obj.show_obj.location)
+ os.mkdir(ep_obj.show_obj.location)
# do the library update for synoindex
notifiers.NotifierFactory().get('SYNOINDEX').addFolder(ep_obj.show_obj.location)
except (OSError, IOError):
@@ -1138,7 +1135,7 @@ class PostProcessor(object):
self._change_ep_objs(show_obj, season_number, episode_numbers, new_ep_quality)
# Just want to keep this consistent for failed handling right now
- release_name = show_name_helpers.determineReleaseName(self.folder_path, self.nzb_name)
+ release_name = show_name_helpers.determine_release_name(self.folder_path, self.nzb_name)
if None is release_name:
self._log(u'No snatched release found in history', logger.WARNING)
elif sickgear.USE_FAILED_DOWNLOADS:
@@ -1147,8 +1144,8 @@ class PostProcessor(object):
# find the destination folder
try:
proper_path = ep_obj.proper_path()
- proper_absolute_path = ek.ek(os.path.join, ep_obj.show_obj.location, proper_path)
- dest_path = ek.ek(os.path.dirname, proper_absolute_path)
+ proper_absolute_path = os.path.join(ep_obj.show_obj.location, proper_path)
+ dest_path = os.path.dirname(proper_absolute_path)
except exceptions_helper.ShowDirNotFoundException:
raise exceptions_helper.PostProcessingFailed(
@@ -1162,7 +1159,7 @@ class PostProcessor(object):
# figure out the base name of the resulting episode file
if sickgear.RENAME_EPISODES:
- new_base_name = ek.ek(os.path.basename, proper_path)
+ new_base_name = os.path.basename(proper_path)
new_file_name = new_base_name + '.' + self.file_name.rpartition('.')[-1]
else:
@@ -1224,7 +1221,7 @@ class PostProcessor(object):
sql_l = []
for cur_ep_obj in [ep_obj] + ep_obj.related_ep_obj:
with cur_ep_obj.lock:
- cur_ep_obj.location = ek.ek(os.path.join, dest_path, new_file_name)
+ cur_ep_obj.location = os.path.join(dest_path, new_file_name)
if dosubs:
cur_ep_obj.download_subtitles(force=True)
# set file modify stamp to show airdate
diff --git a/sickgear/processTV.py b/sickgear/processTV.py
index fa5e14da..18a7a0dc 100644
--- a/sickgear/processTV.py
+++ b/sickgear/processTV.py
@@ -24,8 +24,6 @@ import shutil
import stat
import sys
-# noinspection PyPep8Naming
-import encodingKludge as ek
import exceptions_helper
from exceptions_helper import ex, MultipleShowObjectsException
from json_helper import json_dumps, json_loads
@@ -79,8 +77,8 @@ class ProcessTVShow(object):
if None is not text:
self._output.append(text)
if self.webhandler:
- logger_msg = re.sub(r'(?i) ', '\n', text)
- logger_msg = re.sub('(?i)]+>([^<]+)<[/]a>', r'\1', logger_msg)
+ logger_msg = re.sub(r'(?i) ', '\n', text)
+ logger_msg = re.sub('(?i)]+>([^<]+)', r'\1', logger_msg)
self.webhandler('%s%s' % (logger_msg, u'\n'))
def _log_helper(self, message, log_level=logger.DEBUG):
@@ -91,8 +89,8 @@ class ProcessTVShow(object):
:param log_level: log level
:type log_level: int
"""
- logger_msg = re.sub(r'(?i) \.*', '', message)
- logger_msg = re.sub('(?i)]+>([^<]+)<[/]a>', r'\1', logger_msg)
+ logger_msg = re.sub(r'(?i) \.*', '', message)
+ logger_msg = re.sub('(?i)]+>([^<]+)', r'\1', logger_msg)
logger.log(u'%s' % logger_msg, log_level)
self._buffer(message)
return
@@ -124,7 +122,7 @@ class ProcessTVShow(object):
:rtype: bool
"""
# check if it's a folder
- if not ek.ek(os.path.isdir, folder):
+ if not os.path.isdir(folder):
return False
# make sure it isn't TV_DOWNLOAD_DIR
@@ -142,7 +140,7 @@ class ProcessTVShow(object):
logger.log(u'Warning: unable to delete folder: %s: %s' % (folder, ex(e)), logger.WARNING)
return False
- if ek.ek(os.path.isdir, folder):
+ if os.path.isdir(folder):
logger.log(u'Warning: unable to delete folder: %s' % folder, logger.WARNING)
return False
@@ -164,24 +162,24 @@ class ProcessTVShow(object):
# Delete all file not needed
for cur_file in notwanted_files:
- cur_file_path = ek.ek(os.path.join, process_path, cur_file)
+ cur_file_path = os.path.join(process_path, cur_file)
- if not ek.ek(os.path.isfile, cur_file_path):
+ if not os.path.isfile(cur_file_path):
continue # Prevent error when a notwantedfiles is an associated files
# check first the read-only attribute
- file_attribute = ek.ek(os.stat, cur_file_path)[0]
+ file_attribute = os.stat(cur_file_path)[0]
if not file_attribute & stat.S_IWRITE:
# File is read-only, so make it writeable
self._log_helper(u'Changing ReadOnly flag for file ' + cur_file)
try:
- ek.ek(os.chmod, cur_file_path, stat.S_IWRITE)
+ os.chmod(cur_file_path, stat.S_IWRITE)
except OSError as e:
self._log_helper(u'Cannot change permissions of %s: %s' % (cur_file_path, ex(e)))
removal_type = helpers.remove_file(cur_file_path)
- if ek.ek(os.path.isfile, cur_file_path):
+ if os.path.isfile(cur_file_path):
result = False
else:
self._log_helper(u'%s file %s' % (removal_type, cur_file))
@@ -254,7 +252,7 @@ class ProcessTVShow(object):
video_size = 0
for cur_video_file in videofiles:
try:
- cur_video_size = ek.ek(os.path.getsize, ek.ek(os.path.join, path, cur_video_file))
+ cur_video_size = os.path.getsize(os.path.join(path, cur_video_file))
except (BaseException, Exception):
continue
@@ -263,7 +261,7 @@ class ProcessTVShow(object):
video_pick = cur_video_file
if video_pick:
- vid_filename = ek.ek(os.path.splitext, video_pick)[0]
+ vid_filename = os.path.splitext(video_pick)[0]
# check if filename is garbage, disregard it
if re.search(r'^[a-zA-Z0-9]+$', vid_filename):
return
@@ -315,21 +313,20 @@ class ProcessTVShow(object):
"""
# if they passed us a real directory then assume it's the one we want
- if dir_name and ek.ek(os.path.isdir, long_path(dir_name)):
- dir_name = long_path(ek.ek(os.path.realpath, long_path(dir_name)))
+ if dir_name and os.path.isdir(long_path(dir_name)):
+ dir_name = long_path(os.path.realpath(long_path(dir_name)))
# if the client and SickGear are not on the same machine translate the directory in a network directory
- elif dir_name and sickgear.TV_DOWNLOAD_DIR and ek.ek(os.path.isdir, sickgear.TV_DOWNLOAD_DIR)\
- and ek.ek(os.path.normpath, dir_name) != ek.ek(os.path.normpath, sickgear.TV_DOWNLOAD_DIR):
- dir_name = ek.ek(os.path.join, sickgear.TV_DOWNLOAD_DIR,
- ek.ek(os.path.abspath, dir_name).split(os.path.sep)[-1])
+ elif dir_name and sickgear.TV_DOWNLOAD_DIR and os.path.isdir(sickgear.TV_DOWNLOAD_DIR)\
+ and os.path.normpath(dir_name) != os.path.normpath(sickgear.TV_DOWNLOAD_DIR):
+ dir_name = os.path.join(sickgear.TV_DOWNLOAD_DIR, os.path.abspath(dir_name).split(os.path.sep)[-1])
self._log_helper(u'SickGear PP Config, completed TV downloads folder: ' + sickgear.TV_DOWNLOAD_DIR)
if dir_name:
self._log_helper(u'Checking folder... ' + dir_name)
# if we didn't find a real directory then process "failed" or just quit
- if not dir_name or not ek.ek(os.path.isdir, dir_name):
+ if not dir_name or not os.path.isdir(dir_name):
if nzb_name and failed:
self._process_failed(dir_name, nzb_name, show_obj=show_obj)
else:
@@ -351,7 +348,7 @@ class ProcessTVShow(object):
show_obj = self.check_name(re.sub(r'\.(nzb|torrent)$', '', nzb_name, flags=re.I))
if None is show_obj and dir_name:
- show_obj = self.check_name(ek.ek(os.path.basename, dir_name))
+ show_obj = self.check_name(os.path.basename(dir_name))
path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type)
@@ -376,12 +373,12 @@ class ProcessTVShow(object):
self._process_failed(dir_name, nzb_name, show_obj=show_obj)
self.update_history_tab()
return self.result
- rar_content = [x for x in rar_content if not helpers.is_link(ek.ek(os.path.join, path, x))]
+ rar_content = [x for x in rar_content if not helpers.is_link(os.path.join(path, x))]
path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type)
- files = [x for x in files if not helpers.is_link(ek.ek(os.path.join, path, x))]
+ files = [x for x in files if not helpers.is_link(os.path.join(path, x))]
video_files = filter_list(helpers.has_media_ext, files)
video_in_rar = filter_list(helpers.has_media_ext, rar_content)
- work_files += [ek.ek(os.path.join, path, item) for item in rar_content]
+ work_files += [os.path.join(path, item) for item in rar_content]
if 0 < len(files):
self._log_helper(u'Process file%s: %s' % (helpers.maybe_plural(files), str(files)))
@@ -408,7 +405,7 @@ class ProcessTVShow(object):
if None is show_obj:
soh = self.check_video_filenames(path, video_in_rar)
self._process_media(path, video_in_rar, nzb_name, 'move', force, force_replace, show_obj=soh)
- self._delete_files(path, [ek.ek(os.path.relpath, item, path) for item in work_files], force=True)
+ self._delete_files(path, [os.path.relpath(item, path) for item in work_files], force=True)
video_batch = set(video_files) - set(video_in_rar)
else:
video_batch = video_files
@@ -418,7 +415,7 @@ class ProcessTVShow(object):
video_pick = ['']
video_size = 0
for cur_video_file in video_batch:
- cur_video_size = ek.ek(os.path.getsize, ek.ek(os.path.join, path, cur_video_file))
+ cur_video_size = os.path.getsize(os.path.join(path, cur_video_file))
if 0 == video_size or cur_video_size > video_size:
video_size = cur_video_size
video_pick = [cur_video_file]
@@ -439,7 +436,7 @@ class ProcessTVShow(object):
# self._set_process_success(reset=True)
- for walk_path, walk_dir, files in ek.ek(os.walk, ek.ek(os.path.join, path, directory), topdown=False):
+ for walk_path, walk_dir, files in os.walk(os.path.join(path, directory), topdown=False):
if sickgear.POSTPONE_IF_SYNC_FILES and any(filter_iter(helpers.is_sync_file, files)):
self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR)
@@ -452,17 +449,17 @@ class ProcessTVShow(object):
continue
# Ignore any symlinks at this stage to avoid the potential for unraring a symlinked archive
- files = [x for x in files if not helpers.is_link(ek.ek(os.path.join, walk_path, x))]
+ files = [x for x in files if not helpers.is_link(os.path.join(walk_path, x))]
rar_files, rarfile_history = self.unused_archives(
walk_path, filter_list(helpers.is_first_rar_volume, files), pp_type, process_method,
rarfile_history)
rar_content = self._unrar(walk_path, rar_files, force)
- work_files += [ek.ek(os.path.join, walk_path, item) for item in rar_content]
+ work_files += [os.path.join(walk_path, item) for item in rar_content]
if self.fail_detected:
self._process_failed(dir_name, nzb_name, show_obj=self.show_obj_helper(show_obj, directory))
continue
- rar_content = [x for x in rar_content if not helpers.is_link(ek.ek(os.path.join, walk_path, x))]
+ rar_content = [x for x in rar_content if not helpers.is_link(os.path.join(walk_path, x))]
files = list(set(files + rar_content))
video_files = filter_list(helpers.has_media_ext, files)
video_in_rar = filter_list(helpers.has_media_ext, rar_content)
@@ -483,7 +480,7 @@ class ProcessTVShow(object):
video_pick = ['']
video_size = 0
for cur_video_file in video_batch:
- cur_video_size = ek.ek(os.path.getsize, ek.ek(os.path.join, walk_path, cur_video_file))
+ cur_video_size = os.path.getsize(os.path.join(walk_path, cur_video_file))
if 0 == video_size or cur_video_size > video_size:
video_size = cur_video_size
@@ -512,14 +509,12 @@ class ProcessTVShow(object):
self._delete_files(walk_path, notwanted_files)
if 'move' == process_method \
- and ek.ek(os.path.normpath, sickgear.TV_DOWNLOAD_DIR) != ek.ek(os.path.normpath,
- walk_path):
+ and os.path.normpath(sickgear.TV_DOWNLOAD_DIR) != os.path.normpath(walk_path):
self._delete_folder(walk_path, check_empty=False)
if 'copy' == process_method and work_files:
- self._delete_files(path, [ek.ek(os.path.relpath, item, path) for item in work_files], force=True)
- for f in sorted(list(set([ek.ek(os.path.dirname, item) for item in work_files]) - {path}),
- key=len, reverse=True):
+ self._delete_files(path, [os.path.relpath(item, path) for item in work_files], force=True)
+ for f in sorted(list(set([os.path.dirname(item) for item in work_files]) - {path}), key=len, reverse=True):
self._delete_folder(f)
def _bottom_line(text, log_level=logger.DEBUG):
@@ -561,7 +556,7 @@ class ProcessTVShow(object):
if ('auto' == pp_type and sickgear.PROCESS_AUTOMATICALLY
and 'copy' == process_method and sickgear.UNPACK):
- archive_history_file = ek.ek(os.path.join, sickgear.DATA_DIR, 'archive_history.txt')
+ archive_history_file = os.path.join(sickgear.DATA_DIR, 'archive_history.txt')
if not archive_history:
try:
@@ -572,10 +567,10 @@ class ProcessTVShow(object):
init_history_cnt = len(archive_history)
- archive_history = {k_arc: v for k_arc, v in iteritems(archive_history) if ek.ek(os.path.isfile, k_arc)}
+ archive_history = {k_arc: v for k_arc, v in iteritems(archive_history) if os.path.isfile(k_arc)}
- unused_files = list(set([ek.ek(os.path.join, path, x) for x in archives]) - set(iterkeys(archive_history)))
- archives = [ek.ek(os.path.basename, x) for x in unused_files]
+ unused_files = list(set([os.path.join(path, x) for x in archives]) - set(iterkeys(archive_history)))
+ archives = [os.path.basename(x) for x in unused_files]
if unused_files:
for f in unused_files:
archive_history.setdefault(f, int(timestamp_near(datetime.datetime.utcnow())))
@@ -607,18 +602,18 @@ class ProcessTVShow(object):
"""
self._log_helper(u'Processing sub dir: ' + dir_name)
- if ek.ek(os.path.basename, dir_name).startswith('_FAILED_'):
+ if os.path.basename(dir_name).startswith('_FAILED_'):
self._log_helper(u'The directory name indicates it failed to extract.')
failed = True
- elif ek.ek(os.path.basename, dir_name).startswith('_UNDERSIZED_'):
+ elif os.path.basename(dir_name).startswith('_UNDERSIZED_'):
self._log_helper(u'The directory name indicates that it was previously rejected for being undersized.')
failed = True
- elif ek.ek(os.path.basename, dir_name).upper().startswith('_UNPACK'):
+ elif os.path.basename(dir_name).upper().startswith('_UNPACK'):
self._log_helper(u'The directory name indicates that this release is in the process of being unpacked.')
return False
if failed:
- self._process_failed(ek.ek(os.path.join, path, dir_name), nzb_name_original, show_obj=show_obj)
+ self._process_failed(os.path.join(path, dir_name), nzb_name_original, show_obj=show_obj)
return False
if helpers.is_hidden_folder(dir_name):
@@ -630,8 +625,8 @@ class ProcessTVShow(object):
sql_result = my_db.select('SELECT * FROM tv_shows')
for cur_result in sql_result:
- if dir_name.lower().startswith(ek.ek(os.path.realpath, cur_result['location']).lower() + os.sep)\
- or dir_name.lower() == ek.ek(os.path.realpath, cur_result['location']).lower():
+ if dir_name.lower().startswith(os.path.realpath(cur_result['location']).lower() + os.sep) \
+ or dir_name.lower() == os.path.realpath(cur_result['location']).lower():
self._log_helper(
u'Found an episode that has already been moved to its show dir, skipping',
logger.ERROR)
@@ -641,7 +636,7 @@ class ProcessTVShow(object):
all_files = []
all_dirs = []
process_path = None
- for process_path, process_dir, fileList in ek.ek(os.walk, ek.ek(os.path.join, path, dir_name), topdown=False):
+ for process_path, process_dir, fileList in os.walk(os.path.join(path, dir_name), topdown=False):
all_dirs += process_dir
all_files += fileList
@@ -688,7 +683,7 @@ class ProcessTVShow(object):
unpacked_files = []
if 'win32' == sys.platform:
- rarfile.UNRAR_TOOL = ek.ek(os.path.join, sickgear.PROG_DIR, 'lib', 'rarfile', 'UnRAR.exe')
+ rarfile.UNRAR_TOOL = os.path.join(sickgear.PROG_DIR, 'lib', 'rarfile', 'UnRAR.exe')
if sickgear.UNPACK and rar_files:
@@ -699,7 +694,7 @@ class ProcessTVShow(object):
self._log_helper(u'Unpacking archive: ' + archive)
try:
- rar_handle = rarfile.RarFile(ek.ek(os.path.join, path, archive))
+ rar_handle = rarfile.RarFile(os.path.join(path, archive))
except (BaseException, Exception):
self._log_helper(u'Failed to open archive: %s' % archive, logger.ERROR)
self._set_process_success(False)
@@ -707,7 +702,7 @@ class ProcessTVShow(object):
try:
# Skip extraction if any file in archive has previously been extracted
skip_file = False
- for file_in_archive in [ek.ek(os.path.basename, x.filename)
+ for file_in_archive in [os.path.basename(x.filename)
for x in rar_handle.infolist() if not x.is_dir()]:
if self._already_postprocessed(path, file_in_archive, force):
self._log_helper(
@@ -721,8 +716,7 @@ class ProcessTVShow(object):
raise rarfile.PasswordRequired
rar_handle.extractall(path=path)
- rar_content = [ek.ek(os.path.normpath, x.filename)
- for x in rar_handle.infolist() if not x.is_dir()]
+ rar_content = [os.path.normpath(x.filename) for x in rar_handle.infolist() if not x.is_dir()]
renamed = self.cleanup_names(path, rar_content)
cur_unpacked = rar_content if not renamed else \
(list(set(rar_content) - set(iterkeys(renamed))) + list_values(renamed))
@@ -744,7 +738,7 @@ class ProcessTVShow(object):
# check for passworded rar's
for archive in rar_files:
try:
- rar_handle = rarfile.RarFile(ek.ek(os.path.join, path, archive))
+ rar_handle = rarfile.RarFile(os.path.join(path, archive))
except (BaseException, Exception):
self._log_helper(u'Failed to open archive: %s' % archive, logger.ERROR)
continue
@@ -773,7 +767,7 @@ class ProcessTVShow(object):
old_name = None
new_name = None
params = {
- 'base_name': ek.ek(os.path.basename, directory),
+ 'base_name': os.path.basename(directory),
'reverse_pattern': re.compile('|'.join([
r'\.\d{2}e\d{2}s\.', r'\.p0(?:63|27|612)\.', r'\.[pi](?:084|675|0801)\.', r'\b[45]62[xh]\.',
r'\.yarulb\.', r'\.vtd[hp]\.', r'\.(?:ld[.-]?)?bew\.', r'\.pir.?(?:shv|dov|dvd|bew|db|rb)\.',
@@ -797,9 +791,9 @@ class ProcessTVShow(object):
for cur_filename in _filenames:
- file_name, file_extension = ek.ek(os.path.splitext, cur_filename)
- file_path = ek.ek(os.path.join, _dirpath, cur_filename)
- dir_name = ek.ek(os.path.dirname, file_path)
+ file_name, file_extension = os.path.splitext(cur_filename)
+ file_path = os.path.join(_dirpath, cur_filename)
+ dir_name = os.path.dirname(file_path)
if None is not reverse_pattern.search(file_name):
na_parts = season_pattern.search(file_name)
@@ -817,32 +811,32 @@ class ProcessTVShow(object):
new_filename = file_name[::-1]
logger.log('Reversing base filename "%s" to "%s"' % (file_name, new_filename))
try:
- ek.ek(os.rename, file_path, ek.ek(os.path.join, _dirpath, new_filename + file_extension))
- is_renamed[ek.ek(os.path.relpath, file_path, directory)] = ek.ek(
- os.path.relpath, new_filename + file_extension, directory)
+ os.rename(file_path, os.path.join(_dirpath, new_filename + file_extension))
+ is_renamed[os.path.relpath(file_path, directory)] = \
+ os.path.relpath(new_filename + file_extension, directory)
except OSError as _e:
logger.log('Error unable to rename file "%s" because %s' % (cur_filename, ex(_e)), logger.ERROR)
elif helpers.has_media_ext(cur_filename) and \
None is not garbage_name.search(file_name) and None is not media_pattern.search(base_name):
_num_videos += 1
_old_name = file_path
- _new_name = ek.ek(os.path.join, dir_name, '%s%s' % (base_name, file_extension))
+ _new_name = os.path.join(dir_name, '%s%s' % (base_name, file_extension))
return is_renamed, _num_videos, _old_name, _new_name
if files:
is_renamed, num_videos, old_name, new_name = renamer(
directory, files, num_videos, old_name, new_name, **params)
else:
- for cur_dirpath, void, cur_filenames in ek.ek(os.walk, directory):
+ for cur_dirpath, void, cur_filenames in os.walk(directory):
is_renamed, num_videos, old_name, new_name = renamer(
cur_dirpath, cur_filenames, num_videos, old_name, new_name, **params)
if all([not is_renamed, 1 == num_videos, old_name, new_name]):
- try_name = ek.ek(os.path.basename, new_name)
- logger.log('Renaming file "%s" using dirname as "%s"' % (ek.ek(os.path.basename, old_name), try_name))
+ try_name = os.path.basename(new_name)
+ logger.log('Renaming file "%s" using dirname as "%s"' % (os.path.basename(old_name), try_name))
try:
- ek.ek(os.rename, old_name, new_name)
- is_renamed[ek.ek(os.path.relpath, old_name, directory)] = ek.ek(os.path.relpath, new_name, directory)
+ os.rename(old_name, new_name)
+ is_renamed[os.path.relpath(old_name, directory)] = os.path.relpath(new_name, directory)
except OSError as e:
logger.log('Error unable to rename file "%s" because %s' % (old_name, ex(e)), logger.ERROR)
@@ -859,11 +853,11 @@ class ProcessTVShow(object):
result = False
chunks = {}
matcher = re.compile(r'\.[0-9]+$')
- for dirpath, void, filenames in ek.ek(os.walk, directory):
+ for dirpath, void, filenames in os.walk(directory):
for filename in filenames:
if None is not matcher.search(filename):
- maybe_chunk = ek.ek(os.path.join, dirpath, filename)
- base_filepath, ext = ek.ek(os.path.splitext, maybe_chunk)
+ maybe_chunk = os.path.join(dirpath, filename)
+ base_filepath, ext = os.path.splitext(maybe_chunk)
if base_filepath not in chunks:
chunks[base_filepath] = []
chunks[base_filepath].append(maybe_chunk)
@@ -874,22 +868,22 @@ class ProcessTVShow(object):
for base_filepath in chunks:
chunks[base_filepath].sort()
chunk_set = chunks[base_filepath]
- if ek.ek(os.path.isfile, base_filepath):
- base_filesize = ek.ek(os.path.getsize, base_filepath)
- chunk_sizes = [ek.ek(os.path.getsize, x) for x in chunk_set]
+ if os.path.isfile(base_filepath):
+ base_filesize = os.path.getsize(base_filepath)
+ chunk_sizes = [os.path.getsize(x) for x in chunk_set]
largest_chunk = max(chunk_sizes)
if largest_chunk >= base_filesize:
outfile = '%s.001' % base_filepath
if outfile not in chunk_set:
try:
- ek.ek(os.rename, base_filepath, outfile)
+ os.rename(base_filepath, outfile)
except OSError:
logger.log('Error unable to rename file %s' % base_filepath, logger.ERROR)
return result
chunk_set.append(outfile)
chunk_set.sort()
else:
- del_dir, del_file = ek.ek(os.path.split, base_filepath)
+ del_dir, del_file = os.path.split(base_filepath)
if not self._delete_files(del_dir, [del_file], force=True):
return result
else:
@@ -1048,7 +1042,7 @@ class ProcessTVShow(object):
self._set_process_success(False)
continue
- cur_video_file_path = ek.ek(os.path.join, process_path, cur_video_file)
+ cur_video_file_path = os.path.join(process_path, cur_video_file)
parent = self.find_parent(cur_video_file_path)
if parent:
@@ -1097,16 +1091,16 @@ class ProcessTVShow(object):
if dir_name == sickgear.TV_DOWNLOAD_DIR and not nzb_name or 'manual' == pp_type:
# Scheduled Media Process Active
# Get at first all the subdir in the dir_name
- for path, dirs, files in ek.ek(os.walk, dir_name):
- files = [x for x in files if not helpers.is_link(ek.ek(os.path.join, path, x))]
+ for path, dirs, files in os.walk(dir_name):
+ files = [x for x in files if not helpers.is_link(os.path.join(path, x))]
break
else:
- path, dirs = ek.ek(os.path.split, dir_name) # Script Media Process
+ path, dirs = os.path.split(dir_name) # Script Media Process
if None is not nzb_name and not nzb_name.endswith('.nzb') and \
- ek.ek(os.path.isfile, ek.ek(os.path.join, dir_name, nzb_name)):
+ os.path.isfile(os.path.join(dir_name, nzb_name)):
# For single torrent file without directory
dirs = []
- files = [ek.ek(os.path.join, dir_name, nzb_name)]
+ files = [os.path.join(dir_name, nzb_name)]
else:
dirs = [dirs]
files = []
diff --git a/sickgear/properFinder.py b/sickgear/properFinder.py
index b27f9c78..9e26b98c 100644
--- a/sickgear/properFinder.py
+++ b/sickgear/properFinder.py
@@ -21,8 +21,6 @@ import re
import threading
import traceback
-# noinspection PyPep8Naming
-import encodingKludge as ek
from exceptions_helper import ex, MultipleShowObjectsException, AuthException
import sickgear
@@ -466,7 +464,7 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime
for hitem in history_results:
# if the result exists in history already we need to skip it
if clean_proper_name == _generic_name(helpers.remove_non_release_groups(
- ek.ek(os.path.basename, hitem['resource']))):
+ os.path.basename(hitem['resource']))):
is_same = True
break
if is_same:
diff --git a/sickgear/providers/filesharingtalk.py b/sickgear/providers/filesharingtalk.py
index 9890f9ac..e97a69ec 100644
--- a/sickgear/providers/filesharingtalk.py
+++ b/sickgear/providers/filesharingtalk.py
@@ -311,7 +311,7 @@ class FSTProvider(generic.NZBProvider):
:return: list of search strings
:rtype: List[AnyStr]
"""
- return [x for x in show_name_helpers.makeSceneSeasonSearchString(self.show_obj, ep_obj)]
+ return [x for x in show_name_helpers.make_scene_season_search_string(self.show_obj, ep_obj)]
def _episode_strings(self, ep_obj):
"""
@@ -321,7 +321,7 @@ class FSTProvider(generic.NZBProvider):
:return: list of search strings
:rtype: List[AnyStr]
"""
- return [x for x in show_name_helpers.makeSceneSearchString(self.show_obj, ep_obj)]
+ return [x for x in show_name_helpers.make_scene_search_string(self.show_obj, ep_obj)]
@staticmethod
def ui_string(key=None):
diff --git a/sickgear/providers/generic.py b/sickgear/providers/generic.py
index 43da77f6..aea3e471 100644
--- a/sickgear/providers/generic.py
+++ b/sickgear/providers/generic.py
@@ -29,9 +29,7 @@ import threading
import socket
import zlib
-# noinspection PyPep8Naming
-import encodingKludge as ek
-from exceptions_helper import SickBeardException, AuthException, ex
+from exceptions_helper import SickGearException, AuthException, ex
import sickgear
from .. import classes, db, helpers, logger, tvcache
@@ -60,7 +58,7 @@ if False:
from typing import Any, AnyStr, Callable, Dict, List, Match, Optional, Tuple, Union
-class HaltParseException(SickBeardException):
+class HaltParseException(SickGearException):
"""Something requires the current processing to abort"""
@@ -653,8 +651,7 @@ class GenericProvider(object):
:return:
"""
for name in ['%s.%s' % (self.get_id(), image_ext) for image_ext in ['png', 'gif', 'jpg']]:
- if ek.ek(os.path.isfile,
- ek.ek(os.path.join, sickgear.PROG_DIR, 'gui', sickgear.GUI_NAME, 'images', 'providers', name)):
+ if os.path.isfile(os.path.join(sickgear.PROG_DIR, 'gui', sickgear.GUI_NAME, 'images', 'providers', name)):
return name
return '%s.png' % ('newznab', default_name[0])[any(default_name)]
@@ -838,11 +835,11 @@ class GenericProvider(object):
cache_dir = sickgear.CACHE_DIR or helpers.get_system_temp_dir()
base_name = '%s.%s' % (re.sub('.%s$' % self.providerType, '', helpers.sanitize_filename(result.name)),
self.providerType)
- final_file = ek.ek(os.path.join, final_dir, base_name)
+ final_file = os.path.join(final_dir, base_name)
cached = result.cache_filepath
- if cached and ek.ek(os.path.isfile, cached):
- base_name = ek.ek(os.path.basename, cached)
- cache_file = ek.ek(os.path.join, cache_dir, base_name)
+ if cached and os.path.isfile(cached):
+ base_name = os.path.basename(cached)
+ cache_file = os.path.join(cache_dir, base_name)
self.session.headers['Referer'] = url
if cached or helpers.download_file(url, cache_file, session=self.session, allow_redirects='/it' not in url,
@@ -870,7 +867,7 @@ class GenericProvider(object):
if not saved and 'magnet' == link_type:
logger.log(u'All torrent cache servers failed to return a downloadable result', logger.DEBUG)
- final_file = ek.ek(os.path.join, final_dir, '%s.%s' % (helpers.sanitize_filename(result.name), link_type))
+ final_file = os.path.join(final_dir, '%s.%s' % (helpers.sanitize_filename(result.name), link_type))
try:
with open(final_file, 'wb') as fp:
fp.write(decode_bytes(result.url))
@@ -1880,7 +1877,7 @@ class TorrentProvider(GenericProvider):
seen_attr = 'PROVIDER_SEEN'
if obf and self.__module__ not in getattr(sickgear, seen_attr, []):
file_path = '%s.py' % os.path.join(sickgear.PROG_DIR, *self.__module__.split('.'))
- if ek.ek(os.path.isfile, file_path):
+ if os.path.isfile(file_path):
with open(file_path, 'rb') as file_hd:
c = bytearray(codecs.encode(decode_bytes(str(zlib.crc32(file_hd.read()))), 'hex_codec'))
@@ -1996,7 +1993,7 @@ class TorrentProvider(GenericProvider):
if 2012691328 == s + zlib.crc32(decode_bytes(('.%s' % parsed.netloc).split('.')[-2])):
is_valid = False
file_name = '%s.py' % os.path.join(sickgear.PROG_DIR, *self.__module__.split('.'))
- if ek.ek(os.path.isfile, file_name):
+ if os.path.isfile(file_name):
with open(file_name, 'rb') as file_hd:
is_valid = s + zlib.crc32(file_hd.read()) in (1661931498, 472149389)
return is_valid
diff --git a/sickgear/providers/omgwtfnzbs.py b/sickgear/providers/omgwtfnzbs.py
index ac2bf7e9..054dfad9 100644
--- a/sickgear/providers/omgwtfnzbs.py
+++ b/sickgear/providers/omgwtfnzbs.py
@@ -109,7 +109,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
:return: list of search strings
:rtype: List[AnyStr]
"""
- return [x for x in show_name_helpers.makeSceneSeasonSearchString(self.show_obj, ep_obj)]
+ return [x for x in show_name_helpers.make_scene_season_search_string(self.show_obj, ep_obj)]
def _episode_strings(self, ep_obj):
"""
@@ -119,7 +119,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
:return: list of search strings
:rtype: List[AnyStr]
"""
- return [x for x in show_name_helpers.makeSceneSearchString(self.show_obj, ep_obj)]
+ return [x for x in show_name_helpers.make_scene_search_string(self.show_obj, ep_obj)]
def _title_and_url(self, item):
"""
diff --git a/sickgear/providers/tokyotoshokan.py b/sickgear/providers/tokyotoshokan.py
index 8396fd9e..39592d61 100644
--- a/sickgear/providers/tokyotoshokan.py
+++ b/sickgear/providers/tokyotoshokan.py
@@ -98,10 +98,10 @@ class TokyoToshokanProvider(generic.TorrentProvider):
return results
def _season_strings(self, *args, **kwargs):
- return [{'Season': show_name_helpers.makeSceneSeasonSearchString(self.show_obj, *args)}]
+ return [{'Season': show_name_helpers.make_scene_season_search_string(self.show_obj, *args)}]
def _episode_strings(self, *args, **kwargs):
- return [{'Episode': show_name_helpers.makeSceneSearchString(self.show_obj, *args)}]
+ return [{'Episode': show_name_helpers.make_scene_search_string(self.show_obj, *args)}]
class TokyoToshokanCache(tvcache.TVCache):
diff --git a/sickgear/scene_exceptions.py b/sickgear/scene_exceptions.py
index cb8b47eb..a9fa0afa 100644
--- a/sickgear/scene_exceptions.py
+++ b/sickgear/scene_exceptions.py
@@ -25,8 +25,6 @@ import threading
import traceback
import sickgear
-# noinspection PyPep8Naming
-import encodingKludge as ek
from exceptions_helper import ex
from json_helper import json_load
from . import db, helpers, logger, name_cache
@@ -387,22 +385,22 @@ def _custom_exceptions_fetcher():
src_id = 'GHSG'
logger.log(u'Checking to update custom alternatives from %s' % src_id)
- dirpath = ek.ek(os.path.join, sickgear.CACHE_DIR, 'alts')
- tmppath = ek.ek(os.path.join, dirpath, 'tmp')
- file_rar = ek.ek(os.path.join, tmppath, 'alt.rar')
- file_cache = ek.ek(os.path.join, dirpath, 'alt.json')
+ dirpath = os.path.join(sickgear.CACHE_DIR, 'alts')
+ tmppath = os.path.join(dirpath, 'tmp')
+ file_rar = os.path.join(tmppath, 'alt.rar')
+ file_cache = os.path.join(dirpath, 'alt.json')
iv = 30 * 60 # min interval to fetch updates
refresh = should_refresh(src_id, iv)
- fetch_data = not ek.ek(os.path.isfile, file_cache) or (not int(os.environ.get('NO_ALT_GET', 0)) and refresh)
+ fetch_data = not os.path.isfile(file_cache) or (not int(os.environ.get('NO_ALT_GET', 0)) and refresh)
if fetch_data:
- if ek.ek(os.path.exists, tmppath):
+ if os.path.exists(tmppath):
helpers.remove_file(tmppath, tree=True)
helpers.make_path(tmppath)
helpers.download_file(r'https://github.com/SickGear/sickgear.altdata/raw/main/alt.rar', file_rar)
rar_handle = None
if 'win32' == sys.platform:
- rarfile.UNRAR_TOOL = ek.ek(os.path.join, sickgear.PROG_DIR, 'lib', 'rarfile', 'UnRAR.exe')
+ rarfile.UNRAR_TOOL = os.path.join(sickgear.PROG_DIR, 'lib', 'rarfile', 'UnRAR.exe')
try:
rar_handle = rarfile.RarFile(file_rar)
rar_handle.extractall(path=dirpath, pwd='sickgear_alt')
@@ -418,7 +416,7 @@ def _custom_exceptions_fetcher():
if refresh:
set_last_refresh(src_id)
- if not fetch_data and not ek.ek(os.path.isfile, file_cache):
+ if not fetch_data and not os.path.isfile(file_cache):
logger.debug(u'Unable to fetch custom exceptions, skipped: %s' % file_rar)
return custom_exception_dict, cnt_updated_numbers, should_refresh(src_id, iv, remaining=True)
@@ -516,7 +514,7 @@ def _xem_exceptions_fetcher():
for tvid in [i for i in sickgear.TVInfoAPI().sources if 'xem_origin' in sickgear.TVInfoAPI(i).config]:
logger.log(u'Checking for XEM scene exception updates for %s' % sickgear.TVInfoAPI(tvid).name)
- url = 'http://thexem.info/map/allNames?origin=%s%s&seasonNumbers=1'\
+ url = 'https://thexem.info/map/allNames?origin=%s%s&seasonNumbers=1'\
% (sickgear.TVInfoAPI(tvid).config['xem_origin'], ('&language=us', '')['xem' == xem_list])
parsed_json = helpers.get_url(url, parse_json=True, timeout=90)
@@ -551,7 +549,7 @@ def _xem_get_ids(infosrc_name, xem_origin):
"""
xem_ids = []
- url = 'http://thexem.info/map/havemap?origin=%s' % xem_origin
+ url = 'https://thexem.info/map/havemap?origin=%s' % xem_origin
task = 'Fetching show ids with%s xem scene mapping%s for origin'
logger.log(u'%s %s' % (task % ('', 's'), infosrc_name))
diff --git a/sickgear/search.py b/sickgear/search.py
index 7c26e8a8..ac2cde64 100644
--- a/sickgear/search.py
+++ b/sickgear/search.py
@@ -22,8 +22,6 @@ import re
import threading
import traceback
-# noinspection PyPep8Naming
-import encodingKludge as ek
import exceptions_helper
from exceptions_helper import ex
from sg_helpers import write_file
@@ -65,7 +63,7 @@ def _download_result(result):
elif 'nzbdata' == result.resultType:
# get the final file path to the nzb
- file_name = ek.ek(os.path.join, sickgear.NZB_DIR, u'%s.nzb' % result.name)
+ file_name = os.path.join(sickgear.NZB_DIR, u'%s.nzb' % result.name)
logger.log(u'Saving NZB to %s' % file_name)
@@ -768,7 +766,7 @@ def cache_torrent_file(
):
# type: (...) -> Optional[TorrentSearchResult]
- cache_file = ek.ek(os.path.join, sickgear.CACHE_DIR or helpers.get_system_temp_dir(),
+ cache_file = os.path.join(sickgear.CACHE_DIR or helpers.get_system_temp_dir(),
'%s.torrent' % (helpers.sanitize_filename(search_result.name)))
if not helpers.download_file(
diff --git a/sickgear/show_name_helpers.py b/sickgear/show_name_helpers.py
index b1397df1..a18e5878 100644
--- a/sickgear/show_name_helpers.py
+++ b/sickgear/show_name_helpers.py
@@ -19,8 +19,6 @@ import os
import copy
import re
-# noinspection PyPep8Naming
-import encodingKludge as ek
from exceptions_helper import ex
import sickgear
@@ -233,19 +231,21 @@ def get_show_names_all_possible(show_obj, season=-1, scenify=True, spacer='.', f
:param season: season
:param scenify:
:param spacer: spacer
+ :param force_anime:
:return:
"""
- show_names = list(set(allPossibleShowNames(show_obj, season=season, force_anime=force_anime))) # type: List[AnyStr]
+ show_names = list(set(
+ all_possible_show_names(show_obj, season=season, force_anime=force_anime))) # type: List[AnyStr]
if scenify:
show_names = map_list(sanitize_scene_name, show_names)
return url_encode(show_names, spacer)
-def makeSceneSeasonSearchString(show_obj, # type: sickgear.tv.TVShow
- ep_obj, # type: sickgear.tv.TVEpisode
- ignore_allowlist=False, # type: bool
- extra_search_type=None
- ): # type: (...) -> List[AnyStr]
+def make_scene_season_search_string(show_obj, # type: sickgear.tv.TVShow
+ ep_obj, # type: sickgear.tv.TVEpisode
+ ignore_allowlist=False, # type: bool
+ extra_search_type=None
+ ): # type: (...) -> List[AnyStr]
"""
:param show_obj: show object
@@ -258,34 +258,34 @@ def makeSceneSeasonSearchString(show_obj, # type: sickgear.tv.TVShow
numseasons = 0
# the search string for air by date shows is just
- seasonStrings = [str(ep_obj.airdate).split('-')[0]]
+ season_strings = [str(ep_obj.airdate).split('-')[0]]
elif show_obj.is_anime:
numseasons = 0
ep_obj_list = show_obj.get_all_episodes(ep_obj.season)
# get show qualities
- anyQualities, bestQualities = common.Quality.splitQuality(show_obj.quality)
+ any_qualities, best_qualities = common.Quality.splitQuality(show_obj.quality)
# compile a list of all the episode numbers we need in this 'season'
- seasonStrings = []
+ season_strings = []
for episode in ep_obj_list:
# get quality of the episode
- curCompositeStatus = episode.status
- curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus)
+ cur_composite_status = episode.status
+ cur_status, cur_quality = common.Quality.splitCompositeStatus(cur_composite_status)
- if bestQualities:
- highestBestQuality = max(bestQualities)
+ if best_qualities:
+ highest_best_quality = max(best_qualities)
else:
- highestBestQuality = 0
+ highest_best_quality = 0
# if we need a better one then add it to the list of episodes to fetch
- if (curStatus in (
+ if (cur_status in (
common.DOWNLOADED,
- common.SNATCHED) and curQuality < highestBestQuality) or curStatus == common.WANTED:
+ common.SNATCHED) and cur_quality < highest_best_quality) or cur_status == common.WANTED:
ab_number = episode.scene_absolute_number
if 0 < ab_number:
- seasonStrings.append("%02d" % ab_number)
+ season_strings.append("%02d" % ab_number)
else:
my_db = db.DBConnection()
@@ -297,7 +297,7 @@ def makeSceneSeasonSearchString(show_obj, # type: sickgear.tv.TVShow
[show_obj.tvid, show_obj.prodid])
numseasons = int(sql_result[0][0])
- seasonStrings = ["S%02d" % int(ep_obj.scene_season)]
+ season_strings = ["S%02d" % int(ep_obj.scene_season)]
show_names = get_show_names_all_possible(show_obj, ep_obj.scene_season)
@@ -312,7 +312,7 @@ def makeSceneSeasonSearchString(show_obj, # type: sickgear.tv.TVShow
to_return.append(cur_name)
# for providers that don't allow multiple searches in one request we only search for Sxx style stuff
else:
- for cur_season in seasonStrings:
+ for cur_season in season_strings:
if not ignore_allowlist and show_obj.is_anime \
and None is not show_obj.release_groups and show_obj.release_groups.allowlist:
for keyword in show_obj.release_groups.allowlist:
@@ -324,10 +324,10 @@ def makeSceneSeasonSearchString(show_obj, # type: sickgear.tv.TVShow
return to_return
-def makeSceneSearchString(show_obj, # type: sickgear.tv.TVShow
- ep_obj, # type: sickgear.tv.TVEpisode
- ignore_allowlist=False # type: bool
- ): # type: (...) -> List[AnyStr]
+def make_scene_search_string(show_obj, # type: sickgear.tv.TVShow
+ ep_obj, # type: sickgear.tv.TVEpisode
+ ignore_allowlist=False # type: bool
+ ): # type: (...) -> List[AnyStr]
"""
:param show_obj: show object
@@ -374,7 +374,7 @@ def makeSceneSearchString(show_obj, # type: sickgear.tv.TVShow
return to_return
-def allPossibleShowNames(show_obj, season=-1, force_anime=False):
+def all_possible_show_names(show_obj, season=-1, force_anime=False):
# type: (sickgear.tv.TVShow, int, bool) -> List[AnyStr]
"""
Figures out every possible variation of the name for a particular show. Includes TVDB name, TVRage name,
@@ -382,45 +382,48 @@ def allPossibleShowNames(show_obj, season=-1, force_anime=False):
:param show_obj: a TVShow object that we should get the names of
:param season: season
+ :param force_anime:
:return: a list of all the possible show names
"""
- showNames = get_scene_exceptions(show_obj.tvid, show_obj.prodid, season=season)[:]
- if not showNames: # if we dont have any season specific exceptions fallback to generic exceptions
+ show_names = get_scene_exceptions(show_obj.tvid, show_obj.prodid, season=season)[:]
+ if not show_names: # if we dont have any season specific exceptions fallback to generic exceptions
season = -1
- showNames = get_scene_exceptions(show_obj.tvid, show_obj.prodid, season=season)[:]
+ show_names = get_scene_exceptions(show_obj.tvid, show_obj.prodid, season=season)[:]
if -1 == season:
- showNames.append(show_obj.name)
+ show_names.append(show_obj.name)
if not show_obj.is_anime and not force_anime:
- newShowNames = []
+ new_show_names = []
country_list = common.countryList
country_list.update(dict(zip(itervalues(common.countryList), iterkeys(common.countryList))))
- for curName in set(showNames):
- if not curName:
+ for cur_name in set(show_names):
+ if not cur_name:
continue
# if we have "Show Name Australia" or "Show Name (Australia)" this will add "Show Name (AU)" for
# any countries defined in common.countryList
# (and vice versa)
- for curCountry in country_list:
- if curName.endswith(' ' + curCountry):
- newShowNames.append(curName.replace(' ' + curCountry, ' (' + country_list[curCountry] + ')'))
- elif curName.endswith(' (' + curCountry + ')'):
- newShowNames.append(curName.replace(' (' + curCountry + ')', ' (' + country_list[curCountry] + ')'))
+ for cur_country in country_list:
+ if cur_name.endswith(' ' + cur_country):
+ new_show_names.append(cur_name.replace(' ' + cur_country,
+ ' (' + country_list[cur_country] + ')'))
+ elif cur_name.endswith(' (' + cur_country + ')'):
+ new_show_names.append(cur_name.replace(' (' + cur_country + ')',
+ ' (' + country_list[cur_country] + ')'))
# if we have "Show Name (2013)" this will strip the (2013) show year from the show name
# newShowNames.append(re.sub('\(\d{4}\)','',curName))
- showNames += newShowNames
+ show_names += new_show_names
- return showNames
+ return show_names
-def determineReleaseName(dir_name=None, nzb_name=None):
+def determine_release_name(dir_name=None, nzb_name=None):
# type: (AnyStr, AnyStr) -> Union[AnyStr, None]
- """Determine a release name from an nzb and/or folder name
+ """Determine a release name from a nzb and/or folder name
:param dir_name: dir name
:param nzb_name: nzb name
:return: None or release name
@@ -430,7 +433,7 @@ def determineReleaseName(dir_name=None, nzb_name=None):
logger.log(u'Using nzb name for release name.')
return nzb_name.rpartition('.')[0]
- if not dir_name or not ek.ek(os.path.isdir, dir_name):
+ if not dir_name or not os.path.isdir(dir_name):
return None
# try to get the release name from nzb/nfo
@@ -447,7 +450,7 @@ def determineReleaseName(dir_name=None, nzb_name=None):
return found_file.rpartition('.')[0]
# If that fails, we try the folder
- folder = ek.ek(os.path.basename, dir_name)
+ folder = os.path.basename(dir_name)
if pass_wordlist_checks(folder):
# NOTE: Multiple failed downloads will change the folder name.
# (e.g., appending #s)
diff --git a/sickgear/show_queue.py b/sickgear/show_queue.py
index 0bdacfdf..1f451fec 100644
--- a/sickgear/show_queue.py
+++ b/sickgear/show_queue.py
@@ -23,8 +23,6 @@ import traceback
from lib.dateutil.parser import parser
from lib.tvinfo_base.exceptions import *
-# noinspection PyPep8Naming
-import encodingKludge as ek
import exceptions_helper
from exceptions_helper import ex
@@ -49,7 +47,7 @@ if False:
from lib.tvinfo_base import TVInfoShow
from .tv import TVEpisode
-# Define special priority of tv source switch tasks, higher then anything else except newly added shows
+# Define special priority of tv source switch tasks, higher than anything else except newly added shows
SWITCH_PRIO = generic_queue.QueuePriorities.HIGH + 5
DAILY_SHOW_UPDATE_FINISHED_EVENT = 1
@@ -72,7 +70,7 @@ class ShowQueue(generic_queue.GenericQueue):
def check_events(self):
if self.daily_update_running and \
- not (self.isShowUpdateRunning() or sickgear.show_update_scheduler.action.amActive):
+ not (self.is_show_update_running() or sickgear.show_update_scheduler.action.amActive):
self.execute_events(DAILY_SHOW_UPDATE_FINISHED_EVENT)
self.daily_update_running = False
@@ -91,24 +89,24 @@ class ShowQueue(generic_queue.GenericQueue):
continue
if cur_row['action_id'] in (ShowQueueActions.UPDATE, ShowQueueActions.FORCEUPDATE,
- ShowQueueActions.WEBFORCEUPDATE):
- self.updateShow(add_to_db=False, force=bool(cur_row['force']),
- pausestatus_after=bool_none(cur_row['pausestatus_after']),
- scheduled_update=bool(cur_row['scheduled_update']),
- show_obj=show_obj, skip_refresh=bool(cur_row['skip_refresh']),
- uid=cur_row['uid'],
- web=ShowQueueActions.WEBFORCEUPDATE == cur_row['action_id'])
+ ShowQueueActions.WEBFORCEUPDATE):
+ self.update_show(add_to_db=False, force=bool(cur_row['force']),
+ pausestatus_after=bool_none(cur_row['pausestatus_after']),
+ scheduled_update=bool(cur_row['scheduled_update']),
+ show_obj=show_obj, skip_refresh=bool(cur_row['skip_refresh']),
+ uid=cur_row['uid'],
+ web=ShowQueueActions.WEBFORCEUPDATE == cur_row['action_id'])
elif ShowQueueActions.REFRESH == cur_row['action_id']:
- self.refreshShow(add_to_db=False, force=bool(cur_row['force']),
- force_image_cache=bool(cur_row['force_image_cache']),
- priority=cur_row['priority'],
- scheduled_update=bool(cur_row['scheduled_update']),
- show_obj=show_obj,
- uid=cur_row['uid'])
+ self.refresh_show(add_to_db=False, force=bool(cur_row['force']),
+ force_image_cache=bool(cur_row['force_image_cache']),
+ priority=cur_row['priority'],
+ scheduled_update=bool(cur_row['scheduled_update']),
+ show_obj=show_obj,
+ uid=cur_row['uid'])
elif ShowQueueActions.RENAME == cur_row['action_id']:
- self.renameShowEpisodes(add_to_db=False, show_obj=show_obj, uid=cur_row['uid'])
+ self.rename_show_episodes(add_to_db=False, show_obj=show_obj, uid=cur_row['uid'])
elif ShowQueueActions.SUBTITLE == cur_row['action_id']:
self.download_subtitles(add_to_db=False, show_obj=show_obj, uid=cur_row['uid'])
@@ -243,7 +241,7 @@ class ShowQueue(generic_queue.GenericQueue):
# type: (List[integer_types], bool) -> None
generic_queue.GenericQueue._remove_from_queue(self, to_remove=to_remove, force=force)
- def _isInQueue(self, show_obj, actions):
+ def _is_in_queue(self, show_obj, actions):
# type: (TVShow, Tuple[integer_types, ...]) -> bool
"""
@@ -254,7 +252,7 @@ class ShowQueue(generic_queue.GenericQueue):
with self.lock:
return any(1 for x in self.queue if x.action_id in actions and show_obj == x.show_obj)
- def _isBeingSomethinged(self, show_obj, actions):
+ def _is_being_somethinged(self, show_obj, actions):
# type: (TVShow, Tuple[integer_types, ...]) -> bool
"""
@@ -269,7 +267,7 @@ class ShowQueue(generic_queue.GenericQueue):
and show_obj == self.currentItem.show_obj \
and self.currentItem.action_id in actions
- def isInUpdateQueue(self, show_obj):
+ def is_in_update_queue(self, show_obj):
# type: (TVShow) -> bool
"""
@@ -278,10 +276,10 @@ class ShowQueue(generic_queue.GenericQueue):
:return:
:rtype: bool
"""
- return self._isInQueue(show_obj, (ShowQueueActions.UPDATE, ShowQueueActions.FORCEUPDATE,
- ShowQueueActions.WEBFORCEUPDATE))
+ return self._is_in_queue(show_obj, (ShowQueueActions.UPDATE, ShowQueueActions.FORCEUPDATE,
+ ShowQueueActions.WEBFORCEUPDATE))
- def isInRefreshQueue(self, show_obj):
+ def is_in_refresh_queue(self, show_obj):
# type: (TVShow) -> bool
"""
@@ -290,9 +288,9 @@ class ShowQueue(generic_queue.GenericQueue):
:return:
:rtype: bool
"""
- return self._isInQueue(show_obj, (ShowQueueActions.REFRESH,))
+ return self._is_in_queue(show_obj, (ShowQueueActions.REFRESH,))
- def isInRenameQueue(self, show_obj):
+ def is_in_rename_queue(self, show_obj):
# type: (TVShow) -> bool
"""
@@ -301,9 +299,9 @@ class ShowQueue(generic_queue.GenericQueue):
:return:
:rtype: bool
"""
- return self._isInQueue(show_obj, (ShowQueueActions.RENAME,))
+ return self._is_in_queue(show_obj, (ShowQueueActions.RENAME,))
- def isInSubtitleQueue(self, show_obj):
+ def is_in_subtitle_queue(self, show_obj):
# type: (TVShow) -> bool
"""
@@ -312,9 +310,9 @@ class ShowQueue(generic_queue.GenericQueue):
:return:
:rtype: bool
"""
- return self._isInQueue(show_obj, (ShowQueueActions.SUBTITLE,))
+ return self._is_in_queue(show_obj, (ShowQueueActions.SUBTITLE,))
- def isBeingAdded(self, show_obj):
+ def is_being_added(self, show_obj):
# type: (TVShow) -> bool
"""
@@ -323,9 +321,9 @@ class ShowQueue(generic_queue.GenericQueue):
:return:
:rtype: bool
"""
- return self._isBeingSomethinged(show_obj, (ShowQueueActions.ADD,))
+ return self._is_being_somethinged(show_obj, (ShowQueueActions.ADD,))
- def isBeingUpdated(self, show_obj):
+ def is_being_updated(self, show_obj):
# type: (TVShow) -> bool
"""
@@ -334,10 +332,10 @@ class ShowQueue(generic_queue.GenericQueue):
:return:
:rtype: bool
"""
- return self._isBeingSomethinged(show_obj, (ShowQueueActions.UPDATE, ShowQueueActions.FORCEUPDATE,
- ShowQueueActions.WEBFORCEUPDATE))
+ return self._is_being_somethinged(show_obj, (ShowQueueActions.UPDATE, ShowQueueActions.FORCEUPDATE,
+ ShowQueueActions.WEBFORCEUPDATE))
- def isBeingRefreshed(self, show_obj):
+ def is_being_refreshed(self, show_obj):
# type: (TVShow) -> bool
"""
@@ -346,9 +344,9 @@ class ShowQueue(generic_queue.GenericQueue):
:return:
:rtype: bool
"""
- return self._isBeingSomethinged(show_obj, (ShowQueueActions.REFRESH,))
+ return self._is_being_somethinged(show_obj, (ShowQueueActions.REFRESH,))
- def isBeingRenamed(self, show_obj):
+ def is_being_renamed(self, show_obj):
# type: (TVShow) -> bool
"""
@@ -357,9 +355,9 @@ class ShowQueue(generic_queue.GenericQueue):
:return:
:rtype: bool
"""
- return self._isBeingSomethinged(show_obj, (ShowQueueActions.RENAME,))
+ return self._is_being_somethinged(show_obj, (ShowQueueActions.RENAME,))
- def isBeingSubtitled(self, show_obj):
+ def is_being_subtitled(self, show_obj):
# type: (TVShow) -> bool
"""
@@ -368,9 +366,9 @@ class ShowQueue(generic_queue.GenericQueue):
:return:
:rtype: bool
"""
- return self._isBeingSomethinged(show_obj, (ShowQueueActions.SUBTITLE,))
+ return self._is_being_somethinged(show_obj, (ShowQueueActions.SUBTITLE,))
- def isShowUpdateRunning(self):
+ def is_show_update_running(self):
"""
:return:
@@ -387,7 +385,7 @@ class ShowQueue(generic_queue.GenericQueue):
:param show_obj: show object
"""
- return self._isBeingSomethinged(show_obj, (ShowQueueActions.SWITCH,))
+ return self._is_being_somethinged(show_obj, (ShowQueueActions.SWITCH,))
def is_show_switch_queued(self, show_obj):
# type: (TVShow) -> bool
@@ -396,21 +394,21 @@ class ShowQueue(generic_queue.GenericQueue):
:param show_obj: show object
"""
- return self._isInQueue(show_obj, (ShowQueueActions.SWITCH,))
+ return self._is_in_queue(show_obj, (ShowQueueActions.SWITCH,))
def is_switch_running(self):
# type: (...) -> bool
with self.lock:
return any(1 for x in self.queue + [self.currentItem] if isinstance(x, QueueItemSwitchSource))
- def _getLoadingShowList(self):
+ def _get_loading_showlist(self):
"""
:return:
:rtype: List
"""
with self.lock:
- return [x for x in self.queue + [self.currentItem] if None is not x and x.isLoading]
+ return [x for x in self.queue + [self.currentItem] if None is not x and x.is_loading]
def queue_length(self):
# type: (...) -> Dict[AnyStr, List[AnyStr, Dict]]
@@ -454,18 +452,18 @@ class ShowQueue(generic_queue.GenericQueue):
length['switch'].append(result_item)
return length
- loadingShowList = property(_getLoadingShowList)
+ loading_showlist = property(_get_loading_showlist)
- def updateShow(self,
- show_obj, # type: TVShow
- force=False, # type: bool
- web=False, # type: bool
- scheduled_update=False, # type: bool
- priority=generic_queue.QueuePriorities.NORMAL, # type: integer_types
- uid=None, # type: integer_types
- add_to_db=True, # type: bool
- **kwargs # type: Any
- ): # type: (...) -> Union[QueueItemUpdate, QueueItemForceUpdate, QueueItemForceUpdateWeb]
+ def update_show(self,
+ show_obj, # type: TVShow
+ force=False, # type: bool
+ web=False, # type: bool
+ scheduled_update=False, # type: bool
+ priority=generic_queue.QueuePriorities.NORMAL, # type: integer_types
+ uid=None, # type: integer_types
+ add_to_db=True, # type: bool
+ **kwargs # type: Any
+ ): # type: (...) -> Union[QueueItemUpdate, QueueItemForceUpdate, QueueItemForceUpdateWeb]
"""
:param show_obj: show object
@@ -485,15 +483,15 @@ class ShowQueue(generic_queue.GenericQueue):
:rtype: QueueItemUpdate or QueueItemForceUpdateWeb or QueueItemForceUpdate
"""
with self.lock:
- if self.isBeingAdded(show_obj):
+ if self.is_being_added(show_obj):
raise exceptions_helper.CantUpdateException(
'Show is still being added, wait until it is finished before you update.')
- if self.isBeingUpdated(show_obj):
+ if self.is_being_updated(show_obj):
raise exceptions_helper.CantUpdateException(
'This show is already being updated, can\'t update again until it\'s done.')
- if self.isInUpdateQueue(show_obj):
+ if self.is_in_update_queue(show_obj):
raise exceptions_helper.CantUpdateException(
'This show is already being updated, can\'t update again until it\'s done.')
@@ -516,9 +514,9 @@ class ShowQueue(generic_queue.GenericQueue):
return queue_item_obj
- def refreshShow(self, show_obj, force=False, scheduled_update=False, after_update=False,
- priority=generic_queue.QueuePriorities.HIGH, force_image_cache=False, uid=None, add_to_db=True,
- **kwargs):
+ def refresh_show(self, show_obj, force=False, scheduled_update=False, after_update=False,
+ priority=generic_queue.QueuePriorities.HIGH, force_image_cache=False, uid=None, add_to_db=True,
+ **kwargs):
# type: (TVShow, bool, bool, bool, integer_types, bool, integer_types, bool, Any) -> Optional[QueueItemRefresh]
"""
@@ -541,10 +539,11 @@ class ShowQueue(generic_queue.GenericQueue):
:rtype: QueueItemRefresh
"""
with self.lock:
- if (self.isBeingRefreshed(show_obj) or self.isInRefreshQueue(show_obj)) and not force:
+ if (self.is_being_refreshed(show_obj) or self.is_in_refresh_queue(show_obj)) and not force:
raise exceptions_helper.CantRefreshException('This show is being refreshed, not refreshing again.')
- if ((not after_update and self.isBeingUpdated(show_obj)) or self.isInUpdateQueue(show_obj)) and not force:
+ if ((not after_update and self.is_being_updated(show_obj))
+ or self.is_in_update_queue(show_obj)) and not force:
logger.log('Skipping this refresh as there is already an update queued or'
' in progress and a refresh is done at the end of an update anyway.', logger.DEBUG)
return
@@ -561,7 +560,7 @@ class ShowQueue(generic_queue.GenericQueue):
return queue_item_obj
- def renameShowEpisodes(self, show_obj, uid=None, add_to_db=True):
+ def rename_show_episodes(self, show_obj, uid=None, add_to_db=True):
# type: (TVShow, integer_types, bool) -> QueueItemRename
"""
@@ -758,14 +757,14 @@ class ShowQueueItem(generic_queue.QueueItem):
self.show_obj = show_obj # type: sickgear.tv.TVShow
self.scheduled_update = scheduled_update # type: bool
- def isInQueue(self):
+ def is_in_queue(self):
"""
:rtype: bool
"""
return self in sickgear.show_queue_scheduler.action.queue + [
sickgear.show_queue_scheduler.action.currentItem]
- def _getName(self):
+ def _get_name(self):
"""
:rtype: AnyStr
"""
@@ -773,7 +772,7 @@ class ShowQueueItem(generic_queue.QueueItem):
return self.show_obj.name
return ''
- def _isLoading(self):
+ def _is_loading(self):
return False
def __str__(self):
@@ -782,9 +781,9 @@ class ShowQueueItem(generic_queue.QueueItem):
def __repr__(self):
return self.__str__()
- show_name = property(_getName)
+ show_name = property(_get_name)
- isLoading = property(_isLoading)
+ is_loading = property(_is_loading)
class QueueItemAdd(ShowQueueItem):
@@ -864,7 +863,7 @@ class QueueItemAdd(ShowQueueItem):
self.priority = generic_queue.QueuePriorities.VERYHIGH
- def _getName(self):
+ def _get_name(self):
"""
:return: the show name if there is a show object created, if not returns
the dir that the show is being added to.
@@ -876,9 +875,9 @@ class QueueItemAdd(ShowQueueItem):
return self.showDir
return self.show_obj.name
- show_name = property(_getName)
+ show_name = property(_get_name)
- def _isLoading(self):
+ def _is_loading(self):
"""
:return: True if we've gotten far enough to have a show object, or False
if we still only know the folder name.
@@ -886,7 +885,7 @@ class QueueItemAdd(ShowQueueItem):
"""
return None is self.show_obj
- isLoading = property(_isLoading)
+ is_loading = property(_is_loading)
# if they gave a number to start or number to end as wanted, then change those eps to it
def _get_wanted(self, db_obj, wanted_max, latest):
@@ -985,7 +984,7 @@ class QueueItemAdd(ShowQueueItem):
if getattr(t, 'show_not_found', False):
logger.log('Show %s was not found on %s, maybe show was deleted' %
(self.show_name, sickgear.TVInfoAPI(self.tvid).name), logger.ERROR)
- self._finishEarly()
+ self._finish_early()
return
# this usually only happens if they have an NFO in their show dir
@@ -997,7 +996,7 @@ class QueueItemAdd(ShowQueueItem):
'Show in %s has no name on %s, probably the wrong language.'
' Delete .nfo and add manually in the correct language.' %
(self.showDir, sickgear.TVInfoAPI(self.tvid).name))
- self._finishEarly()
+ self._finish_early()
return
except (BaseException, Exception):
logger.log('Unable to find show ID:%s on TV info: %s' % (self.prodid, sickgear.TVInfoAPI(self.tvid).name),
@@ -1006,7 +1005,7 @@ class QueueItemAdd(ShowQueueItem):
'Unable to look up the show in %s on %s using ID %s, not using the NFO.'
' Delete .nfo and try adding manually again.' %
(self.showDir, sickgear.TVInfoAPI(self.tvid).name, self.prodid))
- self._finishEarly()
+ self._finish_early()
return
try:
@@ -1056,19 +1055,19 @@ class QueueItemAdd(ShowQueueItem):
else:
ui.notifications.error(
'Unable to add show due to an error with %s' % sickgear.TVInfoAPI(self.tvid).name)
- self._finishEarly()
+ self._finish_early()
return
except exceptions_helper.MultipleShowObjectsException:
logger.log('The show in %s is already in your show list, skipping' % self.showDir, logger.ERROR)
ui.notifications.error('Show skipped', 'The show in %s is already in your show list' % self.showDir)
- self._finishEarly()
+ self._finish_early()
return
except (BaseException, Exception) as e:
logger.log('Error trying to add show: %s' % ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.ERROR)
- self._finishEarly()
+ self._finish_early()
raise
self.show_obj.load_imdb_info()
@@ -1078,7 +1077,7 @@ class QueueItemAdd(ShowQueueItem):
except (BaseException, Exception) as e:
logger.log('Error saving the show to the database: %s' % ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.ERROR)
- self._finishEarly()
+ self._finish_early()
raise
if not show_exists:
@@ -1152,7 +1151,7 @@ class QueueItemAdd(ShowQueueItem):
except (BaseException, Exception) as e:
logger.log('Error saving the show to the database: %s' % ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.ERROR)
- self._finishEarly()
+ self._finish_early()
raise
# update internal name cache
@@ -1191,14 +1190,14 @@ class QueueItemAdd(ShowQueueItem):
self.finish()
- def _finishEarly(self):
+ def _finish_early(self):
if None is not self.show_obj:
self.show_obj.delete_show()
if self.new_show:
- # if we adding a new show, delete the empty folder that was already created
+ # if adding a new show, delete the empty folder that was already created
try:
- ek.ek(os.rmdir, self.showDir)
+ os.rmdir(self.showDir)
except (BaseException, Exception):
pass
@@ -1381,7 +1380,7 @@ class QueueItemUpdate(ShowQueueItem):
if not sickgear.TVInfoAPI(self.show_obj.tvid).config['active']:
logger.log('TV info source %s is marked inactive, aborting update for show %s and continue with refresh.'
% (sickgear.TVInfoAPI(self.show_obj.tvid).config['name'], self.show_obj.name))
- sickgear.show_queue_scheduler.action.refreshShow(self.show_obj, self.force, self.scheduled_update,
+ sickgear.show_queue_scheduler.action.refresh_show(self.show_obj, self.force, self.scheduled_update,
after_update=True)
return
@@ -1485,7 +1484,7 @@ class QueueItemUpdate(ShowQueueItem):
sickgear.MEMCACHE['history_tab'] = sickgear.webserve.History.menu_tab(
sickgear.MEMCACHE['history_tab_limit'])
if not getattr(self, 'skip_refresh', False):
- sickgear.show_queue_scheduler.action.refreshShow(self.show_obj, self.force, self.scheduled_update,
+ sickgear.show_queue_scheduler.action.refresh_show(self.show_obj, self.force, self.scheduled_update,
after_update=True, force_image_cache=self.force_web,
**self.kwargs)
diff --git a/sickgear/show_updater.py b/sickgear/show_updater.py
index 7bd46c31..901b431c 100644
--- a/sickgear/show_updater.py
+++ b/sickgear/show_updater.py
@@ -20,8 +20,6 @@ import traceback
import exceptions_helper
from exceptions_helper import ex
-# noinspection PyPep8Naming
-import encodingKludge as ek
import sickgear
from . import db, logger, network_timezones, properFinder, ui
@@ -72,8 +70,7 @@ class ShowUpdater(object):
if sickgear.db.db_supports_backup and 0 < sickgear.BACKUP_DB_MAX_COUNT:
logger.log('backing up all db\'s')
try:
- sickgear.db.backup_all_dbs(sickgear.BACKUP_DB_PATH or
- ek.ek(os.path.join, sickgear.DATA_DIR, 'backup'))
+ sickgear.db.backup_all_dbs(sickgear.BACKUP_DB_PATH or os.path.join(sickgear.DATA_DIR, 'backup'))
except (BaseException, Exception):
logger.log('backup db error', logger.ERROR)
@@ -137,7 +134,7 @@ class ShowUpdater(object):
# cleanup ignore and require lists
try:
clean_ignore_require_words()
- except Exception:
+ except (BaseException, Exception):
logger.log('ignore, require words cleanup error', logger.ERROR)
logger.log(traceback.format_exc(), logger.ERROR)
@@ -166,7 +163,7 @@ class ShowUpdater(object):
logger.log(traceback.format_exc(), logger.ERROR)
# select 10 'Ended' tv_shows updated more than 90 days ago
- # and all shows not updated more then 180 days ago to include in this update
+ # and all shows not updated more than 180 days ago to include in this update
stale_should_update = []
stale_update_date = (update_date - datetime.timedelta(days=90)).toordinal()
stale_update_date_max = (update_date - datetime.timedelta(days=180)).toordinal()
@@ -204,16 +201,16 @@ class ShowUpdater(object):
try:
# if should_update returns True (not 'Ended') or show is selected stale 'Ended' then update,
# otherwise just refresh
- if cur_show_obj.should_update(update_date=update_date,
- last_indexer_change=show_updates.get(cur_show_obj.tvid, {}).
- get(cur_show_obj.prodid)) \
+ if cur_show_obj.should_update(
+ update_date=update_date,
+ last_indexer_change=show_updates.get(cur_show_obj.tvid, {}).get(cur_show_obj.prodid)) \
or cur_show_obj.tvid_prodid in stale_should_update:
- cur_queue_item = sickgear.show_queue_scheduler.action.updateShow(cur_show_obj,
- scheduled_update=True)
+ cur_queue_item = sickgear.show_queue_scheduler.action.update_show(
+ cur_show_obj, scheduled_update=True)
else:
logger.debug(u'Not updating episodes for show %s because it\'s marked as ended and last/next'
u' episode is not within the grace period.' % cur_show_obj.unique_name)
- cur_queue_item = sickgear.show_queue_scheduler.action.refreshShow(cur_show_obj, True, True)
+ cur_queue_item = sickgear.show_queue_scheduler.action.refresh_show(cur_show_obj, True, True)
pi_list.append(cur_queue_item)
diff --git a/sickgear/subtitles.py b/sickgear/subtitles.py
index d3a7dbbd..2cffd798 100644
--- a/sickgear/subtitles.py
+++ b/sickgear/subtitles.py
@@ -17,9 +17,6 @@
import datetime
-# noinspection PyPep8Naming
-import encodingKludge as ek
-
from . import db, helpers, logger
from .common import *
@@ -31,41 +28,41 @@ SINGLE = 'und'
def sorted_service_list():
- servicesMapping = dict([(x.lower(), x) for x in subliminal.core.SERVICES])
+ services_mapping = dict([(x.lower(), x) for x in subliminal.core.SERVICES])
- newList = []
+ new_list = []
# add all services in the priority list, in order
- curIndex = 0
- for curService in sickgear.SUBTITLES_SERVICES_LIST:
- if curService in servicesMapping:
- curServiceDict = dict(
- id=curService,
- image=curService + '.png',
- name=servicesMapping[curService],
- enabled=1 == sickgear.SUBTITLES_SERVICES_ENABLED[curIndex],
- api_based=__import__('lib.subliminal.services.' + curService, globals=globals(),
+ cur_index = 0
+ for cur_service in sickgear.SUBTITLES_SERVICES_LIST:
+ if cur_service in services_mapping:
+ cur_service_dict = dict(
+ id=cur_service,
+ image=cur_service + '.png',
+ name=services_mapping[cur_service],
+ enabled=1 == sickgear.SUBTITLES_SERVICES_ENABLED[cur_index],
+ api_based=__import__('lib.subliminal.services.' + cur_service, globals=globals(),
locals=locals(), fromlist=['Service']).Service.api_based,
- url=__import__('lib.subliminal.services.' + curService, globals=globals(),
+ url=__import__('lib.subliminal.services.' + cur_service, globals=globals(),
locals=locals(), fromlist=['Service']).Service.site_url)
- newList.append(curServiceDict)
- curIndex += 1
+ new_list.append(cur_service_dict)
+ cur_index += 1
# add any services that are missing from that list
- for curService in servicesMapping:
- if curService not in [x['id'] for x in newList]:
- curServiceDict = dict(
- id=curService,
- image=curService + '.png',
- name=servicesMapping[curService],
+ for cur_service in services_mapping:
+ if cur_service not in [x['id'] for x in new_list]:
+ cur_service_dict = dict(
+ id=cur_service,
+ image=cur_service + '.png',
+ name=services_mapping[cur_service],
enabled=False,
- api_based=__import__('lib.subliminal.services.' + curService, globals=globals(),
+ api_based=__import__('lib.subliminal.services.' + cur_service, globals=globals(),
locals=locals(), fromlist=['Service']).Service.api_based,
- url=__import__('lib.subliminal.services.' + curService, globals=globals(),
+ url=__import__('lib.subliminal.services.' + cur_service, globals=globals(),
locals=locals(), fromlist=['Service']).Service.site_url)
- newList.append(curServiceDict)
+ new_list.append(cur_service_dict)
- return newList
+ return new_list
def get_enabled_service_list():
@@ -81,10 +78,10 @@ def get_language_name(select_lang):
def wanted_languages(sql_like=False):
- wantedLanguages = sorted(sickgear.SUBTITLES_LANGUAGES)
+ wanted_langs = sorted(sickgear.SUBTITLES_LANGUAGES)
if sql_like:
- return '%' + ','.join(wantedLanguages) + '%'
- return wantedLanguages
+ return '%' + ','.join(wanted_langs) + '%'
+ return wanted_langs
def subtitles_languages(video_path):
@@ -166,7 +163,7 @@ class SubtitlesFinder(object):
now = datetime.datetime.now()
for cur_result in sql_result:
- if not ek.ek(os.path.isfile, cur_result['location']):
+ if not os.path.isfile(cur_result['location']):
logger.log('Episode file does not exist, cannot download subtitles for episode %dx%d of show %s'
% (cur_result['season'], cur_result['episode'], cur_result['show_name']), logger.DEBUG)
continue
diff --git a/sickgear/traktChecker.py b/sickgear/traktChecker.py
index 092a0471..851ed124 100644
--- a/sickgear/traktChecker.py
+++ b/sickgear/traktChecker.py
@@ -19,9 +19,6 @@ import datetime
import os
import traceback
-# noinspection PyPep8Naming
-import encodingKludge as ek
-
import sickgear
from . import helpers, logger, search_queue
from .common import SKIPPED, WANTED
@@ -175,7 +172,7 @@ class TraktChecker(object):
location = None
if location:
- showPath = ek.ek(os.path.join, location, helpers.sanitize_filename(name))
+ showPath = os.path.join(location, helpers.sanitize_filename(name))
dir_exists = helpers.make_dir(showPath)
if not dir_exists:
logger.log(u"Unable to create the folder " + showPath + ", can't add the show", logger.ERROR)
diff --git a/sickgear/tv.py b/sickgear/tv.py
index ab0155a3..8ec8cc69 100644
--- a/sickgear/tv.py
+++ b/sickgear/tv.py
@@ -36,8 +36,6 @@ import traceback
from imdbpie import ImdbAPIError
from lxml_etree import etree
-# noinspection PyPep8Naming
-import encodingKludge as ek
import exceptions_helper
from exceptions_helper import ex
@@ -72,7 +70,7 @@ from six import integer_types, iteritems, itervalues, moves, PY2, string_types
if False:
from typing import Any, AnyStr, Dict, List, Optional, Set, Text, Tuple, Union
from sqlite3 import Row
- from lib.tvinfo_base import CastList, Character as TVINFO_Character, Person as TVINFO_Person, \
+ from lib.tvinfo_base import CastList, TVInfoCharacter, TVInfoPerson, \
TVInfoEpisode, TVInfoShow
coreid_warnings = False
@@ -453,7 +451,7 @@ class Person(Referential):
'homepage', 'ids', 'image_url', 'name', 'nicknames', 'real_name', 'thumb_url']}
def reset(self, person_obj=None):
- # type: (TVINFO_Person) -> None
+ # type: (TVInfoPerson) -> None
"""
reset all properties with the exception of: name, id, ids
@@ -646,7 +644,7 @@ class Person(Referential):
break
def update_prop_from_tvinfo_person(self, person_obj):
- # type: (TVINFO_Person) -> None
+ # type: (TVInfoPerson) -> None
"""
update person with tvinfo person object info
Note: doesn't change: name, id, image_url, thumb_url
@@ -747,7 +745,7 @@ class Person(Referential):
continue
if tvsrc_result:
# verify we have the correct person
- for cur_person in tvsrc_result: # type: TVINFO_Person
+ for cur_person in tvsrc_result: # type: TVInfoPerson
if None is not rp:
break
if not (imdb_confirmed and TVINFO_IMDB == tv_src) \
@@ -767,7 +765,7 @@ class Person(Referential):
# noinspection PyUnresolvedReferences
if show_obj and None is not pd and pd.characters:
clean_show_name = indexermapper.clean_show_name(show_obj.name.lower())
- for ch in pd.characters or []: # type: TVINFO_Character
+ for ch in pd.characters or []: # type: TVInfoCharacter
if clean_show_name == indexermapper.clean_show_name(ch.show.seriesname.lower()):
rp = pd
confirmed_on_src = True
@@ -1635,7 +1633,7 @@ class TVShow(TVShowBase):
if sickgear.CREATE_MISSING_SHOW_DIRS:
return self._location
- if ek.ek(os.path.isdir, self._location):
+ if os.path.isdir(self._location):
return self._location
raise exceptions_helper.ShowDirNotFoundException('Show folder does not exist: \'%s\'' % self._location)
@@ -1644,7 +1642,7 @@ class TVShow(TVShowBase):
# type: (AnyStr) -> None
logger.log('Setter sets location to %s' % new_location, logger.DEBUG)
# Don't validate dir if user wants to add shows without creating a dir
- if sickgear.ADD_SHOWS_WO_DIR or ek.ek(os.path.isdir, new_location):
+ if sickgear.ADD_SHOWS_WO_DIR or os.path.isdir(new_location):
self.dirty_setter('_location')(self, new_location)
self.path = new_location
# self._is_location_good = True
@@ -2052,7 +2050,7 @@ class TVShow(TVShowBase):
result = False
- if not ek.ek(os.path.isdir, self._location):
+ if not os.path.isdir(self._location):
logger.log('%s: Show directory doesn\'t exist, skipping NFO generation' % self.tvid_prodid)
return False
@@ -2068,7 +2066,7 @@ class TVShow(TVShowBase):
:param show_only: only for show
:param force:
"""
- if not ek.ek(os.path.isdir, self._location):
+ if not os.path.isdir(self._location):
logger.log('%s: Show directory doesn\'t exist, skipping NFO generation' % self.tvid_prodid)
return
@@ -2084,7 +2082,7 @@ class TVShow(TVShowBase):
def write_episode_nfo(self, force=False):
# type: (bool) -> None
- if not ek.ek(os.path.isdir, self._location):
+ if not os.path.isdir(self._location):
logger.log('%s: Show directory doesn\'t exist, skipping NFO generation' % self.tvid_prodid)
return
@@ -2119,7 +2117,7 @@ class TVShow(TVShowBase):
def update_metadata(self):
- if not ek.ek(os.path.isdir, self._location):
+ if not os.path.isdir(self._location):
logger.log('%s: Show directory doesn\'t exist, skipping NFO generation' % self.tvid_prodid)
return
@@ -2129,7 +2127,7 @@ class TVShow(TVShowBase):
result = False
- if not ek.ek(os.path.isdir, self._location):
+ if not os.path.isdir(self._location):
logger.log('%s: Show directory doesn\'t exist, skipping NFO generation' % self.tvid_prodid)
return False
@@ -2142,7 +2140,7 @@ class TVShow(TVShowBase):
# find all media files in the show folder and create episodes for as many as possible
def load_episodes_from_dir(self):
- if not ek.ek(os.path.isdir, self._location):
+ if not os.path.isdir(self._location):
logger.log('%s: Show directory doesn\'t exist, not loading episodes from disk' % self.tvid_prodid)
return
@@ -2159,7 +2157,7 @@ class TVShow(TVShowBase):
logger.log('%s: Creating episode from %s' % (self.tvid_prodid, cur_media_file), logger.DEBUG)
try:
- ep_obj = self.ep_obj_from_file(ek.ek(os.path.join, self._location, cur_media_file))
+ ep_obj = self.ep_obj_from_file(os.path.join(self._location, cur_media_file))
except (exceptions_helper.ShowNotFoundException, exceptions_helper.EpisodeNotFoundException) as e:
logger.log('Episode %s returned an exception: %s' % (cur_media_file, ex(e)), logger.ERROR)
continue
@@ -2170,8 +2168,8 @@ class TVShow(TVShowBase):
continue
# see if we should save the release name in the db
- ep_file_name = ek.ek(os.path.basename, ep_obj.location)
- ep_file_name = ek.ek(os.path.splitext, ep_file_name)[0]
+ ep_file_name = os.path.basename(ep_obj.location)
+ ep_file_name = os.path.splitext(ep_file_name)[0]
try:
parse_result = None
@@ -2423,7 +2421,7 @@ class TVShow(TVShowBase):
:param path:
:return:
"""
- if not ek.ek(os.path.isfile, path):
+ if not os.path.isfile(path):
logger.log('%s: Not a real file... %s' % (self.tvid_prodid, path))
return None
@@ -2474,7 +2472,7 @@ class TVShow(TVShowBase):
if IGNORED == status:
continue
- if (ep_obj.location and ek.ek(os.path.normpath, ep_obj.location) != ek.ek(os.path.normpath, path)) or \
+ if (ep_obj.location and os.path.normpath(ep_obj.location) != os.path.normpath(path)) or \
(not ep_obj.location and path) or \
(SKIPPED == status):
logger.log('The old episode had a different file associated with it, re-checking the quality ' +
@@ -2856,14 +2854,14 @@ class TVShow(TVShowBase):
if show_info.cast and self._should_cast_update(show_info.cast):
sickgear.people_queue_scheduler.action.add_cast_update(show_obj=self, show_info_cast=show_info.cast,
- scheduled_update=scheduled_update, switch=switch)
+ scheduled_update=scheduled_update, switch=switch)
else:
logger.log('Not updating cast for show because data is unchanged.')
return show_info
@staticmethod
def _update_person_properties_helper(person_obj, src_person, p_ids):
- # type: (Person, TVINFO_Person, Dict) -> None
+ # type: (Person, TVInfoPerson, Dict) -> None
person_obj.update_properties(
name=src_person.name, gender=src_person.gender,
birthday=src_person.birthdate, deathday=src_person.deathdate,
@@ -2898,7 +2896,7 @@ class TVShow(TVShowBase):
cast_list = self._load_cast_from_db()
remove_char_ids = {c.id for c in cast_list or []}
cast_ordered = weakList()
- for ct, c_l in iteritems(show_info_cast): # type: (integer_types, List[TVINFO_Character])
+ for ct, c_l in iteritems(show_info_cast): # type: (integer_types, List[TVInfoCharacter])
if ct not in (RoleTypes.ActorMain, RoleTypes.Host, RoleTypes.Interviewer, RoleTypes.Presenter):
continue
for c in c_l:
@@ -3074,7 +3072,7 @@ class TVShow(TVShowBase):
self._imdbid = redirect_check
imdb_id = redirect_check
imdb_info['imdb_id'] = self.imdbid
- i = imdbpie.Imdb(exclude_episodes=True, cachedir=ek.ek(os.path.join, sickgear.CACHE_DIR, 'imdb-pie'))
+ i = imdbpie.Imdb(exclude_episodes=True, cachedir=os.path.join(sickgear.CACHE_DIR, 'imdb-pie'))
if not helpers.parse_imdb_id(imdb_id):
logger.log('Not a valid imdbid: %s for show: %s' % (imdb_id, self._name), logger.WARNING)
return
@@ -3276,10 +3274,10 @@ class TVShow(TVShowBase):
# clear the cache
ic = image_cache.ImageCache()
- for cache_obj in ek.ek(glob.glob, ic.fanart_path(self.tvid, self.prodid).replace('fanart.jpg', '*')) \
- + ek.ek(glob.glob, ic.poster_thumb_path(self.tvid, self.prodid).replace('poster.jpg', '*')) \
- + ek.ek(glob.glob, ic.poster_path(self.tvid, self.prodid).replace('poster.jpg', '*')):
- cache_dir = ek.ek(os.path.isdir, cache_obj)
+ for cache_obj in glob.glob(ic.fanart_path(self.tvid, self.prodid).replace('fanart.jpg', '*')) \
+ + glob.glob(ic.poster_thumb_path(self.tvid, self.prodid).replace('poster.jpg', '*')) \
+ + glob.glob(ic.poster_path(self.tvid, self.prodid).replace('poster.jpg', '*')):
+ cache_dir = os.path.isdir(cache_obj)
result = helpers.remove_file(cache_obj, tree=cache_dir, log_level=logger.WARNING)
if result:
logger.log('%s cache %s %s' % (result, cache_dir and 'dir' or 'file', cache_obj))
@@ -3292,12 +3290,12 @@ class TVShow(TVShowBase):
try:
logger.log('Attempt to %s show folder %s' % (action, self._location))
# check first the read-only attribute
- file_attribute = ek.ek(os.stat, self.location)[0]
+ file_attribute = os.stat(self.location)[0]
if not file_attribute & stat.S_IWRITE:
# File is read-only, so make it writeable
logger.log('Attempting to make writeable the read only folder %s' % self._location, logger.DEBUG)
try:
- ek.ek(os.chmod, self.location, stat.S_IWRITE)
+ os.chmod(self.location, stat.S_IWRITE)
except (BaseException, Exception):
logger.log('Unable to change permissions of %s' % self._location, logger.WARNING)
@@ -3324,7 +3322,7 @@ class TVShow(TVShowBase):
def refresh_dir(self):
# make sure the show dir is where we think it is unless dirs are created on the fly
- if not ek.ek(os.path.isdir, self._location) and not sickgear.CREATE_MISSING_SHOW_DIRS:
+ if not os.path.isdir(self._location) and not sickgear.CREATE_MISSING_SHOW_DIRS:
return False
# load from dir
@@ -3351,7 +3349,7 @@ class TVShow(TVShowBase):
for cur_row in sql_result:
season = int(cur_row['season'])
episode = int(cur_row['episode'])
- location = ek.ek(os.path.normpath, cur_row['location'])
+ location = os.path.normpath(cur_row['location'])
try:
ep_obj = self.get_episode(season, episode, ep_result=[cur_row])
@@ -3362,7 +3360,7 @@ class TVShow(TVShowBase):
# if the path exist and if it's in our show dir
if (self.prune and season and ep_obj.location not in attempted and 0 < helpers.get_size(ep_obj.location) and
- ek.ek(os.path.normpath, location).startswith(ek.ek(os.path.normpath, self.location))):
+ os.path.normpath(location).startswith(os.path.normpath(self.location))):
with ep_obj.lock:
if ep_obj.status in Quality.DOWNLOADED:
# locations repeat but attempt to delete once
@@ -3377,8 +3375,8 @@ class TVShow(TVShowBase):
kept += 1
# if the path doesn't exist or if it's not in our show dir
- if not ek.ek(os.path.isfile, location) or not ek.ek(os.path.normpath, location).startswith(
- ek.ek(os.path.normpath, self.location)):
+ if not os.path.isfile(location) or not os.path.normpath(location).startswith(
+ os.path.normpath(self.location)):
# check if downloaded files still exist, update our data if this has changed
if 1 != sickgear.SKIP_REMOVED_FILES:
@@ -3427,7 +3425,7 @@ class TVShow(TVShowBase):
:param force:
"""
# TODO: Add support for force option
- if not ek.ek(os.path.isdir, self._location):
+ if not os.path.isdir(self._location):
logger.log('%s: Show directory doesn\'t exist, can\'t download subtitles' % self.tvid_prodid, logger.DEBUG)
return
logger.log('%s: Downloading subtitles' % self.tvid_prodid, logger.DEBUG)
@@ -3526,11 +3524,11 @@ class TVShow(TVShowBase):
save_mapping(self)
name_cache.remove_from_namecache(old_tvid, old_prodid)
- image_cache_dir = ek.ek(os.path.join, sickgear.CACHE_DIR, 'images', 'shows')
- old_dir = ek.ek(os.path.join, image_cache_dir, '%s-%s' % (old_tvid, old_prodid))
- new_dir = ek.ek(os.path.join, image_cache_dir, '%s-%s' % (self.tvid, self.prodid))
+ image_cache_dir = os.path.join(sickgear.CACHE_DIR, 'images', 'shows')
+ old_dir = os.path.join(image_cache_dir, '%s-%s' % (old_tvid, old_prodid))
+ new_dir = os.path.join(image_cache_dir, '%s-%s' % (self.tvid, self.prodid))
try:
- ek.ek(os.rename, old_dir, new_dir)
+ os.rename(old_dir, new_dir)
except (BaseException, Exception) as e:
logger.log('Unable to rename %s to %s: %s / %s' % (old_dir, new_dir, repr(e), ex(e)),
logger.WARNING)
@@ -3556,7 +3554,7 @@ class TVShow(TVShowBase):
if update_show:
# force the update
try:
- sickgear.show_queue_scheduler.action.updateShow(
+ sickgear.show_queue_scheduler.action.update_show(
self, force=True, web=True, priority=QueuePriorities.VERYHIGH,
pausestatus_after=pausestatus_after, switch_src=True)
except exceptions_helper.CantUpdateException as e:
@@ -3944,8 +3942,8 @@ class TVEpisode(TVEpisodeBase):
# self._location = newLocation
self.dirty_setter('_location')(self, val)
- if val and ek.ek(os.path.isfile, val):
- self.file_size = ek.ek(os.path.getsize, val)
+ if val and os.path.isfile(val):
+ self.file_size = os.path.getsize(val)
else:
self.file_size = 0
@@ -3968,7 +3966,7 @@ class TVEpisode(TVEpisodeBase):
return
# TODO: Add support for force option
- if not ek.ek(os.path.isfile, self.location):
+ if not os.path.isfile(self.location):
logger.log('%s: Episode file doesn\'t exist, can\'t download subtitles for episode %sx%s' %
(self.show_obj.tvid_prodid, self.season, self.episode), logger.DEBUG)
return
@@ -3987,7 +3985,7 @@ class TVEpisode(TVEpisodeBase):
if sickgear.SUBTITLES_DIR:
for video in subs:
- subs_new_path = ek.ek(os.path.join, ek.ek(os.path.dirname, video.path), sickgear.SUBTITLES_DIR)
+ subs_new_path = os.path.join(os.path.dirname(video.path), sickgear.SUBTITLES_DIR)
dir_exists = helpers.make_dir(subs_new_path)
if not dir_exists:
logger.log('Unable to create subtitles folder %s' % subs_new_path, logger.ERROR)
@@ -3995,7 +3993,7 @@ class TVEpisode(TVEpisodeBase):
helpers.chmod_as_parent(subs_new_path)
for subtitle in subs.get(video):
- new_file_path = ek.ek(os.path.join, subs_new_path, ek.ek(os.path.basename, subtitle.path))
+ new_file_path = os.path.join(subs_new_path, os.path.basename(subtitle.path))
helpers.move_file(subtitle.path, new_file_path)
helpers.chmod_as_parent(new_file_path)
else:
@@ -4052,7 +4050,7 @@ class TVEpisode(TVEpisodeBase):
hastbn = False
# check for nfo and tbn
- if ek.ek(os.path.isfile, self.location):
+ if os.path.isfile(self.location):
for cur_provider in itervalues(sickgear.metadata_provider_dict):
if cur_provider.episode_metadata:
new_result = cur_provider.has_episode_metadata(self)
@@ -4085,7 +4083,7 @@ class TVEpisode(TVEpisodeBase):
"""
if not self.load_from_db(season, episode, **kwargs):
# only load from NFO if we didn't load from DB
- if ek.ek(os.path.isfile, self.location):
+ if os.path.isfile(self.location):
try:
self.load_from_nfo(self.location)
except exceptions_helper.NoNFOException:
@@ -4168,7 +4166,7 @@ class TVEpisode(TVEpisodeBase):
self._subtitles_searchcount = show_result['subtitles_searchcount']
self._timestamp = show_result['timestamp'] or self._make_timestamp()
self._version = self._version if not show_result['version'] else int(show_result['version'])
- self.location = show_result['location'] and ek.ek(os.path.normpath, show_result['location']) or self.location
+ self.location = show_result['location'] and os.path.normpath(show_result['location']) or self.location
if None is not show_result['release_group']:
self._release_group = show_result['release_group']
@@ -4414,7 +4412,7 @@ class TVEpisode(TVEpisodeBase):
# don't update show status if show dir is missing, unless it's missing on purpose
# noinspection PyProtectedMember
- if not ek.ek(os.path.isdir, self._show_obj._location) \
+ if not os.path.isdir(self._show_obj._location) \
and not sickgear.CREATE_MISSING_SHOW_DIRS and not sickgear.ADD_SHOWS_WO_DIR:
if UNKNOWN == self._status:
self.status = (SKIPPED, UNAIRED)[future_airtime]
@@ -4430,7 +4428,7 @@ class TVEpisode(TVEpisodeBase):
logger.DEBUG)
# if we don't have the file
- if not ek.ek(os.path.isfile, self._location):
+ if not os.path.isfile(self._location):
if self._status in [SKIPPED, UNAIRED, UNKNOWN, WANTED]:
very_old_delta = datetime.timedelta(days=90)
@@ -4498,7 +4496,7 @@ class TVEpisode(TVEpisodeBase):
:type location: AnyStr
"""
# noinspection PyProtectedMember
- if not ek.ek(os.path.isdir, self._show_obj._location):
+ if not os.path.isdir(self._show_obj._location):
logger.log('%s: The show directory is missing, not bothering to try loading the episode NFO'
% self._show_obj.tvid_prodid)
return
@@ -4518,14 +4516,14 @@ class TVEpisode(TVEpisodeBase):
nfo_file = sickgear.helpers.replace_extension(self.location, 'nfo')
logger.log('%s: Using NFO name %s' % (self._show_obj.tvid_prodid, nfo_file), logger.DEBUG)
- if ek.ek(os.path.isfile, nfo_file):
+ if os.path.isfile(nfo_file):
try:
show_xml = etree.ElementTree(file=nfo_file)
except (SyntaxError, ValueError) as e:
logger.log('Error loading the NFO, backing up the NFO and skipping for now: %s' % ex(e),
logger.ERROR) # TODO: figure out what's wrong and fix it
try:
- ek.ek(os.rename, nfo_file, '%s.old' % nfo_file)
+ os.rename(nfo_file, '%s.old' % nfo_file)
except (BaseException, Exception) as e:
logger.log(
'Failed to rename your episode\'s NFO file - you need to delete it or fix it: %s' % ex(e),
@@ -4576,7 +4574,7 @@ class TVEpisode(TVEpisodeBase):
else:
self.hasnfo = False
- if ek.ek(os.path.isfile, sickgear.helpers.replace_extension(nfo_file, 'tbn')):
+ if os.path.isfile(sickgear.helpers.replace_extension(nfo_file, 'tbn')):
self.hastbn = True
else:
self.hastbn = False
@@ -4613,7 +4611,7 @@ class TVEpisode(TVEpisodeBase):
def create_meta_files(self, force=False):
# noinspection PyProtectedMember
- if not ek.ek(os.path.isdir, self.show_obj._location):
+ if not os.path.isdir(self.show_obj._location):
logger.log('%s: The show directory is missing, not bothering to try to create metadata'
% self.show_obj.tvid_prodid)
return
@@ -4797,7 +4795,7 @@ class TVEpisode(TVEpisodeBase):
# def full_location(self):
# if self.location in (None, ''):
# return None
- # return ek.ek(os.path.join, self.show_obj.location, self.location)
+ # return os.path.join(self.show_obj.location, self.location)
#
# # TODO: remove if unused
# def create_strings(self, pattern=None):
@@ -5140,7 +5138,7 @@ class TVEpisode(TVEpisodeBase):
return result
# if not we append the folder on and use that
- return ek.ek(os.path.join, self.formatted_dir(), result)
+ return os.path.join(self.formatted_dir(), result)
def formatted_dir(self, pattern=None, multi=None):
"""
@@ -5164,7 +5162,7 @@ class TVEpisode(TVEpisodeBase):
if 1 == len(name_groups):
logger.debug('No Season Folder set in Naming pattern: %s' % pattern)
return ''
- return self._format_pattern(ek.ek(os.sep.join, name_groups[:-1]), multi)
+ return self._format_pattern(os.sep.join(name_groups[:-1]), multi)
def formatted_filename(self, pattern=None, multi=None, anime_type=None):
"""
@@ -5193,13 +5191,13 @@ class TVEpisode(TVEpisodeBase):
in the naming settings.
"""
- if not ek.ek(os.path.isfile, self.location):
+ if not os.path.isfile(self.location):
logger.log('Can\'t perform rename on %s when it doesn\'t exist, skipping' % self.location, logger.WARNING)
return
proper_path = self.proper_path()
- absolute_proper_path = ek.ek(os.path.join, self._show_obj.location, proper_path)
- absolute_current_path_no_ext, file_ext = ek.ek(os.path.splitext, self.location)
+ absolute_proper_path = os.path.join(self._show_obj.location, proper_path)
+ absolute_current_path_no_ext, file_ext = os.path.splitext(self.location)
absolute_current_path_no_ext_length = len(absolute_current_path_no_ext)
related_subs = []
@@ -5224,7 +5222,7 @@ class TVEpisode(TVEpisodeBase):
if self.show_obj.subtitles and '' != sickgear.SUBTITLES_DIR:
related_subs = postProcessor.PostProcessor(self.location).list_associated_files(sickgear.SUBTITLES_DIR,
subtitles_only=True)
- # absolute_proper_subs_path = ek.ek(os.path.join, sickgear.SUBTITLES_DIR, self.formatted_filename())
+ # absolute_proper_subs_path = os.path.join(sickgear.SUBTITLES_DIR, self.formatted_filename())
logger.log('Files associated to %s: %s' % (self.location, related_files), logger.DEBUG)
@@ -5239,7 +5237,7 @@ class TVEpisode(TVEpisodeBase):
logger.log('%s: Unable to rename file %s' % (self._epid, cur_related_file), logger.ERROR)
for cur_related_sub in related_subs:
- absolute_proper_subs_path = ek.ek(os.path.join, sickgear.SUBTITLES_DIR, self.formatted_filename())
+ absolute_proper_subs_path = os.path.join(sickgear.SUBTITLES_DIR, self.formatted_filename())
renamed = helpers.rename_ep_file(cur_related_sub, absolute_proper_subs_path,
absolute_current_path_no_ext_length)
if not renamed:
@@ -5277,7 +5275,7 @@ class TVEpisode(TVEpisodeBase):
has_timestamp = isinstance(self._timestamp, int) and 1 < self._timestamp
if not has_timestamp and (not isinstance(self._airdate, datetime.date) or 1 == self._airdate.year):
logger.log('%s: Did not change modify date of %s because episode date is never aired or invalid'
- % (self._show_obj.tvid_prodid, ek.ek(os.path.basename, self.location)), logger.DEBUG)
+ % (self._show_obj.tvid_prodid, os.path.basename(self.location)), logger.DEBUG)
return
aired_dt = None
@@ -5292,7 +5290,7 @@ class TVEpisode(TVEpisodeBase):
try:
aired_epoch = SGDatetime.to_file_timestamp(aired_dt)
- filemtime = int(ek.ek(os.path.getmtime, self.location))
+ filemtime = int(os.path.getmtime(self.location))
except (BaseException, Exception):
return
@@ -5303,7 +5301,7 @@ class TVEpisode(TVEpisodeBase):
result, loglevel = 'Error changing', logger.WARNING
logger.log('%s: %s modify date of %s to show air date %s'
- % (self._show_obj.tvid_prodid, result, ek.ek(os.path.basename, self.location),
+ % (self._show_obj.tvid_prodid, result, os.path.basename(self.location),
'n/a' if not aired_dt else aired_dt.strftime('%b %d,%Y (%H:%M)')), loglevel)
def __getstate__(self):
diff --git a/sickgear/ui.py b/sickgear/ui.py
index a15743dc..b03d9728 100644
--- a/sickgear/ui.py
+++ b/sickgear/ui.py
@@ -158,10 +158,10 @@ class QueueProgressIndicator(object):
return len(self.queueItemList)
def numFinished(self):
- return len([x for x in self.queueItemList if not x.isInQueue()])
+ return len([x for x in self.queueItemList if not x.is_in_queue()])
def numRemaining(self):
- return len([x for x in self.queueItemList if x.isInQueue()])
+ return len([x for x in self.queueItemList if x.is_in_queue()])
def nextName(self):
for curItem in [
diff --git a/sickgear/version_checker.py b/sickgear/version_checker.py
index a2c892a2..23609e71 100644
--- a/sickgear/version_checker.py
+++ b/sickgear/version_checker.py
@@ -25,8 +25,6 @@ import time
import traceback
from . import gh_api as github
-# noinspection PyPep8Naming
-import encodingKludge as ek
from exceptions_helper import ex
import sickgear
@@ -176,7 +174,7 @@ class SoftwareUpdater(object):
'git': running from source using git
'source': running from source without git
"""
- return ('source', 'git')[os.path.isdir(ek.ek(os.path.join, sickgear.PROG_DIR, '.git'))]
+ return ('source', 'git')[os.path.isdir(os.path.join(sickgear.PROG_DIR, '.git'))]
def check_for_new_version(self, force=False):
"""
@@ -754,7 +752,7 @@ class SourceUpdateManager(UpdateManager):
try:
# prepare the update dir
- sg_update_dir = ek.ek(os.path.join, sickgear.PROG_DIR, u'sg-update')
+ sg_update_dir = os.path.join(sickgear.PROG_DIR, u'sg-update')
if os.path.isdir(sg_update_dir):
logger.log(u'Clearing out update folder %s before extracting' % sg_update_dir)
@@ -768,11 +766,11 @@ class SourceUpdateManager(UpdateManager):
tar_download_path = os.path.join(sg_update_dir, u'sg-update.tar')
urllib.request.urlretrieve(tar_download_url, tar_download_path)
- if not ek.ek(os.path.isfile, tar_download_path):
+ if not os.path.isfile(tar_download_path):
logger.error(u'Unable to retrieve new version from %s, can\'t update' % tar_download_url)
return False
- if not ek.ek(tarfile.is_tarfile, tar_download_path):
+ if not tarfile.is_tarfile(tar_download_path):
logger.error(u'Retrieved version from %s is corrupt, can\'t update' % tar_download_url)
return False
diff --git a/sickgear/webapi.py b/sickgear/webapi.py
index b13bea0d..691f2c6c 100644
--- a/sickgear/webapi.py
+++ b/sickgear/webapi.py
@@ -31,8 +31,6 @@ import time
import traceback
from . import webserve
-# noinspection PyPep8Naming
-import encodingKludge as ek
import exceptions_helper
from exceptions_helper import ex
from json_helper import is_orjson, json_dumps, JSON_INDENT, json_loads, JSONEncoder, ORJSON_OPTIONS
@@ -833,7 +831,7 @@ def _getRootDirs():
for root_dir in root_dirs:
valid = 1
try:
- ek.ek(os.listdir, root_dir)
+ os.listdir(root_dir)
except (BaseException, Exception):
valid = 0
default = 0
@@ -2003,7 +2001,7 @@ class CMD_SickGearAddRootDir(ApiCall):
index = 0
# disallow adding/setting an invalid dir
- if not ek.ek(os.path.isdir, self.location):
+ if not os.path.isdir(self.location):
return _responds(RESULT_FAILURE, msg="Location is invalid")
root_dirs = []
@@ -2340,8 +2338,8 @@ class CMD_SickGearGetIndexerIcon(ApiCall):
self.handler.set_status(404)
return _responds(RESULT_FAILURE, 'Icon not found')
img = i['icon']
- image = ek.ek(os.path.join, sickgear.PROG_DIR, 'gui', 'slick', 'images', img)
- if not ek.ek(os.path.isfile, image):
+ image = os.path.join(sickgear.PROG_DIR, 'gui', 'slick', 'images', img)
+ if not os.path.isfile(image):
self.handler.set_status(404)
return _responds(RESULT_FAILURE, 'Icon not found')
return {'outputType': 'image', 'image': self.handler.get_image(image)}
@@ -2361,9 +2359,8 @@ class CMD_SickGearGetNetworkIcon(ApiCall):
ApiCall.__init__(self, handler, args, kwargs)
def run(self):
- image = ek.ek(os.path.join, sickgear.PROG_DIR, 'gui', 'slick', 'images', 'network',
- '%s.png' % self.network.lower())
- if not ek.ek(os.path.isfile, image):
+ image = os.path.join(sickgear.PROG_DIR, 'gui', 'slick', 'images', 'network', '%s.png' % self.network.lower())
+ if not os.path.isfile(image):
self.handler.set_status(404)
return _responds(RESULT_FAILURE, 'Icon not found')
return {'outputType': 'image', 'image': self.handler.get_image(image)}
@@ -3328,7 +3325,7 @@ class CMD_SickGearShowAddExisting(ApiCall):
if show_obj:
return _responds(RESULT_FAILURE, msg="An existing indexerid already exists in the database")
- if not ek.ek(os.path.isdir, self.location):
+ if not os.path.isdir(self.location):
return _responds(RESULT_FAILURE, msg='Not a valid location')
lINDEXER_API_PARMS = sickgear.TVInfoAPI(self.tvid).api_params.copy()
@@ -3460,7 +3457,7 @@ class CMD_SickGearShowAddNew(ApiCall):
else:
return _responds(RESULT_FAILURE, msg="Root directory is not set, please provide a location")
- if not ek.ek(os.path.isdir, self.location):
+ if not os.path.isdir(self.location):
return _responds(RESULT_FAILURE, msg="'" + self.location + "' is not a valid location")
# use default quality as a failsafe
@@ -3611,9 +3608,9 @@ class CMD_SickGearShowCache(ApiCall):
has_poster = 0
has_banner = 0
- if ek.ek(os.path.isfile, cache_obj.poster_path(show_obj.tvid, show_obj.prodid)):
+ if os.path.isfile(cache_obj.poster_path(show_obj.tvid, show_obj.prodid)):
has_poster = 1
- if ek.ek(os.path.isfile, cache_obj.banner_path(show_obj.tvid, show_obj.prodid)):
+ if os.path.isfile(cache_obj.banner_path(show_obj.tvid, show_obj.prodid)):
has_banner = 1
return _responds(RESULT_SUCCESS, {"poster": has_poster, "banner": has_banner})
@@ -3663,8 +3660,8 @@ class CMD_SickGearShowDelete(ApiCall):
if not show_obj:
return _responds(RESULT_FAILURE, msg="Show not found")
- if sickgear.show_queue_scheduler.action.isBeingAdded(
- show_obj) or sickgear.show_queue_scheduler.action.isBeingUpdated(show_obj):
+ if sickgear.show_queue_scheduler.action.is_being_added(
+ show_obj) or sickgear.show_queue_scheduler.action.is_being_updated(show_obj):
return _responds(RESULT_FAILURE, msg="Show can not be deleted while being added or updated")
show_obj.delete_show(full=self.full_delete)
@@ -3834,8 +3831,7 @@ class CMD_SickGearShowListFanart(ApiCall):
fanart = []
rating_names = {10: 'group', 20: 'favorite', 30: 'avoid'}
cache_obj = image_cache.ImageCache()
- for img in ek.ek(glob.glob, cache_obj.fanart_path(
- show_obj.tvid, show_obj.prodid).replace('fanart.jpg', '*')) or []:
+ for img in glob.glob(cache_obj.fanart_path(show_obj.tvid, show_obj.prodid).replace('fanart.jpg', '*')) or []:
match = re.search(r'(\d+(?:\.(\w*?(\d*)))?\.(?:\w{5,8}))\.fanart\.', img, re.I)
if match and match.group(1):
fanart += [(match.group(1), rating_names.get(sickgear.FANART_RATINGS.get(
@@ -3870,7 +3866,7 @@ class CMD_SickGearShowRateFanart(ApiCall):
cache_obj = image_cache.ImageCache()
fanartfile = cache_obj.fanart_path(self.tvid, self.prodid).replace('fanart.jpg',
'%s.fanart.jpg' % self.fanartname)
- if not ek.ek(os.path.isfile, fanartfile):
+ if not os.path.isfile(fanartfile):
return _responds(RESULT_FAILURE, msg='Unknown Fanart')
fan_ratings = {'unrate': 0, 'group': 10, 'favorite': 20, 'avoid': 30}
show_id = TVidProdid({self.tvid: self.prodid})()
@@ -3906,19 +3902,19 @@ class CMD_SickGearShowGetFanart(ApiCall):
def run(self):
""" get the fanart stored for a show """
cache_obj = image_cache.ImageCache()
- default_fanartfile = ek.ek(os.path.join, sickgear.PROG_DIR, 'gui', 'slick', 'images', 'trans.png')
+ default_fanartfile = os.path.join(sickgear.PROG_DIR, 'gui', 'slick', 'images', 'trans.png')
fanartfile = default_fanartfile
used_fanart = 'default'
if self.fanartname:
fanartfile = cache_obj.fanart_path(self.tvid, self.prodid).replace('fanart.jpg',
'%s.fanart.jpg' % self.fanartname)
- if not ek.ek(os.path.isfile, fanartfile):
+ if not os.path.isfile(fanartfile):
fanartfile = default_fanartfile
used_fanart = self.fanartname
else:
fanart = []
- for img in ek.ek(glob.glob, cache_obj.fanart_path(self.tvid, self.prodid).replace('fanart.jpg', '*')) or []:
- if not ek.ek(os.path.isfile, img):
+ for img in glob.glob(cache_obj.fanart_path(self.tvid, self.prodid).replace('fanart.jpg', '*')) or []:
+ if not os.path.isfile(img):
continue
match = re.search(r'(\d+(?:\.(\w*?(\d*)))?\.(?:\w{5,8}))\.fanart\.', img, re.I)
if match and match.group(1):
@@ -3933,8 +3929,8 @@ class CMD_SickGearShowGetFanart(ApiCall):
fanartfile = fanartsorted[random_fanart][0]
used_fanart = fanartsorted[random_fanart][1]
- if fanartfile and ek.ek(os.path.isfile, fanartfile):
- with ek.ek(open, fanartfile, 'rb') as f:
+ if fanartfile and os.path.isfile(fanartfile):
+ with open(fanartfile, 'rb') as f:
mime_type, encoding = MimeTypes().guess_type(fanartfile)
self.handler.set_header('X-Fanartname', used_fanart)
self.handler.set_header('Content-Type', mime_type)
@@ -4021,7 +4017,7 @@ class CMD_SickGearShowRefresh(ApiCall):
return _responds(RESULT_FAILURE, msg="Show not found")
try:
- sickgear.show_queue_scheduler.action.refreshShow(show_obj)
+ sickgear.show_queue_scheduler.action.refresh_show(show_obj)
return _responds(RESULT_SUCCESS, msg='%s has queued to be refreshed' % show_obj.unique_name)
except exceptions_helper.CantRefreshException as e:
# TODO: log the exception
@@ -4443,7 +4439,7 @@ class CMD_SickGearShowUpdate(ApiCall):
return _responds(RESULT_FAILURE, msg="Show not found")
try:
- sickgear.show_queue_scheduler.action.updateShow(show_obj, True)
+ sickgear.show_queue_scheduler.action.update_show(show_obj, True)
return _responds(RESULT_SUCCESS, msg='%s has queued to be updated' % show_obj.unique_name)
except exceptions_helper.CantUpdateException as e:
self.log(u'Unable to update %s. %s' % (show_obj.unique_name, ex(e)), logger.ERROR)
@@ -4655,7 +4651,7 @@ class CMD_SickGearShowsForceUpdate(ApiCall):
def run(self):
""" force the daily show update now """
- if sickgear.show_queue_scheduler.action.isShowUpdateRunning() \
+ if sickgear.show_queue_scheduler.action.is_show_update_running() \
or sickgear.show_update_scheduler.action.amActive:
return _responds(RESULT_FAILURE, msg="show update already running.")
diff --git a/sickgear/webserve.py b/sickgear/webserve.py
index 13ab4a6d..cfbafc7a 100644
--- a/sickgear/webserve.py
+++ b/sickgear/webserve.py
@@ -37,8 +37,6 @@ import zipfile
from exceptions_helper import ex, MultipleShowObjectsException
import exceptions_helper
-# noinspection PyPep8Naming
-import encodingKludge as ek
from json_helper import json_dumps, json_loads
import sg_helpers
from sg_helpers import remove_file, scantree
@@ -173,7 +171,7 @@ class BaseStaticFileHandler(StaticFileHandler):
return super(BaseStaticFileHandler, self).write_error(status_code, **kwargs)
def validate_absolute_path(self, root, absolute_path):
- if '\\images\\flags\\' in absolute_path and not ek.ek(os.path.isfile, absolute_path):
+ if '\\images\\flags\\' in absolute_path and not os.path.isfile(absolute_path):
absolute_path = re.sub(r'\\[^\\]+\.png$', '\\\\unknown.png', absolute_path)
return super(BaseStaticFileHandler, self).validate_absolute_path(root, absolute_path)
@@ -281,10 +279,10 @@ class BaseHandler(RouteHandler):
return True
def get_image(self, image):
- if ek.ek(os.path.isfile, image):
+ if os.path.isfile(image):
mime_type, encoding = MimeTypes().guess_type(image)
self.set_header('Content-Type', mime_type)
- with ek.ek(open, image, 'rb') as img:
+ with open(image, 'rb') as img:
return img.read()
def show_poster(self, tvid_prodid=None, which=None, api=None):
@@ -316,19 +314,19 @@ class BaseHandler(RouteHandler):
('%s' % (re.sub(r'.*?fanart_(\d+(?:\.\w{1,20})?\.\w{5,8}).*', r'\1.', which, 0, re.I)),))]
for cur_name in image_file_name:
- if ek.ek(os.path.isfile, cur_name):
+ if os.path.isfile(cur_name):
static_image_path = cur_name
break
if api:
- used_file = ek.ek(os.path.basename, static_image_path)
+ used_file = os.path.basename(static_image_path)
if static_image_path.startswith('/images'):
used_file = 'default'
- static_image_path = ek.ek(os.path.join, sickgear.PROG_DIR, 'gui', 'slick', static_image_path[1:])
+ static_image_path = os.path.join(sickgear.PROG_DIR, 'gui', 'slick', static_image_path[1:])
mime_type, encoding = MimeTypes().guess_type(static_image_path)
self.set_header('Content-Type', mime_type)
self.set_header('X-Filename', used_file)
- with ek.ek(open, static_image_path, 'rb') as img:
+ with open(static_image_path, 'rb') as img:
return img.read()
else:
static_image_path = os.path.normpath(static_image_path.replace(sickgear.CACHE_DIR, '/cache'))
@@ -472,37 +470,37 @@ class RepoHandler(BaseStaticFileHandler):
super(RepoHandler, self).initialize(*args, **kwargs)
logger.log('Kodi req... initialize(path): %s' % kwargs['path'], logger.DEBUG)
- cache_client = ek.ek(os.path.join, sickgear.CACHE_DIR, 'clients')
- cache_client_kodi = ek.ek(os.path.join, cache_client, 'kodi')
- cache_client_kodi_watchedstate = ek.ek(os.path.join, cache_client_kodi, 'service.sickgear.watchedstate.updater')
+ cache_client = os.path.join(sickgear.CACHE_DIR, 'clients')
+ cache_client_kodi = os.path.join(cache_client, 'kodi')
+ cache_client_kodi_watchedstate = os.path.join(cache_client_kodi, 'service.sickgear.watchedstate.updater')
- cache_resources = ek.ek(os.path.join, cache_client_kodi_watchedstate, 'resources')
- cache_lang = ek.ek(os.path.join, cache_resources, 'language')
- cache_other_lang = ek.ek(os.path.join, cache_lang, ('English', 'resource.language.en_gb')[self.kodi_is_legacy])
- ek.ek(os.path.exists, cache_other_lang) and remove_file(cache_other_lang, tree=True)
+ cache_resources = os.path.join(cache_client_kodi_watchedstate, 'resources')
+ cache_lang = os.path.join(cache_resources, 'language')
+ cache_other_lang = os.path.join(cache_lang, ('English', 'resource.language.en_gb')[self.kodi_is_legacy])
+ os.path.exists(cache_other_lang) and remove_file(cache_other_lang, tree=True)
- cache_lang_sub = ek.ek(os.path.join, cache_lang, ('resource.language.en_gb', 'English')[self.kodi_is_legacy])
+ cache_lang_sub = os.path.join(cache_lang, ('resource.language.en_gb', 'English')[self.kodi_is_legacy])
for folder in (cache_client,
cache_client_kodi,
- ek.ek(os.path.join, cache_client_kodi, 'repository.sickgear'),
+ os.path.join(cache_client_kodi, 'repository.sickgear'),
cache_client_kodi_watchedstate,
- ek.ek(os.path.join, cache_resources),
+ os.path.join(cache_resources),
cache_lang, cache_lang_sub,
):
- if not ek.ek(os.path.exists, folder):
- ek.ek(os.mkdir, folder)
+ if not os.path.exists(folder):
+ os.mkdir(folder)
- with io.open(ek.ek(os.path.join, cache_client_kodi, 'index.html'), 'w') as fh:
+ with io.open(os.path.join(cache_client_kodi, 'index.html'), 'w') as fh:
fh.write(self.render_kodi_index())
- with io.open(ek.ek(os.path.join, cache_client_kodi, 'repository.sickgear', 'index.html'), 'w') as fh:
+ with io.open(os.path.join(cache_client_kodi, 'repository.sickgear', 'index.html'), 'w') as fh:
fh.write(self.render_kodi_repository_sickgear_index())
- with io.open(ek.ek(os.path.join, cache_client_kodi_watchedstate, 'index.html'), 'w') as fh:
+ with io.open(os.path.join(cache_client_kodi_watchedstate, 'index.html'), 'w') as fh:
fh.write(self.render_kodi_service_sickgear_watchedstate_updater_index())
- with io.open(ek.ek(os.path.join, cache_resources, 'index.html'), 'w') as fh:
+ with io.open(os.path.join(cache_resources, 'index.html'), 'w') as fh:
fh.write(self.render_kodi_service_sickgear_watchedstate_updater_resources_index())
- with io.open(ek.ek(os.path.join, cache_lang, 'index.html'), 'w') as fh:
+ with io.open(os.path.join(cache_lang, 'index.html'), 'w') as fh:
fh.write(self.render_kodi_service_sickgear_watchedstate_updater_resources_language_index())
- with io.open(ek.ek(os.path.join, cache_lang_sub, 'index.html'), 'w') as fh:
+ with io.open(os.path.join(cache_lang_sub, 'index.html'), 'w') as fh:
fh.write(self.render_kodi_service_sickgear_watchedstate_updater_resources_language_english_index())
'''
@@ -511,7 +509,7 @@ class RepoHandler(BaseStaticFileHandler):
if repo rendered md5 changes or flag is true, update the repo addon, where repo version *must* be increased
'''
- repo_md5_file = ek.ek(os.path.join, cache_client_kodi, 'addons.xml.md5')
+ repo_md5_file = os.path.join(cache_client_kodi, 'addons.xml.md5')
saved_md5 = None
try:
with io.open(repo_md5_file, 'r', encoding='utf8') as fh:
@@ -520,18 +518,18 @@ class RepoHandler(BaseStaticFileHandler):
pass
rendered_md5 = self.render_kodi_repo_addons_xml_md5()
if saved_md5 != rendered_md5:
- with io.open(ek.ek(os.path.join, cache_client_kodi, 'repository.sickgear', 'addon.xml'), 'w') as fh:
+ with io.open(os.path.join(cache_client_kodi, 'repository.sickgear', 'addon.xml'), 'w') as fh:
fh.write(self.render_kodi_repo_addon_xml())
- with io.open(ek.ek(os.path.join, cache_client_kodi_watchedstate, 'addon.xml'), 'w') as fh:
+ with io.open(os.path.join(cache_client_kodi_watchedstate, 'addon.xml'), 'w') as fh:
fh.write(self.get_watchedstate_updater_addon_xml())
- with io.open(ek.ek(os.path.join, cache_client_kodi, 'addons.xml'), 'w') as fh:
+ with io.open(os.path.join(cache_client_kodi, 'addons.xml'), 'w') as fh:
fh.write(self.render_kodi_repo_addons_xml())
- with io.open(ek.ek(os.path.join, cache_client_kodi, 'addons.xml.md5'), 'w') as fh:
+ with io.open(os.path.join(cache_client_kodi, 'addons.xml.md5'), 'w') as fh:
fh.write(rendered_md5)
def save_zip(name, version, zip_path, zip_method):
zip_name = '%s-%s.zip' % (name, version)
- zip_file = ek.ek(os.path.join, zip_path, zip_name)
+ zip_file = os.path.join(zip_path, zip_name)
for direntry in helpers.scantree(zip_path, ['resources'], [r'\.(?:md5|zip)$'], filter_kind=False):
remove_file_perm(direntry.path)
zip_data = zip_method()
@@ -539,11 +537,11 @@ class RepoHandler(BaseStaticFileHandler):
zh.write(zip_data)
# Force a UNIX line ending, like the md5sum utility.
- with io.open(ek.ek(os.path.join, zip_path, '%s.md5' % zip_name), 'w', newline='\n') as zh:
+ with io.open(os.path.join(zip_path, '%s.md5' % zip_name), 'w', newline='\n') as zh:
zh.write(u'%s *%s\n' % (self.md5ify(zip_data), zip_name))
aid, ver = self.repo_sickgear_details()
- save_zip(aid, ver, ek.ek(os.path.join, cache_client_kodi, 'repository.sickgear'),
+ save_zip(aid, ver, os.path.join(cache_client_kodi, 'repository.sickgear'),
self.kodi_repository_sickgear_zip)
aid, ver = self.addon_watchedstate_details()
@@ -566,8 +564,8 @@ class RepoHandler(BaseStaticFileHandler):
(cache_lang_sub, 'strings.xml')
))[self.kodi_is_legacy],
):
- helpers.copy_file(ek.ek(
- os.path.join, *(sickgear.PROG_DIR, 'sickgear', 'clients', 'kodi') + src), ek.ek(os.path.join, *dst))
+ helpers.copy_file(
+ os.path.join(*(sickgear.PROG_DIR, 'sickgear', 'clients', 'kodi') + src), os.path.join(*dst))
def get_content_type(self):
if '.md5' == self.absolute_path[-4:] or '.po' == self.absolute_path[-3:]:
@@ -583,7 +581,7 @@ class RepoHandler(BaseStaticFileHandler):
t.addon = '%s-%s.zip' % self.addon_watchedstate_details()
try:
- with open(ek.ek(os.path.join, sickgear.PROG_DIR, 'CHANGES.md')) as fh:
+ with open(os.path.join(sickgear.PROG_DIR, 'CHANGES.md')) as fh:
t.version = re.findall(r'###[^0-9x]+([0-9]+\.[0-9]+\.[0-9x]+)', fh.readline())[0]
except (BaseException, Exception):
t.version = ''
@@ -640,8 +638,8 @@ class RepoHandler(BaseStaticFileHandler):
return sickgear.MEMCACHE.get(mem_key).get('data')
filename = 'addon%s.xml' % self.kodi_include
- with io.open(ek.ek(os.path.join, sickgear.PROG_DIR, 'sickgear', 'clients',
- 'kodi', 'service.sickgear.watchedstate.updater', filename), 'r', encoding='utf8') as fh:
+ with io.open(os.path.join(sickgear.PROG_DIR, 'sickgear', 'clients', 'kodi',
+ 'service.sickgear.watchedstate.updater', filename), 'r', encoding='utf8') as fh:
xml = fh.read().strip() % dict(ADDON_VERSION=self.get_addon_version(self.kodi_include))
sickgear.MEMCACHE[mem_key] = dict(last_update=30 + int(timestamp_near(datetime.datetime.now())), data=xml)
@@ -662,8 +660,8 @@ class RepoHandler(BaseStaticFileHandler):
return sickgear.MEMCACHE.get(mem_key).get('data')
filename = 'service%s.py' % kodi_include
- with io.open(ek.ek(os.path.join, sickgear.PROG_DIR, 'sickgear', 'clients',
- 'kodi', 'service.sickgear.watchedstate.updater', filename), 'r', encoding='utf8') as fh:
+ with io.open(os.path.join(sickgear.PROG_DIR, 'sickgear', 'clients', 'kodi',
+ 'service.sickgear.watchedstate.updater', filename), 'r', encoding='utf8') as fh:
version = re.findall(r'ADDON_VERSION\s*?=\s*?\'([^\']+)', fh.read())[0]
sickgear.MEMCACHE[mem_key] = dict(last_update=30 + int(timestamp_near(datetime.datetime.now())), data=version)
@@ -705,8 +703,8 @@ class RepoHandler(BaseStaticFileHandler):
with zipfile.ZipFile(bfr, 'w') as zh:
zh.writestr('repository.sickgear/addon.xml', self.render_kodi_repo_addon_xml(), zipfile.ZIP_DEFLATED)
- with io.open(ek.ek(os.path.join, sickgear.PROG_DIR,
- 'sickgear', 'clients', 'kodi', 'repository.sickgear', 'icon.png'), 'rb') as fh:
+ with io.open(os.path.join(sickgear.PROG_DIR, 'sickgear', 'clients', 'kodi',
+ 'repository.sickgear', 'icon.png'), 'rb') as fh:
infile = fh.read()
zh.writestr('repository.sickgear/icon.png', infile, zipfile.ZIP_DEFLATED)
except OSError as e:
@@ -719,12 +717,12 @@ class RepoHandler(BaseStaticFileHandler):
def kodi_service_sickgear_watchedstate_updater_zip(self):
bfr = io.BytesIO()
- basepath = ek.ek(os.path.join, sickgear.PROG_DIR, 'sickgear', 'clients', 'kodi')
+ basepath = os.path.join(sickgear.PROG_DIR, 'sickgear', 'clients', 'kodi')
- zip_path = ek.ek(os.path.join, basepath, 'service.sickgear.watchedstate.updater')
- devenv_src = ek.ek(os.path.join, sickgear.PROG_DIR, 'tests', '_devenv.py')
- devenv_dst = ek.ek(os.path.join, zip_path, '_devenv.py')
- if sickgear.ENV.get('DEVENV') and ek.ek(os.path.exists, devenv_src):
+ zip_path = os.path.join(basepath, 'service.sickgear.watchedstate.updater')
+ devenv_src = os.path.join(sickgear.PROG_DIR, 'tests', '_devenv.py')
+ devenv_dst = os.path.join(zip_path, '_devenv.py')
+ if sickgear.ENV.get('DEVENV') and os.path.exists(devenv_src):
helpers.copy_file(devenv_src, devenv_dst)
else:
helpers.remove_file_perm(devenv_dst)
@@ -746,7 +744,7 @@ class RepoHandler(BaseStaticFileHandler):
infile = fh.read()
with zipfile.ZipFile(bfr, 'a') as zh:
- zh.writestr(ek.ek(os.path.relpath, direntry.path.replace(self.kodi_legacy, ''), basepath),
+ zh.writestr(os.path.relpath(direntry.path.replace(self.kodi_legacy, ''), basepath),
infile, zipfile.ZIP_DEFLATED)
except OSError as e:
logger.log('Unable to zip %s: %r / %s' % (direntry.path, e, ex(e)), logger.WARNING)
@@ -890,7 +888,7 @@ class LogfileHandler(BaseHandler):
self.set_header('Content-Type', 'text/html; charset=utf-8')
self.set_header('Content-Description', 'Logfile Download')
self.set_header('Content-Disposition', 'attachment; filename=sickgear.log')
- # self.set_header('Content-Length', ek.ek(os.path.getsize, logfile_name))
+ # self.set_header('Content-Length', os.path.getsize(logfile_name))
auths = sickgear.GenericProvider.dedupe_auths(True)
rxc_auths = re.compile('(?i)%s' % '|'.join([(re.escape(_a)) for _a in auths]))
replacements = dict([(_a, starify(_a)) for _a in auths])
@@ -1192,7 +1190,7 @@ class MainHandler(WebHandler):
if tvid_prodid in fanarts:
continue
- for img in ek.ek(glob.glob, cache_obj.fanart_path(*tvid_prodid_obj.tuple).replace('fanart.jpg', '*')) or []:
+ for img in glob.glob(cache_obj.fanart_path(*tvid_prodid_obj.tuple).replace('fanart.jpg', '*')) or []:
match = re.search(r'(\d+(?:\.\w*)?\.\w{5,8})\.fanart\.', img, re.I)
if not match:
continue
@@ -1276,8 +1274,8 @@ class MainHandler(WebHandler):
elif 'backart' in kwargs:
sickgear.EPISODE_VIEW_BACKGROUND = backart
sickgear.FANART_PANEL = 'highlight-off' == sickgear.FANART_PANEL and 'highlight-off' or \
- 'highlight2' == sickgear.FANART_PANEL and 'highlight1' or \
- 'highlight1' == sickgear.FANART_PANEL and 'highlight' or 'highlight-off'
+ 'highlight2' == sickgear.FANART_PANEL and 'highlight1' or \
+ 'highlight1' == sickgear.FANART_PANEL and 'highlight' or 'highlight-off'
elif 'viewmode' in kwargs:
sickgear.EPISODE_VIEW_VIEWMODE = viewmode
@@ -1395,7 +1393,7 @@ r.close()
if data:
my_db = db.DBConnection(row_type='dict')
- media_paths = map_list(lambda arg: ek.ek(os.path.basename, arg[1]['path_file']), iteritems(data))
+ media_paths = map_list(lambda arg: os.path.basename(arg[1]['path_file']), iteritems(data))
def chunks(lines, n):
for c in range(0, len(lines), n):
@@ -1412,13 +1410,13 @@ r.close()
cl = []
ep_results = {}
- map_consume(lambda r: ep_results.update({'%s' % ek.ek(os.path.basename, r['location']).lower(): dict(
+ map_consume(lambda r: ep_results.update({'%s' % os.path.basename(r['location']).lower(): dict(
episode_id=r['episode_id'], status=r['status'], location=r['location'],
file_size=r['file_size'])}), sql_result)
for (k, v) in iteritems(data):
- bname = (ek.ek(os.path.basename, v.get('path_file')) or '').lower()
+ bname = (os.path.basename(v.get('path_file')) or '').lower()
if not bname:
msg = 'Missing media file name provided'
data[k] = msg
@@ -1581,15 +1579,15 @@ class Home(MainHandler):
if 'simple' != sickgear.HOME_LAYOUT:
t.network_images = {}
networks = {}
- images_path = ek.ek(os.path.join, sickgear.PROG_DIR, 'gui', 'slick', 'images', 'network')
+ images_path = os.path.join(sickgear.PROG_DIR, 'gui', 'slick', 'images', 'network')
for cur_show_obj in sickgear.showList:
network_name = 'nonetwork' if None is cur_show_obj.network \
else cur_show_obj.network.replace(u'\u00C9', 'e').lower()
if network_name not in networks:
filename = u'%s.png' % network_name
- if not ek.ek(os.path.isfile, ek.ek(os.path.join, images_path, filename)):
+ if not os.path.isfile(os.path.join(images_path, filename)):
filename = u'%s.png' % re.sub(r'(?m)(.*)\s+\(\w{2}\)$', r'\1', network_name)
- if not ek.ek(os.path.isfile, ek.ek(os.path.join, images_path, filename)):
+ if not os.path.isfile(os.path.join(images_path, filename)):
filename = u'nonetwork.png'
networks.setdefault(network_name, filename)
t.network_images.setdefault(cur_show_obj.tvid_prodid, networks[network_name])
@@ -2141,25 +2139,25 @@ class Home(MainHandler):
show_message = []
- if sickgear.show_queue_scheduler.action.isBeingAdded(show_obj):
+ if sickgear.show_queue_scheduler.action.is_being_added(show_obj):
show_message = ['Downloading this show, the information below is incomplete']
- elif sickgear.show_queue_scheduler.action.isBeingUpdated(show_obj):
+ elif sickgear.show_queue_scheduler.action.is_being_updated(show_obj):
show_message = ['Updating information for this show']
- elif sickgear.show_queue_scheduler.action.isBeingRefreshed(show_obj):
+ elif sickgear.show_queue_scheduler.action.is_being_refreshed(show_obj):
show_message = ['Refreshing episodes from disk for this show']
- elif sickgear.show_queue_scheduler.action.isBeingSubtitled(show_obj):
+ elif sickgear.show_queue_scheduler.action.is_being_subtitled(show_obj):
show_message = ['Downloading subtitles for this show']
- elif sickgear.show_queue_scheduler.action.isInRefreshQueue(show_obj):
+ elif sickgear.show_queue_scheduler.action.is_in_refresh_queue(show_obj):
show_message = ['Refresh queued for this show']
- elif sickgear.show_queue_scheduler.action.isInUpdateQueue(show_obj):
+ elif sickgear.show_queue_scheduler.action.is_in_update_queue(show_obj):
show_message = ['Update queued for this show']
- elif sickgear.show_queue_scheduler.action.isInSubtitleQueue(show_obj):
+ elif sickgear.show_queue_scheduler.action.is_in_subtitle_queue(show_obj):
show_message = ['Subtitle download queued for this show']
if sickgear.show_queue_scheduler.action.is_show_being_switched(show_obj):
@@ -2185,8 +2183,8 @@ class Home(MainHandler):
show_message = '. '.join(show_message)
t.force_update = 'home/update-show?tvid_prodid=%s&force=1&web=1' % tvid_prodid
- if not sickgear.show_queue_scheduler.action.isBeingAdded(show_obj):
- if not sickgear.show_queue_scheduler.action.isBeingUpdated(show_obj):
+ if not sickgear.show_queue_scheduler.action.is_being_added(show_obj):
+ if not sickgear.show_queue_scheduler.action.is_being_updated(show_obj):
t.submenu.append(
{'title': 'Remove',
'path': 'home/delete-show?tvid_prodid=%s' % tvid_prodid, 'confirm': True})
@@ -2211,7 +2209,7 @@ class Home(MainHandler):
t.submenu.append(
{'title': 'Media Rename',
'path': 'home/rename-media?tvid_prodid=%s' % tvid_prodid})
- if sickgear.USE_SUBTITLES and not sickgear.show_queue_scheduler.action.isBeingSubtitled(
+ if sickgear.USE_SUBTITLES and not sickgear.show_queue_scheduler.action.is_being_subtitled(
show_obj) and show_obj.subtitles:
t.submenu.append(
{'title': 'Download Subtitles',
@@ -2355,8 +2353,7 @@ class Home(MainHandler):
t.fanart = []
cache_obj = image_cache.ImageCache()
- for img in ek.ek(glob.glob,
- cache_obj.fanart_path(show_obj.tvid, show_obj.prodid).replace('fanart.jpg', '*')) or []:
+ for img in glob.glob(cache_obj.fanart_path(show_obj.tvid, show_obj.prodid).replace('fanart.jpg', '*')) or []:
match = re.search(r'(\d+(?:\.(\w*?(\d*)))?\.\w{5,8})\.fanart\.', img, re.I)
if match and match.group(1):
t.fanart += [(match.group(1),
@@ -2544,8 +2541,8 @@ class Home(MainHandler):
show_obj = helpers.find_show_by_id({tvid: prodid}, no_mapped_ids=True)
try:
sickgear.show_queue_scheduler.action.switch_show(show_obj=show_obj, new_tvid=m_tvid,
- new_prodid=m_prodid, force_id=True,
- set_pause=set_pause, mark_wanted=mark_wanted)
+ new_prodid=m_prodid, force_id=True,
+ set_pause=set_pause, mark_wanted=mark_wanted)
except (BaseException, Exception) as e:
logger.log('Could not add show %s to switch queue: %s' % (show_obj.tvid_prodid, ex(e)), logger.WARNING)
@@ -2666,7 +2663,7 @@ class Home(MainHandler):
t.fanart = []
cache_obj = image_cache.ImageCache()
show_obj = getattr(t, 'show_obj', None) or getattr(t, 'show', None)
- for img in ek.ek(glob.glob, cache_obj.fanart_path(
+ for img in glob.glob(cache_obj.fanart_path(
show_obj.tvid, show_obj.prodid).replace('fanart.jpg', '*')) or []:
match = re.search(r'(\d+(?:\.(\w*?(\d*)))?\.\w{5,8})\.fanart\.', img, re.I)
if match and match.group(1):
@@ -2849,7 +2846,7 @@ class Home(MainHandler):
if bool(show_obj.flatten_folders) != bool(flatten_folders):
show_obj.flatten_folders = flatten_folders
try:
- sickgear.show_queue_scheduler.action.refreshShow(show_obj)
+ sickgear.show_queue_scheduler.action.refresh_show(show_obj)
except exceptions_helper.CantRefreshException as e:
errors.append('Unable to refresh this show: ' + ex(e))
@@ -2894,11 +2891,11 @@ class Home(MainHandler):
# if we change location clear the db of episodes, change it, write to db, and rescan
# noinspection PyProtectedMember
- old_path = ek.ek(os.path.normpath, show_obj._location)
- new_path = ek.ek(os.path.normpath, location)
+ old_path = os.path.normpath(show_obj._location)
+ new_path = os.path.normpath(location)
if old_path != new_path:
logger.log(u'%s != %s' % (old_path, new_path), logger.DEBUG)
- if not ek.ek(os.path.isdir, new_path) and not sickgear.CREATE_MISSING_SHOW_DIRS:
+ if not os.path.isdir(new_path) and not sickgear.CREATE_MISSING_SHOW_DIRS:
errors.append(u'New location %s does not exist' % new_path)
# don't bother if we're going to update anyway
@@ -2907,7 +2904,7 @@ class Home(MainHandler):
try:
show_obj.location = new_path
try:
- sickgear.show_queue_scheduler.action.refreshShow(show_obj)
+ sickgear.show_queue_scheduler.action.refresh_show(show_obj)
except exceptions_helper.CantRefreshException as e:
errors.append('Unable to refresh this show:' + ex(e))
# grab updated info from TVDB
@@ -2924,7 +2921,7 @@ class Home(MainHandler):
# force the update
if do_update:
try:
- sickgear.show_queue_scheduler.action.updateShow(show_obj, True)
+ sickgear.show_queue_scheduler.action.update_show(show_obj, True)
helpers.cpu_sleep()
except exceptions_helper.CantUpdateException:
errors.append('Unable to force an update on the show.')
@@ -2962,8 +2959,8 @@ class Home(MainHandler):
if None is show_obj:
return self._generic_message('Error', 'Unable to find the specified show')
- if sickgear.show_queue_scheduler.action.isBeingAdded(
- show_obj) or sickgear.show_queue_scheduler.action.isBeingUpdated(show_obj):
+ if sickgear.show_queue_scheduler.action.is_being_added(
+ show_obj) or sickgear.show_queue_scheduler.action.is_being_updated(show_obj):
return self._generic_message("Error", "Shows can't be deleted while they're being added or updated.")
# if sickgear.USE_TRAKT and sickgear.TRAKT_SYNC:
@@ -3008,7 +3005,7 @@ class Home(MainHandler):
# force the update from the DB
try:
- sickgear.show_queue_scheduler.action.refreshShow(show_obj)
+ sickgear.show_queue_scheduler.action.refresh_show(show_obj)
except exceptions_helper.CantRefreshException as e:
ui.notifications.error('Unable to refresh this show.', ex(e))
@@ -3028,7 +3025,7 @@ class Home(MainHandler):
# force the update
try:
- sickgear.show_queue_scheduler.action.updateShow(show_obj, bool(force), bool(web))
+ sickgear.show_queue_scheduler.action.update_show(show_obj, bool(force), bool(web))
except exceptions_helper.CantUpdateException as e:
ui.notifications.error('Unable to update this show.',
ex(e))
@@ -3153,7 +3150,7 @@ class Home(MainHandler):
elif status in Quality.DOWNLOADED \
and ep_obj.status not in required + Quality.ARCHIVED + [IGNORED, SKIPPED] \
- and not ek.ek(os.path.isfile, ep_obj.location):
+ and not os.path.isfile(ep_obj.location):
err_msg = 'to downloaded because it\'s not snatched/downloaded/archived'
if err_msg:
@@ -4061,7 +4058,7 @@ class AddShows(Home):
any(ids_to_search[si] == results[cur_tvid][tv_src_id].get('ids', {})[si]
for si in ids_to_search):
ids_search_used.update({k: v for k, v in iteritems(
- results[cur_tvid][tv_src_id].get('ids',{}))
+ results[cur_tvid][tv_src_id].get('ids', {}))
if v and k not in iterkeys(ids_to_search)})
results[cur_tvid][tv_src_id]['rename_suggest'] = '' \
if not results[cur_tvid][tv_src_id]['firstaired'] \
@@ -4110,7 +4107,8 @@ class AddShows(Home):
show['seriesname'], helpers.xhtml_escape(show['seriesname']), show['firstaired'],
(isinstance(show['firstaired'], string_types)
and SGDatetime.sbfdate(_parse_date(show['firstaired'])) or ''),
- show.get('network', '') or '', (show.get('genres', '') or show.get('genre', '') or '').replace('|', ', '), # 11 - 12
+ show.get('network', '') or '', # 11
+ (show.get('genres', '') or show.get('genre', '') or '').replace('|', ', '), # 12
show.get('language', ''), show.get('language_country_code') or '', # 13 - 14
re.sub(r'([,.!][^,.!]*?)$', '...',
re.sub(r'([.!?])(?=\w)', r'\1 ',
@@ -4275,7 +4273,7 @@ class AddShows(Home):
try:
for cur_dir in scantree(cur_root_dir, filter_kind=True, recurse=False):
- normpath = ek.ek(os.path.normpath, cur_dir.path)
+ normpath = os.path.normpath(cur_dir.path)
highlight = hash_dir == re.sub('[^a-z]', '', sg_helpers.md5_for_text(normpath))
if hash_dir:
display_one_dir = highlight
@@ -4318,7 +4316,7 @@ class AddShows(Home):
if display_one_dir and not cur_data['highlight'][cur_enum]:
continue
- dir_item = dict(normpath=cur_normpath, rootpath='%s%s' % (ek.ek(os.path.dirname, cur_normpath), os.sep),
+ dir_item = dict(normpath=cur_normpath, rootpath='%s%s' % (os.path.dirname(cur_normpath), os.sep),
name=cur_data['name'][cur_enum], added_already=any(cur_data['exists'][cur_enum]),
highlight=cur_data['highlight'][cur_enum])
@@ -4330,7 +4328,7 @@ class AddShows(Home):
if prodid and show_name:
break
- (tvid, prodid, show_name) = cur_provider.retrieveShowMetadata(cur_normpath)
+ (tvid, prodid, show_name) = cur_provider.retrieve_show_metadata(cur_normpath)
# default to TVDB if TV info src was not detected
if show_name and (not tvid or not prodid):
@@ -4376,7 +4374,7 @@ class AddShows(Home):
elif not show_dir:
t.default_show_name = ''
elif not show_name:
- t.default_show_name = ek.ek(os.path.basename, ek.ek(os.path.normpath, show_dir)).replace('.', ' ')
+ t.default_show_name = os.path.basename(os.path.normpath(show_dir)).replace('.', ' ')
else:
t.default_show_name = show_name
@@ -5948,7 +5946,7 @@ class AddShows(Home):
tvid, void, prodid, show_name = self.split_extra_show(which_series)
if bool(helpers.try_int(cancel_form)):
tvid = tvid or provided_tvid or '0'
- prodid = re.findall(r'tvid_prodid=[^%s]+%s([\d]+)' % tuple(2 * [TVidProdid.glue]), return_to)[0]
+ prodid = re.findall(r'tvid_prodid=[^%s]+%s(\d+)' % tuple(2 * [TVidProdid.glue]), return_to)[0]
return self.redirect(return_to % (tvid, prodid))
# grab our list of other dirs if given
@@ -6001,14 +5999,14 @@ class AddShows(Home):
# use the whole path if it's given, or else append the show name to the root dir to get the full show path
if full_show_path:
- show_dir = ek.ek(os.path.normpath, full_show_path)
+ show_dir = os.path.normpath(full_show_path)
new_show = False
else:
show_dir = helpers.generate_show_dir_name(root_dir, show_name)
new_show = True
# if the dir exists, do 'add existing show'
- if ek.ek(os.path.isdir, show_dir) and not full_show_path:
+ if os.path.isdir(show_dir) and not full_show_path:
ui.notifications.error('Unable to add show', u'Found existing folder: ' + show_dir)
return self.redirect(
'/add-shows/import?tvid_prodid=%s%s%s&hash_dir=%s%s' %
@@ -6691,7 +6689,7 @@ class Manage(MainHandler):
for cur_show_obj in show_list:
# noinspection PyProtectedMember
- cur_root_dir = ek.ek(os.path.dirname, cur_show_obj._location)
+ cur_root_dir = os.path.dirname(cur_show_obj._location)
if cur_root_dir not in root_dir_list:
root_dir_list.append(cur_root_dir)
@@ -6817,11 +6815,11 @@ class Manage(MainHandler):
continue
# noinspection PyProtectedMember
- cur_root_dir = ek.ek(os.path.dirname, show_obj._location)
+ cur_root_dir = os.path.dirname(show_obj._location)
# noinspection PyProtectedMember
- cur_show_dir = ek.ek(os.path.basename, show_obj._location)
+ cur_show_dir = os.path.basename(show_obj._location)
if cur_root_dir in dir_map and cur_root_dir != dir_map[cur_root_dir]:
- new_show_dir = ek.ek(os.path.join, dir_map[cur_root_dir], cur_show_dir)
+ new_show_dir = os.path.join(dir_map[cur_root_dir], cur_show_dir)
if 'nt' != os.name and ':\\' in cur_show_dir:
# noinspection PyProtectedMember
cur_show_dir = show_obj._location.split('\\')[-1]
@@ -6829,7 +6827,7 @@ class Manage(MainHandler):
base_dir = dir_map[cur_root_dir].rsplit(cur_show_dir)[0].rstrip('/')
except IndexError:
base_dir = dir_map[cur_root_dir]
- new_show_dir = ek.ek(os.path.join, base_dir, cur_show_dir)
+ new_show_dir = os.path.join(base_dir, cur_show_dir)
# noinspection PyProtectedMember
logger.log(u'For show %s changing dir from %s to %s' %
(show_obj.unique_name, show_obj._location, new_show_dir))
@@ -6945,20 +6943,20 @@ class Manage(MainHandler):
else:
if cur_tvid_prodid in to_update:
try:
- sickgear.show_queue_scheduler.action.updateShow(cur_show_obj, True, True)
+ sickgear.show_queue_scheduler.action.update_show(cur_show_obj, True, True)
update.append(cur_show_obj.name)
except exceptions_helper.CantUpdateException as e:
errors.append('Unable to update show %s: %s' % (cur_show_obj.unique_name, ex(e)))
elif cur_tvid_prodid in to_refresh:
try:
- sickgear.show_queue_scheduler.action.refreshShow(cur_show_obj)
+ sickgear.show_queue_scheduler.action.refresh_show(cur_show_obj)
refresh.append(cur_show_obj.name)
except exceptions_helper.CantRefreshException as e:
errors.append('Unable to refresh show %s: %s' % (cur_show_obj.unique_name, ex(e)))
if cur_tvid_prodid in to_rename:
- sickgear.show_queue_scheduler.action.renameShowEpisodes(cur_show_obj)
+ sickgear.show_queue_scheduler.action.rename_show_episodes(cur_show_obj)
rename.append(cur_show_obj.name)
if sickgear.USE_SUBTITLES and cur_tvid_prodid in to_subtitle:
@@ -7067,7 +7065,7 @@ class Manage(MainHandler):
continue
try:
sickgear.show_queue_scheduler.action.switch_show(show_obj=show_obj, new_tvid=new_tvid,
- new_prodid=new_prodid, force_id=force_id)
+ new_prodid=new_prodid, force_id=force_id)
except (BaseException, Exception) as e:
logger.log('Could not add show %s to switch queue: %s' % (show_obj.tvid_prodid, ex(e)), logger.WARNING)
errors.append('Could not add show %s to switch queue: %s' % (show_obj.tvid_prodid, ex(e)))
@@ -7172,7 +7170,7 @@ class ShowTasks(Manage):
t.people_queue = sickgear.people_queue_scheduler.action.queue_data()
t.next_run = sickgear.show_update_scheduler.lastRun.replace(
hour=sickgear.show_update_scheduler.start_time.hour)
- t.show_update_running = sickgear.show_queue_scheduler.action.isShowUpdateRunning() \
+ t.show_update_running = sickgear.show_queue_scheduler.action.is_show_update_running() \
or sickgear.show_update_scheduler.action.amActive
my_db = db.DBConnection(row_type='dict')
@@ -7613,8 +7611,7 @@ class History(MainHandler):
rd = sickgear.ROOT_DIRS.split('|')[1:] \
+ [x.split('=')[0] for x in sickgear.EMBY_PARENT_MAPS.split(',') if any(x)]
- rootpaths = sorted(
- ['%s%s' % (ek.ek(os.path.splitdrive, x)[1], os.path.sep) for x in rd], key=len, reverse=True)
+ rootpaths = sorted(['%s%s' % (os.path.splitdrive(x)[1], os.path.sep) for x in rd], key=len, reverse=True)
rootdirs = sorted([x for x in rd], key=len, reverse=True)
headers = {'Content-type': 'application/json'}
states = {}
@@ -7667,8 +7664,8 @@ class History(MainHandler):
continue
for index, p in enumerate(rootpaths):
if p in path_file:
- path_file = ek.ek(os.path.join, rootdirs[index],
- re.sub('.*?%s' % re.escape(p), '', path_file))
+ path_file = os.path.join(
+ rootdirs[index], re.sub('.*?%s' % re.escape(p), '', path_file))
root_dir_found = True
break
if not root_dir_found:
@@ -7701,11 +7698,11 @@ class History(MainHandler):
if states:
# Prune user removed items that are no longer being returned by API
- media_paths = map_list(lambda arg: ek.ek(os.path.basename, arg[1]['path_file']), iteritems(states))
+ media_paths = map_list(lambda arg: os.path.basename(arg[1]['path_file']), iteritems(states))
sql = 'FROM tv_episodes_watched WHERE hide=1 AND label LIKE "%%{Emby}"'
my_db = db.DBConnection(row_type='dict')
files = my_db.select('SELECT location %s' % sql)
- for i in filter_iter(lambda f: ek.ek(os.path.basename, f['location']) not in media_paths, files):
+ for i in filter_iter(lambda f: os.path.basename(f['location']) not in media_paths, files):
loc = i.get('location')
if loc:
my_db.select('DELETE %s AND location="%s"' % (sql, loc))
@@ -7770,11 +7767,11 @@ class History(MainHandler):
if states:
# Prune user removed items that are no longer being returned by API
- media_paths = map_list(lambda arg: ek.ek(os.path.basename, arg[1]['path_file']), iteritems(states))
+ media_paths = map_list(lambda arg: os.path.basename(arg[1]['path_file']), iteritems(states))
sql = 'FROM tv_episodes_watched WHERE hide=1 AND label LIKE "%%{Plex}"'
my_db = db.DBConnection(row_type='dict')
files = my_db.select('SELECT location %s' % sql)
- for i in filter_iter(lambda f: ek.ek(os.path.basename, f['location']) not in media_paths, files):
+ for i in filter_iter(lambda f: os.path.basename(f['location']) not in media_paths, files):
loc = i.get('location')
if loc:
my_db.select('DELETE %s AND location="%s"' % (sql, loc))
@@ -7812,7 +7809,7 @@ class History(MainHandler):
refresh = []
for cur_result in sql_result:
if files and cur_result['location'] not in attempted and 0 < helpers.get_size(cur_result['location']) \
- and ek.ek(os.path.isfile, cur_result['location']):
+ and os.path.isfile(cur_result['location']):
# locations repeat with watch events but attempt to delete once
attempted += [cur_result['location']]
@@ -7855,7 +7852,7 @@ class History(MainHandler):
for tvid_prodid_dict in refresh:
try:
- sickgear.show_queue_scheduler.action.refreshShow(
+ sickgear.show_queue_scheduler.action.refresh_show(
helpers.find_show_by_id(tvid_prodid_dict))
except (BaseException, Exception):
pass
@@ -7899,7 +7896,7 @@ class Config(MainHandler):
t.submenu = self.config_menu()
try:
- with open(ek.ek(os.path.join, sickgear.PROG_DIR, 'CHANGES.md')) as fh:
+ with open(os.path.join(sickgear.PROG_DIR, 'CHANGES.md')) as fh:
t.version = re.findall(r'###[^0-9]+([0-9]+\.[0-9]+\.[0-9x]+)', fh.readline())[0]
except (BaseException, Exception):
t.version = ''
@@ -7909,18 +7906,18 @@ class Config(MainHandler):
t.tz_version = None
try:
if None is not current_file:
- current_file = ek.ek(os.path.basename, current_file)
- zonefile = real_path(ek.ek(os.path.join, sickgear.ZONEINFO_DIR, current_file))
- if not ek.ek(os.path.isfile, zonefile):
+ current_file = os.path.basename(current_file)
+ zonefile = real_path(os.path.join(sickgear.ZONEINFO_DIR, current_file))
+ if not os.path.isfile(zonefile):
t.tz_fallback = True
- zonefile = ek.ek(os.path.join, ek.ek(os.path.dirname, zoneinfo.__file__), current_file)
- if ek.ek(os.path.isfile, zonefile):
+ zonefile = os.path.join(os.path.dirname(zoneinfo.__file__), current_file)
+ if os.path.isfile(zonefile):
t.tz_version = zoneinfo.ZoneInfoFile(zoneinfo.getzoneinfofile_stream()).metadata['tzversion']
except (BaseException, Exception):
pass
t.backup_db_path = sickgear.BACKUP_DB_MAX_COUNT and \
- (sickgear.BACKUP_DB_PATH or ek.ek(os.path.join, sickgear.DATA_DIR, 'backup')) or 'Disabled'
+ (sickgear.BACKUP_DB_PATH or os.path.join(sickgear.DATA_DIR, 'backup')) or 'Disabled'
return t.respond()
@@ -8058,7 +8055,7 @@ class ConfigGeneral(Config):
best_qualities = ([], best_qualities.split(','))[any(best_qualities)]
sickgear.QUALITY_DEFAULT = int(Quality.combineQualities(map_list(int, any_qualities),
- map_list(int, best_qualities)))
+ map_list(int, best_qualities)))
sickgear.WANTED_BEGIN_DEFAULT = config.minimax(default_wanted_begin, 0, -1, 10)
sickgear.WANTED_LATEST_DEFAULT = config.minimax(default_wanted_latest, 0, -1, 10)
sickgear.SHOW_TAG_DEFAULT = default_tag
@@ -8114,7 +8111,7 @@ class ConfigGeneral(Config):
result.update(dict(result='Success: apikey added', added=api_key))
sickgear.USE_API = 1
sickgear.save_config()
- ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickgear.CONFIG_FILE))
+ ui.notifications.message('Configuration Saved', os.path.join(sickgear.CONFIG_FILE))
return json_dumps(result)
@@ -8132,7 +8129,7 @@ class ConfigGeneral(Config):
logger.log('Revoked [%s] apikey [%s]' % (app_name, api_key), logger.DEBUG)
result.update(dict(result='Success: apikey removed', removed=True))
sickgear.save_config()
- ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickgear.CONFIG_FILE))
+ ui.notifications.message('Configuration Saved', os.path.join(sickgear.CONFIG_FILE))
return json_dumps(result)
@@ -8287,7 +8284,7 @@ class ConfigGeneral(Config):
ui.notifications.error('Error(s) Saving Configuration',
' \n'.join(results))
else:
- ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickgear.CONFIG_FILE))
+ ui.notifications.message('Configuration Saved', os.path.join(sickgear.CONFIG_FILE))
if restart:
self.clear_cookie('sickgear-session-%s' % helpers.md5_for_text(sickgear.WEB_PORT))
@@ -8458,7 +8455,7 @@ class ConfigSearch(Config):
ui.notifications.error('Error(s) Saving Configuration',
' \n'.join(results))
else:
- ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickgear.CONFIG_FILE))
+ ui.notifications.message('Configuration Saved', os.path.join(sickgear.CONFIG_FILE))
self.redirect('/config/search/')
@@ -8585,7 +8582,7 @@ class ConfigMediaProcess(Config):
ui.notifications.error('Error(s) Saving Configuration',
' \n'.join(results))
else:
- ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickgear.CONFIG_FILE))
+ ui.notifications.message('Configuration Saved', os.path.join(sickgear.CONFIG_FILE))
self.redirect('/config/media-process/')
@@ -8600,7 +8597,7 @@ class ConfigMediaProcess(Config):
result = naming.test_name(pattern, multi, abd, sports, anime, anime_type)
- result = ek.ek(os.path.join, result['dir'], result['name'])
+ result = os.path.join(result['dir'], result['name'])
return result
@@ -8647,8 +8644,8 @@ class ConfigMediaProcess(Config):
try:
if 'win32' == sys.platform:
- rarfile.UNRAR_TOOL = ek.ek(os.path.join, sickgear.PROG_DIR, 'lib', 'rarfile', 'UnRAR.exe')
- rar_path = ek.ek(os.path.join, sickgear.PROG_DIR, 'lib', 'rarfile', 'test.rar')
+ rarfile.UNRAR_TOOL = os.path.join(sickgear.PROG_DIR, 'lib', 'rarfile', 'UnRAR.exe')
+ rar_path = os.path.join(sickgear.PROG_DIR, 'lib', 'rarfile', 'test.rar')
if 'This is only a test.' == decode_str(rarfile.RarFile(rar_path).read(r'test/test.txt')):
return 'supported'
msg = 'Could not read test file content'
@@ -8998,7 +8995,7 @@ class ConfigProviders(Config):
logger.log(x, logger.ERROR)
ui.notifications.error('Error(s) Saving Configuration', ' \n'.join(results))
else:
- ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickgear.CONFIG_FILE))
+ ui.notifications.message('Configuration Saved', os.path.join(sickgear.CONFIG_FILE))
if reload_page:
self.write('reload')
@@ -9266,7 +9263,7 @@ class ConfigNotifications(Config):
ui.notifications.error('Error(s) Saving Configuration',
' \n'.join(results))
else:
- ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickgear.CONFIG_FILE))
+ ui.notifications.message('Configuration Saved', os.path.join(sickgear.CONFIG_FILE))
self.redirect('/config/notifications/')
@@ -9321,7 +9318,7 @@ class ConfigSubtitles(Config):
ui.notifications.error('Error(s) Saving Configuration',
' \n'.join(results))
else:
- ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickgear.CONFIG_FILE))
+ ui.notifications.message('Configuration Saved', os.path.join(sickgear.CONFIG_FILE))
self.redirect('/config/subtitles/')
@@ -9354,7 +9351,7 @@ class ConfigAnime(Config):
ui.notifications.error('Error(s) Saving Configuration',
' \n'.join(results))
else:
- ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickgear.CONFIG_FILE))
+ ui.notifications.message('Configuration Saved', os.path.join(sickgear.CONFIG_FILE))
self.redirect('/config/anime/')
@@ -9410,7 +9407,7 @@ class EventLogs(MainHandler):
min_level = int(min_level)
- regex = re.compile(r'^\d{4}-\d{2}-\d{2}\s*\d{2}:\d{2}:\d{2}\s*([A-Z]+)\s*([^\s]+)\s+:{2}\s*(.*\r?\n)$')
+ regex = re.compile(r'^\d{4}-\d{2}-\d{2}\s*\d{2}:\d{2}:\d{2}\s*([A-Z]+)\s*(\S+)\s+:{2}\s*(.*\r?\n)$')
final_data = []
normal_data = []
@@ -9578,9 +9575,9 @@ class CachedImages(MainHandler):
def should_try_image(filename, source, days=1, minutes=0):
result = True
try:
- dummy_file = '%s.%s.dummy' % (ek.ek(os.path.splitext, filename)[0], source)
- if ek.ek(os.path.isfile, dummy_file):
- if ek.ek(os.stat, dummy_file).st_mtime \
+ dummy_file = '%s.%s.dummy' % (os.path.splitext(filename)[0], source)
+ if os.path.isfile(dummy_file):
+ if os.stat(dummy_file).st_mtime \
< (int(timestamp_near((datetime.datetime.now()
- datetime.timedelta(days=days, minutes=minutes))))):
CachedImages.delete_dummy_image(dummy_file)
@@ -9592,7 +9589,7 @@ class CachedImages(MainHandler):
@staticmethod
def create_dummy_image(filename, source):
- dummy_file = '%s.%s.dummy' % (ek.ek(os.path.splitext, filename)[0], source)
+ dummy_file = '%s.%s.dummy' % (os.path.splitext(filename)[0], source)
CachedImages.delete_dummy_image(dummy_file)
try:
with open(dummy_file, 'w'):
@@ -9603,28 +9600,28 @@ class CachedImages(MainHandler):
@staticmethod
def delete_dummy_image(dummy_file):
try:
- if ek.ek(os.path.isfile, dummy_file):
- ek.ek(os.remove, dummy_file)
+ if os.path.isfile(dummy_file):
+ os.remove(dummy_file)
except (BaseException, Exception):
pass
@staticmethod
def delete_all_dummy_images(filename):
for f in ['tmdb', 'tvdb', 'tvmaze']:
- CachedImages.delete_dummy_image('%s.%s.dummy' % (ek.ek(os.path.splitext, filename)[0], f))
+ CachedImages.delete_dummy_image('%s.%s.dummy' % (os.path.splitext(filename)[0], f))
def index(self, path='', source=None, filename=None, tmdbid=None, tvdbid=None, trans=True):
path = path.strip('/')
file_name = ''
if None is not source:
- file_name = ek.ek(os.path.basename, source)
+ file_name = os.path.basename(source)
elif filename not in [None, 0, '0']:
file_name = filename
- image_file = ek.ek(os.path.join, sickgear.CACHE_DIR, 'images', path, file_name)
- image_file = ek.ek(os.path.abspath, image_file.replace('\\', '/'))
- if not ek.ek(os.path.isfile, image_file) and has_image_ext(file_name):
- basepath = ek.ek(os.path.dirname, image_file)
+ image_file = os.path.join(sickgear.CACHE_DIR, 'images', path, file_name)
+ image_file = os.path.abspath(image_file.replace('\\', '/'))
+ if not os.path.isfile(image_file) and has_image_ext(file_name):
+ basepath = os.path.dirname(image_file)
helpers.make_path(basepath)
poster_url = ''
tmdb_image = False
@@ -9641,13 +9638,15 @@ class CachedImages(MainHandler):
poster_url = show_obj.poster
except (BaseException, Exception):
poster_url = ''
- if poster_url and not sg_helpers.download_file(poster_url, image_file, nocache=True) and poster_url.find('trakt.us'):
+ if poster_url \
+ and not sg_helpers.download_file(poster_url, image_file, nocache=True) \
+ and poster_url.find('trakt.us'):
sg_helpers.download_file(poster_url.replace('trakt.us', 'trakt.tv'), image_file, nocache=True)
- if tmdb_image and not ek.ek(os.path.isfile, image_file):
+ if tmdb_image and not os.path.isfile(image_file):
self.create_dummy_image(image_file, 'tmdb')
if None is source and tvdbid not in [None, 'None', 0, '0'] \
- and not ek.ek(os.path.isfile, image_file) \
+ and not os.path.isfile(image_file) \
and self.should_try_image(image_file, 'tvdb'):
try:
tvinfo_config = sickgear.TVInfoAPI(TVINFO_TVDB).api_params.copy()
@@ -9660,15 +9659,15 @@ class CachedImages(MainHandler):
poster_url = ''
if poster_url:
sg_helpers.download_file(poster_url, image_file, nocache=True)
- if not ek.ek(os.path.isfile, image_file):
+ if not os.path.isfile(image_file):
self.create_dummy_image(image_file, 'tvdb')
- if ek.ek(os.path.isfile, image_file):
+ if os.path.isfile(image_file):
self.delete_all_dummy_images(image_file)
- if not ek.ek(os.path.isfile, image_file):
- image_file = ek.ek(os.path.join, sickgear.PROG_DIR, 'gui', 'slick',
- 'images', ('image-light.png', 'trans.png')[bool(int(trans))])
+ if not os.path.isfile(image_file):
+ image_file = os.path.join(sickgear.PROG_DIR, 'gui', 'slick', 'images',
+ ('image-light.png', 'trans.png')[bool(int(trans))])
else:
helpers.set_file_timestamp(image_file, min_age=3, new_time=None)
@@ -9683,8 +9682,8 @@ class CachedImages(MainHandler):
:param filename: image file name with path
:param days: max age to trigger reload of image
"""
- if not ek.ek(os.path.isfile, filename) or \
- ek.ek(os.stat, filename).st_mtime < \
+ if not os.path.isfile(filename) or \
+ os.stat(filename).st_mtime < \
(int(timestamp_near((datetime.datetime.now() - datetime.timedelta(days=days))))):
return True
return False
@@ -9735,9 +9734,9 @@ class CachedImages(MainHandler):
sg_helpers.download_file(char_obj.thumb_url, image_thumb, nocache=True)
primary, fallback = ((image_normal, image_thumb), (image_thumb, image_normal))[thumb]
- if ek.ek(os.path.isfile, primary):
+ if os.path.isfile(primary):
image_file = primary
- elif ek.ek(os.path.isfile, fallback):
+ elif os.path.isfile(fallback):
image_file = fallback
elif person_id:
@@ -9773,9 +9772,9 @@ class CachedImages(MainHandler):
sg_helpers.download_file(person_obj.thumb_url, image_thumb, nocache=True)
primary, fallback = ((image_normal, image_thumb), (image_thumb, image_normal))[thumb]
- if ek.ek(os.path.isfile, primary):
+ if os.path.isfile(primary):
image_file = primary
- elif ek.ek(os.path.isfile, fallback):
+ elif os.path.isfile(fallback):
image_file = fallback
return self.image_data(image_file, cast_default=True)
@@ -9790,7 +9789,7 @@ class CachedImages(MainHandler):
:return: binary image data or None
"""
if cast_default and None is image_file:
- image_file = ek.ek(os.path.join, sickgear.PROG_DIR, 'gui', 'slick', 'images', 'poster-person.jpg')
+ image_file = os.path.join(sickgear.PROG_DIR, 'gui', 'slick', 'images', 'poster-person.jpg')
mime_type, encoding = MimeTypes().guess_type(image_file)
self.set_header('Content-Type', mime_type)
diff --git a/tests/network_timezone_tests.py b/tests/network_timezone_tests.py
index dc757af8..886f5f1e 100644
--- a/tests/network_timezone_tests.py
+++ b/tests/network_timezone_tests.py
@@ -10,8 +10,6 @@ import datetime
from lib.dateutil import tz
import sickgear
from sickgear import network_timezones, helpers
-# noinspection PyPep8Naming
-import encodingKludge as ek
class NetworkTimezoneTests(test.SickbeardTestDBCase):
@@ -33,12 +31,12 @@ class NetworkTimezoneTests(test.SickbeardTestDBCase):
@classmethod
def remove_zoneinfo(cls):
# delete all existing zoneinfo files
- for (path, dirs, files) in ek.ek(os.walk, helpers.real_path(sickgear.ZONEINFO_DIR)):
+ for (path, dirs, files) in os.walk(helpers.real_path(sickgear.ZONEINFO_DIR)):
for filename in files:
if filename.endswith('.tar.gz'):
- file_w_path = ek.ek(os.path.join, path, filename)
+ file_w_path = os.path.join(path, filename)
try:
- ek.ek(os.remove, file_w_path)
+ os.remove(file_w_path)
except (BaseException, Exception):
pass
diff --git a/tests/scene_helpers_tests.py b/tests/scene_helpers_tests.py
index 7cbe6257..2827522b 100644
--- a/tests/scene_helpers_tests.py
+++ b/tests/scene_helpers_tests.py
@@ -26,7 +26,7 @@ class SceneTests(test.SickbeardTestDBCase):
s.tvid = TVINFO_TVDB
s.name = name
- result = show_name_helpers.allPossibleShowNames(s, season=season)
+ result = show_name_helpers.all_possible_show_names(s, season=season)
self.assertTrue(len(set(expected).intersection(set(result))) == len(expected))
def _test_pass_wordlist_checks(self, name, expected):