diff --git a/CHANGES.md b/CHANGES.md
index 3a1e2240..ffb6f19b 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -3,6 +3,7 @@
* Update package resource API 63.2.0 (3ae44cd) to 67.3.2 (b9bf2ec)
* Change remove calls to legacy py2 fix encoding function
* Change requirements for pure py3
+* Change codebase cleanups
### 3.27.8 (2023-02-20 23:30:00 UTC)
@@ -1080,7 +1081,7 @@
* Add API response field `global exclude require` to sg.listrequirewords endpoint
* Change improve Popen resource usage under py2
* Add overall failure monitoring to History/Connect fails (renamed from "Provider fails")
-* Change log exception during updateCache in newznab
+* Change log exception during update_cache in newznab
* Change make Py3.9 preparations
* Change anime "Available groups" to display "No groups listed..." when API is fine with no results instead of blank
* Change improve clarity of anime group lists by using terms Allow list and Block list
diff --git a/_cleaner.py b/_cleaner.py
index bfc31345..8c6eac00 100644
--- a/_cleaner.py
+++ b/_cleaner.py
@@ -37,6 +37,9 @@ if old_magic != magic_number:
# skip cleaned005 as used during dev by testers
cleanups = [
+ ['.cleaned009.tmp', r'lib\scandir', [
+ r'lib\scandir\__pycache__', r'lib\scandir',
+ ]],
['.cleaned008.tmp', r'lib\tornado_py3', [
r'lib\bs4_py2\builder\__pycache__', r'lib\bs4_py2\builder', r'lib\bs4_py2',
r'lib\bs4_py3\builder\__pycache__', r'lib\bs4_py3\builder', r'lib\bs4_py3',
diff --git a/gui/slick/interfaces/default/cache.tmpl b/gui/slick/interfaces/default/cache.tmpl
index 7b115f67..be3ba876 100644
--- a/gui/slick/interfaces/default/cache.tmpl
+++ b/gui/slick/interfaces/default/cache.tmpl
@@ -65,7 +65,7 @@
#for $hItem in $cacheResults:
- #set $provider = $providers.getProviderClass($hItem['provider'])
+ #set $provider = $providers.get_by_id($hItem['provider'])
#set $tip = '%s @ %s' % ($hItem['provider'], $SGDatetime.sbfdatetime($SGDatetime.fromtimestamp($hItem['time'])))
#set $ver = $hItem['version']
#set $ver = ($ver, '')[-1 == $ver]
diff --git a/gui/slick/interfaces/default/cast_person.tmpl b/gui/slick/interfaces/default/cast_person.tmpl
index 3ce066a3..3d9b9568 100644
--- a/gui/slick/interfaces/default/cast_person.tmpl
+++ b/gui/slick/interfaces/default/cast_person.tmpl
@@ -182,7 +182,11 @@ def param(visible=True, rid=None, cache_person=None, cache_char=None, person=Non
#end if
#set $section_links = False
+#set $all_sources = $TVInfoAPI().all_sources
#for $cur_src, $cur_sid in sorted(iteritems($person.ids))
+ #if $cur_src not in $all_sources:
+ #continue
+ #end if
#if $TVInfoAPI($cur_src).config.get('people_url')
#if not $section_links
#set $section_links = True
diff --git a/gui/slick/interfaces/default/config.tmpl b/gui/slick/interfaces/default/config.tmpl
index 26b462e9..23d9b4ce 100644
--- a/gui/slick/interfaces/default/config.tmpl
+++ b/gui/slick/interfaces/default/config.tmpl
@@ -29,7 +29,7 @@
$sg_str('CONFIG_FILE')
- $db.dbFilename()
+ $db.db_filename()
#if $db.db_supports_backup
$backup_db_path
#end if
diff --git a/gui/slick/interfaces/default/config_general.tmpl b/gui/slick/interfaces/default/config_general.tmpl
index 9f604721..e776220f 100644
--- a/gui/slick/interfaces/default/config_general.tmpl
+++ b/gui/slick/interfaces/default/config_general.tmpl
@@ -13,7 +13,6 @@
#from sickgear.sgdatetime import *
<% def sg_var(varname, default=False): return getattr(sickgear, varname, default) %>#slurp#
<% def sg_str(varname, default=''): return getattr(sickgear, varname, default) %>#slurp#
-#from _23 import list_keys
##
#set global $title = 'Config - General'
#set global $header = 'General Settings'
@@ -846,7 +845,7 @@
File logging level:
-#set $levels = $list_keys(file_logging_presets)
+#set $levels = $list(file_logging_presets)
#set void = $levels.sort(key=lambda x: $file_logging_presets[$x])
#set $level_count = len($levels)
#for $level in $levels
diff --git a/gui/slick/interfaces/default/config_providers.tmpl b/gui/slick/interfaces/default/config_providers.tmpl
index 3c8342dc..ae0c1363 100644
--- a/gui/slick/interfaces/default/config_providers.tmpl
+++ b/gui/slick/interfaces/default/config_providers.tmpl
@@ -36,12 +36,12 @@
>>>> NOTE: Removed self.finish <<<<<-----
-
- If the ``status`` argument is specified, that value is used as the
- HTTP status code; otherwise either 301 (permanent) or 302
- (temporary) is chosen based on the ``permanent`` argument.
- The default is 302 (temporary).
- """
- if not url.startswith(sickgear.WEB_ROOT):
- url = sickgear.WEB_ROOT + url
-
- # noinspection PyUnresolvedReferences
- if self._headers_written:
- raise Exception('Cannot redirect after headers have been written')
- if status is None:
- status = 301 if permanent else 302
- else:
- assert isinstance(status, int)
- assert 300 <= status <= 399
- self.set_status(status)
- self.set_header('Location', urljoin(utf8(self.request.uri),
- utf8(url)))
-
- # todo: move to RouteHandler after removing _legacy module
- def write_error(self, status_code, **kwargs):
- body = ''
- try:
- if self.request.body:
- body = '\nRequest body: %s' % decode_str(self.request.body)
- except (BaseException, Exception):
- pass
- logger.log('Sent %s error response to a `%s` request for `%s` with headers:\n%s%s' %
- (status_code, self.request.method, self.request.path, self.request.headers, body), logger.WARNING)
- # suppress traceback by removing 'exc_info' kwarg
- if 'exc_info' in kwargs:
- logger.log('Gracefully handled exception text:\n%s' % traceback.format_exception(*kwargs["exc_info"]),
- logger.DEBUG)
- del kwargs['exc_info']
- return super(LegacyBase, self).write_error(status_code, **kwargs)
-
- def data_received(self, *args):
- pass
-
-
-class LegacyBaseHandler(LegacyBase):
-
- def redirect_args(self, new_url, exclude=(None,), **kwargs):
- args = '&'.join(['%s=%s' % (k, v) for (k, v) in
- filter_iter(lambda arg: arg[1] not in exclude, iteritems(kwargs))])
- self.redirect('%s%s' % (new_url, ('', '?' + args)[bool(args)]), permanent=True)
-
- """ deprecated from BaseHandler ------------------------------------------------------------------------------------
- """
- def getImage(self, *args, **kwargs):
- return self.get_image(*args, **kwargs)
-
- def get_image(self, *args, **kwargs):
- # abstract method
- pass
-
- def showPoster(self, show=None, **kwargs):
- # test: /showPoster/?show=73141&which=poster_thumb
- return self.show_poster(TVidProdid(show)(), **kwargs)
-
- def show_poster(self, *args, **kwargs):
- # abstract method
- pass
-
- """ deprecated from MainHandler ------------------------------------------------------------------------------------
- """
- def episodeView(self, **kwargs):
- self.redirect_args('/daily-schedule', exclude=(None, False), **kwargs)
-
- def setHomeLayout(self, *args, **kwargs):
- return self.set_layout_view_shows(*args, **kwargs)
-
- def set_layout_view_shows(self, *args, **kwargs):
- # abstract method
- pass
-
- def setPosterSortBy(self, *args):
- return self.set_poster_sortby(*args)
-
- @staticmethod
- def set_poster_sortby(*args):
- # abstract method
- pass
-
- def setPosterSortDir(self, *args):
- return self.set_poster_sortdir(*args)
-
- @staticmethod
- def set_poster_sortdir(*args):
- # abstract method
- pass
-
- def setEpisodeViewLayout(self, *args):
- return self.set_layout_daily_schedule(*args)
-
- def set_layout_daily_schedule(self, *args):
- # abstract method
- pass
-
- def toggleEpisodeViewDisplayPaused(self):
- return self.toggle_display_paused_daily_schedule()
-
- # completely deprecated for the three way state set_ function
- # def toggle_display_paused_daily_schedule(self):
- # # abstract method
- # pass
-
- def toggle_display_paused_daily_schedule(self):
-
- return self.set_display_paused_daily_schedule(not sickgear.EPISODE_VIEW_DISPLAY_PAUSED)
-
- def set_display_paused_daily_schedule(self, *args, **kwargs):
- # abstract method
- pass
-
- def setEpisodeViewCards(self, *args, **kwargs):
- return self.set_cards_daily_schedule(*args, **kwargs)
-
- def set_cards_daily_schedule(self, *args, **kwargs):
- # abstract method
- pass
-
- def setEpisodeViewSort(self, *args, **kwargs):
- return self.set_sort_daily_schedule(*args, **kwargs)
-
- def set_sort_daily_schedule(self, *args, **kwargs):
- # abstract method
- pass
-
- def getFooterTime(self, *args, **kwargs):
- return self.get_footer_time(*args, **kwargs)
-
- @staticmethod
- def get_footer_time(*args, **kwargs):
- # abstract method
- pass
-
- def toggleDisplayShowSpecials(self, **kwargs):
- return self.toggle_specials_view_show(TVidProdid(kwargs.get('show'))())
-
- def toggle_specials_view_show(self, *args):
- # abstract method
- pass
-
- def setHistoryLayout(self, *args):
- return self.set_layout_history(*args)
-
- def set_layout_history(self, *args):
- # abstract method
- pass
-
- """ deprecated from Home -------------------------------------------------------------------------------------------
- """
- def showlistView(self):
- self.redirect('/view-shows', permanent=True)
-
- def viewchanges(self):
- self.redirect('/home/view-changes', permanent=True)
-
- def displayShow(self, **kwargs):
- self.migrate_redir('view-show', **kwargs)
-
- def editShow(self, **kwargs):
- kwargs['any_qualities'] = kwargs.pop('anyQualities', None)
- kwargs['best_qualities'] = kwargs.pop('bestQualities', None)
- kwargs['exceptions_list'] = kwargs.pop('exceptions_list', None)
- kwargs['direct_call'] = kwargs.pop('directCall', False)
- kwargs['tvinfo_lang'] = kwargs.pop('indexerLang', None)
- kwargs['subs'] = kwargs.pop('subtitles', None)
- self.migrate_redir('edit-show', **kwargs)
-
- def testRename(self, **kwargs):
- self.migrate_redir('rename-media', **kwargs)
-
- def migrate_redir(self, new_url, **kwargs):
- kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
- self.redirect_args('/home/%s' % new_url, exclude=(None, False), **kwargs)
-
- def setStatus(self, **kwargs):
- kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
- return self.set_show_status(**kwargs)
-
- def set_show_status(self, **kwargs):
- # abstract method
- pass
-
- def branchCheckout(self, *args):
- return self.branch_checkout(*args)
-
- def branch_checkout(self, *args):
- # abstract method
- pass
-
- def pullRequestCheckout(self, *args):
- return self.pull_request_checkout(*args)
-
- def pull_request_checkout(self, *args):
- # abstract method
- pass
-
- def display_season(self, **kwargs):
- kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
- return self.season_render(**kwargs)
-
- def season_render(self, **kwargs):
- # abstract method
- pass
-
- def plotDetails(self, show, *args):
- return self.plot_details(TVidProdid(show)(), *args)
-
- @staticmethod
- def plot_details(*args):
- # abstract method
- pass
-
- def sceneExceptions(self, show):
- return self.scene_exceptions(TVidProdid(show)())
-
- @staticmethod
- def scene_exceptions(*args):
- # abstract method
- pass
-
- def saveMapping(self, show, **kwargs):
- kwargs['m_tvid'] = kwargs.pop('mindexer', 0)
- kwargs['m_prodid'] = kwargs.pop('mindexerid', 0)
- return self.save_mapping(TVidProdid(show)(), **kwargs)
-
- def save_mapping(self, *args, **kwargs):
- # abstract method
- pass
-
- def forceMapping(self, show, **kwargs):
- return self.force_mapping(TVidProdid(show)(), **kwargs)
-
- @staticmethod
- def force_mapping(*args, **kwargs):
- # abstract method
- pass
-
- def deleteShow(self, **kwargs):
- kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
- return self.delete_show(**kwargs)
-
- def delete_show(self, *args, **kwargs):
- # abstract method
- pass
-
- def refreshShow(self, **kwargs):
- kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
- return self.refresh_show(**kwargs)
-
- def refresh_show(self, *args, **kwargs):
- # abstract method
- pass
-
- def updateShow(self, **kwargs):
- kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
- return self.update_show(**kwargs)
-
- def update_show(self, *args, **kwargs):
- # abstract method
- pass
-
- def subtitleShow(self, **kwargs):
- kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
- return self.subtitle_show(**kwargs)
-
- def subtitle_show(self, *args, **kwargs):
- # abstract method
- pass
-
- def doRename(self, **kwargs):
- kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
- return self.do_rename(**kwargs)
-
- def do_rename(self, *args, **kwargs):
- # abstract method
- pass
-
- def episode_search(self, **kwargs):
- kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
- return self.search_episode(**kwargs)
-
- def search_episode(self, *args, **kwargs):
- # abstract method
- pass
-
- def searchEpisodeSubtitles(self, **kwargs):
- kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
- return self.search_episode_subtitles(**kwargs)
-
- def search_episode_subtitles(self, *args, **kwargs):
- # abstract method
- pass
-
- def setSceneNumbering(self, **kwargs):
- return self.set_scene_numbering(
- tvid_prodid={kwargs.pop('indexer', ''): kwargs.pop('show', '')},
- for_season=kwargs.get('forSeason'), for_episode=kwargs.get('forEpisode'),
- scene_season=kwargs.get('sceneSeason'), scene_episode=kwargs.get('sceneEpisode'),
- scene_absolute=kwargs.get('sceneAbsolute'))
-
- @staticmethod
- def set_scene_numbering(*args, **kwargs):
- # abstract method
- pass
-
- def update_emby(self, **kwargs):
- kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
- return self.update_mb(**kwargs)
-
- def update_mb(self, *args, **kwargs):
- # abstract method
- pass
-
- def search_q_progress(self, **kwargs):
- kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
- return self.search_q_status(**kwargs)
-
- def search_q_status(self, *args, **kwargs):
- # abstract method
- pass
-
- """ deprecated from NewHomeAddShows i.e. HomeAddShows --------------------------------------------------------------
- """
- def addExistingShows(self, **kwargs):
- kwargs['prompt_for_settings'] = kwargs.pop('promptForSettings', None)
- self.redirect_args('/add-shows/add-existing-shows', **kwargs)
-
- def addAniDBShow(self, **kwargs):
- self.migrate_redir_add_shows('info-anidb', TVINFO_TVDB, **kwargs)
-
- def addIMDbShow(self, **kwargs):
- self.migrate_redir_add_shows('info-imdb', TVINFO_IMDB, **kwargs)
-
- def addTraktShow(self, **kwargs):
- self.migrate_redir_add_shows('info-trakt', TVINFO_TVDB, **kwargs)
-
- def migrate_redir_add_shows(self, new_url, tvinfo, **kwargs):
- prodid = kwargs.pop('indexer_id', None)
- if prodid:
- kwargs['ids'] = prodid
- if TVINFO_TVDB == tvinfo and prodid:
- kwargs['ids'] = TVidProdid({tvinfo: prodid})()
- kwargs['show_name'] = kwargs.pop('showName', None)
- self.redirect_args('/add-shows/%s' % new_url, **kwargs)
-
- def getIndexerLanguages(self):
- return self.get_infosrc_languages()
-
- @staticmethod
- def get_infosrc_languages():
- # abstract method
- pass
-
- def searchIndexersForShowName(self, *args, **kwargs):
- return self.search_tvinfo_for_showname(*args, **kwargs)
-
- def search_tvinfo_for_showname(self, *args, **kwargs):
- # abstract method
- pass
-
- def massAddTable(self, **kwargs):
- return self.mass_add_table(
- root_dir=kwargs.pop('rootDir', None), **kwargs)
-
- def mass_add_table(self, *args, **kwargs):
- # abstract method
- pass
-
- def addNewShow(self, **kwargs):
- return self.add_new_show(
- provided_tvid=kwargs.pop('providedIndexer', None),
- which_series=kwargs.pop('whichSeries', None),
- tvinfo_lang=kwargs.pop('indexerLang', 'en'),
- root_dir=kwargs.pop('rootDir', None),
- default_status=kwargs.pop('defaultStatus', None),
- any_qualities=kwargs.pop('anyQualities', None),
- best_qualities=kwargs.pop('bestQualities', None),
- subs=kwargs.pop('subtitles', None),
- full_show_path=kwargs.pop('fullShowPath', None),
- skip_show=kwargs.pop('skipShow', None),
- **kwargs)
-
- def add_new_show(self, *args, **kwargs):
- # abstract method
- pass
-
- """ deprecated from ConfigGeneral ----------------------------------------------------------------------------------
- """
- def generateKey(self):
- return self.generate_key()
-
- @staticmethod
- def generate_key():
- # abstract method
- pass
-
- def saveRootDirs(self, **kwargs):
- return self.save_root_dirs(root_dir_string=kwargs.get('rootDirString'))
-
- @staticmethod
- def save_root_dirs(**kwargs):
- # abstract method
- pass
-
- def saveResultPrefs(self, **kwargs):
- return self.save_result_prefs(**kwargs)
-
- @staticmethod
- def save_result_prefs(**kwargs):
- # abstract method
- pass
-
- def saveAddShowDefaults(self, *args, **kwargs):
- return self.save_add_show_defaults(*args, **kwargs)
-
- @staticmethod
- def save_add_show_defaults(*args, **kwargs):
- # abstract method
- pass
-
- def saveGeneral(self, **kwargs):
- return self.save_general(**kwargs)
-
- def save_general(self, **kwargs):
- # abstract method
- pass
-
- """ deprecated from ConfigSearch -----------------------------------------------------------------------------------
- """
- def saveSearch(self, **kwargs):
- return self.save_search(**kwargs)
-
- def save_search(self, **kwargs):
- # abstract method
- pass
-
- """ deprecated from ConfigProviders --------------------------------------------------------------------------------
- """
- def canAddNewznabProvider(self, *args):
- return self.can_add_newznab_provider(*args)
-
- @staticmethod
- def can_add_newznab_provider(*args):
- # abstract method
- pass
-
- def getNewznabCategories(self, *args):
- return self.get_newznab_categories(*args)
-
- @staticmethod
- def get_newznab_categories(*args):
- # abstract method
- pass
-
- def canAddTorrentRssProvider(self, *args):
- return self.can_add_torrent_rss_provider(*args)
-
- @staticmethod
- def can_add_torrent_rss_provider(*args):
- # abstract method
- pass
-
- def checkProvidersPing(self):
- return self.check_providers_ping()
-
- @staticmethod
- def check_providers_ping():
- # abstract method
- pass
-
- def saveProviders(self, *args, **kwargs):
- return self.save_providers(*args, **kwargs)
-
- def save_providers(self, *args, **kwargs):
- # abstract method
- pass
-
- """ deprecated from ConfigPostProcessing ---------------------------------------------------------------------------
- """
- def savePostProcessing(self, **kwargs):
- return self.save_post_processing(**kwargs)
-
- def save_post_processing(self, **kwargs):
- # abstract method
- pass
-
- def testNaming(self, *args, **kwargs):
- return self.test_naming(*args, **kwargs)
-
- @staticmethod
- def test_naming(*args, **kwargs):
- # abstract method
- pass
-
- def isNamingValid(self, *args, **kwargs):
- return self.is_naming_valid(*args, **kwargs)
-
- @staticmethod
- def is_naming_valid(*args, **kwargs):
- # abstract method
- pass
-
- def isRarSupported(self):
- return self.is_rar_supported()
-
- @staticmethod
- def is_rar_supported():
- # abstract method
- pass
-
- """ deprecated from ConfigSubtitles --------------------------------------------------------------------------------
- """
- def saveSubtitles(self, **kwargs):
- return self.save_subtitles(**kwargs)
-
- def save_subtitles(self, **kwargs):
- # abstract method
- pass
-
- """ deprecated from ConfigAnime ------------------------------------------------------------------------------------
- """
- def saveAnime(self, **kwargs):
- return self.save_anime(**kwargs)
-
- def save_anime(self, **kwargs):
- # abstract method
- pass
-
- """ deprecated from Manage -----------------------------------------------------------------------------------------
- """
- def episode_statuses(self, **kwargs):
- self.redirect_args('/manage/episode-overview', **kwargs)
-
- def subtitleMissed(self, **kwargs):
- kwargs['which_subs'] = kwargs.pop('whichSubs', None)
- self.redirect_args('/manage/subtitle_missed', **kwargs)
-
- def show_episode_statuses(self, **kwargs):
- return self.get_status_episodes(TVidProdid(kwargs.get('indexer_id'))(), kwargs.get('which_status'))
-
- @staticmethod
- def get_status_episodes(*args):
- # abstract method
- pass
-
- def showSubtitleMissed(self, **kwargs):
- return self.show_subtitle_missed(TVidProdid(kwargs.get('indexer_id'))(), kwargs.get('whichSubs'))
-
- @staticmethod
- def show_subtitle_missed(*args):
- # abstract method
- pass
-
- def downloadSubtitleMissed(self, **kwargs):
- return self.download_subtitle_missed(**kwargs)
-
- def download_subtitle_missed(self, **kwargs):
- # abstract method
- pass
-
- def backlogShow(self, **kwargs):
- return self.backlog_show(TVidProdid(kwargs.get('indexer_id'))())
-
- def backlog_show(self, *args):
- # abstract method
- pass
-
- def backlogOverview(self):
- self.redirect('/manage/backlog_overview', permanent=True)
-
- def massEdit(self, **kwargs):
- return self.mass_edit(to_edit=kwargs.get('toEdit'))
-
- def mass_edit(self, **kwargs):
- # abstract method
- pass
-
- def massEditSubmit(self, **kwargs):
- kwargs['to_edit'] = kwargs.pop('toEdit', None)
- kwargs['subs'] = kwargs.pop('subtitles', None)
- kwargs['any_qualities'] = kwargs.pop('anyQualities', None)
- kwargs['best_qualities'] = kwargs.pop('bestQualities', None)
- return self.mass_edit_submit(**kwargs)
-
- def mass_edit_submit(self, **kwargs):
- # abstract method
- pass
-
- def bulkChange(self, **kwargs):
- return self.bulk_change(
- to_update=kwargs.get('toUpdate'), to_refresh=kwargs.get('toRefresh'),
- to_rename=kwargs.get('toRename'), to_delete=kwargs.get('toDelete'), to_remove=kwargs.get('toRemove'),
- to_metadata=kwargs.get('toMetadata'), to_subtitle=kwargs.get('toSubtitle'))
-
- def bulk_change(self, **kwargs):
- # abstract method
- pass
-
- def failedDownloads(self, **kwargs):
- kwargs['to_remove'] = kwargs.pop('toRemove', None)
- return self.failed_downloads(**kwargs)
-
- def failed_downloads(self, **kwargs):
- # abstract method
- pass
-
- """ deprecated from ManageSearches ---------------------------------------------------------------------------------
- """
- def retryProvider(self, **kwargs):
- return self.retry_provider(**kwargs)
-
- @staticmethod
- def retry_provider(**kwargs):
- # abstract method
- pass
-
- def forceVersionCheck(self):
- return self.check_update()
-
- def check_update(self):
- # abstract method
- pass
-
- def forceBacklog(self):
- return self.force_backlog()
-
- def force_backlog(self):
- # abstract method
- pass
-
- def forceSearch(self):
- return self.force_search()
-
- def force_search(self):
- # abstract method
- pass
-
- def forceFindPropers(self):
- return self.force_find_propers()
-
- def force_find_propers(self):
- # abstract method
- pass
-
- def pauseBacklog(self, **kwargs):
- return self.pause_backlog(**kwargs)
-
- def pause_backlog(self, **kwargs):
- # abstract method
- pass
-
- """ deprecated from ShowProcesses ----------------------------------------------------------------------------------
- """
- def forceShowUpdate(self):
- return self.force_show_update()
-
- def force_show_update(self):
- # abstract method
- pass
-
- """ deprecated from History ----------------------------------------------------------------------------------------
- """
- def clearHistory(self):
- return self.clear_history()
-
- def clear_history(self):
- # abstract method
- pass
-
- def trimHistory(self):
- return self.trim_history()
-
- def trim_history(self):
- # abstract method
- pass
-
- """ deprecated from ErrorLogs --------------------------------------------------------------------------------------
- """
- def clearerrors(self):
- self.redirect('/errors/clear-log')
-
- def viewlog(self, **kwargs):
- self.redirect_args('/events/view-log/', **kwargs)
-
- def downloadlog(self):
- return self.download_log()
-
- def download_log(self):
- # abstract method
- pass
-
- """ ------------------------------------------------------------------------------------------------------------ """
- """ ------------------------------------------------------------------------------------------------------------ """
- """ end of base deprecated function stubs """
- """ ------------------------------------------------------------------------------------------------------------ """
- """ ------------------------------------------------------------------------------------------------------------ """
-
-
-class LegacyRouteHandler(RequestHandler):
-
- def data_received(self, *args):
- pass
-
- def __init__(self, *arg, **kwargs):
- super(LegacyRouteHandler, self).__init__(*arg, **kwargs)
- self.lock = threading.Lock()
-
- def set_default_headers(self):
- self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
- self.set_header('X-Robots-Tag', 'noindex, nofollow, noarchive, nocache, noodp, noydir, noimageindex, nosnippet')
- if sickgear.SEND_SECURITY_HEADERS:
- self.set_header('X-Frame-Options', 'SAMEORIGIN')
-
- # noinspection PyUnusedLocal
- @gen.coroutine
- def get(self, *args, **kwargs):
- getattr(self, 'index')()
-
- def redirect(self, url, permanent=False, status=None):
- if not url.startswith(sickgear.WEB_ROOT):
- url = sickgear.WEB_ROOT + url
-
- super(LegacyRouteHandler, self).redirect(url, permanent, status)
-
-
-class LegacyManageManageSearches(LegacyRouteHandler):
-
- """ deprecated from ManageSearches ---------------------------------------------------------------------------------
- """
- def index(self):
- self.redirect('/manage/search-tasks/', permanent=True)
-
-
-class LegacyManageShowProcesses(LegacyRouteHandler):
-
- """ deprecated from ManageShowProcesses ----------------------------------------------------------------------------
- """
- def index(self):
- self.redirect('/manage/show-tasks/', permanent=True)
-
-
-class LegacyConfigPostProcessing(LegacyRouteHandler):
-
- """ deprecated from ConfigPostProcessing ---------------------------------------------------------------------------
- """
- def index(self):
- self.redirect('/config/media-process/', permanent=True)
-
-
-class LegacyHomeAddShows(LegacyRouteHandler):
-
- """ deprecated from NewHomeAddShows i.e. HomeAddShows --------------------------------------------------------------
- """
- def index(self):
- self.redirect('/add-shows/', permanent=True)
-
-
-class LegacyErrorLogs(LegacyRouteHandler):
-
- """ deprecated from ErrorLogs --------------------------------------------------------------------------------------
- """
- def index(self):
- self.redirect('/events/', permanent=True)
diff --git a/sickgear/browser.py b/sickgear/browser.py
index 1c62b9e0..aa0e8589 100644
--- a/sickgear/browser.py
+++ b/sickgear/browser.py
@@ -46,7 +46,7 @@ def get_win_drives():
def folders_at_path(path, include_parent=False, include_files=False):
""" Returns a list of dictionaries with the folders contained at the given path
Give the empty string as the path to list the contents of the root path
- under Unix this means "/", on Windows this will be a list of drive letters)
+ under Unix this means "/", (on Windows this will be a list of drive letters)
"""
# walk up the tree until we find a valid path
diff --git a/sickgear/classes.py b/sickgear/classes.py
index 3cdecae0..a10360d0 100644
--- a/sickgear/classes.py
+++ b/sickgear/classes.py
@@ -25,7 +25,7 @@ import sickgear
from ._legacy_classes import LegacySearchResult, LegacyProper
from .common import Quality
-from six import integer_types, iteritems, PY2, string_types
+from six import integer_types, iteritems, string_types
# noinspection PyUnreachableCode
if False:
@@ -155,7 +155,7 @@ class SearchResult(LegacySearchResult):
class NZBSearchResult(SearchResult):
"""
- Regular NZB result with an URL to the NZB
+ Regular NZB result with a URL to the NZB
"""
resultType = 'nzb'
@@ -169,7 +169,7 @@ class NZBDataSearchResult(SearchResult):
class TorrentSearchResult(SearchResult):
"""
- Torrent result with an URL to the torrent
+ Torrent result with a URL to the torrent
"""
resultType = 'torrent'
@@ -359,41 +359,11 @@ class OrderedDefaultdict(OrderedDict):
args = (self.default_factory,) if self.default_factory else ()
return self.__class__, args, None, None, iteritems(self)
- if PY2:
- # backport from python 3
- def move_to_end(self, key, last=True):
- """Move an existing element to the end (or beginning if last==False).
+ def first_key(self):
+ return next(iter(self))
- Raises KeyError if the element does not exist.
- When last=True, acts like a fast version of self[key]=self.pop(key).
-
- """
- link_prev, link_next, key = link = getattr(self, '_OrderedDict__map')[key]
- link_prev[1] = link_next
- link_next[0] = link_prev
- root = getattr(self, '_OrderedDict__root')
- if last:
- last = root[0]
- link[0] = last
- link[1] = root
- last[1] = root[0] = link
- else:
- first = root[1]
- link[0] = root
- link[1] = first
- root[1] = first[0] = link
-
- def first_key(self):
- return getattr(self, '_OrderedDict__root')[1][2]
-
- def last_key(self):
- return getattr(self, '_OrderedDict__root')[0][2]
- else:
- def first_key(self):
- return next(iter(self))
-
- def last_key(self):
- return next(reversed(self))
+ def last_key(self):
+ return next(reversed(self))
class ImageUrlList(list):
@@ -455,61 +425,14 @@ class EnvVar(object):
pass
def __getitem__(self, key):
- return os.environ(key)
+ return os.environ[key]
@staticmethod
def get(key, default=None):
return os.environ.get(key, default)
-if not PY2:
- sickgear.ENV = EnvVar()
-
-elif 'nt' == os.name:
- from ctypes import windll, create_unicode_buffer
-
- # noinspection PyCompatibility
- class WinEnvVar(EnvVar):
-
- @staticmethod
- def get_environment_variable(name):
- # noinspection PyUnresolvedReferences
- name = unicode(name) # ensures string argument is unicode
- n = windll.kernel32.GetEnvironmentVariableW(name, None, 0)
- env_value = None
- if n:
- buf = create_unicode_buffer(u'\0' * n)
- windll.kernel32.GetEnvironmentVariableW(name, buf, n)
- env_value = buf.value
- return env_value
-
- def __getitem__(self, key):
- return self.get_environment_variable(key)
-
- def get(self, key, default=None):
- r = self.get_environment_variable(key)
- return r if None is not r else default
-
- sickgear.ENV = WinEnvVar()
-else:
- # noinspection PyCompatibility
- class LinuxEnvVar(EnvVar):
- # noinspection PyMissingConstructor
- def __init__(self, environ):
- self.environ = environ
-
- def __getitem__(self, key):
- v = self.environ.get(key)
- try:
- return v if not isinstance(v, str) else v.decode(sickgear.SYS_ENCODING)
- except (UnicodeDecodeError, UnicodeEncodeError):
- return v
-
- def get(self, key, default=None):
- v = self[key]
- return v if None is not v else default
-
- sickgear.ENV = LinuxEnvVar(os.environ)
+sickgear.ENV = EnvVar()
# backport from python 3
@@ -533,7 +456,7 @@ class SimpleNamespace(object):
# list that supports weak reference
-class weakList(list):
+class WeakList(list):
__slots__ = ('__weakref__',)
diff --git a/sickgear/clients/download_station.py b/sickgear/clients/download_station.py
index 3147ab58..03f95f15 100644
--- a/sickgear/clients/download_station.py
+++ b/sickgear/clients/download_station.py
@@ -26,7 +26,7 @@ from .. import logger
from ..sgdatetime import timestamp_near
import sickgear
-from _23 import filter_iter, filter_list, map_list, unquote_plus
+from _23 import unquote_plus
from six import string_types
# noinspection PyUnreachableCode
@@ -96,21 +96,21 @@ class DownloadStationAPI(GenericClient):
id=t['id'], title=t['title'], total_size=t.get('size') or 0,
added_ts=d.get('create_time'), last_completed_ts=d.get('completed_time'),
last_started_ts=d.get('started_time'), seed_elapsed_secs=d.get('seedelapsed'),
- wanted_size=sum(map_list(lambda tf: wanted(tf) and tf.get('size') or 0, f)) or None,
- wanted_down=sum(map_list(lambda tf: wanted(tf) and downloaded(tf) or 0, f)) or None,
+ wanted_size=sum(list(map(lambda tf: wanted(tf) and tf.get('size') or 0, f))) or None,
+ wanted_down=sum(list(map(lambda tf: wanted(tf) and downloaded(tf) or 0, f))) or None,
tally_down=downloaded(tx),
tally_up=tx.get('size_uploaded'),
- state='done' if re.search('finish', t['status']) else ('seed', 'down')[any(filter_list(
- lambda tf: wanted(tf) and (downloaded(tf, -1) < tf.get('size', 0)), f))]
+ state='done' if re.search('finish', t['status']) else ('seed', 'down')[any(list(filter(
+ lambda tf: wanted(tf) and (downloaded(tf, -1) < tf.get('size', 0)), f)))]
))
# only available during "download" and "seeding"
file_list = (lambda t: t.get('additional', {}).get('file', {}))
valid_stat = (lambda ti: not ti.get('error') and isinstance(ti.get('status'), string_types)
- and sum(map_list(lambda tf: wanted(tf) and downloaded(tf) or 0, file_list(ti))))
- result = map_list(lambda t: base_state(
+ and sum(list(map(lambda tf: wanted(tf) and downloaded(tf) or 0, file_list(ti)))))
+ result = list(map(lambda t: base_state(
t, t.get('additional', {}).get('detail', {}), t.get('additional', {}).get('transfer', {}), file_list(t)),
- filter_list(lambda t: t['status'] in ('downloading', 'seeding', 'finished') and valid_stat(t),
- tasks))
+ list(filter(lambda t: t['status'] in ('downloading', 'seeding', 'finished') and valid_stat(t),
+ tasks))))
return result
@@ -133,13 +133,13 @@ class DownloadStationAPI(GenericClient):
t_params=dict(additional='detail,file,transfer'))['data']['tasks']
else:
# noinspection PyUnresolvedReferences
- tasks = (filter_list(lambda d: d.get('id') == rid, self._testdata), self._testdata)[not rid]
+ tasks = (list(filter(lambda d: d.get('id') == rid, self._testdata)), self._testdata)[not rid]
result += tasks and (isinstance(tasks, list) and tasks or (isinstance(tasks, dict) and [tasks])) \
or ([], [{'error': True, 'id': rid}])[err]
except (BaseException, Exception):
if getinfo:
result += [dict(error=True, id=rid)]
- for t in filter_iter(lambda d: isinstance(d.get('title'), string_types) and d.get('title'), result):
+ for t in filter(lambda d: isinstance(d.get('title'), string_types) and d.get('title'), result):
t['title'] = unquote_plus(t.get('title'))
return result
@@ -164,8 +164,8 @@ class DownloadStationAPI(GenericClient):
# type: (Union[AnyStr, list]) -> Union[bool, list]
"""
Pause item(s)
- :param ids: Id(s) to pause
- :return: True/Falsy if success/failure else Id(s) that failed to be paused
+ :param ids: ID(s) to pause
+ :return: True/Falsy if success/failure else ID(s) that failed to be paused
"""
return self._action(
'pause', ids,
@@ -177,8 +177,8 @@ class DownloadStationAPI(GenericClient):
# type: (Union[AnyStr, list]) -> Union[bool, list]
"""
Resume task(s) in client
- :param ids: Id(s) to act on
- :return: True if success, Id(s) that could not be resumed, else Falsy if failure
+ :param ids: ID(s) to act on
+ :return: True if success, ID(s) that could not be resumed, else Falsy if failure
"""
return self._perform_task(
'resume', ids,
@@ -190,8 +190,8 @@ class DownloadStationAPI(GenericClient):
# type: (Union[AnyStr, list]) -> Union[bool, list]
"""
Delete task(s) from client
- :param ids: Id(s) to act on
- :return: True if success, Id(s) that could not be deleted, else Falsy if failure
+ :param ids: ID(s) to act on
+ :return: True if success, ID(s) that could not be deleted, else Falsy if failure
"""
return self._perform_task(
'delete', ids,
@@ -205,13 +205,13 @@ class DownloadStationAPI(GenericClient):
"""
Set up and send a method to client
:param method: Either `resume` or `delete`
- :param ids: Id(s) to perform method on
+ :param ids: ID(s) to perform method on
:param filter_func: Call back function to filter tasks as failed or erroneous
:param pause_first: True if task should be paused prior to invoking method
- :return: True if success, Id(s) that could not be acted upon, else Falsy if failure
+ :return: True if success, ID(s) that could not be acted upon, else Falsy if failure
"""
if isinstance(ids, (string_types, list)):
- rids = ids if isinstance(ids, list) else map_list(lambda x: x.strip(), ids.split(','))
+ rids = ids if isinstance(ids, list) else list(map(lambda x: x.strip(), ids.split(',')))
result = pause_first and self._pause_torrent(rids) # get items not paused
result = (isinstance(result, list) and result or [])
@@ -225,7 +225,7 @@ class DownloadStationAPI(GenericClient):
if isinstance(ids, (string_types, list)):
item = dict(fail=[], ignore=[])
- for task in filter_iter(filter_func, self._tinf(ids, err=True)):
+ for task in filter(filter_func, self._tinf(ids, err=True)):
item[('fail', 'ignore')[self._ignore_state(task)]] += [task.get('id')]
# retry items not acted on
@@ -237,7 +237,7 @@ class DownloadStationAPI(GenericClient):
logger.log('%s: retry %s %s item(s) in %ss' % (self.name, act, len(item['fail']), i), logger.DEBUG)
time.sleep(i)
item['fail'] = []
- for task in filter_iter(filter_func, self._tinf(retry_ids, err=True)):
+ for task in filter(filter_func, self._tinf(retry_ids, err=True)):
item[('fail', 'ignore')[self._ignore_state(task)]] += [task.get('id')]
if not item['fail']:
@@ -256,7 +256,7 @@ class DownloadStationAPI(GenericClient):
"""
Add magnet to client (overridden class function)
:param search_result: A populated search result object
- :return: Id of task in client, True if added but no ID, else Falsy if nothing added
+ :return: ID of task in client, True if added but no ID, else Falsy if nothing added
"""
if 3 <= self._task_version:
return self._add_torrent(uri={'uri': search_result.url})
@@ -269,7 +269,7 @@ class DownloadStationAPI(GenericClient):
"""
Add file to client (overridden class function)
:param search_result: A populated search result object
- :return: Id of task in client, True if added but no ID, else Falsy if nothing added
+ :return: ID of task in client, True if added but no ID, else Falsy if nothing added
"""
return self._add_torrent(
files={'file': ('%s.torrent' % re.sub(r'(\.torrent)+$', '', search_result.name), search_result.content)})
@@ -280,7 +280,7 @@ class DownloadStationAPI(GenericClient):
Create client task
:param uri: URI param for client API
:param files: file param for client API
- :return: Id of task in client, True if created but no id found, else Falsy if nothing created
+ :return: ID of task in client, True if created but no id found, else Falsy if nothing created
"""
if self._testmode:
# noinspection PyUnresolvedReferences
@@ -303,7 +303,7 @@ class DownloadStationAPI(GenericClient):
# noinspection PyUnresolvedReferences
if response and response.get('success'):
for s in (1, 3, 5, 10, 15, 30, 60):
- tasks = filter_list(lambda t: task_stamp <= t['additional']['detail']['create_time'], self._tinf())
+ tasks = list(filter(lambda t: task_stamp <= t['additional']['detail']['create_time'], self._tinf()))
try:
return str(self._client_has(tasks, uri, files)[0].get('id'))
except IndexError:
@@ -324,8 +324,8 @@ class DownloadStationAPI(GenericClient):
if uri or files:
u = isinstance(uri, dict) and (uri.get('uri', '') or '').lower() or None
f = isinstance(files, dict) and (files.get('file', [''])[0]).lower() or None
- result = filter_list(lambda t: u and t['additional']['detail']['uri'].lower() == u
- or f and t['additional']['detail']['uri'].lower() in f, tasks)
+ result = list(filter(lambda t: u and t['additional']['detail']['uri'].lower() == u
+ or f and t['additional']['detail']['uri'].lower() in f, tasks))
return result
def _client_request(self, method, t_id=None, t_params=None, files=None):
@@ -360,7 +360,7 @@ class DownloadStationAPI(GenericClient):
return self._error_task(response)
if None is not t_id and None is t_params and 'create' != method:
- return filter_list(lambda r: r.get('error'), response.get('data', {})) or True
+ return list(filter(lambda r: r.get('error'), response.get('data', {}))) or True
return response
diff --git a/sickgear/clients/generic.py b/sickgear/clients/generic.py
index a9200e5b..143903e9 100644
--- a/sickgear/clients/generic.py
+++ b/sickgear/clients/generic.py
@@ -129,7 +129,7 @@ class GenericClient(object):
def _add_torrent_file(self, result):
"""
This should be overridden to return the True/False from the client
- when a torrent is added via result.content (only .torrent file)
+ when a torrent is added via `result.content` (only .torrent file)
"""
return False
@@ -179,9 +179,9 @@ class GenericClient(object):
"""
This should be overridden to resume task(s) in client
- :param ids: Id(s) to act on
+ :param ids: ID(s) to act on
:type ids: list or string
- :return: True if success, Id(s) that could not be resumed, else Falsy if failure
+ :return: True if success, ID(s) that could not be resumed, else Falsy if failure
:rtype: bool or list
"""
return False
@@ -189,9 +189,9 @@ class GenericClient(object):
def _delete_torrent(self, ids):
"""
This should be overridden to delete task(s) from client
- :param ids: Id(s) to act on
+ :param ids: ID(s) to act on
:type ids: list or string
- :return: True if success, Id(s) that could not be deleted, else Falsy if failure
+ :return: True if success, ID(s) that could not be deleted, else Falsy if failure
:rtype: bool or list
"""
return False
@@ -200,7 +200,7 @@ class GenericClient(object):
def _get_torrent_hash(result):
if result.url.startswith('magnet'):
- result.hash = re.findall(r'urn:btih:([\w]{32,40})', result.url)[0]
+ result.hash = re.findall(r'urn:btih:(\w{32,40})', result.url)[0]
if 32 == len(result.hash):
result.hash = make_btih(result.hash).lower()
else:
diff --git a/sickgear/clients/qbittorrent.py b/sickgear/clients/qbittorrent.py
index f0aa5ebd..b9711e89 100644
--- a/sickgear/clients/qbittorrent.py
+++ b/sickgear/clients/qbittorrent.py
@@ -26,7 +26,7 @@ import sickgear
from requests.exceptions import HTTPError
-from _23 import filter_iter, filter_list, map_list, unquote_plus
+from _23 import unquote_plus
from six import string_types
# noinspection PyUnreachableCode
@@ -58,9 +58,9 @@ class QbittorrentAPI(GenericClient):
id=t['hash'], title=t['name'], total_size=gp.get('total_size') or 0,
added_ts=gp.get('addition_date'), last_completed_ts=gp.get('completion_date'),
last_started_ts=None, seed_elapsed_secs=gp.get('seeding_time'),
- wanted_size=sum(map_list(lambda tf: wanted(tf) and tf.get('size') or 0, f)) or None,
- wanted_down=sum(map_list(lambda tf: wanted(tf) and downloaded(tf) or 0, f)) or None,
- tally_down=sum(map_list(lambda tf: downloaded(tf) or 0, f)) or None,
+ wanted_size=sum(list(map(lambda tf: wanted(tf) and tf.get('size') or 0, f))) or None,
+ wanted_down=sum(list(map(lambda tf: wanted(tf) and downloaded(tf) or 0, f))) or None,
+ tally_down=sum(list(map(lambda tf: downloaded(tf) or 0, f))) or None,
tally_up=gp.get('total_uploaded'),
state='done' if 'pausedUP' == t.get('state') else ('down', 'seed')['up' in t.get('state').lower()]
))
@@ -68,10 +68,10 @@ class QbittorrentAPI(GenericClient):
('torrents/files', 'query/propertiesFiles/%s' % ti['hash'])[not self.api_ns],
params=({'hash': ti['hash']}, {})[not self.api_ns], json=True) or {})
valid_stat = (lambda ti: not self._ignore_state(ti)
- and sum(map_list(lambda tf: wanted(tf) and downloaded(tf) or 0, file_list(ti))))
- result = map_list(lambda t: base_state(t, self._tinf(t['hash'])[0], file_list(t)),
- filter_list(lambda t: re.search('(?i)queue|stall|(up|down)load|pausedUP', t['state']) and
- valid_stat(t), self._tinf(ids, False)))
+ and sum(list(map(lambda tf: wanted(tf) and downloaded(tf) or 0, file_list(ti)))))
+ result = list(map(lambda t: base_state(t, self._tinf(t['hash'])[0], file_list(t)),
+ list(filter(lambda t: re.search('(?i)queue|stall|(up|down)load|pausedUP', t['state']) and
+ valid_stat(t), self._tinf(ids, False)))))
return result
@@ -109,8 +109,7 @@ class QbittorrentAPI(GenericClient):
except (BaseException, Exception):
if getinfo:
result += [dict(error=True, id=rid)]
- for t in filter_iter(lambda d: isinstance(d.get('name'), string_types) and d.get('name'),
- (result, [])[getinfo]):
+ for t in filter(lambda d: isinstance(d.get('name'), string_types) and d.get('name'), (result, [])[getinfo]):
t['name'] = unquote_plus(t.get('name'))
return result
@@ -148,7 +147,7 @@ class QbittorrentAPI(GenericClient):
"""
Set maximal priority in queue to torrent task
:param ids: ID(s) to promote
- :return: True/Falsy if success/failure else Id(s) that failed to be changed
+ :return: True/Falsy if success/failure else ID(s) that failed to be changed
"""
def _maxpri_filter(t):
mark_fail = True
@@ -180,7 +179,7 @@ class QbittorrentAPI(GenericClient):
"""
Set label/category to torrent task
:param ids: ID(s) to change
- :return: True/Falsy if success/failure else Id(s) that failed to be changed
+ :return: True/Falsy if success/failure else ID(s) that failed to be changed
"""
def _label_filter(t):
mark_fail = True
@@ -206,8 +205,8 @@ class QbittorrentAPI(GenericClient):
# type: (Union[AnyStr, list]) -> Union[bool, list]
"""
Pause item(s)
- :param ids: Id(s) to pause
- :return: True/Falsy if success/failure else Id(s) that failed to be paused
+ :param ids: ID(s) to pause
+ :return: True/Falsy if success/failure else ID(s) that failed to be paused
"""
def _pause_filter(t):
mark_fail = True
@@ -253,8 +252,8 @@ class QbittorrentAPI(GenericClient):
# type: (Union[AnyStr, list]) -> Union[bool, list]
"""
Resume task(s) in client
- :param ids: Id(s) to act on
- :return: True if success, Id(s) that could not be resumed, else Falsy if failure
+ :param ids: ID(s) to act on
+ :return: True if success, ID(s) that could not be resumed, else Falsy if failure
"""
return self._perform_task(
'resume', ids,
@@ -268,8 +267,8 @@ class QbittorrentAPI(GenericClient):
# type: (Union[AnyStr, list]) -> Union[bool, list]
"""
Delete task(s) from client
- :param ids: Id(s) to act on
- :return: True if success, Id(s) that could not be deleted, else Falsy if failure
+ :param ids: ID(s) to act on
+ :return: True if success, ID(s) that could not be deleted, else Falsy if failure
"""
return self._perform_task(
'delete', ids,
@@ -284,13 +283,13 @@ class QbittorrentAPI(GenericClient):
"""
Set up and send a method to client
:param method: Either `resume` or `delete`
- :param ids: Id(s) to perform method on
+ :param ids: ID(s) to perform method on
:param filter_func: Call back function passed to _action that will filter tasks as failed or erroneous
:param pause_first: True if task should be paused prior to invoking method
- :return: True if success, Id(s) that could not be acted upon, else Falsy if failure
+ :return: True if success, ID(s) that could not be acted upon, else Falsy if failure
"""
if isinstance(ids, (string_types, list)):
- rids = ids if isinstance(ids, list) else map_list(lambda x: x.strip(), ids.split(','))
+ rids = ids if isinstance(ids, list) else list(map(lambda x: x.strip(), ids.split(',')))
result = pause_first and self._pause_torrent(rids) # get items not paused
result = (isinstance(result, list) and result or [])
@@ -304,7 +303,7 @@ class QbittorrentAPI(GenericClient):
if isinstance(ids, (string_types, list)):
item = dict(fail=[], ignore=[])
- for task in filter_iter(filter_func, self._tinf(ids, use_props=False, err=True)):
+ for task in filter(filter_func, self._tinf(ids, use_props=False, err=True)):
item[('fail', 'ignore')[self._ignore_state(task)]] += [task.get('hash')]
# retry items that are not acted on
@@ -316,7 +315,7 @@ class QbittorrentAPI(GenericClient):
logger.log('%s: retry %s %s item(s) in %ss' % (self.name, act, len(item['fail']), i), logger.DEBUG)
time.sleep(i)
item['fail'] = []
- for task in filter_iter(filter_func, self._tinf(retry_ids, use_props=False, err=True)):
+ for task in filter(filter_func, self._tinf(retry_ids, use_props=False, err=True)):
item[('fail', 'ignore')[self._ignore_state(task)]] += [task.get('hash')]
if not item['fail']:
@@ -378,7 +377,7 @@ class QbittorrentAPI(GenericClient):
if True is response:
for s in (1, 3, 5, 10, 15, 30, 60):
- if filter_list(lambda t: task_stamp <= t['addition_date'], self._tinf(data.hash)):
+ if list(filter(lambda t: task_stamp <= t['addition_date'], self._tinf(data.hash))):
return data.hash
time.sleep(s)
return True
@@ -396,7 +395,7 @@ class QbittorrentAPI(GenericClient):
"""
Send a request to client
:param cmd: Api task to invoke
- :param kwargs: keyword arguments to pass thru to helpers getURL function
+ :param kwargs: keyword arguments to pass through to helpers getURL function
:return: JSON decoded response dict, True if success and no response body, Text error or None if failure,
"""
authless = bool(re.search('(?i)login|version', cmd))
diff --git a/sickgear/clients/rtorrent.py b/sickgear/clients/rtorrent.py
index b728c11f..332428cd 100644
--- a/sickgear/clients/rtorrent.py
+++ b/sickgear/clients/rtorrent.py
@@ -90,7 +90,7 @@ class RtorrentAPI(GenericClient):
# try:
# if ratio > 0:
#
- # # Explicitly set all group options to ensure it is setup correctly
+ # # Explicitly set all group options to ensure it is set up correctly
# group.set_upload('1M')
# group.set_min(ratio)
# group.set_max(ratio)
diff --git a/sickgear/clients/transmission.py b/sickgear/clients/transmission.py
index 5c8074bb..8fb5810a 100644
--- a/sickgear/clients/transmission.py
+++ b/sickgear/clients/transmission.py
@@ -84,7 +84,7 @@ class TransmissionAPI(GenericClient):
def _add_torrent(self, t_object):
- # populate blankable and download_dir
+ # populate blanked and download_dir
if not self._get_auth():
logger.log('%s: Authentication failed' % self.name, logger.ERROR)
return False
diff --git a/sickgear/clients/utorrent.py b/sickgear/clients/utorrent.py
index 749a3efc..24d4a8e0 100644
--- a/sickgear/clients/utorrent.py
+++ b/sickgear/clients/utorrent.py
@@ -24,17 +24,17 @@ from _23 import urlencode
from six import iteritems
-class uTorrentAPI(GenericClient):
+class UtorrentAPI(GenericClient):
def __init__(self, host=None, username=None, password=None):
- super(uTorrentAPI, self).__init__('uTorrent', host, username, password)
+ super(UtorrentAPI, self).__init__('uTorrent', host, username, password)
self.url = self.host + 'gui/'
def _request(self, method='get', params=None, files=None, **kwargs):
params = {} if None is params else params
- return super(uTorrentAPI, self)._request(
+ return super(UtorrentAPI, self)._request(
method=method,
params='token={0:s}&{1:s}'.format(self.auth, '&'.join(
['%s' % urlencode(dict([[key, str(value)]]))
@@ -128,4 +128,4 @@ class uTorrentAPI(GenericClient):
return self._request(params=params)
-api = uTorrentAPI()
+api = UtorrentAPI()
diff --git a/sickgear/common.py b/sickgear/common.py
index 804fee6b..efdcc4e8 100644
--- a/sickgear/common.py
+++ b/sickgear/common.py
@@ -25,7 +25,6 @@ import uuid
import sickgear
-from _23 import map_list
from six import integer_types, iterkeys, string_types
# noinspection PyUnresolvedReferences
@@ -180,7 +179,7 @@ class Quality(object):
return Quality.qualityStrings[quality].replace('SD DVD', 'SD DVD/BR/BD')
@staticmethod
- def _getStatusStrings(status):
+ def _get_status_strings(status):
"""
:param status: status
@@ -188,14 +187,14 @@ class Quality(object):
:return:
:rtype: AnyStr
"""
- toReturn = {}
+ to_return = {}
for _x in Quality.qualityStrings:
- toReturn[Quality.compositeStatus(status, _x)] = '%s (%s)' % (
+ to_return[Quality.composite_status(status, _x)] = '%s (%s)' % (
Quality.statusPrefixes[status], Quality.qualityStrings[_x])
- return toReturn
+ return to_return
@staticmethod
- def combineQualities(any_qualities, best_qualities):
+ def combine_qualities(any_qualities, best_qualities):
# type: (List[int], List[int]) -> int
"""
@@ -211,7 +210,7 @@ class Quality(object):
return any_quality | (best_quality << 16)
@staticmethod
- def splitQuality(quality):
+ def split_quality(quality):
# type: (int) -> Tuple[List[int], List[int]]
"""
@@ -228,10 +227,10 @@ class Quality(object):
return sorted(any_qualities), sorted(best_qualities)
@staticmethod
- def nameQuality(name, anime=False):
+ def name_quality(name, anime=False):
"""
Return The quality from an episode File renamed by SickGear
- If no quality is achieved it will try sceneQuality regex
+ If no quality is achieved it will try scene_quality regex
:param name: name
:type name: AnyStr
:param anime: is anmie
@@ -248,7 +247,7 @@ class Quality(object):
continue
if Quality.NONE == _x: # Last chance
- return Quality.sceneQuality(name, anime)
+ return Quality.scene_quality(name, anime)
regex = r'\W' + Quality.qualityStrings[_x].replace(' ', r'\W') + r'\W'
regex_match = re.search(regex, name, re.I)
@@ -256,7 +255,7 @@ class Quality(object):
return _x
@staticmethod
- def sceneQuality(name, anime=False):
+ def scene_quality(name, anime=False):
"""
Return The quality from the scene episode File
:param name: name
@@ -347,7 +346,7 @@ class Quality(object):
return Quality.UNKNOWN
@staticmethod
- def fileQuality(filename):
+ def file_quality(filename):
"""
:param filename: filename
@@ -406,7 +405,7 @@ class Quality(object):
return Quality.UNKNOWN
@staticmethod
- def assumeQuality(name):
+ def assume_quality(name):
"""
:param name: name
@@ -421,7 +420,7 @@ class Quality(object):
return Quality.UNKNOWN
@staticmethod
- def compositeStatus(status, quality):
+ def composite_status(status, quality):
"""
:param status: status
@@ -434,7 +433,7 @@ class Quality(object):
return status + 100 * quality
@staticmethod
- def qualityDownloaded(status):
+ def quality_downloaded(status):
# type: (int) -> int
"""
@@ -446,7 +445,7 @@ class Quality(object):
return (status - DOWNLOADED) // 100
@staticmethod
- def splitCompositeStatus(status):
+ def split_composite_status(status):
# type: (int) -> Tuple[int, int]
"""Returns a tuple containing (status, quality)
:param status: status
@@ -461,7 +460,7 @@ class Quality(object):
return status, Quality.NONE
@staticmethod
- def statusFromName(name, assume=True, anime=False):
+ def status_from_name(name, assume=True, anime=False):
"""
:param name: name
@@ -473,13 +472,13 @@ class Quality(object):
:return:
:rtype: int or long
"""
- quality = Quality.nameQuality(name, anime)
+ quality = Quality.name_quality(name, anime)
if assume and Quality.UNKNOWN == quality:
- quality = Quality.assumeQuality(name)
- return Quality.compositeStatus(DOWNLOADED, quality)
+ quality = Quality.assume_quality(name)
+ return Quality.composite_status(DOWNLOADED, quality)
@staticmethod
- def statusFromNameOrFile(file_path, assume=True, anime=False):
+ def status_from_name_or_file(file_path, assume=True, anime=False):
"""
:param file_path: file path
@@ -491,12 +490,12 @@ class Quality(object):
:return:
:rtype: int or long
"""
- quality = Quality.nameQuality(file_path, anime)
+ quality = Quality.name_quality(file_path, anime)
if Quality.UNKNOWN == quality:
- quality = Quality.fileQuality(file_path)
+ quality = Quality.file_quality(file_path)
if assume and Quality.UNKNOWN == quality:
- quality = Quality.assumeQuality(file_path)
- return Quality.compositeStatus(DOWNLOADED, quality)
+ quality = Quality.assume_quality(file_path)
+ return Quality.composite_status(DOWNLOADED, quality)
SNATCHED = None
SNATCHED_PROPER = None
@@ -516,7 +515,7 @@ class WantedQualities(dict):
super(WantedQualities, self).__init__(**kwargs)
def _generate_wantedlist(self, qualities):
- initial_qualities, upgrade_qualities = Quality.splitQuality(qualities)
+ initial_qualities, upgrade_qualities = Quality.split_quality(qualities)
max_initial_quality = max(initial_qualities or [Quality.NONE])
min_upgrade_quality = min(upgrade_qualities or [1 << 16])
self[qualities] = {0: {self.bothlists: False, self.wantedlist: initial_qualities, self.upgradelist: False}}
@@ -563,23 +562,23 @@ for (attr_name, qual_val) in [
('SNATCHED', SNATCHED), ('SNATCHED_PROPER', SNATCHED_PROPER), ('SNATCHED_BEST', SNATCHED_BEST),
('DOWNLOADED', DOWNLOADED), ('ARCHIVED', ARCHIVED), ('FAILED', FAILED),
]:
- setattr(Quality, attr_name, map_list(lambda qk: Quality.compositeStatus(qual_val, qk),
- iterkeys(Quality.qualityStrings)))
+ setattr(Quality, attr_name, list(map(lambda qk: Quality.composite_status(qual_val, qk),
+ iterkeys(Quality.qualityStrings))))
Quality.SNATCHED_ANY = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST
-SD = Quality.combineQualities([Quality.SDTV, Quality.SDDVD], [])
-HD = Quality.combineQualities(
+SD = Quality.combine_qualities([Quality.SDTV, Quality.SDDVD], [])
+HD = Quality.combine_qualities(
[Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.HDBLURAY, Quality.FULLHDBLURAY],
[]) # HD720p + HD1080p
-HD720p = Quality.combineQualities([Quality.HDTV, Quality.HDWEBDL, Quality.HDBLURAY], [])
-HD1080p = Quality.combineQualities([Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY], [])
-UHD2160p = Quality.combineQualities([Quality.UHD4KWEB], [])
-ANY = Quality.combineQualities(
+HD720p = Quality.combine_qualities([Quality.HDTV, Quality.HDWEBDL, Quality.HDBLURAY], [])
+HD1080p = Quality.combine_qualities([Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY], [])
+UHD2160p = Quality.combine_qualities([Quality.UHD4KWEB], [])
+ANY = Quality.combine_qualities(
[Quality.SDTV, Quality.SDDVD, Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL,
Quality.HDBLURAY, Quality.FULLHDBLURAY, Quality.UNKNOWN], []) # SD + HD
# legacy template, can't remove due to reference in mainDB upgrade?
-BEST = Quality.combineQualities([Quality.SDTV, Quality.HDTV, Quality.HDWEBDL], [Quality.HDTV])
+BEST = Quality.combine_qualities([Quality.SDTV, Quality.HDTV, Quality.HDWEBDL], [Quality.HDTV])
qualityPresets = (SD, HD, HD720p, HD1080p, UHD2160p, ANY)
@@ -608,7 +607,7 @@ class StatusStrings(object):
def __getitem__(self, name):
if name in Quality.SNATCHED_ANY + Quality.DOWNLOADED + Quality.ARCHIVED:
- status, quality = Quality.splitCompositeStatus(name)
+ status, quality = Quality.split_composite_status(name)
if quality == Quality.NONE:
return self.statusStrings[status]
return '%s (%s)' % (self.statusStrings[status], Quality.qualityStrings[quality])
@@ -704,7 +703,7 @@ class NeededQualities(object):
"""
from sickgear.tv import TVShow
if isinstance(show_obj, TVShow):
- init, upgrade = Quality.splitQuality(show_obj.quality)
+ init, upgrade = Quality.split_quality(show_obj.quality)
all_qual = set(init + upgrade)
need_sd = need_hd = need_uhd = need_webdl = False
for wanted_qualities in all_qual:
diff --git a/sickgear/config.py b/sickgear/config.py
index c98df792..9fb7aa98 100644
--- a/sickgear/config.py
+++ b/sickgear/config.py
@@ -23,7 +23,7 @@ import sickgear.providers
from . import db, helpers, logger, naming
from lib.api_trakt import TraktAPI
-from _23 import filter_list, urlsplit, urlunsplit
+from _23 import urlsplit, urlunsplit
from six import string_types
@@ -152,7 +152,7 @@ def schedule_mediaprocess(iv):
if sickgear.MEDIAPROCESS_INTERVAL < sickgear.MIN_MEDIAPROCESS_INTERVAL:
sickgear.MEDIAPROCESS_INTERVAL = sickgear.MIN_MEDIAPROCESS_INTERVAL
- sickgear.media_process_scheduler.cycleTime = datetime.timedelta(minutes=sickgear.MEDIAPROCESS_INTERVAL)
+ sickgear.media_process_scheduler.cycle_time = datetime.timedelta(minutes=sickgear.MEDIAPROCESS_INTERVAL)
sickgear.media_process_scheduler.set_paused_state()
@@ -162,14 +162,14 @@ def schedule_recentsearch(iv):
if sickgear.RECENTSEARCH_INTERVAL < sickgear.MIN_RECENTSEARCH_INTERVAL:
sickgear.RECENTSEARCH_INTERVAL = sickgear.MIN_RECENTSEARCH_INTERVAL
- sickgear.recent_search_scheduler.cycleTime = datetime.timedelta(minutes=sickgear.RECENTSEARCH_INTERVAL)
+ sickgear.recent_search_scheduler.cycle_time = datetime.timedelta(minutes=sickgear.RECENTSEARCH_INTERVAL)
def schedule_backlog(iv):
sickgear.BACKLOG_PERIOD = minimax(iv, sickgear.DEFAULT_BACKLOG_PERIOD,
- sickgear.MIN_BACKLOG_PERIOD, sickgear.MAX_BACKLOG_PERIOD)
+ sickgear.MIN_BACKLOG_PERIOD, sickgear.MAX_BACKLOG_PERIOD)
- sickgear.backlog_search_scheduler.action.cycleTime = sickgear.BACKLOG_PERIOD
+ sickgear.backlog_search_scheduler.action.cycle_time = sickgear.BACKLOG_PERIOD
def schedule_update_software(iv):
@@ -178,7 +178,7 @@ def schedule_update_software(iv):
if sickgear.UPDATE_INTERVAL < sickgear.MIN_UPDATE_INTERVAL:
sickgear.UPDATE_INTERVAL = sickgear.MIN_UPDATE_INTERVAL
- sickgear.update_software_scheduler.cycleTime = datetime.timedelta(hours=sickgear.UPDATE_INTERVAL)
+ sickgear.update_software_scheduler.cycle_time = datetime.timedelta(hours=sickgear.UPDATE_INTERVAL)
def schedule_update_software_notify(update_notify):
@@ -195,10 +195,10 @@ def schedule_update_software_notify(update_notify):
def schedule_update_packages(iv):
sickgear.UPDATE_PACKAGES_INTERVAL = minimax(iv, sickgear.DEFAULT_UPDATE_PACKAGES_INTERVAL,
- sickgear.MIN_UPDATE_PACKAGES_INTERVAL,
- sickgear.MAX_UPDATE_PACKAGES_INTERVAL)
+ sickgear.MIN_UPDATE_PACKAGES_INTERVAL,
+ sickgear.MAX_UPDATE_PACKAGES_INTERVAL)
- sickgear.update_packages_scheduler.cycleTime = datetime.timedelta(hours=sickgear.UPDATE_PACKAGES_INTERVAL)
+ sickgear.update_packages_scheduler.cycle_time = datetime.timedelta(hours=sickgear.UPDATE_PACKAGES_INTERVAL)
def schedule_update_packages_notify(update_packages_notify):
@@ -228,15 +228,6 @@ def schedule_trakt(use_trakt):
return
sickgear.USE_TRAKT = use_trakt
- # if sickgear.USE_TRAKT:
- # sickgear.trakt_checker_scheduler.start()
- # else:
- # sickgear.trakt_checker_scheduler.stop()
- # logger.log(u'Waiting for the TRAKTCHECKER thread to exit')
- # try:
- # sickgear.trakt_checker_scheduler.join(10)
- # except:
- # pass
def schedule_subtitles(use_subtitles):
@@ -250,7 +241,7 @@ def schedule_emby_watched(emby_watched_interval):
0, sickgear.MAX_WATCHEDSTATE_INTERVAL)
if emby_watched_iv and emby_watched_iv != sickgear.EMBY_WATCHEDSTATE_INTERVAL:
sickgear.EMBY_WATCHEDSTATE_INTERVAL = emby_watched_iv
- sickgear.emby_watched_state_scheduler.cycleTime = datetime.timedelta(minutes=emby_watched_iv)
+ sickgear.emby_watched_state_scheduler.cycle_time = datetime.timedelta(minutes=emby_watched_iv)
sickgear.EMBY_WATCHEDSTATE_SCHEDULED = bool(emby_watched_iv)
sickgear.emby_watched_state_scheduler.set_paused_state()
@@ -261,7 +252,7 @@ def schedule_plex_watched(plex_watched_interval):
0, sickgear.MAX_WATCHEDSTATE_INTERVAL)
if plex_watched_iv and plex_watched_iv != sickgear.PLEX_WATCHEDSTATE_INTERVAL:
sickgear.PLEX_WATCHEDSTATE_INTERVAL = plex_watched_iv
- sickgear.plex_watched_state_scheduler.cycleTime = datetime.timedelta(minutes=plex_watched_iv)
+ sickgear.plex_watched_state_scheduler.cycle_time = datetime.timedelta(minutes=plex_watched_iv)
sickgear.PLEX_WATCHEDSTATE_SCHEDULED = bool(plex_watched_iv)
sickgear.plex_watched_state_scheduler.set_paused_state()
@@ -345,7 +336,7 @@ def clean_hosts(hosts, default_port=None, allow_base=False):
def clean_url(url, add_slash=True):
- """ Returns an cleaned url starting with a scheme and folder with trailing '/' or an empty string """
+ """ Returns a cleaned url starting with a scheme and folder with trailing '/' or an empty string """
if url and url.strip():
@@ -437,7 +428,7 @@ def check_setting_float(config, cfg_name, item_name, def_val):
def check_setting_str(config, cfg_name, item_name, def_val, log=True):
"""
- For passwords you must include the word `password` in the item_name and
+ For passwords, you must include the word `password` in the item_name and
add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config()
"""
@@ -662,7 +653,7 @@ class ConfigMigrator(object):
Reads in the old naming settings from your config and generates a new config template from them.
"""
# get the old settings from the file and store them in the new variable names
- for prov in [curProvider for curProvider in sickgear.providers.sortedProviderList()
+ for prov in [curProvider for curProvider in sickgear.providers.sorted_sources()
if 'omgwtfnzbs' == curProvider.name]:
prov.username = check_setting_str(self.config_obj, 'omgwtfnzbs', 'omgwtfnzbs_uid', '')
prov.api_key = check_setting_str(self.config_obj, 'omgwtfnzbs', 'omgwtfnzbs_key', '')
@@ -773,13 +764,13 @@ class ConfigMigrator(object):
# Migration v6: Rename daily search to recent search
def _migrate_v6(self):
sickgear.RECENTSEARCH_INTERVAL = check_setting_int(self.config_obj, 'General', 'dailysearch_frequency',
- sickgear.DEFAULT_RECENTSEARCH_INTERVAL)
+ sickgear.DEFAULT_RECENTSEARCH_INTERVAL)
sickgear.RECENTSEARCH_STARTUP = bool(check_setting_int(self.config_obj, 'General', 'dailysearch_startup', 1))
if sickgear.RECENTSEARCH_INTERVAL < sickgear.MIN_RECENTSEARCH_INTERVAL:
sickgear.RECENTSEARCH_INTERVAL = sickgear.MIN_RECENTSEARCH_INTERVAL
- for curProvider in sickgear.providers.sortedProviderList():
+ for curProvider in sickgear.providers.sorted_sources():
if hasattr(curProvider, 'enable_recentsearch'):
curProvider.enable_recentsearch = bool(check_setting_int(
self.config_obj, curProvider.get_id().upper(), curProvider.get_id() + '_enable_dailysearch', 1))
@@ -831,7 +822,7 @@ class ConfigMigrator(object):
# Migration v15: Transmithe.net variables
def _migrate_v15(self):
try:
- neb = filter_list(lambda p: 'Nebulance' in p.name, sickgear.providers.sortedProviderList())[0]
+ neb = list(filter(lambda p: 'Nebulance' in p.name, sickgear.providers.sorted_sources()))[0]
except (BaseException, Exception):
return
# get the old settings from the file and store them in the new variable names
diff --git a/sickgear/databases/cache_db.py b/sickgear/databases/cache_db.py
index 87e7ea98..2332af24 100644
--- a/sickgear/databases/cache_db.py
+++ b/sickgear/databases/cache_db.py
@@ -96,16 +96,16 @@ class InitialSchema(db.SchemaUpgrade):
])
def test(self):
- return self.hasTable('lastUpdate')
+ return self.has_table('lastUpdate')
def execute(self):
self.do_query(self.queries[next(iter(self.queries))])
- self.setDBVersion(MIN_DB_VERSION, check_db_version=False)
+ self.set_db_version(MIN_DB_VERSION, check_db_version=False)
class ConsolidateProviders(InitialSchema):
def test(self):
- return 1 < self.checkDBVersion()
+ return 1 < self.call_check_db_version()
def execute(self):
keep_tables = {'lastUpdate', 'lastSearch', 'db_version',
@@ -113,13 +113,13 @@ class ConsolidateProviders(InitialSchema):
# old provider_cache is dropped before re-creation
# noinspection SqlResolve
self.do_query(['DROP TABLE [provider_cache]'] + self.queries['consolidate_providers'] +
- ['DROP TABLE [%s]' % t for t in (set(self.listTables()) - keep_tables)])
+ ['DROP TABLE [%s]' % t for t in (set(self.list_tables()) - keep_tables)])
self.finish(True)
class AddBacklogParts(ConsolidateProviders):
def test(self):
- return 2 < self.checkDBVersion()
+ return 2 < self.call_check_db_version()
def execute(self):
# noinspection SqlResolve
@@ -130,7 +130,7 @@ class AddBacklogParts(ConsolidateProviders):
class AddProviderFailureHandling(AddBacklogParts):
def test(self):
- return 3 < self.checkDBVersion()
+ return 3 < self.call_check_db_version()
def execute(self):
self.do_query(self.queries['add_provider_fails'])
@@ -139,17 +139,17 @@ class AddProviderFailureHandling(AddBacklogParts):
class AddIndexerToTables(AddProviderFailureHandling):
def test(self):
- return 4 < self.checkDBVersion()
+ return 4 < self.call_check_db_version()
def execute(self):
self.do_query(self.queries['add_indexer_to_tables'])
- self.addColumn('provider_cache', 'indexer', 'NUMERIC')
+ self.add_column('provider_cache', 'indexer', 'NUMERIC')
self.finish()
class AddGenericFailureHandling(AddBacklogParts):
def test(self):
- return 5 < self.checkDBVersion()
+ return 5 < self.call_check_db_version()
def execute(self):
self.do_query(self.queries['connection_fails'])
@@ -158,7 +158,7 @@ class AddGenericFailureHandling(AddBacklogParts):
class AddSaveQueues(AddGenericFailureHandling):
def test(self):
- return 6 < self.checkDBVersion()
+ return 6 < self.call_check_db_version()
def execute(self):
self.do_query(self.queries['save_queues'])
diff --git a/sickgear/databases/failed_db.py b/sickgear/databases/failed_db.py
index 03f66c0a..60d760a8 100644
--- a/sickgear/databases/failed_db.py
+++ b/sickgear/databases/failed_db.py
@@ -28,7 +28,7 @@ TEST_BASE_VERSION = None # the base production db version, only needed for TEST
# Add new migrations at the bottom of the list; subclass the previous migration.
class InitialSchema(db.SchemaUpgrade):
def test(self):
- return self.hasTable('failed')
+ return self.has_table('failed')
def execute(self):
queries = [
@@ -45,18 +45,18 @@ class InitialSchema(db.SchemaUpgrade):
class SizeAndProvider(InitialSchema):
def test(self):
- return self.hasColumn('failed', 'size') and self.hasColumn('failed', 'provider')
+ return self.has_column('failed', 'size') and self.has_column('failed', 'provider')
def execute(self):
- self.addColumn('failed', 'size')
- self.addColumn('failed', 'provider', 'TEXT', '')
+ self.add_column('failed', 'size')
+ self.add_column('failed', 'provider', 'TEXT', '')
class History(SizeAndProvider):
"""Snatch history that can't be modified by the user"""
def test(self):
- return self.hasTable('history')
+ return self.has_table('history')
def execute(self):
self.connection.action('CREATE TABLE history (date NUMERIC, ' +
@@ -67,21 +67,21 @@ class HistoryStatus(History):
"""Store episode status before snatch to revert to if necessary"""
def test(self):
- return self.hasColumn('history', 'old_status')
+ return self.has_column('history', 'old_status')
def execute(self):
- self.addColumn('history', 'old_status', 'NUMERIC', Quality.NONE)
- self.addColumn('history', 'showid', 'NUMERIC', '-1')
- self.addColumn('history', 'season', 'NUMERIC', '-1')
- self.addColumn('history', 'episode', 'NUMERIC', '-1')
+ self.add_column('history', 'old_status', 'NUMERIC', Quality.NONE)
+ self.add_column('history', 'showid', 'NUMERIC', '-1')
+ self.add_column('history', 'season', 'NUMERIC', '-1')
+ self.add_column('history', 'episode', 'NUMERIC', '-1')
class AddIndexerToTables(HistoryStatus):
def test(self):
- return self.hasColumn('history', 'indexer')
+ return self.has_column('history', 'indexer')
def execute(self):
- self.addColumn('history', 'indexer', 'NUMERIC')
+ self.add_column('history', 'indexer', 'NUMERIC')
main_db = db.DBConnection('sickbeard.db')
show_ids = {s['prod_id']: s['tv_id'] for s in
@@ -91,15 +91,15 @@ class AddIndexerToTables(HistoryStatus):
cl.append(['UPDATE history SET indexer = ? WHERE showid = ?', [i, s_id]])
self.connection.mass_action(cl)
- if self.connection.hasTable('backup_history'):
+ if self.connection.has_table('backup_history'):
self.connection.action(
'REPLACE INTO history '
'(date, size, `release`, provider, old_status, showid, season, episode, indexer)'
' SELECT'
' date, size, `release`, provider, old_status, showid, season, episode, indexer'
' FROM backup_history')
- self.connection.removeTable('backup_history')
+ self.connection.remove_table('backup_history')
self.connection.action('VACUUM')
- self.setDBVersion(2, check_db_version=False)
+ self.set_db_version(2, check_db_version=False)
diff --git a/sickgear/databases/mainDB.py b/sickgear/databases/mainDB.py
index be2edcf1..c51e3108 100644
--- a/sickgear/databases/mainDB.py
+++ b/sickgear/databases/mainDB.py
@@ -103,7 +103,7 @@ class MainSanityCheck(db.DBSanityCheck):
# This func would break with multi tv info sources and without tvid, so added check min db version to mitigate
# Also, tv_show table had a unique index added at some time to prevent further dupes,
# therefore, this func is kept to cleanse legacy data given that it's redundant for new row insertions
- if self.connection.checkDBVersion() < 20004:
+ if self.connection.check_db_version() < 20004:
sql_result = self.connection.select(
'SELECT show_id, %(col)s, COUNT(%(col)s) AS count FROM tv_shows GROUP BY %(col)s HAVING count > 1'
@@ -136,7 +136,7 @@ class MainSanityCheck(db.DBSanityCheck):
# This func would break with multi tv info sources and without tvid, so added check min db version to mitigate
# Also, tv_show table had a unique index added at some time to prevent further dupes,
# therefore, this func is kept to cleanse legacy data given that it's redundant for new row insertions
- if self.connection.checkDBVersion() < 20007:
+ if self.connection.check_db_version() < 20007:
sql_result = self.connection.select(
'SELECT indexer AS tv_id, showid AS prod_id, season, episode, COUNT(showid) as count'
@@ -215,18 +215,18 @@ class MainSanityCheck(db.DBSanityCheck):
logger.log('Updating TV Episode table with index idx_sta_epi_sta_air')
self.connection.action('CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season, episode, status, airdate)')
- if not self.connection.hasIndex('tv_episodes', 'idx_tv_ep_ids'):
+ if not self.connection.has_index('tv_episodes', 'idx_tv_ep_ids'):
logger.log('Updating TV Episode table with index idx_tv_ep_ids')
self.connection.action('CREATE INDEX idx_tv_ep_ids ON tv_episodes (indexer, showid)')
- if not self.connection.hasIndex('tv_episodes', 'idx_tv_episodes_unique'):
+ if not self.connection.has_index('tv_episodes', 'idx_tv_episodes_unique'):
self.connection.action('CREATE UNIQUE INDEX idx_tv_episodes_unique ON '
'tv_episodes(indexer,showid,season,episode)')
- allowtbl, blocktbl = (('allow', 'block'), ('white', 'black'))[not self.connection.hasTable('blocklist')]
+ allowtbl, blocktbl = (('allow', 'block'), ('white', 'black'))[not self.connection.has_table('blocklist')]
for t in [('%slist' % allowtbl, 'show_id'), ('%slist' % blocktbl, 'show_id'),
('history', 'showid'), ('scene_exceptions', 'indexer_id')]:
- if not self.connection.hasIndex('%s' % t[0], 'idx_id_indexer_%s' % t[0]):
+ if not self.connection.has_index('%s' % t[0], 'idx_id_indexer_%s' % t[0]):
# noinspection SqlResolve
self.connection.action('CREATE INDEX idx_id_indexer_%s ON %s (indexer, %s)' % (t[0], t[0], t[1]))
@@ -309,9 +309,9 @@ class InitialSchema(db.SchemaUpgrade):
# Add new migrations at the bottom of the list; subclass the previous migration.
# 0 -> 20009
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
- if not self.hasTable('tv_shows') and not self.hasTable('db_version'):
+ if not self.has_table('tv_shows') and not self.has_table('db_version'):
queries = [
# anime allow and block list
'CREATE TABLE allowlist (show_id INTEGER, range TEXT, keyword TEXT, indexer NUMERIC)',
@@ -383,7 +383,7 @@ class InitialSchema(db.SchemaUpgrade):
self.connection.action(query)
else:
- cur_db_version = self.checkDBVersion()
+ cur_db_version = self.call_check_db_version()
if cur_db_version < MIN_DB_VERSION:
logger.log_error_and_exit(
@@ -403,7 +403,7 @@ class InitialSchema(db.SchemaUpgrade):
' your database may be unusable due to their modifications.'
)
- return self.checkDBVersion()
+ return self.call_check_db_version()
# 9 -> 10
@@ -413,13 +413,13 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
This func is only for 9->10 where older db columns exist,
those columns have since changed
"""
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
- if not self.hasColumn('tv_episodes', 'file_size'):
- self.addColumn('tv_episodes', 'file_size')
+ if not self.has_column('tv_episodes', 'file_size'):
+ self.add_column('tv_episodes', 'file_size')
- if not self.hasColumn('tv_episodes', 'release_name'):
- self.addColumn('tv_episodes', 'release_name', 'TEXT', '')
+ if not self.has_column('tv_episodes', 'release_name'):
+ self.add_column('tv_episodes', 'release_name', 'TEXT', '')
sql_result = self.connection.select('SELECT episode_id, location, file_size FROM tv_episodes')
@@ -474,7 +474,7 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
continue
# get the status/quality of the existing ep and make sure it's what we expect
- ep_status, ep_quality = common.Quality.splitCompositeStatus(int(sql_result[0]['status']))
+ ep_status, ep_quality = common.Quality.split_composite_status(int(sql_result[0]['status']))
if ep_status != common.DOWNLOADED:
continue
@@ -528,14 +528,14 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
self.connection.action('UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?',
[ep_file_name, cur_result['episode_id']])
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 10 -> 11
class RenameSeasonFolders(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
# rename the column
self.connection.action('ALTER TABLE tv_shows RENAME TO tmp_tv_shows')
@@ -558,8 +558,8 @@ class RenameSeasonFolders(db.SchemaUpgrade):
# noinspection SqlResolve
self.connection.action('DROP TABLE tmp_tv_shows')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 11 -> 12
@@ -581,8 +581,8 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
"""
def _update_status(self, old_status):
- (status, quality) = common.Quality.splitCompositeStatus(old_status)
- return common.Quality.compositeStatus(status, self._update_quality(quality))
+ (status, quality) = common.Quality.split_composite_status(old_status)
+ return common.Quality.composite_status(status, self._update_quality(quality))
@staticmethod
def _update_quality(old_quality):
@@ -628,24 +628,24 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
return result
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
# update the default quality so we dont grab the wrong qualities after migration
sickgear.QUALITY_DEFAULT = self._update_composite_qualities(sickgear.QUALITY_DEFAULT)
sickgear.save_config()
# upgrade previous HD to HD720p -- shift previous qualities to new placevalues
- old_hd = common.Quality.combineQualities(
+ old_hd = common.Quality.combine_qualities(
[common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3], [])
- new_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL,
- common.Quality.HDBLURAY], [])
+ new_hd = common.Quality.combine_qualities([common.Quality.HDTV, common.Quality.HDWEBDL,
+ common.Quality.HDBLURAY], [])
# update ANY -- shift existing qualities and add new 1080p qualities,
# note that rawHD was not added to the ANY template
- old_any = common.Quality.combineQualities(
+ old_any = common.Quality.combine_qualities(
[common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.HDWEBDL >> 2,
common.Quality.HDBLURAY >> 3, common.Quality.UNKNOWN], [])
- new_any = common.Quality.combineQualities(
+ new_any = common.Quality.combine_qualities(
[common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.FULLHDTV,
common.Quality.HDWEBDL, common.Quality.FULLHDWEBDL, common.Quality.HDBLURAY, common.Quality.FULLHDBLURAY,
common.Quality.UNKNOWN], [])
@@ -697,12 +697,12 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
[self._update_quality(cur_entry['quality']), cur_entry['showid'], cur_entry['date']]])
self.connection.mass_action(cl)
- self.incDBVersion()
+ self.inc_db_version()
# cleanup and reduce db if any previous data was removed
self.upgrade_log(u'Performing a vacuum on the database.', logger.DEBUG)
self.connection.action('VACUUM')
- return self.checkDBVersion()
+ return self.call_check_db_version()
# 12 -> 13
@@ -710,20 +710,20 @@ class AddShowidTvdbidIndex(db.SchemaUpgrade):
# Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Checking for duplicate shows before adding unique index.')
MainSanityCheck(self.connection).fix_duplicate_shows('tvdb_id')
self.upgrade_log(u'Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries.')
- if not self.hasTable('idx_showid'):
+ if not self.has_table('idx_showid'):
self.connection.action('CREATE INDEX idx_showid ON tv_episodes (showid);')
- if not self.hasTable('idx_tvdb_id'):
+ if not self.has_table('idx_tvdb_id'):
# noinspection SqlResolve
self.connection.action('CREATE UNIQUE INDEX idx_tvdb_id ON tv_shows (tvdb_id);')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 13 -> 14
@@ -731,23 +731,23 @@ class AddLastUpdateTVDB(db.SchemaUpgrade):
# Adding column last_update_tvdb to tv_shows for controlling nightly updates
def execute(self):
- if not self.hasColumn('tv_shows', 'last_update_tvdb'):
+ if not self.has_column('tv_shows', 'last_update_tvdb'):
self.upgrade_log(u'Adding column last_update_tvdb to tv_shows')
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
- self.addColumn('tv_shows', 'last_update_tvdb', default=1)
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
+ self.add_column('tv_shows', 'last_update_tvdb', default=1)
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 14 -> 15
class AddDBIncreaseTo15(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
- self.upgrade_log(u'Bumping database version to v%s' % self.checkDBVersion())
- self.incDBVersion()
- return self.checkDBVersion()
+ self.upgrade_log(u'Bumping database version to v%s' % self.call_check_db_version())
+ self.inc_db_version()
+ return self.call_check_db_version()
# 15 -> 16
@@ -755,121 +755,121 @@ class AddIMDbInfo(db.SchemaUpgrade):
def execute(self):
db_backed_up = False
- if not self.hasTable('imdb_info'):
+ if not self.has_table('imdb_info'):
self.upgrade_log(u'Creating IMDb table imdb_info')
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
db_backed_up = True
self.connection.action(
'CREATE TABLE imdb_info (tvdb_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC,'
' akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT,'
' rating TEXT, votes INTEGER, last_update NUMERIC)')
- if not self.hasColumn('tv_shows', 'imdb_id'):
+ if not self.has_column('tv_shows', 'imdb_id'):
self.upgrade_log(u'Adding IMDb column imdb_id to tv_shows')
if not db_backed_up:
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
- self.addColumn('tv_shows', 'imdb_id')
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
+ self.add_column('tv_shows', 'imdb_id')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 16 -> 17
class AddProperNamingSupport(db.SchemaUpgrade):
def execute(self):
- if not self.hasColumn('tv_shows', 'imdb_id')\
- and self.hasColumn('tv_shows', 'rls_require_words')\
- and self.hasColumn('tv_shows', 'rls_ignore_words'):
- return self.setDBVersion(5816)
+ if not self.has_column('tv_shows', 'imdb_id')\
+ and self.has_column('tv_shows', 'rls_require_words')\
+ and self.has_column('tv_shows', 'rls_ignore_words'):
+ return self.set_db_version(5816)
- if not self.hasColumn('tv_episodes', 'is_proper'):
+ if not self.has_column('tv_episodes', 'is_proper'):
self.upgrade_log(u'Adding column is_proper to tv_episodes')
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
- self.addColumn('tv_episodes', 'is_proper')
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
+ self.add_column('tv_episodes', 'is_proper')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 17 -> 18
class AddEmailSubscriptionTable(db.SchemaUpgrade):
def execute(self):
- if not self.hasColumn('tv_episodes', 'is_proper')\
- and self.hasColumn('tv_shows', 'rls_require_words')\
- and self.hasColumn('tv_shows', 'rls_ignore_words')\
- and self.hasColumn('tv_shows', 'skip_notices'):
- return self.setDBVersion(5817)
+ if not self.has_column('tv_episodes', 'is_proper')\
+ and self.has_column('tv_shows', 'rls_require_words')\
+ and self.has_column('tv_shows', 'rls_ignore_words')\
+ and self.has_column('tv_shows', 'skip_notices'):
+ return self.set_db_version(5817)
- if not self.hasColumn('tv_shows', 'notify_list'):
+ if not self.has_column('tv_shows', 'notify_list'):
self.upgrade_log(u'Adding column notify_list to tv_shows')
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
- self.addColumn('tv_shows', 'notify_list', 'TEXT', None)
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
+ self.add_column('tv_shows', 'notify_list', 'TEXT', None)
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 18 -> 19
class AddProperSearch(db.SchemaUpgrade):
def execute(self):
- if not self.hasColumn('tv_episodes', 'is_proper'):
- return self.setDBVersion(12)
+ if not self.has_column('tv_episodes', 'is_proper'):
+ return self.set_db_version(12)
- if not self.hasColumn('tv_shows', 'notify_list')\
- and self.hasColumn('tv_shows', 'rls_require_words')\
- and self.hasColumn('tv_shows', 'rls_ignore_words')\
- and self.hasColumn('tv_shows', 'skip_notices')\
- and self.hasColumn('history', 'source'):
- return self.setDBVersion(5818)
+ if not self.has_column('tv_shows', 'notify_list')\
+ and self.has_column('tv_shows', 'rls_require_words')\
+ and self.has_column('tv_shows', 'rls_ignore_words')\
+ and self.has_column('tv_shows', 'skip_notices')\
+ and self.has_column('history', 'source'):
+ return self.set_db_version(5818)
- if not self.hasColumn('info', 'last_proper_search'):
+ if not self.has_column('info', 'last_proper_search'):
self.upgrade_log(u'Adding column last_proper_search to info')
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
- self.addColumn('info', 'last_proper_search', default=1)
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
+ self.add_column('info', 'last_proper_search', default=1)
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 19 -> 20
class AddDvdOrderOption(db.SchemaUpgrade):
def execute(self):
- if not self.hasColumn('tv_shows', 'dvdorder'):
+ if not self.has_column('tv_shows', 'dvdorder'):
self.upgrade_log(u'Adding column dvdorder to tv_shows')
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
- self.addColumn('tv_shows', 'dvdorder', 'NUMERIC', '0')
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
+ self.add_column('tv_shows', 'dvdorder', 'NUMERIC', '0')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 20 -> 21
class AddSubtitlesSupport(db.SchemaUpgrade):
def execute(self):
- if not self.hasColumn('tv_shows', 'subtitles'):
+ if not self.has_column('tv_shows', 'subtitles'):
self.upgrade_log(u'Adding subtitles to tv_shows and tv_episodes')
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
- self.addColumn('tv_shows', 'subtitles')
- self.addColumn('tv_episodes', 'subtitles', 'TEXT', '')
- self.addColumn('tv_episodes', 'subtitles_searchcount')
- self.addColumn('tv_episodes', 'subtitles_lastsearch', 'TIMESTAMP', str(datetime.datetime.min))
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
+ self.add_column('tv_shows', 'subtitles')
+ self.add_column('tv_episodes', 'subtitles', 'TEXT', '')
+ self.add_column('tv_episodes', 'subtitles_searchcount')
+ self.add_column('tv_episodes', 'subtitles_lastsearch', 'TIMESTAMP', str(datetime.datetime.min))
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 21 -> 22
class ConvertTVShowsToIndexerScheme(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Converting TV Shows table to Indexer Scheme...')
- if self.hasTable('tmp_tv_shows'):
+ if self.has_table('tmp_tv_shows'):
self.upgrade_log(u'Removing temp tv show tables left behind from previous updates...')
# noinspection SqlResolve
self.connection.action('DROP TABLE tmp_tv_shows')
@@ -899,18 +899,18 @@ class ConvertTVShowsToIndexerScheme(db.SchemaUpgrade):
# noinspection SqlConstantCondition
self.connection.action('UPDATE tv_shows SET indexer = 1 WHERE 1=1')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 22 -> 23
class ConvertTVEpisodesToIndexerScheme(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Converting TV Episodes table to Indexer Scheme...')
- if self.hasTable('tmp_tv_episodes'):
+ if self.has_table('tmp_tv_episodes'):
self.upgrade_log(u'Removing temp tv episode tables left behind from previous updates...')
# noinspection SqlResolve
self.connection.action('DROP TABLE tmp_tv_episodes')
@@ -940,18 +940,18 @@ class ConvertTVEpisodesToIndexerScheme(db.SchemaUpgrade):
# noinspection SqlConstantCondition
self.connection.action('UPDATE tv_episodes SET indexer = 1 WHERE 1=1')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 23 -> 24
class ConvertIMDBInfoToIndexerScheme(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Converting IMDb Info table to Indexer Scheme...')
- if self.hasTable('tmp_imdb_info'):
+ if self.has_table('tmp_imdb_info'):
self.upgrade_log(u'Removing temp imdb info tables left behind from previous updates...')
# noinspection SqlResolve
self.connection.action('DROP TABLE tmp_imdb_info')
@@ -969,18 +969,18 @@ class ConvertIMDBInfoToIndexerScheme(db.SchemaUpgrade):
# noinspection SqlResolve
self.connection.action('DROP TABLE tmp_imdb_info')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 24 -> 25
class ConvertInfoToIndexerScheme(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Converting Info table to Indexer Scheme...')
- if self.hasTable('tmp_info'):
+ if self.has_table('tmp_info'):
self.upgrade_log(u'Removing temp info tables left behind from previous updates...')
# noinspection SqlResolve
self.connection.action('DROP TABLE tmp_info')
@@ -995,29 +995,29 @@ class ConvertInfoToIndexerScheme(db.SchemaUpgrade):
# noinspection SqlResolve
self.connection.action('DROP TABLE tmp_info')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 25 -> 26
class AddArchiveFirstMatchOption(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
- if not self.hasColumn('tv_shows', 'archive_firstmatch'):
+ if not self.has_column('tv_shows', 'archive_firstmatch'):
self.upgrade_log(u'Adding column archive_firstmatch to tv_shows')
- self.addColumn('tv_shows', 'archive_firstmatch', 'NUMERIC', '0')
+ self.add_column('tv_shows', 'archive_firstmatch', 'NUMERIC', '0')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 26 -> 27
class AddSceneNumbering(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
- if self.hasTable('scene_numbering'):
+ if self.has_table('scene_numbering'):
self.connection.action('DROP TABLE scene_numbering')
self.upgrade_log(u'Upgrading table scene_numbering ...')
@@ -1026,14 +1026,14 @@ class AddSceneNumbering(db.SchemaUpgrade):
' scene_season INTEGER, scene_episode INTEGER,'
' PRIMARY KEY (indexer_id,season,episode))')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 27 -> 28
class ConvertIndexerToInteger(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
cl = []
self.upgrade_log(u'Converting Indexer to Integer ...')
@@ -1046,50 +1046,50 @@ class ConvertIndexerToInteger(db.SchemaUpgrade):
self.connection.mass_action(cl)
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 28 -> 29
class AddRequireAndIgnoreWords(db.SchemaUpgrade):
# Adding column rls_require_words and rls_ignore_words to tv_shows
def execute(self):
- if self.hasColumn('tv_shows', 'rls_require_words') and self.hasColumn('tv_shows', 'rls_ignore_words'):
- self.incDBVersion()
- return self.checkDBVersion()
+ if self.has_column('tv_shows', 'rls_require_words') and self.has_column('tv_shows', 'rls_ignore_words'):
+ self.inc_db_version()
+ return self.call_check_db_version()
db_backed_up = False
- if not self.hasColumn('tv_shows', 'rls_require_words'):
+ if not self.has_column('tv_shows', 'rls_require_words'):
self.upgrade_log(u'Adding column rls_require_words to tv_shows')
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
db_backed_up = True
- self.addColumn('tv_shows', 'rls_require_words', 'TEXT', '')
+ self.add_column('tv_shows', 'rls_require_words', 'TEXT', '')
- if not self.hasColumn('tv_shows', 'rls_ignore_words'):
+ if not self.has_column('tv_shows', 'rls_ignore_words'):
self.upgrade_log(u'Adding column rls_ignore_words to tv_shows')
if not db_backed_up:
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
- self.addColumn('tv_shows', 'rls_ignore_words', 'TEXT', '')
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
+ self.add_column('tv_shows', 'rls_ignore_words', 'TEXT', '')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 29 -> 30
class AddSportsOption(db.SchemaUpgrade):
def execute(self):
db_backed_up = False
- if not self.hasColumn('tv_shows', 'sports'):
+ if not self.has_column('tv_shows', 'sports'):
self.upgrade_log(u'Adding column sports to tv_shows')
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
db_backed_up = True
- self.addColumn('tv_shows', 'sports', 'NUMERIC', '0')
+ self.add_column('tv_shows', 'sports', 'NUMERIC', '0')
- if self.hasColumn('tv_shows', 'air_by_date') and self.hasColumn('tv_shows', 'sports'):
+ if self.has_column('tv_shows', 'air_by_date') and self.has_column('tv_shows', 'sports'):
# update sports column
self.upgrade_log(u'[4/4] Updating tv_shows to reflect the correct sports value...')
if not db_backed_up:
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
cl = []
history_quality = self.connection.select(
'SELECT * FROM tv_shows WHERE LOWER(classification) = "sports" AND air_by_date = 1 AND sports = 0')
@@ -1099,117 +1099,117 @@ class AddSportsOption(db.SchemaUpgrade):
cl.append(['UPDATE tv_shows SET air_by_date = 0 WHERE show_id = ?', [cur_entry['show_id']]])
self.connection.mass_action(cl)
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 30 -> 31
class AddSceneNumberingToTvEpisodes(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Adding columns scene_season and scene_episode to tvepisodes')
- self.addColumn('tv_episodes', 'scene_season', 'NUMERIC', 'NULL')
- self.addColumn('tv_episodes', 'scene_episode', 'NUMERIC', 'NULL')
+ self.add_column('tv_episodes', 'scene_season', 'NUMERIC', 'NULL')
+ self.add_column('tv_episodes', 'scene_episode', 'NUMERIC', 'NULL')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 31 -> 32
class AddAnimeTVShow(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Adding column anime to tv_episodes')
- self.addColumn('tv_shows', 'anime', 'NUMERIC', '0')
+ self.add_column('tv_shows', 'anime', 'NUMERIC', '0')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 32 -> 33
class AddAbsoluteNumbering(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Adding column absolute_number to tv_episodes')
- self.addColumn('tv_episodes', 'absolute_number', 'NUMERIC', '0')
+ self.add_column('tv_episodes', 'absolute_number', 'NUMERIC', '0')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 33 -> 34
class AddSceneAbsoluteNumbering(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Adding columns absolute_number and scene_absolute_number to scene_numbering')
- self.addColumn('scene_numbering', 'absolute_number', 'NUMERIC', '0')
- self.addColumn('scene_numbering', 'scene_absolute_number', 'NUMERIC', '0')
+ self.add_column('scene_numbering', 'absolute_number', 'NUMERIC', '0')
+ self.add_column('scene_numbering', 'scene_absolute_number', 'NUMERIC', '0')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 34 -> 35
class AddAnimeAllowlistBlocklist(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
cl = [['CREATE TABLE allowlist (show_id INTEGER, range TEXT, keyword TEXT, indexer NUMERIC)'],
['CREATE TABLE blocklist (show_id INTEGER, range TEXT, keyword TEXT, indexer NUMERIC)']]
self.upgrade_log(u'Creating tables for anime allow and block lists')
self.connection.mass_action(cl)
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 35 -> 36
class AddSceneAbsoluteNumbering2(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Adding column scene_absolute_number to tv_episodes')
- self.addColumn('tv_episodes', 'scene_absolute_number', 'NUMERIC', '0')
+ self.add_column('tv_episodes', 'scene_absolute_number', 'NUMERIC', '0')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 36 -> 37
class AddXemRefresh(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Creating table xem_refresh')
self.connection.action(
'CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 37 -> 38
class AddSceneToTvShows(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Adding column scene to tv_shows')
- self.addColumn('tv_shows', 'scene', 'NUMERIC', '0')
+ self.add_column('tv_shows', 'scene', 'NUMERIC', '0')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 38 -> 39
class AddIndexerMapping(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
- if self.hasTable('indexer_mapping'):
+ if self.has_table('indexer_mapping'):
self.connection.action('DROP TABLE indexer_mapping')
self.upgrade_log(u'Adding table indexer_mapping')
@@ -1217,44 +1217,44 @@ class AddIndexerMapping(db.SchemaUpgrade):
'CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER, mindexer NUMERIC,'
' PRIMARY KEY (indexer_id, indexer))')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 39 -> 40
class AddVersionToTvEpisodes(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Adding columns release_group and version to tv_episodes')
- self.addColumn('tv_episodes', 'release_group', 'TEXT', '')
- self.addColumn('tv_episodes', 'version', 'NUMERIC', '-1')
+ self.add_column('tv_episodes', 'release_group', 'TEXT', '')
+ self.add_column('tv_episodes', 'version', 'NUMERIC', '-1')
self.upgrade_log(u'Adding column version to history')
- self.addColumn('history', 'version', 'NUMERIC', '-1')
+ self.add_column('history', 'version', 'NUMERIC', '-1')
- self.incDBVersion()
- return self.checkDBVersion()
+ self.inc_db_version()
+ return self.call_check_db_version()
# 40 -> 10000
class BumpDatabaseVersion(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Bumping database version')
- return self.setDBVersion(10000)
+ return self.set_db_version(10000)
# 41,42 -> 10001
class Migrate41(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Bumping database version')
- return self.setDBVersion(10001)
+ return self.set_db_version(10001)
# 43,44 -> 10001
@@ -1264,25 +1264,25 @@ class Migrate43(db.SchemaUpgrade):
db_backed_up = False
db_chg = None
table = 'tmdb_info'
- if self.hasTable(table):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ if self.has_table(table):
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
db_backed_up = True
self.upgrade_log(u'Dropping redundant table tmdb_info')
# noinspection SqlResolve
self.connection.action('DROP TABLE [%s]' % table)
db_chg = True
- if self.hasColumn('tv_shows', 'tmdb_id'):
+ if self.has_column('tv_shows', 'tmdb_id'):
if not db_backed_up:
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
db_backed_up = True
self.upgrade_log(u'Dropping redundant tmdb_info refs')
- self.dropColumn('tv_shows', 'tmdb_id')
+ self.drop_columns('tv_shows', 'tmdb_id')
db_chg = True
- if not self.hasTable('db_version'):
+ if not self.has_table('db_version'):
if not db_backed_up:
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.connection.action('PRAGMA user_version = 0')
self.connection.action('CREATE TABLE db_version (db_version INTEGER);')
self.connection.action('INSERT INTO db_version (db_version) VALUES (0);')
@@ -1290,124 +1290,124 @@ class Migrate43(db.SchemaUpgrade):
if not db_chg:
self.upgrade_log(u'Bumping database version')
- return self.setDBVersion(10001)
+ return self.set_db_version(10001)
# 4301 -> 10002
class Migrate4301(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Bumping database version')
- return self.setDBVersion(10002)
+ return self.set_db_version(10002)
# 4302,4400 -> 10003
class Migrate4302(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Bumping database version')
- return self.setDBVersion(10003)
+ return self.set_db_version(10003)
# 5816 - 5818 -> 15
class MigrateUpstream(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
- self.upgrade_log(u'Migrate SickBeard db v%s into v15' % str(self.checkDBVersion()).replace('58', ''))
+ self.upgrade_log(u'Migrate SickBeard db v%s into v15' % str(self.call_check_db_version()).replace('58', ''))
- return self.setDBVersion(15)
+ return self.set_db_version(15)
# 10000 -> 20000
class SickGearDatabaseVersion(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Bumping database version to new SickGear standards')
- return self.setDBVersion(20000)
+ return self.set_db_version(20000)
# 10001 -> 10000
class RemoveDefaultEpStatusFromTvShows(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Dropping redundant column default_ep_status from tv_shows')
- self.dropColumn('tv_shows', 'default_ep_status')
+ self.drop_columns('tv_shows', 'default_ep_status')
- return self.setDBVersion(10000)
+ return self.set_db_version(10000)
# 10002 -> 10001
class RemoveMinorDBVersion(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Dropping redundant column db_minor_version from db_version')
- self.dropColumn('db_version', 'db_minor_version')
+ self.drop_columns('db_version', 'db_minor_version')
- return self.setDBVersion(10001)
+ return self.set_db_version(10001)
# 10003 -> 10002
class RemoveMetadataSub(db.SchemaUpgrade):
def execute(self):
- if self.hasColumn('tv_shows', 'sub_use_sr_metadata'):
+ if self.has_column('tv_shows', 'sub_use_sr_metadata'):
self.upgrade_log(u'Dropping redundant column metadata sub')
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
- self.dropColumn('tv_shows', 'sub_use_sr_metadata')
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
+ self.drop_columns('tv_shows', 'sub_use_sr_metadata')
- return self.setDBVersion(10002)
+ return self.set_db_version(10002)
# 20000 -> 20001
class DBIncreaseTo20001(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Bumping database version to force a backup before new database code')
self.connection.action('VACUUM')
self.upgrade_log(u'Performed a vacuum on the database', logger.DEBUG)
- return self.setDBVersion(20001)
+ return self.set_db_version(20001)
# 20001 -> 20002
class AddTvShowOverview(db.SchemaUpgrade):
def execute(self):
- if not self.hasColumn('tv_shows', 'overview'):
+ if not self.has_column('tv_shows', 'overview'):
self.upgrade_log(u'Adding column overview to tv_shows')
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
- self.addColumn('tv_shows', 'overview', 'TEXT', '')
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
+ self.add_column('tv_shows', 'overview', 'TEXT', '')
- return self.setDBVersion(20002)
+ return self.set_db_version(20002)
# 20002 -> 20003
class AddTvShowTags(db.SchemaUpgrade):
def execute(self):
- if not self.hasColumn('tv_shows', 'tag'):
+ if not self.has_column('tv_shows', 'tag'):
self.upgrade_log(u'Adding tag to tv_shows')
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
- self.addColumn('tv_shows', 'tag', 'TEXT', 'Show List')
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
+ self.add_column('tv_shows', 'tag', 'TEXT', 'Show List')
- return self.setDBVersion(20003)
+ return self.set_db_version(20003)
# 20003 -> 20004
class ChangeMapIndexer(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
- if self.hasTable('indexer_mapping'):
+ if self.has_table('indexer_mapping'):
self.connection.action('DROP TABLE indexer_mapping')
self.upgrade_log(u'Changing table indexer_mapping')
@@ -1418,22 +1418,22 @@ class ChangeMapIndexer(db.SchemaUpgrade):
self.connection.action('CREATE INDEX IF NOT EXISTS idx_mapping ON indexer_mapping (indexer_id, indexer)')
- if not self.hasColumn('info', 'last_run_backlog'):
+ if not self.has_column('info', 'last_run_backlog'):
self.upgrade_log('Adding last_run_backlog to info')
- self.addColumn('info', 'last_run_backlog', 'NUMERIC', 1)
+ self.add_column('info', 'last_run_backlog', 'NUMERIC', 1)
self.upgrade_log(u'Moving table scene_exceptions from cache.db to sickbeard.db')
- if self.hasTable('scene_exceptions_refresh'):
+ if self.has_table('scene_exceptions_refresh'):
self.connection.action('DROP TABLE scene_exceptions_refresh')
self.connection.action('CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER)')
- if self.hasTable('scene_exceptions'):
+ if self.has_table('scene_exceptions'):
self.connection.action('DROP TABLE scene_exceptions')
self.connection.action('CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY,'
' indexer_id INTEGER KEY, show_name TEXT, season NUMERIC, custom NUMERIC)')
try:
cachedb = db.DBConnection(filename='cache.db')
- if cachedb.hasTable('scene_exceptions'):
+ if cachedb.has_table('scene_exceptions'):
sql_result = cachedb.action('SELECT * FROM scene_exceptions')
cs = []
for cur_result in sql_result:
@@ -1452,7 +1452,7 @@ class ChangeMapIndexer(db.SchemaUpgrade):
'scene_exceptions', 'scene_exceptions_refresh', 'info', 'indexer_mapping',
'db_version', 'history', 'imdb_info', 'lastUpdate', 'scene_numbering', 'tv_episodes', 'tv_shows',
'xem_refresh'}
- current_tables = set(self.listTables())
+ current_tables = set(self.list_tables())
remove_tables = list(current_tables - keep_tables)
for table in remove_tables:
# noinspection SqlResolve
@@ -1460,34 +1460,34 @@ class ChangeMapIndexer(db.SchemaUpgrade):
self.connection.action('VACUUM')
- return self.setDBVersion(20004)
+ return self.set_db_version(20004)
# 20004 -> 20005
class AddShowNotFoundCounter(db.SchemaUpgrade):
def execute(self):
- if not self.hasTable('tv_shows_not_found'):
+ if not self.has_table('tv_shows_not_found'):
self.upgrade_log(u'Adding table tv_shows_not_found')
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.connection.action(
'CREATE TABLE tv_shows_not_found (indexer NUMERIC NOT NULL, indexer_id NUMERIC NOT NULL,'
' fail_count NUMERIC NOT NULL DEFAULT 0, last_check NUMERIC NOT NULL, last_success NUMERIC,'
' PRIMARY KEY (indexer_id, indexer))')
- return self.setDBVersion(20005)
+ return self.set_db_version(20005)
# 20005 -> 20006
class AddFlagTable(db.SchemaUpgrade):
def execute(self):
- if not self.hasTable('flags'):
+ if not self.has_table('flags'):
self.upgrade_log(u'Adding table flags')
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.connection.action('CREATE TABLE flags (flag PRIMARY KEY NOT NULL )')
- return self.setDBVersion(20006)
+ return self.set_db_version(20006)
# 20006 -> 20007
@@ -1496,61 +1496,61 @@ class DBIncreaseTo20007(db.SchemaUpgrade):
self.upgrade_log(u'Bumping database version')
- return self.setDBVersion(20007)
+ return self.set_db_version(20007)
# 20007 -> 20008
class AddWebdlTypesTable(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.connection.action('CREATE TABLE webdl_types (dname TEXT NOT NULL , regex TEXT NOT NULL )')
- return self.setDBVersion(20008)
+ return self.set_db_version(20008)
# 20008 -> 20009
class AddWatched(db.SchemaUpgrade):
def execute(self):
# remove old table from version 20007
- if self.hasTable('tv_episodes_watched') and not self.hasColumn('tv_episodes_watched', 'clientep_id'):
+ if self.has_table('tv_episodes_watched') and not self.has_column('tv_episodes_watched', 'clientep_id'):
self.connection.action('DROP TABLE tv_episodes_watched')
self.connection.action('VACUUM')
- if not self.hasTable('tv_episodes_watched'):
+ if not self.has_table('tv_episodes_watched'):
self.upgrade_log(u'Adding table tv_episodes_watched')
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.connection.action(
'CREATE TABLE tv_episodes_watched (tvep_id NUMERIC NOT NULL, clientep_id TEXT, label TEXT,'
' played NUMERIC DEFAULT 0 NOT NULL, date_watched NUMERIC NOT NULL, date_added NUMERIC,'
' status NUMERIC, location TEXT, file_size NUMERIC, hide INT default 0 not null)'
)
- return self.setDBVersion(20009)
+ return self.set_db_version(20009)
# 20009 -> 20010
class AddPrune(db.SchemaUpgrade):
def execute(self):
- if not self.hasColumn('tv_shows', 'prune'):
+ if not self.has_column('tv_shows', 'prune'):
self.upgrade_log('Adding prune to tv_shows')
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
- self.addColumn('tv_shows', 'prune', 'INT', 0)
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
+ self.add_column('tv_shows', 'prune', 'INT', 0)
- return self.setDBVersion(20010)
+ return self.set_db_version(20010)
# 20010 -> 20011
class AddIndexerToTables(db.SchemaUpgrade):
def execute(self):
sickgear.helpers.upgrade_new_naming()
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
show_ids = {s['prod_id']: s['tv_id'] for s in
self.connection.select('SELECT indexer AS tv_id, indexer_id AS prod_id FROM tv_shows')}
- allowtbl, blocktbl = (('allow', 'block'), ('white', 'black'))[not self.connection.hasTable('blocklist')]
+ allowtbl, blocktbl = (('allow', 'block'), ('white', 'black'))[not self.connection.has_table('blocklist')]
allowtbl, blocktbl = '%slist' % allowtbl, '%slist' % blocktbl
columns = {allowtbl: 'show_id, range, keyword, indexer',
blocktbl: 'show_id, range, keyword, indexer',
@@ -1560,9 +1560,9 @@ class AddIndexerToTables(db.SchemaUpgrade):
# add missing indexer column
for t in [(allowtbl, 'show_id'), (blocktbl, 'show_id'),
('history', 'showid'), ('scene_exceptions', 'indexer_id')]:
- if not self.hasColumn(t[0], 'indexer'):
+ if not self.has_column(t[0], 'indexer'):
self.upgrade_log(u'Adding TV info support to %s table' % t[0])
- self.addColumn(t[0], 'indexer')
+ self.add_column(t[0], 'indexer')
cl = []
for s_id, i in iteritems(show_ids):
# noinspection SqlResolve
@@ -1578,11 +1578,11 @@ class AddIndexerToTables(db.SchemaUpgrade):
if 0 < self.connection.connection.total_changes:
self.upgrade_log('Removed orphaned data from %s' % t[0])
- if self.connection.hasTable('backup_%s' % t[0]):
+ if self.connection.has_table('backup_%s' % t[0]):
self.upgrade_log('Adding backup data to %s' % t[0])
self.connection.action('REPLACE INTO %s SELECT %s FROM %s' % ('%s (%s)' % (t[0], columns[t[0]]),
columns[t[0]], 'backup_%s' % t[0]))
- self.connection.removeTable('backup_%s' % t[0])
+ self.connection.remove_table('backup_%s' % t[0])
# recreate tables that have wrong primary key = indexer_id without indexer
self.upgrade_log('Adding TV info support to scene_numbering')
@@ -1626,7 +1626,7 @@ class AddIndexerToTables(db.SchemaUpgrade):
self.connection.mass_action(cl)
self.connection.action('CREATE INDEX idx_id_indexer_imdb_info ON imdb_info (indexer,indexer_id)')
- if self.connection.hasTable('backup_imdb_info'):
+ if self.connection.has_table('backup_imdb_info'):
self.upgrade_log('Adding backup data to imdb_info')
# noinspection SqlResolve
self.connection.action('REPLACE INTO imdb_info (indexer, indexer_id, imdb_id, title, year, akas, '
@@ -1634,29 +1634,29 @@ class AddIndexerToTables(db.SchemaUpgrade):
'last_update) SELECT indexer, indexer_id, imdb_id, title, year, akas, runtimes, '
'genres, countries, country_codes, certificates, rating, votes, last_update '
'FROM backup_imdb_info')
- self.connection.removeTable('backup_imdb_info')
+ self.connection.remove_table('backup_imdb_info')
# remove an index of an no longer existing column
self.upgrade_log('Changing/Re-Creating Indexes')
- if self.connection.hasIndex('tv_shows', 'idx_tvdb_id'):
- self.connection.removeIndex('tv_shows', 'idx_tvdb_id')
+ if self.connection.has_index('tv_shows', 'idx_tvdb_id'):
+ self.connection.remove_index('tv_shows', 'idx_tvdb_id')
- if self.connection.hasIndex('tv_shows', 'idx_indexer_id'):
- self.connection.removeIndex('tv_shows', 'idx_indexer_id')
+ if self.connection.has_index('tv_shows', 'idx_indexer_id'):
+ self.connection.remove_index('tv_shows', 'idx_indexer_id')
self.connection.action('CREATE UNIQUE INDEX idx_indexer_id ON tv_shows (indexer,indexer_id)')
- if self.connection.hasIndex('tv_episodes', 'idx_showid'):
- self.connection.removeIndex('tv_episodes', 'idx_showid')
+ if self.connection.has_index('tv_episodes', 'idx_showid'):
+ self.connection.remove_index('tv_episodes', 'idx_showid')
- if self.connection.hasIndex('tv_episodes', 'idx_tv_episodes_showid_airdate'):
- self.connection.removeIndex('tv_episodes', 'idx_tv_episodes_showid_airdate')
+ if self.connection.has_index('tv_episodes', 'idx_tv_episodes_showid_airdate'):
+ self.connection.remove_index('tv_episodes', 'idx_tv_episodes_showid_airdate')
self.connection.action('CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(indexer,showid,airdate)')
- if not self.connection.hasIndex('tv_episodes', 'idx_tv_episodes_unique'):
+ if not self.connection.has_index('tv_episodes', 'idx_tv_episodes_unique'):
self.connection.action('CREATE UNIQUE INDEX idx_tv_episodes_unique ON '
'tv_episodes(indexer,showid,season,episode)')
- if self.connection.hasTable('backup_tv_episodes'):
+ if self.connection.has_table('backup_tv_episodes'):
self.upgrade_log('Adding backup data to tv_episodes')
# noinspection SqlResolve
self.connection.action('REPLACE INTO tv_episodes (episode_id, showid, indexerid, indexer, name, season, '
@@ -1668,9 +1668,9 @@ class AddIndexerToTables(db.SchemaUpgrade):
'file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, '
'is_proper, scene_season, scene_episode, absolute_number, scene_absolute_number, '
'release_group, version FROM backup_tv_episodes')
- self.connection.removeTable('backup_tv_episodes')
+ self.connection.remove_table('backup_tv_episodes')
- if self.connection.hasTable('backup_tv_shows'):
+ if self.connection.has_table('backup_tv_shows'):
self.upgrade_log('Adding backup data to tv_shows')
# noinspection SqlResolve
self.connection.action('REPLACE INTO tv_shows (show_id, indexer_id, indexer, show_name, location, '
@@ -1684,25 +1684,25 @@ class AddIndexerToTables(db.SchemaUpgrade):
'notify_list, imdb_id, last_update_indexer, dvdorder, archive_firstmatch, '
'rls_require_words, rls_ignore_words, sports, anime, scene, overview, tag, prune '
'FROM backup_tv_shows')
- self.connection.removeTable('backup_tv_shows')
+ self.connection.remove_table('backup_tv_shows')
self.connection.action('VACUUM')
- return self.setDBVersion(20011)
+ return self.set_db_version(20011)
# 20011 -> 20012
class AddShowExludeGlobals(db.SchemaUpgrade):
def execute(self):
- if not self.hasColumn('tv_shows', 'rls_global_exclude_ignore'):
+ if not self.has_column('tv_shows', 'rls_global_exclude_ignore'):
self.upgrade_log('Adding rls_global_exclude_ignore, rls_global_exclude_require to tv_shows')
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
- self.addColumn('tv_shows', 'rls_global_exclude_ignore', data_type='TEXT', default='')
- self.addColumn('tv_shows', 'rls_global_exclude_require', data_type='TEXT', default='')
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
+ self.add_column('tv_shows', 'rls_global_exclude_ignore', data_type='TEXT', default='')
+ self.add_column('tv_shows', 'rls_global_exclude_require', data_type='TEXT', default='')
- if self.hasTable('tv_shows_exclude_backup'):
+ if self.has_table('tv_shows_exclude_backup'):
self.upgrade_log('Adding rls_global_exclude_ignore, rls_global_exclude_require from backup to tv_shows')
# noinspection SqlResolve
self.connection.mass_action([['UPDATE tv_shows SET rls_global_exclude_ignore = '
@@ -1717,15 +1717,15 @@ class AddShowExludeGlobals(db.SchemaUpgrade):
['DROP TABLE tv_shows_exclude_backup']
])
- return self.setDBVersion(20012)
+ return self.set_db_version(20012)
# 20012 -> 20013
class RenameAllowBlockListTables(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
- if not self.connection.hasTable('blocklist'):
+ if not self.connection.has_table('blocklist'):
self.upgrade_log('Renaming allow/block list tables')
for old, new in (('black', 'block'), ('white', 'allow')):
@@ -1738,19 +1738,19 @@ class RenameAllowBlockListTables(db.SchemaUpgrade):
['DROP TABLE tmp_%slist' % new]
])
- return self.setDBVersion(20013)
+ return self.set_db_version(20013)
# 20013 -> 20014
class AddHistoryHideColumn(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
- if not self.hasColumn('history', 'hide'):
+ if not self.has_column('history', 'hide'):
self.upgrade_log('Adding hide column to history')
- self.addColumn('history', 'hide', default=0, set_default=True)
+ self.add_column('history', 'hide', default=0, set_default=True)
- if self.hasTable('history_hide_backup'):
+ if self.has_table('history_hide_backup'):
self.upgrade_log('Restoring hide status in history from backup')
# noinspection SqlResolve
self.connection.mass_action([
@@ -1765,30 +1765,30 @@ class AddHistoryHideColumn(db.SchemaUpgrade):
['DROP TABLE history_hide_backup']
])
- return self.setDBVersion(20014)
+ return self.set_db_version(20014)
# 20014 -> 20015
class ChangeShowData(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection, 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log('Adding new data columns to tv_shows')
- self.addColumns('tv_shows', [('timezone', 'TEXT', ''), ('airtime', 'NUMERIC'),
- ('network_country', 'TEXT', ''), ('network_country_code', 'TEXT', ''),
- ('network_id', 'NUMERIC'), ('network_is_stream', 'INTEGER'),
- ('src_update_timestamp', 'INTEGER')])
+ self.add_columns('tv_shows', [('timezone', 'TEXT', ''), ('airtime', 'NUMERIC'),
+ ('network_country', 'TEXT', ''), ('network_country_code', 'TEXT', ''),
+ ('network_id', 'NUMERIC'), ('network_is_stream', 'INTEGER'),
+ ('src_update_timestamp', 'INTEGER')])
self.upgrade_log('Adding new data columns to tv_episodes')
- self.addColumns('tv_episodes', [('timezone', 'TEXT', ''), ('airtime', 'NUMERIC'),
- ('runtime', 'NUMERIC', 0), ('timestamp', 'NUMERIC'),
- ('network', 'TEXT', ''), ('network_country', 'TEXT', ''),
- ('network_country_code', 'TEXT', ''), ('network_id', 'NUMERIC'),
- ('network_is_stream', 'INTEGER')])
+ self.add_columns('tv_episodes', [('timezone', 'TEXT', ''), ('airtime', 'NUMERIC'),
+ ('runtime', 'NUMERIC', 0), ('timestamp', 'NUMERIC'),
+ ('network', 'TEXT', ''), ('network_country', 'TEXT', ''),
+ ('network_country_code', 'TEXT', ''), ('network_id', 'NUMERIC'),
+ ('network_is_stream', 'INTEGER')])
- if not self.hasColumn('imdb_info', 'is_mini_series'):
+ if not self.has_column('imdb_info', 'is_mini_series'):
self.upgrade_log('Adding new data columns to imdb_info')
- self.addColumns('imdb_info', [('is_mini_series', 'INTEGER', 0), ('episode_count', 'NUMERIC')])
+ self.add_columns('imdb_info', [('is_mini_series', 'INTEGER', 0), ('episode_count', 'NUMERIC')])
self.upgrade_log('Adding Character and Persons tables')
@@ -1984,7 +1984,7 @@ class ChangeShowData(db.SchemaUpgrade):
self.connection.mass_action(cl)
self.connection.action('VACUUM')
- return self.setDBVersion(20015)
+ return self.set_db_version(20015)
# 20015 -> 20016
@@ -2014,8 +2014,8 @@ class ChangeTmdbID(db.SchemaUpgrade):
except (BaseException, Exception):
pass
- db.backup_database(self.connection, 'sickbeard.db', self.checkDBVersion())
- has_tmdb_backups = all(self.hasTable(_r) for _r in
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
+ has_tmdb_backups = all(self.has_table(_r) for _r in
('backup_tmdb_tv_shows', 'backup_tmdb_tv_episodes', 'backup_tmdb_indexer_mapping'))
if has_tmdb_backups:
self.upgrade_log('Checking for dupe shows in backup tables')
@@ -2091,8 +2091,8 @@ class ChangeTmdbID(db.SchemaUpgrade):
['REPLACE INTO indexer_mapping (indexer_id, indexer, mindexer_id, mindexer, date, status)'
' SELECT indexer_id, indexer, mindexer_id, mindexer, date, status FROM backup_tmdb_indexer_mapping'],
])[has_tmdb_backups])
- [self.connection.removeTable(_t) for _t in ('backup_tmdb_tv_shows', 'backup_tmdb_tv_episodes',
+ [self.connection.remove_table(_t) for _t in ('backup_tmdb_tv_shows', 'backup_tmdb_tv_episodes',
'backup_tmdb_indexer_mapping')]
- return self.setDBVersion(20016)
+ return self.set_db_version(20016)
diff --git a/sickgear/db.py b/sickgear/db.py
index b9ee5a4e..2e70ba16 100644
--- a/sickgear/db.py
+++ b/sickgear/db.py
@@ -32,11 +32,12 @@ from .sgdatetime import timestamp_near
from sg_helpers import make_path, compress_file, remove_file_perm, scantree
-from _23 import filter_iter, filter_list, list_values, scandir
+from _23 import scandir
from six import iterkeys, iteritems, itervalues
# noinspection PyUnreachableCode
if False:
+ # noinspection PyUnresolvedReferences
from typing import Any, AnyStr, Dict, List, Optional, Tuple, Union
@@ -47,7 +48,7 @@ db_support_upsert = (3, 25, 0) <= sqlite3.sqlite_version_info # type: bool
db_supports_backup = hasattr(sqlite3.Connection, 'backup') and (3, 6, 11) <= sqlite3.sqlite_version_info # type: bool
-def dbFilename(filename='sickbeard.db', suffix=None):
+def db_filename(filename='sickbeard.db', suffix=None):
# type: (AnyStr, Optional[AnyStr]) -> AnyStr
"""
@param filename: The sqlite database filename to use. If not specified,
@@ -70,7 +71,7 @@ def mass_upsert_sql(table_name, value_dict, key_dict, sanitise=True):
:param value_dict: dict of values to be set {'table_fieldname': value}
:param key_dict: dict of restrains for update {'table_fieldname': value}
:param sanitise: True to remove k, v pairs in keyDict from valueDict as they must not exist in both.
- This option has a performance hit so it's best to remove key_dict keys from value_dict and set this False instead.
+ This option has a performance hit, so it's best to remove key_dict keys from value_dict and set this False instead.
:type sanitise: Boolean
:return: list of 2 sql command
"""
@@ -80,12 +81,12 @@ def mass_upsert_sql(table_name, value_dict, key_dict, sanitise=True):
# sanity: remove k, v pairs in keyDict from valueDict
if sanitise:
- value_dict = dict(filter_iter(lambda k: k[0] not in key_dict, iteritems(value_dict)))
+ value_dict = dict(filter(lambda k: k[0] not in key_dict, iteritems(value_dict)))
# noinspection SqlResolve
cl.append(['UPDATE [%s] SET %s WHERE %s' %
(table_name, ', '.join(gen_params(value_dict)), ' AND '.join(gen_params(key_dict))),
- list_values(value_dict) + list_values(key_dict)])
+ list(value_dict.values()) + list(key_dict.values())])
# noinspection SqlResolve
cl.append(['INSERT INTO [' + table_name + '] (' +
@@ -104,9 +105,9 @@ class DBConnection(object):
from . import helpers
self.new_db = False
- db_src = dbFilename(filename)
+ db_src = db_filename(filename)
if not os.path.isfile(db_src):
- db_alt = dbFilename('sickrage.db')
+ db_alt = db_filename('sickrage.db')
if os.path.isfile(db_alt):
helpers.copy_file(db_alt, db_src)
@@ -143,6 +144,7 @@ class DBConnection(object):
logger.log('Backup target file already exists', logger.ERROR)
return False, 'Backup target file already exists'
+ # noinspection PyUnusedLocal
def progress(status, remaining, total):
logger.log('Copied %s of %s pages...' % (total - remaining, total), logger.DEBUG)
@@ -167,11 +169,11 @@ class DBConnection(object):
return True, 'Backup successful'
- def checkDBVersion(self):
+ def check_db_version(self):
# type: (...) -> int
try:
- if self.hasTable('db_version'):
+ if self.has_table('db_version'):
result = self.select('SELECT db_version FROM db_version')
else:
version = self.select('PRAGMA user_version')[0]['user_version']
@@ -185,7 +187,7 @@ class DBConnection(object):
if result:
version = int(result[0]['db_version'])
- if 10000 > version and self.hasColumn('db_version', 'db_minor_version'):
+ if 10000 > version and self.has_column('db_version', 'db_minor_version'):
# noinspection SqlResolve
minor = self.select('SELECT db_minor_version FROM db_version')
return version * 100 + int(minor[0]['db_minor_version'])
@@ -304,16 +306,16 @@ class DBConnection(object):
query = 'UPDATE [%s] SET %s WHERE %s' % (
table_name, ', '.join(gen_params(value_dict)), ' AND '.join(gen_params(key_dict)))
- self.action(query, list_values(value_dict) + list_values(key_dict))
+ self.action(query, list(value_dict.values()) + list(key_dict.values()))
if self.connection.total_changes == changes_before:
# noinspection SqlResolve
query = 'INSERT INTO [' + table_name + ']' \
+ ' (%s)' % ', '.join(itertools.chain(iterkeys(value_dict), iterkeys(key_dict))) \
+ ' VALUES (%s)' % ', '.join(['?'] * (len(value_dict) + len(key_dict)))
- self.action(query, list_values(value_dict) + list_values(key_dict))
+ self.action(query, list(value_dict.values()) + list(key_dict.values()))
- def tableInfo(self, table_name):
+ def table_info(self, table_name):
# type: (AnyStr) -> Dict[AnyStr, Dict[AnyStr, AnyStr]]
# FIXME ? binding is not supported here, but I cannot find a way to escape a string manually
@@ -331,38 +333,32 @@ class DBConnection(object):
d[col[0]] = row[idx]
return d
- def hasTable(self, table_name):
+ def has_table(self, table_name):
# type: (AnyStr) -> bool
return 0 < len(self.select('SELECT 1 FROM sqlite_master WHERE name = ?;', (table_name,)))
- def hasColumn(self, table_name, column):
+ def has_column(self, table_name, column):
# type: (AnyStr, AnyStr) -> bool
- return column in self.tableInfo(table_name)
+ return column in self.table_info(table_name)
- def hasIndex(self, table_name, index):
+ def has_index(self, table_name, index):
# type: (AnyStr, AnyStr) -> bool
- sqlResults = self.select('PRAGMA index_list([%s])' % table_name)
- for result in sqlResults:
+ sql_results = self.select('PRAGMA index_list([%s])' % table_name)
+ for result in sql_results:
if result['name'] == index:
return True
return False
- def removeIndex(self, table, name):
+ def remove_index(self, table, name):
# type: (AnyStr, AnyStr) -> None
- if self.hasIndex(table, name):
+ if self.has_index(table, name):
self.action('DROP INDEX' + ' [%s]' % name)
- def removeTable(self, name):
+ def remove_table(self, name):
# type: (AnyStr) -> None
- if self.hasTable(name):
+ if self.has_table(name):
self.action('DROP TABLE' + ' [%s]' % name)
- # noinspection SqlResolve
- def addColumn(self, table, column, data_type='NUMERIC', default=0):
- # type: (AnyStr, AnyStr, AnyStr, Any) -> None
- self.action('ALTER TABLE [%s] ADD %s %s' % (table, column, data_type))
- self.action('UPDATE [%s] SET %s = ?' % (table, column), (default,))
-
def has_flag(self, flag_name):
# type: (AnyStr) -> bool
sql_result = self.select('SELECT flag FROM flags WHERE flag = ?', [flag_name])
@@ -415,7 +411,7 @@ class DBConnection(object):
logger.load_log('Upgrading %s' % self.filename, to_log, log_level)
-def sanityCheckDatabase(connection, sanity_check):
+def sanity_check_db(connection, sanity_check):
sanity_check(connection).check()
@@ -427,36 +423,36 @@ class DBSanityCheck(object):
pass
-def upgradeDatabase(connection, schema):
+def upgrade_database(connection, schema):
logger.log(u'Checking database structure...', logger.MESSAGE)
connection.is_upgrading = False
- connection.new_db = 0 == connection.checkDBVersion()
- _processUpgrade(connection, schema)
+ connection.new_db = 0 == connection.check_db_version()
+ _process_upgrade(connection, schema)
if connection.is_upgrading:
connection.upgrade_log('Finished')
-def prettyName(class_name):
+def _pretty_name(class_name):
# type: (AnyStr) -> AnyStr
return ' '.join([x.group() for x in re.finditer('([A-Z])([a-z0-9]+)', class_name)])
-def restoreDatabase(filename, version):
+def _restore_database(filename, version):
logger.log(u'Restoring database before trying upgrade again')
- if not sickgear.helpers.restore_versioned_file(dbFilename(filename=filename, suffix='v%s' % version), version):
+ if not sickgear.helpers.restore_versioned_file(db_filename(filename=filename, suffix='v%s' % version), version):
logger.log_error_and_exit(u'Database restore failed, abort upgrading database')
return False
return True
-def _processUpgrade(connection, upgrade_class):
+def _process_upgrade(connection, upgrade_class):
instance = upgrade_class(connection)
- logger.log('Checking %s database upgrade' % prettyName(upgrade_class.__name__), logger.DEBUG)
+ logger.log('Checking %s database upgrade' % _pretty_name(upgrade_class.__name__), logger.DEBUG)
if not instance.test():
connection.is_upgrading = True
- connection.upgrade_log(getattr(upgrade_class, 'pretty_name', None) or prettyName(upgrade_class.__name__))
- logger.log('Database upgrade required: %s' % prettyName(upgrade_class.__name__), logger.MESSAGE)
- db_version = connection.checkDBVersion()
+ connection.upgrade_log(getattr(upgrade_class, 'pretty_name', None) or _pretty_name(upgrade_class.__name__))
+ logger.log('Database upgrade required: %s' % _pretty_name(upgrade_class.__name__), logger.MESSAGE)
+ db_version = connection.check_db_version()
try:
# only do backup if it's not a new db
0 < db_version and backup_database(connection, connection.filename, db_version)
@@ -468,7 +464,7 @@ def _processUpgrade(connection, upgrade_class):
# close db before attempting restore
connection.close()
- if restoreDatabase(connection.filename, db_version):
+ if _restore_database(connection.filename, db_version):
logger.log_error_and_exit('Successfully restored database version: %s' % db_version)
else:
logger.log_error_and_exit('Failed to restore database version: %s' % db_version)
@@ -480,7 +476,7 @@ def _processUpgrade(connection, upgrade_class):
logger.log('%s upgrade not required' % upgrade_class.__name__, logger.DEBUG)
for upgradeSubClass in upgrade_class.__subclasses__():
- _processUpgrade(connection, upgradeSubClass)
+ _process_upgrade(connection, upgradeSubClass)
# Base migration class. All future DB changes should be subclassed from this class
@@ -488,11 +484,11 @@ class SchemaUpgrade(object):
def __init__(self, connection, **kwargs):
self.connection = connection
- def hasTable(self, table_name):
+ def has_table(self, table_name):
return 0 < len(self.connection.select('SELECT 1 FROM sqlite_master WHERE name = ?;', (table_name,)))
- def hasColumn(self, table_name, column):
- return column in self.connection.tableInfo(table_name)
+ def has_column(self, table_name, column):
+ return column in self.connection.table_info(table_name)
def list_tables(self):
# type: (...) -> List[AnyStr]
@@ -511,13 +507,13 @@ class SchemaUpgrade(object):
['index'])]
# noinspection SqlResolve
- def addColumn(self, table, column, data_type='NUMERIC', default=0, set_default=False):
+ def add_column(self, table, column, data_type='NUMERIC', default=0, set_default=False):
self.connection.action('ALTER TABLE [%s] ADD %s %s%s' %
(table, column, data_type, ('', ' DEFAULT "%s"' % default)[set_default]))
self.connection.action('UPDATE [%s] SET %s = ?' % (table, column), (default,))
# noinspection SqlResolve
- def addColumns(self, table, column_list=None):
+ def add_columns(self, table, column_list=None):
# type: (AnyStr, List) -> None
if isinstance(column_list, list):
sql = []
@@ -535,25 +531,21 @@ class SchemaUpgrade(object):
if sql:
self.connection.mass_action(sql)
- def dropColumn(self, table, columns):
- # type: (AnyStr, AnyStr) -> None
- self.drop_columns(table, columns)
-
def drop_columns(self, table, column):
# type: (AnyStr, Union[AnyStr, List[AnyStr]]) -> None
# get old table columns and store the ones we want to keep
result = self.connection.select('pragma table_info([%s])' % table)
columns_list = ([column], column)[isinstance(column, list)]
- keptColumns = filter_list(lambda col: col['name'] not in columns_list, result)
+ kept_columns = list(filter(lambda col: col['name'] not in columns_list, result))
- keptColumnsNames = []
+ kept_columns_names = []
final = []
pk = []
# copy the old table schema, column by column
- for column in keptColumns:
+ for column in kept_columns:
- keptColumnsNames.append(column['name'])
+ kept_columns_names.append(column['name'])
cl = [column['name'], column['type']]
@@ -574,7 +566,7 @@ class SchemaUpgrade(object):
# join all the table column creation fields
final = ', '.join(final)
- keptColumnsNames = ', '.join(keptColumnsNames)
+ kept_columns_names = ', '.join(kept_columns_names)
# generate sql for the new table creation
if 0 == len(pk):
@@ -586,12 +578,12 @@ class SchemaUpgrade(object):
# create new temporary table and copy the old table data across, barring the removed column
self.connection.action(sql)
# noinspection SqlResolve
- self.connection.action('INSERT INTO [%s_new] SELECT %s FROM [%s]' % (table, keptColumnsNames, table))
+ self.connection.action('INSERT INTO [%s_new] SELECT %s FROM [%s]' % (table, kept_columns_names, table))
# copy the old indexes from the old table
result = self.connection.select("SELECT sql FROM sqlite_master WHERE tbl_name=? AND type='index'", [table])
- # remove the old table and rename the new table to take it's place
+ # remove the old table and rename the new table to take its place
# noinspection SqlResolve
self.connection.action('DROP TABLE [%s]' % table)
# noinspection SqlResolve
@@ -605,22 +597,19 @@ class SchemaUpgrade(object):
# vacuum the db as we will have a lot of space to reclaim after dropping tables
self.connection.action('VACUUM')
- def checkDBVersion(self):
- return self.connection.checkDBVersion()
+ def call_check_db_version(self):
+ return self.connection.check_db_version()
- def incDBVersion(self):
- new_version = self.checkDBVersion() + 1
+ def inc_db_version(self):
+ new_version = self.call_check_db_version() + 1
# noinspection SqlConstantCondition
self.connection.action('UPDATE db_version SET db_version = ? WHERE 1=1', [new_version])
return new_version
- def setDBVersion(self, new_version, check_db_version=True):
+ def set_db_version(self, new_version, check_db_version=True):
# noinspection SqlConstantCondition
self.connection.action('UPDATE db_version SET db_version = ? WHERE 1=1', [new_version])
- return check_db_version and self.checkDBVersion()
-
- def listTables(self):
- return self.list_tables()
+ return check_db_version and self.call_check_db_version()
def do_query(self, queries):
if not isinstance(queries, list):
@@ -630,23 +619,23 @@ class SchemaUpgrade(object):
for query in queries:
tbl_name = re.findall(r'(?i)DROP.*?TABLE.*?\[?([^\s\]]+)', query)
- if tbl_name and not self.hasTable(tbl_name[0]):
+ if tbl_name and not self.has_table(tbl_name[0]):
continue
tbl_name = re.findall(r'(?i)CREATE.*?TABLE.*?\s([^\s(]+)\s*\(', query)
- if tbl_name and self.hasTable(tbl_name[0]):
+ if tbl_name and self.has_table(tbl_name[0]):
continue
self.connection.action(query)
def finish(self, tbl_dropped=False):
if tbl_dropped:
self.connection.action('VACUUM')
- self.incDBVersion()
+ self.inc_db_version()
def upgrade_log(self, *args, **kwargs):
self.connection.upgrade_log(*args, **kwargs)
-def MigrationCode(my_db):
+def migration_code(my_db):
schema = {
0: sickgear.mainDB.InitialSchema,
9: sickgear.mainDB.AddSizeAndSceneNameFields,
@@ -719,7 +708,7 @@ def MigrationCode(my_db):
# 20002: sickgear.mainDB.AddCoolSickGearFeature3,
}
- db_version = my_db.checkDBVersion()
+ db_version = my_db.check_db_version()
my_db.new_db = 0 == db_version
logger.log(u'Detected database version: v%s' % db_version, logger.DEBUG)
@@ -746,7 +735,7 @@ def MigrationCode(my_db):
my_db.close()
logger.log(u'Failed to update database with error: %s attempting recovery...' % ex(e), logger.ERROR)
- if restoreDatabase(my_db.filename, db_version):
+ if _restore_database(my_db.filename, db_version):
# initialize the main SB database
logger.log_error_and_exit(u'Successfully restored database version: %s' % db_version)
else:
@@ -759,9 +748,9 @@ def cleanup_old_db_backups(filename):
d, filename = os.path.split(filename)
if not d:
d = sickgear.DATA_DIR
- for f in filter_iter(lambda fn: fn.is_file() and filename in fn.name and
- re.search(r'\.db(\.v\d+)?\.r\d+$', fn.name),
- scandir(d)):
+ for f in filter(lambda fn: fn.is_file() and filename in fn.name and
+ re.search(r'\.db(\.v\d+)?\.r\d+$', fn.name),
+ scandir(d)):
try:
os.unlink(f.path)
except (BaseException, Exception):
@@ -777,7 +766,7 @@ def backup_database(db_connection, filename, version):
return
logger.log(u'Backing up database before upgrade')
- if not sickgear.helpers.backup_versioned_file(dbFilename(filename), version):
+ if not sickgear.helpers.backup_versioned_file(db_filename(filename), version):
logger.log_error_and_exit(u'Database backup failed, abort upgrading database')
else:
logger.log(u'Proceeding with upgrade')
@@ -841,7 +830,7 @@ def backup_all_dbs(target, compress=True, prefer_7z=True):
optional compress with zip or 7z (python 3 only, external lib py7zr required)
7z falls back to zip if py7zr is not available
- :param target: target folder to backup to
+ :param target: target folder for backup db
:param compress: compress db backups
:param prefer_7z: prefer 7z compression if available
:return: success, message
diff --git a/sickgear/event_queue.py b/sickgear/event_queue.py
index d9a42daa..2975c380 100644
--- a/sickgear/event_queue.py
+++ b/sickgear/event_queue.py
@@ -33,7 +33,7 @@ class Events(threading.Thread):
# get event type
etype = self.queue.get(True, 1)
- # perform callback if we got a event type
+ # perform callback if we got an event type
self.callback(etype)
# event completed
diff --git a/sickgear/failedProcessor.py b/sickgear/failedProcessor.py
index b1c7b4d8..e1e6a40b 100644
--- a/sickgear/failedProcessor.py
+++ b/sickgear/failedProcessor.py
@@ -69,19 +69,19 @@ class FailedProcessor(LegacyFailedProcessor):
"""
self._log(u'Failed download detected: (%s, %s)' % (self.nzb_name, self.dir_name))
- releaseName = show_name_helpers.determine_release_name(self.dir_name, self.nzb_name)
- if None is releaseName:
+ release_name = show_name_helpers.determine_release_name(self.dir_name, self.nzb_name)
+ if None is release_name:
self._log(u'Warning: unable to find a valid release name.', logger.WARNING)
raise exceptions_helper.FailedProcessingFailed()
try:
parser = NameParser(False, show_obj=self.show_obj, convert=True)
- parsed = parser.parse(releaseName)
+ parsed = parser.parse(release_name)
except InvalidNameException:
- self._log(u'Error: release name is invalid: ' + releaseName, logger.DEBUG)
+ self._log(u'Error: release name is invalid: ' + release_name, logger.DEBUG)
raise exceptions_helper.FailedProcessingFailed()
except InvalidShowException:
- self._log(u'Error: unable to parse release name %s into a valid show' % releaseName, logger.DEBUG)
+ self._log(u'Error: unable to parse release name %s into a valid show' % release_name, logger.DEBUG)
raise exceptions_helper.FailedProcessingFailed()
logger.log(u"name_parser info: ", logger.DEBUG)
diff --git a/sickgear/failed_history.py b/sickgear/failed_history.py
index 5af91a6d..8edc87fe 100644
--- a/sickgear/failed_history.py
+++ b/sickgear/failed_history.py
@@ -25,7 +25,6 @@ from .history import dateFormat
from exceptions_helper import EpisodeNotFoundException, ex
from _23 import unquote
-from six import PY2, text_type
# noinspection PyUnresolvedReferences
# noinspection PyUnreachableCode
@@ -83,10 +82,6 @@ def prepare_failed_name(release):
fixed = re.sub(r'[.\-+ ]', '_', fixed)
- # noinspection PyUnresolvedReferences
- if PY2 and not isinstance(fixed, unicode):
- fixed = text_type(fixed, 'utf-8', 'replace')
-
return fixed
@@ -165,8 +160,8 @@ def set_episode_failed(ep_obj):
"""
try:
with ep_obj.lock:
- quality = Quality.splitCompositeStatus(ep_obj.status)[1]
- ep_obj.status = Quality.compositeStatus(FAILED, quality)
+ quality = Quality.split_composite_status(ep_obj.status)[1]
+ ep_obj.status = Quality.composite_status(FAILED, quality)
ep_obj.save_to_db()
except EpisodeNotFoundException as e:
@@ -236,7 +231,7 @@ def revert_episode(ep_obj):
if ep_obj.episode in history_eps:
status_revert = history_eps[ep_obj.episode]['old_status']
- status, quality = Quality.splitCompositeStatus(status_revert)
+ status, quality = Quality.split_composite_status(status_revert)
logger.log('Found in failed.db history with status: %s quality: %s' % (
statusStrings[status], Quality.qualityStrings[quality]))
else:
diff --git a/sickgear/generic_queue.py b/sickgear/generic_queue.py
index b29d0ebc..d57ebcbe 100644
--- a/sickgear/generic_queue.py
+++ b/sickgear/generic_queue.py
@@ -175,7 +175,7 @@ class GenericQueue(object):
"""
clear queue excluding internal defined types
- :param action_types: only clear all of given action type
+ :param action_types: only clear supplied action types
"""
if not isinstance(action_types, list):
action_types = [action_types]
diff --git a/sickgear/gh_api.py b/sickgear/gh_api.py
index bca5e589..7c78c4ed 100644
--- a/sickgear/gh_api.py
+++ b/sickgear/gh_api.py
@@ -23,7 +23,7 @@ if False:
class GitHub(object):
"""
- Simple api wrapper for the Github API v3. Currently only supports the small thing that SB
+ Simple api wrapper for the GitHub API v3. Currently only supports the small thing that SB
needs it for - list of commits.
"""
@@ -34,7 +34,7 @@ class GitHub(object):
self.branch = branch
@staticmethod
- def _access_API(path, params=None):
+ def _access_api(path, params=None):
"""
Access the API at the path given and with the optional params given.
@@ -49,55 +49,57 @@ class GitHub(object):
if params and type(params) is dict:
url += '?' + '&'.join([str(x) + '=' + str(params[x]) for x in params])
- parsedJSON = helpers.get_url(url, parse_json=True)
- if not parsedJSON:
+ parsed_json = helpers.get_url(url, parse_json=True)
+ if not parsed_json:
return []
- return parsedJSON
+ return parsed_json
def commits(self):
"""
Get a list of the 100 most recent commits from the specified user/repo/branch, starting from HEAD.
- user: The github username of the person whose repo you're querying
+ user: The GitHub username of the person whose repo you're querying
repo: The repo name to query
branch: Optional, the branch name to show commits from
- Returns a deserialized json object containing the commit info. See http://developer.github.com/v3/repos/commits/
+ Returns a deserialized json object containing the commit info.
+ See https://developer.github.com/v3/repos/commits/
"""
- access_API = self._access_API(['repos', self.github_repo_user, self.github_repo, 'commits'],
+ access_api = self._access_api(['repos', self.github_repo_user, self.github_repo, 'commits'],
params={'per_page': 100, 'sha': self.branch})
- return access_API
+ return access_api
def compare(self, base, head, per_page=1):
"""
Uses the API to get a list of compares between base and head.
- user: The github username of the person whose repo you're querying
+ user: The GitHub username of the person whose repo you're querying
repo: The repo name to query
base: Start compare from branch
head: Current commit sha or branch name to compare
per_page: number of items per page
- Returns a deserialized json object containing the compare info. See http://developer.github.com/v3/repos/commits
+ Returns a deserialized json object containing the compare info.
+ See https://developer.github.com/v3/repos/commits
"""
- access_API = self._access_API(
+ access_api = self._access_api(
['repos', self.github_repo_user, self.github_repo, 'compare', base + '...' + head],
params={'per_page': per_page})
- return access_API
+ return access_api
def branches(self):
- access_API = self._access_API(
+ access_api = self._access_api(
['repos', self.github_repo_user, self.github_repo, 'branches'],
params={'per_page': 100})
- return access_API
+ return access_api
def pull_requests(self):
- access_API = self._access_API(
+ access_api = self._access_api(
['repos', self.github_repo_user, self.github_repo, 'pulls'],
params={'per_page': 100}) # type: Optional[Dict]
pulls = []
- for x in access_API:
+ for x in access_api:
try:
pull = PullRequest(x['head']['ref'], x['number'])
pulls.append((repr(pull), pull.fetch_name()))
diff --git a/sickgear/helpers.py b/sickgear/helpers.py
index 58dd3562..3fc8c499 100644
--- a/sickgear/helpers.py
+++ b/sickgear/helpers.py
@@ -43,8 +43,9 @@ import requests
import requests.exceptions
import subliminal
from lxml_etree import etree, is_lxml
+from base64 import decodebytes as b64decodebytes, encodebytes as b64encodebytes
-from _23 import b64decodebytes, b64encodebytes, decode_bytes, decode_str, filter_iter, scandir
+from _23 import decode_bytes, decode_str, scandir
from six import iteritems, string_types, text_type
# noinspection PyUnresolvedReferences
from six.moves import zip
@@ -62,7 +63,7 @@ if False:
from typing import Any, AnyStr, Dict, Generator, NoReturn, Iterable, Iterator, List, Optional, Set, Tuple, Union
from .tv import TVShow
# the following workaround hack resolves a pyc resolution bug
- from .name_cache import retrieveNameFromCache
+ from .name_cache import retrieve_name_from_cache
from six import integer_types
RE_XML_ENCODING = re.compile(r'^(<\?xml[^>]+)\s+(encoding\s*=\s*[\"\'][^\"\']*[\"\'])(\s*\?>|)', re.U)
@@ -953,7 +954,7 @@ def get_show(name, try_scene_exceptions=False):
show_obj = None
try:
- tvid, prodid = sickgear.name_cache.retrieveNameFromCache(name)
+ tvid, prodid = sickgear.name_cache.retrieve_name_from_cache(name)
if tvid and prodid:
show_obj = find_show_by_id({tvid: prodid})
@@ -1283,7 +1284,7 @@ def check_port(host, port, timeout=1.0):
def clear_unused_providers():
- providers = [x.cache.providerID for x in sickgear.providers.sortedProviderList() if x.is_active()]
+ providers = [x.cache.providerID for x in sickgear.providers.sorted_sources() if x.is_active()]
if providers:
my_db = db.DBConnection('cache.db')
@@ -1317,7 +1318,7 @@ def has_anime():
:rtype: bool
"""
# noinspection PyTypeChecker
- return False if not sickgear.showList else any(filter_iter(lambda show: show.is_anime, sickgear.showList))
+ return False if not sickgear.showList else any(filter(lambda show: show.is_anime, sickgear.showList))
def cpu_sleep():
@@ -1390,7 +1391,7 @@ def should_delete_episode(status):
:return: should be deleted
:rtype: bool
"""
- s = Quality.splitCompositeStatus(status)[0]
+ s = Quality.split_composite_status(status)[0]
if s not in SNATCHED_ANY + [DOWNLOADED, ARCHIVED, IGNORED]:
return True
logger.log('not safe to delete episode from db because of status: %s' % statusStrings[s], logger.DEBUG)
@@ -1514,7 +1515,7 @@ def get_overview(ep_status, show_quality, upgrade_once, split_snatch=False):
:type split_snatch: bool
:return: constant from classes Overview
"""
- status, quality = Quality.splitCompositeStatus(ep_status)
+ status, quality = Quality.split_composite_status(ep_status)
if ARCHIVED == status:
return Overview.GOOD
if WANTED == status:
@@ -1530,7 +1531,7 @@ def get_overview(ep_status, show_quality, upgrade_once, split_snatch=False):
if not split_snatch and status in SNATCHED_ANY:
return Overview.SNATCHED
- void, best_qualities = Quality.splitQuality(show_quality)
+ void, best_qualities = Quality.split_quality(show_quality)
# if re-downloads aren't wanted then mark it "good" if there is anything
if not len(best_qualities):
return Overview.GOOD
@@ -1682,7 +1683,7 @@ def upgrade_new_naming():
(d_entry.path, new_dir_name, repr(e), ex(e)), logger.WARNING)
if os.path.isdir(new_dir_name):
try:
- f_n = filter_iter(lambda fn: fn.is_file(), scandir(new_dir_name))
+ f_n = filter(lambda fn: fn.is_file(), scandir(new_dir_name))
except OSError as e:
logger.log('Unable to rename %s / %s' % (repr(e), ex(e)),
logger.WARNING)
diff --git a/sickgear/history.py b/sickgear/history.py
index 844088c5..209b3e9a 100644
--- a/sickgear/history.py
+++ b/sickgear/history.py
@@ -22,8 +22,6 @@ from .common import FAILED, SNATCHED, SNATCHED_PROPER, SUBTITLED, Quality
from .name_parser.parser import NameParser
import sickgear
-from six import PY2, text_type
-
# noinspection PyUnreachableCode
if False:
from typing import Any, AnyStr
@@ -47,9 +45,6 @@ def _log_history_item(action, tvid, prodid, season, episode, quality, resource,
"""
log_date = datetime.datetime.now().strftime(dateFormat)
- if PY2 and not isinstance(resource, text_type):
- resource = text_type(resource, 'utf-8', 'replace')
-
my_db = db.DBConnection()
my_db.action(
'INSERT INTO history'
@@ -77,7 +72,7 @@ def log_snatch(search_result):
else:
provider = 'unknown'
- action = Quality.compositeStatus((SNATCHED, SNATCHED_PROPER)[is_proper], search_result.quality)
+ action = Quality.composite_status((SNATCHED, SNATCHED_PROPER)[is_proper], search_result.quality)
resource = search_result.name
@@ -125,8 +120,8 @@ def log_subtitle(tvid, prodid, season, episode, status, subtitle_result):
"""
resource = subtitle_result.path
provider = subtitle_result.service
- status, quality = Quality.splitCompositeStatus(status)
- action = Quality.compositeStatus(SUBTITLED, quality)
+ status, quality = Quality.split_composite_status(status)
+ action = Quality.composite_status(SUBTITLED, quality)
_log_history_item(action, tvid, prodid, season, episode, quality, resource, provider)
@@ -140,8 +135,8 @@ def log_failed(ep_obj, release, provider=None):
:param release: release
:param provider: provider name
"""
- status, quality = Quality.splitCompositeStatus(ep_obj.status)
- action = Quality.compositeStatus(FAILED, quality)
+ status, quality = Quality.split_composite_status(ep_obj.status)
+ action = Quality.composite_status(FAILED, quality)
_log_history_item(action, ep_obj.show_obj.tvid, ep_obj.show_obj.prodid,
ep_obj.season, ep_obj.episode, quality, release, provider)
@@ -215,7 +210,7 @@ def history_snatched_proper_fix():
continue
if 0 < Quality.get_proper_level(pr.extra_info_no_name(), pr.version, pr.is_anime):
cl.append(['UPDATE history SET action = ? WHERE rowid = ?',
- [Quality.compositeStatus(SNATCHED_PROPER, int(r['quality'])),
+ [Quality.composite_status(SNATCHED_PROPER, int(r['quality'])),
r['rowid']]])
if cl:
my_db.mass_action(cl)
diff --git a/sickgear/image_cache.py b/sickgear/image_cache.py
index 8648bd22..f0372358 100644
--- a/sickgear/image_cache.py
+++ b/sickgear/image_cache.py
@@ -271,7 +271,7 @@ class ImageCache(object):
"""
:param image_file: image file
:type image_file: AnyStr
- :return: true if a image_file exists
+ :return: true if an image_file exists
:rtype: bool
"""
result = []
@@ -652,7 +652,7 @@ class ImageCache(object):
if thumb_img_data:
thumb_result = metadata_generator.write_image(thumb_img_data, dest_thumb_path, force=True)
if not thumb_result:
- thumb_result = metadata_generator.write_image(img_data, dest_thumb_path, force=True)
+ metadata_generator.write_image(img_data, dest_thumb_path, force=True)
break
if result:
diff --git a/sickgear/indexermapper.py b/sickgear/indexermapper.py
index b5eafcf8..38d0f022 100644
--- a/sickgear/indexermapper.py
+++ b/sickgear/indexermapper.py
@@ -26,8 +26,7 @@ import sickgear
from lib.dateutil.parser import parse
-from _23 import unidecode
-from six import iteritems, moves, string_types, PY2
+from six import iteritems, moves, string_types
# noinspection PyUnreachableCode
if False:
@@ -133,7 +132,7 @@ def confirm_show(premiere_date, shows_premiere, expected_name, show_name):
# type: (Optional[datetime.date], Optional[Union[AnyStr, datetime.date]], AnyStr, AnyStr) -> bool
"""
confirm show possible confirmations:
- 1. premiere dates are less then 2 days apart
+ 1. premiere dates are less than 2 days apart
2. show name is the same and premiere year is 1 year or less apart
:param premiere_date: expected show premiere date
@@ -178,9 +177,7 @@ def clean_show_name(showname):
:return:
:rtype: AnyStr
"""
- if not PY2:
- return re.sub(r'[(\s]*(?:19|20)\d\d[)\s]*$', '', showname)
- return re.sub(r'[(\s]*(?:19|20)\d\d[)\s]*$', '', unidecode(showname))
+ return re.sub(r'[(\s]*(?:19|20)\d\d[)\s]*$', '', showname)
def get_show_name_date(show_obj):
@@ -255,7 +252,7 @@ def map_indexers_to_show(show_obj, update=False, force=False, recheck=False, im_
all_ids_srcs = [src_tv_id] + [s for s in (TVINFO_TRAKT, TVINFO_TMDB, TVINFO_TVMAZE, TVINFO_TVDB, TVINFO_IMDB)
if s != src_tv_id]
searched, confirmed = {}, False
- for r in moves.range(len(all_ids_srcs)):
+ for _ in moves.range(len(all_ids_srcs)):
search_done = False
for i in all_ids_srcs:
if new_ids.verified.get(i):
diff --git a/sickgear/indexers/indexer_api.py b/sickgear/indexers/indexer_api.py
index 530faa96..c5ee5f65 100644
--- a/sickgear/indexers/indexer_api.py
+++ b/sickgear/indexers/indexer_api.py
@@ -20,8 +20,6 @@ from sg_helpers import proxy_setting
import sickgear
from lib.tvinfo_base import TVInfoBase
-from _23 import list_values
-
# noinspection PyUnreachableCode
if False:
from typing import AnyStr, Dict
@@ -83,13 +81,13 @@ class TVInfoAPI(object):
@property
def sources(self):
# type: () -> Dict[int, AnyStr]
- return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if not x['mapped_only'] and
+ return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if not x['mapped_only'] and
True is not x.get('fallback') and True is not x.get('people_only')])
@property
def search_sources(self):
# type: () -> Dict[int, AnyStr]
- return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if not x['mapped_only'] and
+ return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if not x['mapped_only'] and
x.get('active') and not x.get('defunct') and True is not x.get('fallback')
and True is not x.get('people_only')])
@@ -99,7 +97,7 @@ class TVInfoAPI(object):
"""
:return: return all indexers including mapped only indexers excluding fallback indexers
"""
- return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if True is not x.get('fallback')
+ return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if True is not x.get('fallback')
and True is not x.get('people_only')])
@property
@@ -108,9 +106,9 @@ class TVInfoAPI(object):
"""
:return: return all fallback indexers
"""
- return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if True is x.get('fallback')])
+ return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if True is x.get('fallback')])
@property
def xem_supported_sources(self):
# type: () -> Dict[int, AnyStr]
- return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if x.get('xem_origin')])
+ return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if x.get('xem_origin')])
diff --git a/sickgear/logger.py b/sickgear/logger.py
index 39821266..2e479737 100644
--- a/sickgear/logger.py
+++ b/sickgear/logger.py
@@ -263,8 +263,8 @@ class SBRotatingLogHandler(object):
buf = fh.read(min(remaining_size, buf_size))
remaining_size -= buf_size
lines = buf.split('\n')
- # the first line of the buffer is probably not a complete line so
- # we'll save it and append it to the last line of the next buffer
+ # the first line of the buffer is probably not a complete line,
+ # so save it and append it to the last line of the next buffer
# we read
if None is not segment:
# if the previous chunk starts right from the beginning of line
diff --git a/sickgear/metadata/__init__.py b/sickgear/metadata/__init__.py
index 8e1a4315..1dbf73ad 100644
--- a/sickgear/metadata/__init__.py
+++ b/sickgear/metadata/__init__.py
@@ -19,14 +19,13 @@ __all__ = ['generic', 'helpers', 'kodi', 'mede8er', 'mediabrowser', 'ps3', 'tivo
import sys
from . import kodi, mede8er, mediabrowser, ps3, tivo, wdtv, xbmc, xbmc_12plus
-from _23 import filter_list
def available_generators():
- return filter_list(lambda x: x not in ('generic', 'helpers'), __all__)
+ return list(filter(lambda x: x not in ('generic', 'helpers'), __all__))
-def _getMetadataModule(name):
+def _get_metadata_module(name):
name = name.lower()
prefix = "sickgear.metadata."
if name in __all__ and prefix + name in sys.modules:
@@ -34,8 +33,8 @@ def _getMetadataModule(name):
return None
-def _getMetadataClass(name):
- module = _getMetadataModule(name)
+def _get_metadata_class(name):
+ module = _get_metadata_module(name)
if not module:
return None
@@ -46,10 +45,10 @@ def _getMetadataClass(name):
def get_metadata_generator_dict():
result = {}
for cur_generator_id in available_generators():
- cur_generator = _getMetadataClass(cur_generator_id)
+ cur_generator = _get_metadata_class(cur_generator_id)
if not cur_generator:
continue
result[cur_generator.name] = cur_generator
return result
-
+
diff --git a/sickgear/metadata/generic.py b/sickgear/metadata/generic.py
index 810a01f0..62b122dc 100644
--- a/sickgear/metadata/generic.py
+++ b/sickgear/metadata/generic.py
@@ -35,7 +35,6 @@ from lib.fanart.core import Request as fanartRequest
import lib.fanart as fanart
from lxml_etree import etree
-from _23 import filter_iter, list_keys
from six import iteritems, itervalues, string_types
# noinspection PyUnreachableCode
@@ -614,7 +613,7 @@ class GenericMetadata(object):
logger.log(u"No thumb is available for this episode, not creating a thumb", logger.DEBUG)
return False
- thumb_data = metadata_helpers.getShowImage(thumb_url, show_name=ep_obj.show_obj.name)
+ thumb_data = metadata_helpers.get_show_image(thumb_url, show_name=ep_obj.show_obj.name)
result = self._write_image(thumb_data, file_path)
@@ -712,7 +711,7 @@ class GenericMetadata(object):
if 0 == len(cur_season_art):
continue
- # Just grab whatever's there for now
+ # Just grab whatever is there for now
art_id, season_url = cur_season_art.popitem()
season_poster_file_path = self.get_season_poster_path(show_obj, cur_season)
@@ -722,7 +721,7 @@ class GenericMetadata(object):
logger.DEBUG)
continue
- season_data = metadata_helpers.getShowImage(season_url, show_name=show_obj.name)
+ season_data = metadata_helpers.get_show_image(season_url, show_name=show_obj.name)
if not season_data:
logger.log(u'No season poster data available, skipping this season', logger.DEBUG)
@@ -757,7 +756,7 @@ class GenericMetadata(object):
if 0 == len(cur_season_art):
continue
- # Just grab whatever's there for now
+ # Just grab whatever is there for now
art_id, season_url = cur_season_art.popitem()
season_banner_file_path = self.get_season_banner_path(show_obj, cur_season)
@@ -767,7 +766,7 @@ class GenericMetadata(object):
logger.DEBUG)
continue
- season_data = metadata_helpers.getShowImage(season_url, show_name=show_obj.name)
+ season_data = metadata_helpers.get_show_image(season_url, show_name=show_obj.name)
if not season_data:
logger.log(u'No season banner data available, skipping this season', logger.DEBUG)
@@ -855,7 +854,7 @@ class GenericMetadata(object):
def _get_show_info(tv_id):
try:
show_lang = show_obj.lang
- # There's gotta be a better way of doing this but we don't wanna
+ # There's gotta be a better way of doing this, but we don't want to
# change the language value elsewhere
tvinfo_config = sickgear.TVInfoAPI(tv_id).api_params.copy()
tvinfo_config['fanart'] = True
@@ -874,7 +873,7 @@ class GenericMetadata(object):
tv_id).name + ", not downloading images: " + ex(e), logger.WARNING)
# todo: when tmdb is added as tv source remove the hardcoded TVINFO_TMDB
- for tv_src in list(OrderedDict.fromkeys([show_obj.tvid] + list_keys(sickgear.TVInfoAPI().search_sources) +
+ for tv_src in list(OrderedDict.fromkeys([show_obj.tvid] + list(sickgear.TVInfoAPI().search_sources) +
[TVINFO_TMDB])):
if tv_src != show_obj.tvid and not show_obj.ids.get(tv_src, {}).get('id'):
continue
@@ -1059,7 +1058,7 @@ class GenericMetadata(object):
if image_type in ('poster', 'banner'):
if isinstance(image_url, tuple):
image_url = image_url[0]
- img_data = metadata_helpers.getShowImage(image_url, which, show_obj.name)
+ img_data = metadata_helpers.get_show_image(image_url, which, show_obj.name)
if img_cache_type and img_cache_type != image_cache.which_type(img_data, is_binary=True):
img_data = None
continue
@@ -1083,7 +1082,7 @@ class GenericMetadata(object):
result = {}
try:
- # There's gotta be a better way of doing this but we don't wanna
+ # There's gotta be a better way of doing this, but we don't want to
# change the language value elsewhere
tvinfo_config = sickgear.TVInfoAPI(show_obj.tvid).api_params.copy()
tvinfo_config[image_type] = True
@@ -1220,9 +1219,9 @@ class GenericMetadata(object):
resp = request.response()
itemlist = []
dedupe = []
- for art in filter_iter(lambda i: 10 < len(i.get('url', '')) and (lang == i.get('lang', '')[0:2]),
- # remove "[0:2]" ... to strictly use only data where "en" is at source
- resp[types[image_type]]): # type: dict
+ for art in filter(lambda i: 10 < len(i.get('url', '')) and (lang == i.get('lang', '')[0:2]),
+ # remove "[0:2]" ... to strictly use only data where "en" is at source
+ resp[types[image_type]]): # type: dict
try:
url = (art['url'], art['url'].replace('/fanart/', '/preview/'))[thumb]
if url not in dedupe:
diff --git a/sickgear/metadata/helpers.py b/sickgear/metadata/helpers.py
index fe046379..f0f5254a 100644
--- a/sickgear/metadata/helpers.py
+++ b/sickgear/metadata/helpers.py
@@ -22,7 +22,7 @@ if False:
from typing import AnyStr, Optional
-def getShowImage(url, img_num=None, show_name=None, supress_log=False):
+def get_show_image(url, img_num=None, show_name=None, supress_log=False):
# type: (AnyStr, Optional[int], Optional[AnyStr], bool) -> Optional[bytes]
"""
diff --git a/sickgear/metadata/kodi.py b/sickgear/metadata/kodi.py
index e679ebeb..9723d940 100644
--- a/sickgear/metadata/kodi.py
+++ b/sickgear/metadata/kodi.py
@@ -29,7 +29,7 @@ import exceptions_helper
from exceptions_helper import ex
from lxml_etree import etree
-from _23 import decode_str, map_iter
+from _23 import decode_str
from six import string_types
# noinspection PyUnreachableCode
@@ -107,7 +107,7 @@ class KODIMetadata(generic.GenericMetadata):
show_obj: a TVShow instance to create the NFO for
"""
- show_ID = show_obj.prodid
+ show_id = show_obj.prodid
show_lang = show_obj.lang
tvinfo_config = sickgear.TVInfoAPI(show_obj.tvid).api_params.copy()
@@ -125,9 +125,9 @@ class KODIMetadata(generic.GenericMetadata):
tv_node = etree.Element('tvshow')
try:
- show_info = t[int(show_ID)]
+ show_info = t[int(show_id)]
except BaseTVinfoShownotfound as e:
- logger.log('Unable to find show with id %s on %s, skipping it' % (show_ID, sickgear.TVInfoAPI(
+ logger.log('Unable to find show with id %s on %s, skipping it' % (show_id, sickgear.TVInfoAPI(
show_obj.tvid).name), logger.ERROR)
raise e
except BaseTVinfoError as e:
@@ -141,7 +141,7 @@ class KODIMetadata(generic.GenericMetadata):
# check for title and id
if None is getattr(show_info, 'seriesname', None) or None is getattr(show_info, 'id', None):
- logger.log('Incomplete info for show with id %s on %s, skipping it' % (show_ID, sickgear.TVInfoAPI(
+ logger.log('Incomplete info for show with id %s on %s, skipping it' % (show_id, sickgear.TVInfoAPI(
show_obj.tvid).name), logger.ERROR)
return False
@@ -157,7 +157,7 @@ class KODIMetadata(generic.GenericMetadata):
has_id = False
tvdb_id = None
- for tvid, slug in map_iter(
+ for tvid, slug in map(
lambda _tvid: (_tvid, sickgear.TVInfoAPI(_tvid).config.get('kodi_slug')),
list(sickgear.TVInfoAPI().all_sources)):
mid = slug and show_obj.ids[tvid].get('id')
@@ -171,7 +171,7 @@ class KODIMetadata(generic.GenericMetadata):
uniqueid = etree.SubElement(tv_node, 'uniqueid', **kwargs)
uniqueid.text = '%s%s' % (('', 'tt')[TVINFO_IMDB == tvid], mid)
if not has_id:
- logger.log('Incomplete info for show with id %s on %s, skipping it' % (show_ID, sickgear.TVInfoAPI(
+ logger.log('Incomplete info for show with id %s on %s, skipping it' % (show_id, sickgear.TVInfoAPI(
show_obj.tvid).name), logger.ERROR)
return False
diff --git a/sickgear/name_cache.py b/sickgear/name_cache.py
index 1ecac246..c7225a27 100644
--- a/sickgear/name_cache.py
+++ b/sickgear/name_cache.py
@@ -32,7 +32,7 @@ sceneNameCache = {}
nameCacheLock = threading.Lock()
-def addNameToCache(name, tvid=0, prodid=0, season=-1):
+def add_name_to_cache(name, tvid=0, prodid=0, season=-1):
"""Adds the show & tvdb id to the namecache
:param name: the show name to cache
@@ -41,7 +41,7 @@ def addNameToCache(name, tvid=0, prodid=0, season=-1):
:type tvid: int
:param prodid: the production id that this show should be cached with (can be None/0 for unknown)
:type prodid: int or long
- :param season: the season the the name exception belongs to. -1 for generic exception
+ :param season: the season the name exception belongs to. -1 for generic exception
:type season: int
"""
global nameCache
@@ -53,7 +53,7 @@ def addNameToCache(name, tvid=0, prodid=0, season=-1):
nameCache[name] = [int(tvid), int(prodid), season]
-def retrieveNameFromCache(name):
+def retrieve_name_from_cache(name):
# type: (AnyStr) -> Union[Tuple[int, int], Tuple[None, None]]
"""Looks up the given name in the name cache
@@ -71,7 +71,7 @@ def retrieveNameFromCache(name):
return None, None
-def buildNameCache(show_obj=None, update_only_scene=False):
+def build_name_cache(show_obj=None, update_only_scene=False):
# type: (Optional[Union[TVShow, TVShowBase]], bool) -> None
"""Adds all new name exceptions to the namecache memory and flushes any removed name exceptions
@@ -104,7 +104,7 @@ def buildNameCache(show_obj=None, update_only_scene=False):
for cur_so in sickgear.showList if cur_so])
sceneNameCache = {}
- cacheDB = db.DBConnection()
+ cache_db = db.DBConnection()
cache_results = []
if update_only_scene:
@@ -117,7 +117,7 @@ def buildNameCache(show_obj=None, update_only_scene=False):
tmp_scene_name_cache = sceneNameCache.copy()
for t, s in iteritems(show_ids):
- cache_results += cacheDB.select(
+ cache_results += cache_db.select(
'SELECT show_name, indexer AS tv_id, indexer_id AS prod_id, season'
' FROM scene_exceptions'
' WHERE indexer = %s AND indexer_id IN (%s)' % (t, ','.join(['%s' % i for i in s])))
diff --git a/sickgear/name_parser/parser.py b/sickgear/name_parser/parser.py
index 8d63bb59..c1769f1b 100644
--- a/sickgear/name_parser/parser.py
+++ b/sickgear/name_parser/parser.py
@@ -39,8 +39,8 @@ from lib.tvinfo_base.exceptions import *
from ..classes import OrderedDefaultdict
from .._legacy_classes import LegacyParseResult
-from _23 import decode_str, list_keys, list_range
-from six import iteritems, iterkeys, itervalues, PY2, string_types, text_type
+from _23 import decode_str, list_range
+from six import iteritems, iterkeys, itervalues, string_types, text_type
# noinspection PyUnreachableCode
if False:
@@ -166,7 +166,7 @@ class NameParser(object):
result.which_regex = [cur_regex_name]
result.score = 0 - cur_regex_num
- named_groups = list_keys(match.groupdict())
+ named_groups = list(match.groupdict())
if 'series_name' in named_groups:
result.series_name = match.group('series_name')
@@ -260,7 +260,7 @@ class NameParser(object):
if 'extra_info' in named_groups:
tmp_extra_info = match.group('extra_info')
- # Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season
+ # Show.S04.Special or Show.S05.Part.2.Extras are almost certainly not every episode in the season
if tmp_extra_info and 'season_only' == cur_regex_name and re.search(
r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I):
continue
@@ -292,7 +292,7 @@ class NameParser(object):
matches.append(result)
if len(matches):
- # pick best match with highest score based on placement
+ # pick best match with the highest score based on placement
best_result = max(sorted(matches, reverse=True, key=lambda x: x.which_regex), key=lambda x: x.score)
show_obj = None
@@ -326,7 +326,7 @@ class NameParser(object):
# get quality
new_name = helpers.remove_non_release_groups(name, show_obj.is_anime)
- best_result.quality = common.Quality.nameQuality(new_name, show_obj.is_anime)
+ best_result.quality = common.Quality.name_quality(new_name, show_obj.is_anime)
new_episode_numbers = []
new_season_numbers = []
@@ -451,7 +451,7 @@ class NameParser(object):
'SickGear does not support this. '
'Sorry.' % (str(new_season_numbers)))
- # I guess it's possible that we'd have duplicate episodes too, so lets
+ # I guess it's possible that we'd have duplicate episodes too, so let's
# eliminate them
new_episode_numbers = list(set(new_episode_numbers))
new_episode_numbers.sort()
@@ -500,23 +500,20 @@ class NameParser(object):
if not second:
return getattr(first, attr)
- a = getattr(first, attr, [])
- b = getattr(second, attr)
+ first_val = getattr(first, attr, [])
+ second_val = getattr(second, attr)
- # if a is good use it
- if None is not a or (isinstance(a, list) and len(a)):
- return a
+ # if first_val is good use it
+ if None is not first_val or (isinstance(first_val, list) and len(first_val)):
+ return first_val
# if not use b (if b isn't set it'll just be default)
- return b
+ return second_val
@staticmethod
- def _unicodify(obj, encoding='utf-8'):
- if PY2 and isinstance(obj, string_types):
- if not isinstance(obj, text_type):
- obj = text_type(obj, encoding, 'replace')
- if not PY2 and isinstance(obj, text_type):
+ def _unicodify(obj, encoding='utf8'):
+ if isinstance(obj, text_type):
try:
- return obj.encode('latin1').decode('utf8')
+ return obj.encode('latin1').decode(encoding)
except (BaseException, Exception):
pass
return obj
@@ -751,9 +748,7 @@ class ParseResult(LegacyParseResult):
self.release_group, self.air_date, tuple(self.ab_episode_numbers)))
def __str__(self):
- if not PY2:
- return self.__unicode__()
- return self.__unicode__().encode('utf-8', errors='ignore')
+ return self.__unicode__()
def __unicode__(self):
if None is not self.series_name:
diff --git a/sickgear/name_parser/regexes.py b/sickgear/name_parser/regexes.py
index 9a6b30db..85df55a5 100644
--- a/sickgear/name_parser/regexes.py
+++ b/sickgear/name_parser/regexes.py
@@ -14,7 +14,7 @@
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see .
-# all regexes are case insensitive
+# all regexes are case-insensitive
normal_regexes = [
('garbage_name',
diff --git a/sickgear/naming.py b/sickgear/naming.py
index 9a24e43c..6d34d227 100644
--- a/sickgear/naming.py
+++ b/sickgear/naming.py
@@ -109,7 +109,7 @@ class TVEpisodeSample(tv.TVEpisode):
self.scene_absolute_number = absolute_number # type: int
self._airdate = datetime.date(2010, 3, 9) # type: datetime.date
self.show_obj = TVShowSample() # type: TVShowSample
- self._status = Quality.compositeStatus(common.DOWNLOADED, common.Quality.SDTV) # type: int
+ self._status = Quality.composite_status(common.DOWNLOADED, common.Quality.SDTV) # type: int
self._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP' # type: AnyStr
self._is_proper = True # type: bool
self._version = 2 # type: int
@@ -196,7 +196,7 @@ def check_valid_abd_naming(pattern=None):
def check_valid_sports_naming(pattern=None):
"""
- Checks if the name is can be parsed back to its original form for an sports format.
+ Checks if the name is can be parsed back to its original form for a sports format.
Returns true if the naming is valid, false if not.
:param pattern: String Naming Pattern
@@ -294,7 +294,7 @@ def generate_sample_ep(multi=None, abd=False, sports=False, anime=False, anime_t
# make a fake episode object
sample_ep_obj = TVEpisodeSample(2, 3, 3, 'Ep Name')
- sample_ep_obj._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
+ sample_ep_obj._status = Quality.composite_status(DOWNLOADED, Quality.HDTV)
sample_ep_obj._airdate = datetime.date(2011, 3, 9)
if abd:
@@ -313,14 +313,14 @@ def generate_sample_ep(multi=None, abd=False, sports=False, anime=False, anime_t
if None is not multi:
sample_ep_obj._name = 'Ep Name (1)'
second_ep = TVEpisodeSample(2, 4, 4, 'Ep Name (2)')
- second_ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
+ second_ep._status = Quality.composite_status(DOWNLOADED, Quality.HDTV)
normal_naming = not anime or 3 == anime_type
release_name = sample_ep_obj._release_name = second_ep._release_name = \
('Show.Name.003-004.HDTV.XviD-RLSGROUP', 'Show.Name.S02E03E04E05.HDTV.XviD-RLSGROUP')[normal_naming]
sample_ep_obj.related_ep_obj.append(second_ep)
if normal_naming:
third_ep = TVEpisodeSample(2, 5, 5, 'Ep Name (3)')
- third_ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
+ third_ep._status = Quality.composite_status(DOWNLOADED, Quality.HDTV)
third_ep._release_name = release_name
sample_ep_obj.related_ep_obj.append(third_ep)
else:
diff --git a/sickgear/network_timezones.py b/sickgear/network_timezones.py
index 04c70aef..c6c548ce 100644
--- a/sickgear/network_timezones.py
+++ b/sickgear/network_timezones.py
@@ -29,15 +29,14 @@ from lib.dateutil import tz, zoneinfo
from lib.tzlocal import get_localzone
from sg_helpers import remove_file_perm, scantree
-from six import integer_types, iteritems, string_types, PY2
-from _23 import list_keys
+from six import integer_types, iteritems, string_types
# noinspection PyUnreachableCode
if False:
from _23 import DirEntry
from typing import AnyStr, Optional, Tuple, Union
-# regex to parse time (12/24 hour format)
+# regex to parse time (12/24-hour format)
time_regex = re.compile(r'(\d{1,2})(([:.](\d{2}))? ?([PA][. ]? ?M)|[:.](\d{2}))\b', flags=re.I)
am_regex = re.compile(r'(A[. ]? ?M)', flags=re.I)
pm_regex = re.compile(r'(P[. ]? ?M)', flags=re.I)
@@ -175,7 +174,7 @@ def _update_zoneinfo():
url_data = helpers.get_url(url)
if None is url_data:
update_last_retry()
- # when None is urlData, trouble connecting to github
+ # when None is urlData, trouble connecting to GitHub
logger.log(u'Fetching zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s' % url,
logger.WARNING)
return
@@ -264,13 +263,13 @@ def update_network_dict():
network_tz_data = {}
- # network timezones are stored on github pages
+ # network timezones are stored on GitHub pages
url = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/network_timezones.txt'
url_data = helpers.get_url(url)
if url_data in (None, ''):
update_last_retry()
- # When None is urlData, trouble connecting to github
+ # When None is urlData, trouble connecting to GitHub
logger.debug(u'Updating network timezones failed, this can happen from time to time. URL: %s' % url)
load_network_dict(load=False)
return
@@ -414,7 +413,7 @@ def parse_time(time_of_day):
hour = helpers.try_int(time_parsed.group(1))
mins = helpers.try_int(time_parsed.group(4))
ampm = time_parsed.group(5)
- # convert am/pm to 24 hour clock
+ # convert am/pm to 24-hour clock
if None is not ampm:
if None is not pm_regex.search(ampm) and 12 != hour:
hour += 12
@@ -506,13 +505,13 @@ def _load_network_conversions():
conversions_in = []
- # network conversions are stored on github pages
+ # network conversions are stored on GitHub pages
url = 'https://raw.githubusercontent.com/prinz23/sg_network_conversions/master/conversions.txt'
url_data = helpers.get_url(url)
if url_data in (None, ''):
update_last_retry()
- # when no url_data, trouble connecting to github
+ # when no url_data, trouble connecting to GitHub
logger.debug(u'Updating network conversions failed, this can happen from time to time. URL: %s' % url)
return
@@ -547,7 +546,7 @@ def _load_network_conversions():
# remove deleted records
if 0 < len(conversions_db):
- network_name = list_keys(conversions_db)
+ network_name = list(conversions_db)
cl.append(['DELETE FROM network_conversions WHERE tvdb_network'
' IN (%s)' % ','.join(['?'] * len(network_name)), network_name])
@@ -632,8 +631,6 @@ def get_episode_time(d, # type: int
if d and None is not ep_time and None is not tzinfo:
ep_date = datetime.date.fromordinal(helpers.try_int(d))
- if PY2:
- return datetime.datetime.combine(ep_date, ep_time).replace(tzinfo=tzinfo)
return datetime.datetime.combine(ep_date, ep_time, tzinfo)
return parse_date_time(d, t, tzinfo)
diff --git a/sickgear/notifiers/__init__.py b/sickgear/notifiers/__init__.py
index 1b56c4f5..b35ae421 100644
--- a/sickgear/notifiers/__init__.py
+++ b/sickgear/notifiers/__init__.py
@@ -25,8 +25,6 @@ from . import emby, kodi, plex, xbmc, \
import sickgear
-from _23 import filter_iter, list_values
-
class NotifierFactory(object):
@@ -68,32 +66,27 @@ class NotifierFactory(object):
:return: ID String
:rtype: String
"""
- for n in filter_iter(lambda v: v.is_enabled(),
- list_values(self.notifiers)):
+ for n in filter(lambda v: v.is_enabled(), list(self.notifiers.values())):
yield n.id()
@property
def enabled_onsnatch(self):
- for n in filter_iter(lambda v: v.is_enabled() and v.is_enabled_onsnatch(),
- list_values(self.notifiers)):
+ for n in filter(lambda v: v.is_enabled() and v.is_enabled_onsnatch(), list(self.notifiers.values())):
yield n.id()
@property
def enabled_ondownload(self):
- for n in filter_iter(lambda v: v.is_enabled() and v.is_enabled_ondownload(),
- list_values(self.notifiers)):
+ for n in filter(lambda v: v.is_enabled() and v.is_enabled_ondownload(), list(self.notifiers.values())):
yield n.id()
@property
def enabled_onsubtitledownload(self):
- for n in filter_iter(lambda v: v.is_enabled() and v.is_enabled_onsubtitledownload(),
- list_values(self.notifiers)):
+ for n in filter(lambda v: v.is_enabled() and v.is_enabled_onsubtitledownload(), list(self.notifiers.values())):
yield n.id()
@property
def enabled_library(self):
- for n in filter_iter(lambda v: v.is_enabled() and v.is_enabled_library(),
- list_values(self.notifiers)):
+ for n in filter(lambda v: v.is_enabled() and v.is_enabled_library(), list(self.notifiers.values())):
yield n.id()
def get(self, nid):
diff --git a/sickgear/notifiers/emby.py b/sickgear/notifiers/emby.py
index 249c6639..81065c00 100644
--- a/sickgear/notifiers/emby.py
+++ b/sickgear/notifiers/emby.py
@@ -21,7 +21,7 @@ from .generic import Notifier
from json_helper import json_loads
import sickgear
-from _23 import decode_bytes, decode_str, map_list
+from _23 import decode_bytes, decode_str
class EmbyNotifier(Notifier):
@@ -50,7 +50,7 @@ class EmbyNotifier(Notifier):
timeout=20, hooks=dict(response=self._cb_response), json=True)
return self.response and self.response.get('ok') and 200 == self.response.get('status_code') and \
- version <= map_list(lambda x: int(x), (response and response.get('Version') or '0.0.0.0').split('.'))
+ version <= list(map(lambda x: int(x), (response and response.get('Version') or '0.0.0.0').split('.')))
def update_library(self, show_obj=None, **kwargs):
""" Update library function
diff --git a/sickgear/notifiers/plex.py b/sickgear/notifiers/plex.py
index 5eaf646c..b84c7d89 100644
--- a/sickgear/notifiers/plex.py
+++ b/sickgear/notifiers/plex.py
@@ -20,8 +20,8 @@ from .generic import Notifier
import sickgear
from exceptions_helper import ex
-from _23 import b64encodestring, decode_str, etree, filter_iter, list_values, unquote_plus, urlencode
-from six import iteritems, text_type, PY2
+from _23 import b64encodestring, decode_str, etree, unquote_plus, urlencode
+from six import iteritems
# noinspection PyUnresolvedReferences
from six.moves import urllib
@@ -49,8 +49,7 @@ class PLEXNotifier(Notifier):
return False
for key in command:
- if not PY2 or type(command[key]) == text_type:
- command[key] = command[key].encode('utf-8')
+ command[key] = command[key].encode('utf-8')
enc_command = urlencode(command)
self._log_debug(u'Encoded API command: ' + enc_command)
@@ -203,7 +202,7 @@ class PLEXNotifier(Notifier):
hosts_failed.append(cur_host)
continue
- for section in filter_iter(lambda x: 'show' == x.attrib['type'], sections):
+ for section in filter(lambda x: 'show' == x.attrib['type'], sections):
if str(section.attrib['key']) in hosts_all:
continue
keyed_host = [(str(section.attrib['key']), cur_host)]
@@ -247,18 +246,14 @@ class PLEXNotifier(Notifier):
return ''
hosts = [
- host.replace('http://', '') for host in filter_iter(lambda x: x.startswith('http:'),
- list_values(hosts_all))]
+ host.replace('http://', '') for host in filter(lambda x: x.startswith('http:'), list(hosts_all.values()))]
secured = [
- host.replace('https://', '') for host in filter_iter(lambda x: x.startswith('https:'),
- list_values(hosts_all))]
+ host.replace('https://', '') for host in filter(lambda x: x.startswith('https:'), list(hosts_all.values()))]
failed = ', '.join([
- host.replace('http://', '') for host in filter_iter(lambda x: x.startswith('http:'),
- hosts_failed)])
- failed_secured = ', '.join(filter_iter(
+ host.replace('http://', '') for host in filter(lambda x: x.startswith('http:'), hosts_failed)])
+ failed_secured = ', '.join(filter(
lambda x: x not in hosts,
- [host.replace('https://', '') for host in filter_iter(lambda x: x.startswith('https:'),
- hosts_failed)]))
+ [host.replace('https://', '') for host in filter(lambda x: x.startswith('https:'), hosts_failed)]))
return ' ' + ' '.join([result for result in [
('', 'Fail: username/password when fetching credentials from plex.tv')[False is token_arg],
diff --git a/sickgear/notifiers/trakt.py b/sickgear/notifiers/trakt.py
index dcd2a28a..cb24c4ff 100644
--- a/sickgear/notifiers/trakt.py
+++ b/sickgear/notifiers/trakt.py
@@ -22,7 +22,6 @@ import sickgear
from lib.api_trakt import TraktAPI, exceptions
from exceptions_helper import ConnectionSkipException
-from _23 import list_keys
from six import iteritems
# noinspection PyUnreachableCode
@@ -38,7 +37,7 @@ class TraktNotifier(BaseNotifier):
def is_enabled_library(cls):
if sickgear.TRAKT_ACCOUNTS:
for tid, locations in iteritems(sickgear.TRAKT_UPDATE_COLLECTION):
- if tid in list_keys(sickgear.TRAKT_ACCOUNTS):
+ if tid in list(sickgear.TRAKT_ACCOUNTS):
return True
return False
@@ -89,7 +88,7 @@ class TraktNotifier(BaseNotifier):
data['shows'][0]['seasons'][0]['episodes'].append({'number': cur_ep_obj.episode})
for tid, locations in iteritems(sickgear.TRAKT_UPDATE_COLLECTION):
- if tid not in list_keys(sickgear.TRAKT_ACCOUNTS):
+ if tid not in list(sickgear.TRAKT_ACCOUNTS):
continue
for loc in locations:
if not ep_obj.location.startswith('%s%s' % (loc.rstrip(os.path.sep), os.path.sep)):
diff --git a/sickgear/notifiers/xbmc.py b/sickgear/notifiers/xbmc.py
index 71b24718..67b0412e 100644
--- a/sickgear/notifiers/xbmc.py
+++ b/sickgear/notifiers/xbmc.py
@@ -23,7 +23,6 @@ from exceptions_helper import ex
from json_helper import json_dumps, json_load
from _23 import b64encodestring, decode_str, etree, quote, unquote, unquote_plus, urlencode
-from six import PY2, text_type
# noinspection PyUnresolvedReferences
from six.moves import urllib
@@ -150,8 +149,7 @@ class XBMCNotifier(Notifier):
password = self._choose(password, sickgear.XBMC_PASSWORD)
for key in command:
- if not PY2 or type(command[key]) == text_type:
- command[key] = command[key].encode('utf-8')
+ command[key] = command[key].encode('utf-8')
enc_command = urlencode(command)
self._log_debug(u'Encoded API command: ' + enc_command)
diff --git a/sickgear/nzbSplitter.py b/sickgear/nzbSplitter.py
index da179d7c..7ac6cfe8 100644
--- a/sickgear/nzbSplitter.py
+++ b/sickgear/nzbSplitter.py
@@ -40,7 +40,7 @@ SUBJECT_FN_MATCHER = re.compile(r'"([^"]*)"')
RE_NORMAL_NAME = re.compile(r'\.\w{1,5}$')
-def platform_encode(p):
+def _platform_encode(p):
""" Return Unicode name, if not already Unicode, decode with UTF-8 or latin1 """
try:
return decode_str(p)
@@ -48,17 +48,17 @@ def platform_encode(p):
return decode_str(p, sickgear.SYS_ENCODING, errors='replace').replace('?', '!')
-def name_extractor(subject):
+def _name_extractor(subject):
""" Try to extract a file name from a subject line, return `subject` if in doubt """
result = subject
for name in re.findall(SUBJECT_FN_MATCHER, subject):
name = name.strip(' "')
if name and RE_NORMAL_NAME.search(name):
result = name
- return platform_encode(result)
+ return _platform_encode(result)
-def getSeasonNZBs(name, url_data, season):
+def _get_season_nzbs(name, url_data, season):
"""
:param name: name
@@ -71,31 +71,31 @@ def getSeasonNZBs(name, url_data, season):
:rtype: Tuple[Dict, AnyStr]
"""
try:
- showXML = etree.ElementTree(etree.XML(url_data))
+ show_xml = etree.ElementTree(etree.XML(url_data))
except SyntaxError:
logger.log(u'Unable to parse the XML of %s, not splitting it' % name, logger.ERROR)
return {}, ''
filename = name.replace('.nzb', '')
- nzbElement = showXML.getroot()
+ nzb_element = show_xml.getroot()
regex = r'([\w\._\ ]+)[\._ ]S%02d[\._ ]([\w\._\-\ ]+)' % season
- sceneNameMatch = re.search(regex, filename, re.I)
- if sceneNameMatch:
- showName, qualitySection = sceneNameMatch.groups()
+ scene_name_match = re.search(regex, filename, re.I)
+ if scene_name_match:
+ show_name, quality_section = scene_name_match.groups()
else:
logger.log('%s - Not a valid season pack scene name. If it\'s a valid one, log a bug.' % name, logger.ERROR)
return {}, ''
- regex = r'(%s[\._]S%02d(?:[E0-9]+)\.[\w\._]+)' % (re.escape(showName), season)
+ regex = r'(%s[\._]S%02d(?:[E0-9]+)\.[\w\._]+)' % (re.escape(show_name), season)
regex = regex.replace(' ', '.')
ep_files = {}
xmlns = None
- for cur_file in list(nzbElement):
+ for cur_file in list(nzb_element):
if not isinstance(cur_file.tag, string_types):
continue
xmlns_match = re.match(r'[{](https?://[A-Za-z0-9_./]+/nzb)[}]file', cur_file.tag)
@@ -108,7 +108,7 @@ def getSeasonNZBs(name, url_data, season):
# print curFile.get("subject"), "doesn't match", regex
continue
cur_ep = match.group(1)
- fn = name_extractor(cur_file.get('subject', ''))
+ fn = _name_extractor(cur_file.get('subject', ''))
if cur_ep == re.sub(r'\+\d+\.par2$', '', fn, flags=re.I):
bn, ext = os.path.splitext(fn)
cur_ep = re.sub(r'\.(part\d+|vol\d+(\+\d+)?)$', '', bn, flags=re.I)
@@ -126,7 +126,7 @@ def getSeasonNZBs(name, url_data, season):
return ep_files, xmlns
-def createNZBString(file_elements, xmlns):
+def _create_nzb_string(file_elements, xmlns):
"""
:param file_elements: first element
@@ -134,17 +134,17 @@ def createNZBString(file_elements, xmlns):
:return:
:rtype: AnyStr
"""
- rootElement = etree.Element("nzb")
+ root_element = etree.Element("nzb")
if xmlns:
- rootElement.set("xmlns", xmlns)
+ root_element.set("xmlns", xmlns)
for curFile in file_elements:
- rootElement.append(stripNS(curFile, xmlns))
+ root_element.append(_strip_ns(curFile, xmlns))
- return etree.tostring(rootElement, encoding='utf-8')
+ return etree.tostring(root_element, encoding='utf-8')
-def saveNZB(nzb_name, nzb_string):
+def _save_nzb(nzb_name, nzb_string):
"""
:param nzb_name: nzb name
@@ -160,15 +160,15 @@ def saveNZB(nzb_name, nzb_string):
logger.log(u'Unable to save NZB: ' + ex(e), logger.ERROR)
-def stripNS(element, ns):
+def _strip_ns(element, ns):
element.tag = element.tag.replace("{" + ns + "}", "")
for curChild in list(element):
- stripNS(curChild, ns)
+ _strip_ns(curChild, ns)
return element
-def splitResult(result):
+def split_result(result):
"""
:param result: search result
@@ -195,7 +195,7 @@ def splitResult(result):
# bust it up
season = parse_result.season_number if None is not parse_result.season_number else 1
- separate_nzbs, xmlns = getSeasonNZBs(result.name, resp, season)
+ separate_nzbs, xmlns = _get_season_nzbs(result.name, resp, season)
result_list = []
@@ -246,7 +246,7 @@ def splitResult(result):
nzb_result.provider = result.provider
nzb_result.quality = result.quality
nzb_result.show_obj = result.show_obj
- nzb_result.extraInfo = [createNZBString(separate_nzbs[new_nzb], xmlns)]
+ nzb_result.extraInfo = [_create_nzb_string(separate_nzbs[new_nzb], xmlns)]
result_list.append(nzb_result)
diff --git a/sickgear/people_queue.py b/sickgear/people_queue.py
index 77a3716e..0e99721c 100644
--- a/sickgear/people_queue.py
+++ b/sickgear/people_queue.py
@@ -154,7 +154,7 @@ class PeopleQueueActions(object):
class PeopleQueueItem(generic_queue.QueueItem):
def __init__(self, action_id, show_obj, uid=None, force=False, **kwargs):
- # type: (integer_types, TVShow, AnyStr, bool, Dict) -> PeopleQueueItem
+ # type: (integer_types, TVShow, AnyStr, bool, Dict) -> None
"""
:param action_id:
@@ -172,7 +172,7 @@ class PeopleQueueItem(generic_queue.QueueItem):
class CastQueueItem(PeopleQueueItem):
def __init__(self, show_obj, show_info_cast=None, uid=None, force=False, scheduled_update=False, switch=False,
**kwargs):
- # type: (TVShow, CastList, AnyStr, bool, bool, bool, Dict) -> CastQueueItem
+ # type: (TVShow, CastList, AnyStr, bool, bool, bool, Dict) -> None
"""
:param show_obj: show obj
diff --git a/sickgear/piper.py b/sickgear/piper.py
index 65217b8d..d4c8a3a9 100644
--- a/sickgear/piper.py
+++ b/sickgear/piper.py
@@ -10,8 +10,7 @@ import re
from json_helper import json_loads
from sg_helpers import cmdline_runner, is_virtualenv
-from _23 import filter_list, ordered_dict
-from six import iteritems, PY2
+from six import iteritems
# noinspection PyUnreachableCode
if False:
@@ -51,10 +50,6 @@ def run_pip(pip_cmd, suppress_stderr=False):
pip_cmd += ['--progress-bar', 'off']
new_pip_arg = ['--no-python-version-warning']
- if PY2:
- pip_version, _, _ = _get_pip_version()
- if pip_version and 20 > int(pip_version.split('.')[0]):
- new_pip_arg = []
return cmdline_runner(
[sys.executable, '-m', 'pip'] + new_pip_arg + ['--disable-pip-version-check'] + pip_cmd,
@@ -72,7 +67,7 @@ def initial_requirements():
from Cheetah import VersionTuple
is_cheetah2 = (3, 0, 0) > VersionTuple[0:3]
- is_cheetah3py3 = not PY2 and (3, 3, 0) > VersionTuple[0:3]
+ is_cheetah3py3 = (3, 3, 0) > VersionTuple[0:3]
if not (is_cheetah2 or is_cheetah3py3):
return
@@ -158,13 +153,10 @@ def check_pip_env():
_, _, installed, failed_names = _check_pip_env()
- py2_last = 'final py2 release'
boost = 'performance boost'
extra_info = dict({'Cheetah3': 'filled requirement', 'CT3': 'filled requirement',
'lxml': boost, 'python-Levenshtein': boost})
- extra_info.update((dict(cryptography=py2_last, pip=py2_last, regex=py2_last,
- scandir=boost, setuptools=py2_last),
- dict(regex=boost))[not PY2])
+ extra_info.update(dict(regex=boost))
return installed, extra_info, failed_names
@@ -256,9 +248,9 @@ def _check_pip_env(pip_outdated=False, reset_fails=False):
names_outdated = dict({cur_item.get('name'): {k: cur_item.get(k) for k in ('version', 'latest_version',
'latest_filetype')}
for cur_item in json_loads(output)})
- to_update = set(filter_list(
+ to_update = set(list(filter(
lambda name: name in specifiers and names_outdated[name]['latest_version'] in specifiers[name],
- set(names_reco).intersection(set(names_outdated))))
+ set(names_reco).intersection(set(names_outdated)))))
# check whether to ignore direct reference specification updates if not dev mode
if not int(os.environ.get('CHK_URL_SPECIFIERS', 0)):
@@ -272,7 +264,7 @@ def _check_pip_env(pip_outdated=False, reset_fails=False):
except (BaseException, Exception):
pass
- updates_todo = ordered_dict()
+ updates_todo = dict()
todo = to_install.union(to_update, requirement_update)
for cur_name in [cur_n for cur_n in names_reco if cur_n in todo]:
updates_todo[cur_name] = dict({
diff --git a/sickgear/postProcessor.py b/sickgear/postProcessor.py
index 945f257b..48501e63 100644
--- a/sickgear/postProcessor.py
+++ b/sickgear/postProcessor.py
@@ -33,7 +33,7 @@ from .indexers.indexer_config import TVINFO_TVDB
from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser
from _23 import decode_str
-from six import iteritems, PY2, string_types
+from six import iteritems, string_types
from sg_helpers import long_path, cmdline_runner
# noinspection PyUnreachableCode
@@ -762,7 +762,7 @@ class PostProcessor(object):
# if there is a quality available in the status then we don't need to bother guessing from the filename
if ep_obj.status in common.Quality.SNATCHED_ANY:
- old_status, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status)
+ old_status, ep_quality = common.Quality.split_composite_status(ep_obj.status)
if common.Quality.UNKNOWN != ep_quality:
self._log(
u'Using "%s" quality from the old status' % common.Quality.qualityStrings[ep_quality],
@@ -779,7 +779,7 @@ class PostProcessor(object):
if not cur_name:
continue
- ep_quality = common.Quality.nameQuality(cur_name, ep_obj.show_obj.is_anime)
+ ep_quality = common.Quality.name_quality(cur_name, ep_obj.show_obj.is_anime)
quality_log = u' "%s" quality parsed from the %s %s'\
% (common.Quality.qualityStrings[ep_quality], thing, cur_name)
@@ -790,14 +790,14 @@ class PostProcessor(object):
else:
self._log(u'Found' + quality_log, logger.DEBUG)
- ep_quality = common.Quality.fileQuality(self.file_path)
+ ep_quality = common.Quality.file_quality(self.file_path)
if common.Quality.UNKNOWN != ep_quality:
self._log(u'Using "%s" quality parsed from the metadata file content of %s'
% (common.Quality.qualityStrings[ep_quality], self.file_name), logger.DEBUG)
return ep_quality
# Try guessing quality from the file name
- ep_quality = common.Quality.assumeQuality(self.file_name)
+ ep_quality = common.Quality.assume_quality(self.file_name)
self._log(u'Using guessed "%s" quality from the file name %s'
% (common.Quality.qualityStrings[ep_quality], self.file_name), logger.DEBUG)
@@ -824,12 +824,7 @@ class PostProcessor(object):
script_cmd[0] = os.path.abspath(script_cmd[0])
self._log(u'Absolute path to script: ' + script_cmd[0], logger.DEBUG)
- if PY2:
- script_cmd += [ep_obj.location.encode(sickgear.SYS_ENCODING),
- self.file_path.encode(sickgear.SYS_ENCODING)
- ]
- else:
- script_cmd += [ep_obj.location, self.file_path]
+ script_cmd += [ep_obj.location, self.file_path]
script_cmd += ([], [str(ep_obj.show_obj.tvid)])[new_call] + [
str(ep_obj.show_obj.prodid),
@@ -894,7 +889,7 @@ class PostProcessor(object):
self._log(u'SickGear snatched this episode, marking it safe to replace', logger.DEBUG)
return True
- old_ep_status, old_ep_quality = common.Quality.splitCompositeStatus(ep_obj.status)
+ old_ep_status, old_ep_quality = common.Quality.split_composite_status(ep_obj.status)
# if old episode is not downloaded/archived then it's safe
if common.DOWNLOADED != old_ep_status and common.ARCHIVED != old_ep_status:
@@ -1007,10 +1002,10 @@ class PostProcessor(object):
cur_ep_obj.release_name = self.release_name or ''
- any_qualities, best_qualities = common.Quality.splitQuality(cur_ep_obj.show_obj.quality)
- cur_status, cur_quality = common.Quality.splitCompositeStatus(cur_ep_obj.status)
+ any_qualities, best_qualities = common.Quality.split_quality(cur_ep_obj.show_obj.quality)
+ cur_status, cur_quality = common.Quality.split_composite_status(cur_ep_obj.status)
- cur_ep_obj.status = common.Quality.compositeStatus(
+ cur_ep_obj.status = common.Quality.composite_status(
**({'status': common.DOWNLOADED, 'quality': quality},
{'status': common.ARCHIVED, 'quality': quality})
[cur_ep_obj.status in common.Quality.SNATCHED_BEST or
@@ -1116,7 +1111,7 @@ class PostProcessor(object):
# set the status of the episodes
# for cur_ep_obj in [ep_obj] + ep_obj.related_ep_obj:
- # cur_ep_obj.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality)
+ # cur_ep_obj.status = common.Quality.composite_status(common.SNATCHED, new_ep_quality)
# if the show directory doesn't exist then make it if allowed
if not os.path.isdir(ep_obj.show_obj.location) and sickgear.CREATE_MISSING_SHOW_DIRS:
@@ -1174,9 +1169,8 @@ class PostProcessor(object):
keepalive = keepalive_stop = None
if self.webhandler:
def keep_alive(webh, stop_event):
- if not PY2:
- import asyncio
- asyncio.set_event_loop(asyncio.new_event_loop())
+ import asyncio
+ asyncio.set_event_loop(asyncio.new_event_loop())
while not stop_event.is_set():
stop_event.wait(60)
webh('.')
diff --git a/sickgear/processTV.py b/sickgear/processTV.py
index 18a7a0dc..78fff9fd 100644
--- a/sickgear/processTV.py
+++ b/sickgear/processTV.py
@@ -35,8 +35,7 @@ from .history import reset_status
from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser
from .sgdatetime import timestamp_near
-from _23 import filter_list, filter_iter, list_values, map_iter
-from six import iteritems, iterkeys, string_types, PY2, text_type
+from six import iteritems, iterkeys, string_types, text_type
from sg_helpers import long_path, scantree
import lib.rarfile.rarfile as rarfile
@@ -281,7 +280,7 @@ class ProcessTVShow(object):
build_path = (lambda old_path: '%s%s' % (helpers.real_path(old_path).rstrip(os.path.sep), os.path.sep))
process_path = build_path(path)
- for parent in map_iter(lambda p: build_path(p), sickgear.ROOT_DIRS.split('|')[1:]):
+ for parent in map(lambda p: build_path(p), sickgear.ROOT_DIRS.split('|')[1:]):
if process_path.startswith(parent):
return parent.rstrip(os.path.sep)
@@ -352,7 +351,7 @@ class ProcessTVShow(object):
path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type)
- if sickgear.POSTPONE_IF_SYNC_FILES and any(filter_iter(helpers.is_sync_file, files)):
+ if sickgear.POSTPONE_IF_SYNC_FILES and any(filter(helpers.is_sync_file, files)):
self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR)
return self.result
@@ -367,7 +366,7 @@ class ProcessTVShow(object):
work_files += [joined]
rar_files, rarfile_history = self.unused_archives(
- path, filter_list(helpers.is_first_rar_volume, files), pp_type, process_method)
+ path, list(filter(helpers.is_first_rar_volume, files)), pp_type, process_method)
rar_content = self._unrar(path, rar_files, force)
if self.fail_detected:
self._process_failed(dir_name, nzb_name, show_obj=show_obj)
@@ -376,8 +375,8 @@ class ProcessTVShow(object):
rar_content = [x for x in rar_content if not helpers.is_link(os.path.join(path, x))]
path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type)
files = [x for x in files if not helpers.is_link(os.path.join(path, x))]
- video_files = filter_list(helpers.has_media_ext, files)
- video_in_rar = filter_list(helpers.has_media_ext, rar_content)
+ video_files = list(filter(helpers.has_media_ext, files))
+ video_in_rar = list(filter(helpers.has_media_ext, rar_content))
work_files += [os.path.join(path, item) for item in rar_content]
if 0 < len(files):
@@ -438,7 +437,7 @@ class ProcessTVShow(object):
for walk_path, walk_dir, files in os.walk(os.path.join(path, directory), topdown=False):
- if sickgear.POSTPONE_IF_SYNC_FILES and any(filter_iter(helpers.is_sync_file, files)):
+ if sickgear.POSTPONE_IF_SYNC_FILES and any(filter(helpers.is_sync_file, files)):
self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR)
return self.result
@@ -452,7 +451,7 @@ class ProcessTVShow(object):
files = [x for x in files if not helpers.is_link(os.path.join(walk_path, x))]
rar_files, rarfile_history = self.unused_archives(
- walk_path, filter_list(helpers.is_first_rar_volume, files), pp_type, process_method,
+ walk_path, list(filter(helpers.is_first_rar_volume, files)), pp_type, process_method,
rarfile_history)
rar_content = self._unrar(walk_path, rar_files, force)
work_files += [os.path.join(walk_path, item) for item in rar_content]
@@ -461,8 +460,8 @@ class ProcessTVShow(object):
continue
rar_content = [x for x in rar_content if not helpers.is_link(os.path.join(walk_path, x))]
files = list(set(files + rar_content))
- video_files = filter_list(helpers.has_media_ext, files)
- video_in_rar = filter_list(helpers.has_media_ext, rar_content)
+ video_files = list(filter(helpers.has_media_ext, files))
+ video_in_rar = list(filter(helpers.has_media_ext, rar_content))
notwanted_files = [x for x in files if x not in video_files]
# Don't Link media when the media is extracted from a rar in the same path
@@ -640,7 +639,7 @@ class ProcessTVShow(object):
all_dirs += process_dir
all_files += fileList
- video_files = filter_list(helpers.has_media_ext, all_files)
+ video_files = list(filter(helpers.has_media_ext, all_files))
all_dirs.append(dir_name)
# check if the directory have at least one tv video file
@@ -660,7 +659,7 @@ class ProcessTVShow(object):
if sickgear.UNPACK and process_path and all_files:
# Search for packed release
- packed_files = filter_list(helpers.is_first_rar_volume, all_files)
+ packed_files = list(filter(helpers.is_first_rar_volume, all_files))
for packed in packed_files:
try:
@@ -719,9 +718,8 @@ class ProcessTVShow(object):
rar_content = [os.path.normpath(x.filename) for x in rar_handle.infolist() if not x.is_dir()]
renamed = self.cleanup_names(path, rar_content)
cur_unpacked = rar_content if not renamed else \
- (list(set(rar_content) - set(iterkeys(renamed))) + list_values(renamed))
- self._log_helper(u'Unpacked content: [u\'%s\']' % '\', u\''.join(map_iter(text_type,
- cur_unpacked)))
+ (list(set(rar_content) - set(iterkeys(renamed))) + list(renamed.values()))
+ self._log_helper(u'Unpacked content: [u\'%s\']' % '\', u\''.join(map(text_type, cur_unpacked)))
unpacked_files += cur_unpacked
except (rarfile.PasswordRequired, rarfile.RarWrongPassword):
self._log_helper(u'Failed to unpack archive PasswordRequired: %s' % archive, logger.ERROR)
@@ -928,10 +926,6 @@ class ProcessTVShow(object):
if force or not self.any_vid_processed:
return False
- # Needed for accessing DB with a unicode dir_name
- if PY2 and not isinstance(dir_name, text_type):
- dir_name = text_type(dir_name, 'utf_8')
-
parse_result = None
try:
parse_result = NameParser(convert=True).parse(videofile, cache_result=False)
@@ -974,8 +968,6 @@ class ProcessTVShow(object):
else:
# This is needed for video whose name differ from dir_name
- if PY2 and not isinstance(videofile, text_type):
- videofile = text_type(videofile, 'utf_8')
sql_result = my_db.select(
'SELECT * FROM tv_episodes WHERE release_name = ?', [videofile.rpartition('.')[0]])
diff --git a/sickgear/properFinder.py b/sickgear/properFinder.py
index 9e26b98c..9d66fd5d 100644
--- a/sickgear/properFinder.py
+++ b/sickgear/properFinder.py
@@ -32,7 +32,7 @@ from .history import dateFormat
from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser
from .sgdatetime import timestamp_near
-from _23 import filter_iter, filter_list, list_values, map_consume, map_list
+from _23 import map_consume
from six import string_types
# noinspection PyUnreachableCode
@@ -73,7 +73,7 @@ def search_propers(provider_proper_obj=None):
proper_sch = sickgear.proper_finder_scheduler
if None is proper_sch.start_time:
- run_in = proper_sch.lastRun + proper_sch.cycleTime - datetime.datetime.now()
+ run_in = proper_sch.last_run + proper_sch.cycle_time - datetime.datetime.now()
run_at = ', next check '
if datetime.timedelta() > run_in:
run_at += 'imminent'
@@ -131,7 +131,7 @@ def get_old_proper_level(show_obj, tvid, prodid, season, episode_numbers, old_st
[tvid, prodid, season, episode])
if not result or not isinstance(result[0]['resource'], string_types) or not result[0]['resource']:
continue
- nq = Quality.sceneQuality(result[0]['resource'], show_obj.is_anime)
+ nq = Quality.scene_quality(result[0]['resource'], show_obj.is_anime)
if nq != new_quality:
continue
try:
@@ -214,7 +214,7 @@ def load_webdl_types():
def _search_provider(cur_provider, provider_propers, aired_since_shows, recent_shows, recent_anime):
# type: (GenericProvider, List, datetime.datetime, List[Tuple[int, int]], List[Tuple[int, int]]) -> None
try:
- # we need to extent the referenced list from parameter to update the original var
+ # we need to extend the referenced list from parameter to update the original var
provider_propers.extend(cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows,
anime=recent_anime))
except AuthException as e:
@@ -251,9 +251,9 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime
# filter provider list for:
# 1. from recent search: recent search enabled providers
# 2. native proper search: active search enabled providers
- provider_list = filter_list(
+ provider_list = list(filter(
lambda p: p.is_active() and (p.enable_recentsearch, p.enable_backlog)[None is proper_dict],
- sickgear.providers.sortedProviderList())
+ sickgear.providers.sorted_sources()))
search_threads = []
if None is proper_dict:
@@ -362,8 +362,8 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime
# only keep the Proper if we already retrieved the same quality ep (don't get better/worse ones)
# check if we want this release: same quality as current, current has correct status
# restrict other release group releases to Proper's
- old_status, old_quality = Quality.splitCompositeStatus(int(sql_result[0]['status']))
- cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime)
+ old_status, old_quality = Quality.split_composite_status(int(sql_result[0]['status']))
+ cur_proper.quality = Quality.name_quality(cur_proper.name, parse_result.is_anime)
cur_proper.is_repack, cur_proper.properlevel = Quality.get_proper_level(
parse_result.extra_info_no_name(), parse_result.version, parse_result.is_anime, check_is_repack=True)
cur_proper.proper_level = cur_proper.properlevel # local non global value
@@ -487,7 +487,7 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime
cur_provider.log_result('Propers', len(propers), '%s' % cur_provider.name)
- return list_values(propers)
+ return list(propers.values())
def _download_propers(proper_list):
@@ -507,24 +507,24 @@ def _download_propers(proper_list):
# get verified list; sort the list of unique Propers for highest proper_level, newest first
for cur_proper in sorted(
- filter_iter(lambda p: p not in consumed_proper,
- # allows Proper to fail or be rejected and another to be tried (with a different name)
- filter_iter(lambda p: _epid(p) not in downloaded_epid, proper_list)),
+ filter(lambda p: p not in consumed_proper,
+ # allows Proper to fail or be rejected and another to be tried (with a different name)
+ filter(lambda p: _epid(p) not in downloaded_epid, proper_list)),
key=operator.attrgetter('properlevel', 'date'), reverse=True): # type: Proper
epid = _epid(cur_proper)
# if the show is in our list and there hasn't been a Proper already added for that particular episode
# then add it to our list of Propers
- if epid not in map_list(_epid, verified_propers):
+ if epid not in list(map(_epid, verified_propers)):
logger.log('Proper may be useful [%s]' % cur_proper.name)
verified_propers.add(cur_proper)
else:
# use Proper with the highest level
remove_propers = set()
map_consume(lambda vp: remove_propers.add(vp),
- filter_iter(lambda p: (epid == _epid(p) and cur_proper.proper_level > p.proper_level),
- verified_propers))
+ filter(lambda p: (epid == _epid(p) and cur_proper.proper_level > p.proper_level),
+ verified_propers))
if remove_propers:
verified_propers -= remove_propers
@@ -631,7 +631,7 @@ def get_needed_qualites(needed=None):
continue
ep_obj = show_obj.get_episode(season=cur_result['season'], episode=cur_result['episode'])
if ep_obj:
- ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
+ ep_status, ep_quality = Quality.split_composite_status(ep_obj.status)
if ep_status in SNATCHED_ANY + [DOWNLOADED, ARCHIVED]:
needed.check_needed_qualities([ep_quality])
@@ -699,7 +699,7 @@ def _set_last_proper_search(when):
def next_proper_timeleft():
- return sickgear.proper_finder_scheduler.timeLeft()
+ return sickgear.proper_finder_scheduler.time_left()
def get_last_proper_search():
diff --git a/sickgear/providers/__init__.py b/sickgear/providers/__init__.py
index 4ba6218d..1695162f 100644
--- a/sickgear/providers/__init__.py
+++ b/sickgear/providers/__init__.py
@@ -22,7 +22,6 @@ from .newznab import NewznabConstants
from .. import logger
import sickgear
-from _23 import filter_list, filter_iter
from six import iteritems, itervalues
# noinspection PyUnreachableCode
@@ -30,6 +29,7 @@ if False:
from typing import AnyStr, List, Union
from .generic import GenericProvider, NZBProvider, TorrentProvider
+# noinspection PyUnresolvedReferences
__all__ = [
# usenet
'filesharingtalk',
@@ -50,47 +50,47 @@ for module in __all__:
try:
m = importlib.import_module('.' + module, 'sickgear.providers')
globals().update({n: getattr(m, n) for n in m.__all__} if hasattr(m, '__all__')
- else dict(filter_iter(lambda t: '_' != t[0][0], iteritems(m.__dict__))))
+ else dict(filter(lambda t: '_' != t[0][0], iteritems(m.__dict__))))
except ImportError as e:
if 'custom' != module[0:6]:
raise e
-def sortedProviderList():
+def sorted_sources():
# type: (...) -> List[Union[GenericProvider, NZBProvider, TorrentProvider]]
"""
return sorted provider list
:return: sorted list of providers
"""
- initialList = sickgear.providerList + sickgear.newznabProviderList + sickgear.torrentRssProviderList
- providerDict = dict(zip([x.get_id() for x in initialList], initialList))
+ initial_list = sickgear.provider_list + sickgear.newznab_providers + sickgear.torrent_rss_providers
+ provider_dict = dict(zip([x.get_id() for x in initial_list], initial_list))
- newList = []
+ new_list = []
# add all modules in the priority list, in order
for curModule in sickgear.PROVIDER_ORDER:
- if curModule in providerDict:
- newList.append(providerDict[curModule])
+ if curModule in provider_dict:
+ new_list.append(provider_dict[curModule])
if not sickgear.PROVIDER_ORDER:
- nzb = filter_list(lambda p: p.providerType == generic.GenericProvider.NZB, itervalues(providerDict))
- tor = filter_list(lambda p: p.providerType != generic.GenericProvider.NZB, itervalues(providerDict))
- newList = sorted(filter_iter(lambda p: not p.anime_only, nzb), key=lambda v: v.get_id()) + \
- sorted(filter_iter(lambda p: not p.anime_only, tor), key=lambda v: v.get_id()) + \
- sorted(filter_iter(lambda p: p.anime_only, nzb), key=lambda v: v.get_id()) + \
- sorted(filter_iter(lambda p: p.anime_only, tor), key=lambda v: v.get_id())
+ nzb = list(filter(lambda p: p.providerType == generic.GenericProvider.NZB, itervalues(provider_dict)))
+ tor = list(filter(lambda p: p.providerType != generic.GenericProvider.NZB, itervalues(provider_dict)))
+ new_list = sorted(filter(lambda p: not p.anime_only, nzb), key=lambda v: v.get_id()) + \
+ sorted(filter(lambda p: not p.anime_only, tor), key=lambda v: v.get_id()) + \
+ sorted(filter(lambda p: p.anime_only, nzb), key=lambda v: v.get_id()) + \
+ sorted(filter(lambda p: p.anime_only, tor), key=lambda v: v.get_id())
# add any modules that are missing from that list
- for curModule in providerDict:
- if providerDict[curModule] not in newList:
- newList.append(providerDict[curModule])
+ for curModule in provider_dict:
+ if provider_dict[curModule] not in new_list:
+ new_list.append(provider_dict[curModule])
- return newList
+ return new_list
-def makeProviderList():
- return [x.provider for x in [getProviderModule(y) for y in __all__] if x]
+def provider_modules():
+ return [x.provider for x in [_get_module_by_name(y) for y in __all__] if x]
def generic_provider_name(n):
@@ -103,7 +103,7 @@ def generic_provider_url(u):
return u.strip().strip('/').lower().replace('https', 'http')
-def make_unique_list(p_list, d_list=None):
+def _make_unique_list(p_list, d_list=None):
# type: (List, List) -> List
"""
remove provider duplicates
@@ -119,7 +119,7 @@ def make_unique_list(p_list, d_list=None):
default_names = [d.name for d in d_list or []]
- p_list = filter_iter(lambda _x: _x.get_id() not in ['sick_beard_index'], p_list)
+ p_list = filter(lambda _x: _x.get_id() not in ['sick_beard_index'], p_list)
for cur_p in p_list:
g_name = generic_provider_name(cur_p.name)
g_url = generic_provider_url(cur_p.url)
@@ -136,32 +136,32 @@ def make_unique_list(p_list, d_list=None):
return new_p_list
-def getNewznabProviderList(data):
+def newznab_source_list(data):
# type: (AnyStr) -> List
- defaultList = [makeNewznabProvider(x) for x in getDefaultNewznabProviders().split('!!!')]
- providerList = make_unique_list(filter_list(lambda _x: _x, [makeNewznabProvider(x) for x in data.split('!!!')]),
- defaultList)
+ default_list = [_create_newznab_source(x) for x in _default_newznab_sources().split('!!!')]
+ provider_list = _make_unique_list(list(filter(
+ lambda _x: _x, [_create_newznab_source(x) for x in data.split('!!!')])), default_list)
- providerDict = dict(zip([x.name for x in providerList], providerList))
+ provider_dict = dict(zip([x.name for x in provider_list], provider_list))
- for curDefault in defaultList:
+ for curDefault in default_list:
if not curDefault:
continue
- if curDefault.name not in providerDict:
+ if curDefault.name not in provider_dict:
curDefault.default = True
- providerList.append(curDefault)
+ provider_list.append(curDefault)
else:
- providerDict[curDefault.name].default = True
+ provider_dict[curDefault.name].default = True
for k in ('name', 'url', 'needs_auth', 'search_mode', 'search_fallback',
'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog',
'server_type'):
- setattr(providerDict[curDefault.name], k, getattr(curDefault, k))
+ setattr(provider_dict[curDefault.name], k, getattr(curDefault, k))
- return filter_list(lambda _x: _x, providerList)
+ return list(filter(lambda _x: _x, provider_list))
-def makeNewznabProvider(config_string):
+def _create_newznab_source(config_string):
if not config_string:
return None
@@ -182,19 +182,19 @@ def makeNewznabProvider(config_string):
newznab_module = sys.modules['sickgear.providers.newznab']
- newProvider = newznab_module.NewznabProvider(name, url, **params)
- newProvider.enabled = '1' == enabled
+ new_provider = newznab_module.NewznabProvider(name, url, **params)
+ new_provider.enabled = '1' == enabled
- return newProvider
+ return new_provider
-def getTorrentRssProviderList(data):
- providerList = filter_list(lambda _x: _x, [makeTorrentRssProvider(x) for x in data.split('!!!')])
+def torrent_rss_source_list(data):
+ provider_list = list(filter(lambda _x: _x, [_create_torrent_rss_source(x) for x in data.split('!!!')]))
- return filter_list(lambda _x: _x, providerList)
+ return list(filter(lambda _x: _x, provider_list))
-def makeTorrentRssProvider(config_string):
+def _create_torrent_rss_source(config_string):
if not config_string:
return None
@@ -218,25 +218,27 @@ def makeTorrentRssProvider(config_string):
return None
try:
- torrentRss = sys.modules['sickgear.providers.rsstorrent']
+ torrent_rss = sys.modules['sickgear.providers.rsstorrent']
except (BaseException, Exception):
return
- newProvider = torrentRss.TorrentRssProvider(name, url, cookies, search_mode, search_fallback, enable_recentsearch,
- enable_backlog)
- newProvider.enabled = '1' == enabled
+ new_provider = torrent_rss.TorrentRssProvider(name, url, cookies, search_mode, search_fallback, enable_recentsearch,
+ enable_backlog)
+ new_provider.enabled = '1' == enabled
- return newProvider
+ return new_provider
-def getDefaultNewznabProviders():
- return '!!!'.join(['NZBgeek|https://api.nzbgeek.info/||5030,5040|0|eponly|0|0|0',
- 'DrunkenSlug|https://api.drunkenslug.com/||5030,5040|0|eponly|0|0|0',
- 'NinjaCentral|https://ninjacentral.co.za/||5030,5040|0|eponly|0|0|0',
- ])
+def _default_newznab_sources():
+ return '!!!'.join([
+ '|'.join(_src) for _src in
+ (['NZBgeek', 'https://api.nzbgeek.info/', '', '5030,5040', '0', 'eponly', '0', '0', '0'],
+ ['DrunkenSlug', 'https://api.drunkenslug.com/', '', '5030,5040', '0', 'eponly', '0', '0', '0'],
+ ['NinjaCentral', 'https://ninjacentral.co.za/', '', '5030,5040', '0', 'eponly', '0', '0', '0'],
+ )])
-def getProviderModule(name):
+def _get_module_by_name(name):
prefix, cprov, name = 'sickgear.providers.', 'motsuc'[::-1], name.lower()
if name in __all__ and prefix + name in sys.modules:
return sys.modules[prefix + name]
@@ -245,11 +247,11 @@ def getProviderModule(name):
raise Exception('Can\'t find %s%s in providers' % (prefix, name))
-def getProviderClass(provider_id):
- providerMatch = [x for x in
- sickgear.providerList + sickgear.newznabProviderList + sickgear.torrentRssProviderList if
- provider_id == x.get_id()]
+def get_by_id(provider_id):
+ provider_match = [x for x in
+ sickgear.provider_list + sickgear.newznab_providers + sickgear.torrent_rss_providers if
+ provider_id == x.get_id()]
- if 1 != len(providerMatch):
+ if 1 != len(provider_match):
return None
- return providerMatch[0]
+ return provider_match[0]
diff --git a/sickgear/providers/alpharatio.py b/sickgear/providers/alpharatio.py
index 4b4ed911..eb4e9a2e 100644
--- a/sickgear/providers/alpharatio.py
+++ b/sickgear/providers/alpharatio.py
@@ -25,7 +25,6 @@ from .. import logger
from ..helpers import try_int
from bs4_parser import BS4Parser
-from _23 import unidecode
from six import iteritems
@@ -63,7 +62,6 @@ class AlphaRatioProvider(generic.TorrentProvider):
rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({'info': 'view', 'get': 'download'})])
for mode in search_params:
for search_string in search_params[mode]:
- search_string = unidecode(search_string)
search_url = self.urls['search'] % (search_string, ('&freetorrent=1', '')[not self.freeleech])
html = self.get_url(search_url)
diff --git a/sickgear/providers/bithdtv.py b/sickgear/providers/bithdtv.py
index 4e7b4be9..86e37964 100644
--- a/sickgear/providers/bithdtv.py
+++ b/sickgear/providers/bithdtv.py
@@ -23,7 +23,6 @@ from .. import logger
from ..helpers import try_int
from bs4_parser import BS4Parser
-from _23 import unidecode
from six import iteritems
@@ -67,7 +66,6 @@ class BitHDTVProvider(generic.TorrentProvider):
for mode in search_params:
for search_string in search_params[mode]:
- search_string = unidecode(search_string)
search_url = self.urls['search'] % (search_string, self._categories_string(mode, '%s', ','))
html = self.get_url(search_url, timeout=90)
diff --git a/sickgear/providers/blutopia.py b/sickgear/providers/blutopia.py
index 0ef6bdb2..c8458a22 100644
--- a/sickgear/providers/blutopia.py
+++ b/sickgear/providers/blutopia.py
@@ -25,7 +25,6 @@ from .. import logger
from ..helpers import try_int
from bs4_parser import BS4Parser
-from _23 import filter_iter, unidecode
from six import iteritems
@@ -107,7 +106,6 @@ class BlutopiaProvider(generic.TorrentProvider):
return results
for search_string in search_params[mode]:
- search_string = unidecode(search_string)
search_url = self.urls['search'] % (
self._token, search_string.replace('.', ' '), self._categories_string(template=''), '', '', '')
@@ -136,7 +134,7 @@ class BlutopiaProvider(generic.TorrentProvider):
marked = ','.join([x.attrs.get('data-original-title', '').lower() for x in tr.find_all(
'i', attrs={'class': ['text-gold', 'fa-diamond', 'fa-certificate']})])
# noinspection PyTypeChecker
- munged = ''.join(filter_iter(marked.__contains__, ['free', 'double', 'feat']))
+ munged = ''.join(filter(marked.__contains__, ['free', 'double', 'feat']))
# noinspection PyUnboundLocalVariable
if ((non_marked and rc['filter'].search(munged)) or
(not non_marked and not rc['filter'].search(munged))):
diff --git a/sickgear/providers/btn.py b/sickgear/providers/btn.py
index f5373228..be0fb5da 100644
--- a/sickgear/providers/btn.py
+++ b/sickgear/providers/btn.py
@@ -32,7 +32,6 @@ from bs4_parser import BS4Parser
from exceptions_helper import AuthException
from json_helper import json_dumps
-from _23 import unidecode
from six import iteritems
@@ -201,7 +200,6 @@ class BTNProvider(generic.TorrentProvider):
del (self.session.headers['Referer'])
self.auth_html = True
- search_string = unidecode(search_string)
search_url = self.urls['search'] % (search_string, self._categories_string(mode, 'filter_cat[%s]=1'))
html = self.get_url(search_url, use_tmr_limit=False)
@@ -369,7 +367,7 @@ class BTNCache(tvcache.TVCache):
def _cache_data(self, **kwargs):
- return self.provider.cache_data(age=self._getLastUpdate().timetuple(), min_time=self.update_iv)
+ return self.provider.cache_data(age=self._get_last_update().timetuple(), min_time=self.update_iv)
provider = BTNProvider()
diff --git a/sickgear/providers/eztv.py b/sickgear/providers/eztv.py
index 86bad378..5a723b1b 100644
--- a/sickgear/providers/eztv.py
+++ b/sickgear/providers/eztv.py
@@ -23,7 +23,7 @@ from .. import logger
from ..helpers import try_int
from bs4_parser import BS4Parser
-from _23 import b64decodestring, unidecode
+from _23 import b64decodestring
from six import iteritems
@@ -62,7 +62,6 @@ class EztvProvider(generic.TorrentProvider):
for mode in search_params:
for search_string in search_params[mode]:
- search_string = unidecode(search_string)
search_url = self.urls['browse'] % search_string if 'Cache' == mode else \
self.urls['search'] % search_string.replace('.', ' ')
diff --git a/sickgear/providers/fano.py b/sickgear/providers/fano.py
index 67eb8395..ebb34fc8 100644
--- a/sickgear/providers/fano.py
+++ b/sickgear/providers/fano.py
@@ -25,7 +25,6 @@ from .. import logger
from ..helpers import try_int
from bs4_parser import BS4Parser
-from _23 import unidecode
from six import iteritems
FLTAG = r'\s+ ]+%s[^<]+ AnyStr
@@ -672,7 +672,7 @@ class GenericProvider(object):
rxc_delim = re.compile(r'[&;]')
rxc_skip_key = re.compile(r'clearance')
- for cur_p in sickgear.providers.sortedProviderList():
+ for cur_p in sickgear.providers.sorted_sources():
pid = cur_p.get_id()
auths = set([])
for cur_kt in ['password', 'passkey', 'api_key', 'key', 'digest', 'cookies', 'hash']:
@@ -755,7 +755,7 @@ class GenericProvider(object):
def is_enabled(self):
# type: (...) -> bool
"""
- This should be overridden and should return the config setting eg. sickgear.MYPROVIDER
+ This should be overridden and should return the config setting e.g. sickgear.MYPROVIDER
"""
return self.enabled
@@ -804,7 +804,7 @@ class GenericProvider(object):
try:
btih = None
try:
- btih = re.findall(r'urn:btih:([\w]{32,40})', result.url)[0]
+ btih = re.findall(r'urn:btih:(\w{32,40})', result.url)[0]
if 32 == len(btih):
btih = make_btih(btih)
except (BaseException, Exception):
@@ -927,7 +927,7 @@ class GenericProvider(object):
def search_rss(self, ep_obj_list):
# type: (List[TVEpisode]) -> Dict[TVEpisode, SearchResult]
- return self.cache.findNeededEpisodes(ep_obj_list)
+ return self.cache.find_needed_episodes(ep_obj_list)
def get_quality(self, item, anime=False):
# type: (etree.Element, bool) -> int
@@ -939,7 +939,7 @@ class GenericProvider(object):
:return: a Quality value obtained from the node's data
"""
(title, url) = self._title_and_url(item)
- quality = Quality.sceneQuality(title, anime)
+ quality = Quality.scene_quality(title, anime)
return quality
def _search_provider(self, search_params, search_mode='eponly', epcount=0, age=0, **kwargs):
@@ -978,11 +978,6 @@ class GenericProvider(object):
def _link(self, url, url_tmpl=None, url_quote=None):
url = '%s' % url # ensure string type
if url and not re.match('(?i)magnet:', url):
- if PY2:
- try:
- url = url.encode('utf-8')
- except (BaseException, Exception):
- pass
url = url.strip().replace('&', '&')
if not url:
url = ''
@@ -1013,12 +1008,12 @@ class GenericProvider(object):
all_cells = all_cells if any(all_cells) else header_row.find_all('td')
headers = [re.sub(
- r'[\s]+', '',
+ r'\s+', '',
((any([cell.get_text()]) and any([rc[x].search(cell.get_text()) for x in iterkeys(rc)]) and cell.get_text())
or (cell.attrs.get('id') and any([rc[x].search(cell['id']) for x in iterkeys(rc)]) and cell['id'])
or (cell.attrs.get('title') and any([rc[x].search(cell['title']) for x in iterkeys(rc)]) and cell['title'])
- or next(iter(set(filter_iter(lambda rz: any([rz]), [
- next(iter(set(filter_iter(lambda ry: any([ry]), [
+ or next(iter(set(filter(lambda rz: any([rz]), [
+ next(iter(set(filter(lambda ry: any([ry]), [
cell.find(tag, **p) for p in [{attr: rc[x]} for x in iterkeys(rc)]]))), {}).get(attr)
for (tag, attr) in [
('img', 'title'), ('img', 'src'), ('i', 'title'), ('i', 'class'),
@@ -1035,7 +1030,7 @@ class GenericProvider(object):
for k, r in iteritems(rc):
if k not in results:
- for name in filter_iter(lambda v: any([v]) and r.search(v), all_headers[::-1]):
+ for name in filter(lambda v: any([v]) and r.search(v), all_headers[::-1]):
results[k] = all_headers.index(name) - len(all_headers)
break
@@ -1108,7 +1103,7 @@ class GenericProvider(object):
search_list = []
for cur_ep_obj in ep_obj_list:
# search cache for episode result
- cache_result = self.cache.searchCache(cur_ep_obj, manual_search) # type: List[SearchResult]
+ cache_result = self.cache.search_cache(cur_ep_obj, manual_search) # type: List[SearchResult]
if cache_result:
if cur_ep_obj.episode not in results:
results[cur_ep_obj.episode] = cache_result
@@ -1353,7 +1348,7 @@ class GenericProvider(object):
:param kwargs:
:return:
"""
- results = self.cache.listPropers(search_date)
+ results = self.cache.list_propers(search_date)
return [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show_obj) for x in
results]
@@ -1384,11 +1379,11 @@ class GenericProvider(object):
:param count: count of successfully processed items
:param url: source url of item(s)
"""
- stats = map_list(lambda arg: ('_reject_%s' % arg[0], arg[1]),
- filter_iter(lambda _arg: all([getattr(self, '_reject_%s' % _arg[0], None)]),
- (('seed', '%s Torrent let'})])
for mode in search_params:
for search_string in search_params[mode]:
- search_string = unidecode(search_string)
search_url = self.urls['search'] % search_string
# fetches 15 results by default, and up to 100 if allowed in user profile
diff --git a/sickgear/providers/nebulance.py b/sickgear/providers/nebulance.py
index 99feacd0..f8005eca 100644
--- a/sickgear/providers/nebulance.py
+++ b/sickgear/providers/nebulance.py
@@ -25,7 +25,7 @@ from ..helpers import try_int
from bs4_parser import BS4Parser
from json_helper import json_dumps
-from _23 import filter_list, unidecode, unquote_plus
+from _23 import unquote_plus
from six import iteritems
@@ -83,7 +83,6 @@ class NebulanceProvider(generic.TorrentProvider):
rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({'nodots': r'[\.\s]+'})])
for mode in search_params:
for search_string in search_params[mode]:
- search_string = unidecode(search_string)
search_url = self.urls['browse'] % (self.user_authkey, self.user_passkey)
if 'Cache' != mode:
@@ -164,7 +163,7 @@ class NebulanceProvider(generic.TorrentProvider):
('(?i)%s(Proper)%s' % (bl, br), r'`\1`'), (r'%s\s*%s' % (bl, br), '`')]:
title = re.sub(r[0], r[1], title)
- grp = filter_list(lambda rn: '.release' in rn.lower(), item['tags'])
+ grp = list(filter(lambda rn: '.release' in rn.lower(), item['tags']))
title = '%s%s-%s' % (('', t[0])[1 < len(t)], title,
(any(grp) and grp[0] or 'nogrp').upper().replace('.RELEASE', ''))
@@ -186,7 +185,7 @@ class NebulanceProvider(generic.TorrentProvider):
for mode in search_params:
for search_string in search_params[mode]:
- search_string = unquote_plus(unidecode(search_string))
+ search_string = unquote_plus(search_string)
params = {'release': search_string}
if 'Cache' == mode:
diff --git a/sickgear/providers/newznab.py b/sickgear/providers/newznab.py
index 69fbfdd4..fc701941 100644
--- a/sickgear/providers/newznab.py
+++ b/sickgear/providers/newznab.py
@@ -347,7 +347,7 @@ class NewznabProvider(generic.NZBProvider):
caps[NewznabConstants.SEARCH_SEASON] = 'season'
if NewznabConstants.SEARCH_EPISODE not in caps or not caps.get(NewznabConstants.SEARCH_EPISODE):
caps[NewznabConstants.SEARCH_TEXT] = 'ep'
- if (TVINFO_TVRAGE not in caps or not caps.get(TVINFO_TVRAGE)):
+ if TVINFO_TVRAGE not in caps or not caps.get(TVINFO_TVRAGE):
caps[TVINFO_TVRAGE] = 'rid'
if NewznabConstants.SEARCH_TEXT not in caps or not caps.get(NewznabConstants.SEARCH_TEXT):
caps[NewznabConstants.SEARCH_TEXT] = 'q'
@@ -645,7 +645,7 @@ class NewznabProvider(generic.NZBProvider):
if not getattr(s, 'wanted_quality', None):
# this should not happen, the creation is missing for the search in this case
logger.log('wanted_quality property was missing for search, creating it', logger.WARNING)
- ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
+ ep_status, ep_quality = Quality.split_composite_status(ep_obj.status)
s.wanted_quality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=True)
needed.check_needed_qualities(s.wanted_quality)
@@ -682,14 +682,14 @@ class NewznabProvider(generic.NZBProvider):
needed.check_needed_types(ep_obj.show_obj)
if not ep_obj.show_obj.is_anime and not ep_obj.show_obj.is_sports:
if not getattr(ep_obj, 'wanted_quality', None):
- ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
+ ep_status, ep_quality = Quality.split_composite_status(ep_obj.status)
ep_obj.wanted_quality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=True)
needed.check_needed_qualities(ep_obj.wanted_quality)
else:
if not ep_obj.show_obj.is_anime and not ep_obj.show_obj.is_sports:
for cur_ep_obj in ep_obj_list:
if not getattr(cur_ep_obj, 'wanted_quality', None):
- ep_status, ep_quality = Quality.splitCompositeStatus(cur_ep_obj.status)
+ ep_status, ep_quality = Quality.split_composite_status(cur_ep_obj.status)
cur_ep_obj.wanted_quality = get_wanted_qualities(cur_ep_obj, ep_status, ep_quality,
unaired=True)
needed.check_needed_qualities(cur_ep_obj.wanted_quality)
@@ -733,7 +733,7 @@ class NewznabProvider(generic.NZBProvider):
continue
# search cache for episode result
- cache_result = self.cache.searchCache(ep_obj, manual_search)
+ cache_result = self.cache.search_cache(ep_obj, manual_search)
if cache_result:
if ep_obj.episode not in results:
results[ep_obj.episode] = cache_result
@@ -1070,7 +1070,7 @@ class NewznabProvider(generic.NZBProvider):
:param kwargs:
:return:
"""
- cache_results = self.cache.listPropers(search_date)
+ cache_results = self.cache.list_propers(search_date)
results = [classes.Proper(x['name'], x['url'],
datetime.datetime.fromtimestamp(x['time']), self.show_obj) for x in cache_results]
@@ -1183,10 +1183,10 @@ class NewznabCache(tvcache.TVCache):
root = elem
return root, ns
- def updateCache(self,
- needed=NeededQualities(need_all=True), # type: NeededQualities
- **kwargs
- ):
+ def update_cache(self,
+ needed=NeededQualities(need_all=True), # type: NeededQualities
+ **kwargs
+ ):
"""
:param needed: needed qualites class
@@ -1195,7 +1195,7 @@ class NewznabCache(tvcache.TVCache):
if 4489 != sickgear.RECENTSEARCH_INTERVAL or self.should_update():
n_spaces = {}
try:
- check = self._checkAuth()
+ check = self.check_auth()
if isinstance(check, bool) and not check:
items = None
else:
@@ -1205,12 +1205,12 @@ class NewznabCache(tvcache.TVCache):
items = None
if items:
- self._clearCache()
+ self.clear_cache()
# parse data
cl = []
for item in items:
- ci = self._parseItem(n_spaces, item)
+ ci = self.parse_item(n_spaces, item)
if None is not ci:
cl.append(ci)
@@ -1219,7 +1219,7 @@ class NewznabCache(tvcache.TVCache):
my_db.mass_action(cl)
# set updated as time the attempt to fetch data is
- self.setLastUpdate()
+ self.set_last_update()
@staticmethod
def parse_ids(item, ns):
@@ -1240,7 +1240,7 @@ class NewznabCache(tvcache.TVCache):
return ids
# overwrite method with that parses the rageid from the newznab feed
- def _parseItem(self,
+ def parse_item(self,
ns, # type: Dict
item # type: etree.Element
): # type: (...) -> Union[List[AnyStr, List[Any]], None]
diff --git a/sickgear/providers/nyaa.py b/sickgear/providers/nyaa.py
index 8b2bd5a9..65156509 100644
--- a/sickgear/providers/nyaa.py
+++ b/sickgear/providers/nyaa.py
@@ -22,7 +22,6 @@ from .. import logger
from ..helpers import try_int
from bs4_parser import BS4Parser
-from _23 import unidecode
from six import iteritems
@@ -51,7 +50,6 @@ class NyaaProvider(generic.TorrentProvider):
rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({'info': 'view', 'get': '(?:torrent|magnet:)'})])
for mode in search_params:
for search_string in search_params[mode]:
- search_string = unidecode(search_string)
search_url = self.urls['search'] % ((0, 2)[self.confirmed], search_string.replace('.', ' '))
html = self.get_url(search_url)
diff --git a/sickgear/providers/pretome.py b/sickgear/providers/pretome.py
index 87acb764..23d067dd 100644
--- a/sickgear/providers/pretome.py
+++ b/sickgear/providers/pretome.py
@@ -23,7 +23,6 @@ from .. import logger
from ..helpers import try_int
from bs4_parser import BS4Parser
-from _23 import unidecode
from six import iteritems
@@ -58,7 +57,6 @@ class PreToMeProvider(generic.TorrentProvider):
rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({'info': 'details', 'get': 'download'})])
for mode in search_params:
for search_string in search_params[mode]:
- search_string = unidecode(search_string)
search_url = self.urls['search'] % search_string
html = self.get_url(search_url)
diff --git a/sickgear/providers/privatehd.py b/sickgear/providers/privatehd.py
index 5f8fbdf0..7ba28252 100644
--- a/sickgear/providers/privatehd.py
+++ b/sickgear/providers/privatehd.py
@@ -25,7 +25,6 @@ from .. import logger
from ..helpers import try_int
from bs4_parser import BS4Parser
-from _23 import filter_iter, unidecode
from six import iteritems
@@ -93,7 +92,6 @@ class PrivateHDProvider(generic.TorrentProvider):
return results
for search_string in search_params[mode]:
- search_string = unidecode(search_string)
search_url = self.urls['search'] % (
'+'.join(search_string.split()), self._categories_string(mode, ''))
@@ -120,7 +118,7 @@ class PrivateHDProvider(generic.TorrentProvider):
if any(self.filter):
marked = ','.join([x.attrs.get('title', '').lower() for x in tr.find_all(
'i', attrs={'class': ['fa-star', 'fa-diamond', 'fa-star-half-o']})])
- munged = ''.join(filter_iter(marked.__contains__, ['free', 'half', 'double']))
+ munged = ''.join(filter(marked.__contains__, ['free', 'half', 'double']))
# noinspection PyUnboundLocalVariable
if ((non_marked and rc['filter'].search(munged)) or
(not non_marked and not rc['filter'].search(munged))):
diff --git a/sickgear/providers/ptf.py b/sickgear/providers/ptf.py
index 3870b82f..da1c94f2 100644
--- a/sickgear/providers/ptf.py
+++ b/sickgear/providers/ptf.py
@@ -26,7 +26,6 @@ from .. import logger
from ..helpers import anon_url, try_int
from bs4_parser import BS4Parser
-from _23 import unidecode
from six import iteritems
@@ -82,7 +81,6 @@ class PTFProvider(generic.TorrentProvider):
for mode in search_params:
rc['cats'] = re.compile('(?i)cat=(?:%s)' % self._categories_string(mode, template='', delimiter='|'))
for search_string in search_params[mode]:
- search_string = unidecode(search_string)
search_url = self.urls['search'] % ('+'.join(search_string.split()), self._categories_string(mode))
html = self.get_url(search_url)
diff --git a/sickgear/providers/revtt.py b/sickgear/providers/revtt.py
index 0ee68d6e..50527f39 100644
--- a/sickgear/providers/revtt.py
+++ b/sickgear/providers/revtt.py
@@ -23,7 +23,6 @@ from .. import logger
from ..helpers import try_int
from bs4_parser import BS4Parser
-from _23 import unidecode
from six import iteritems
@@ -61,7 +60,6 @@ class RevTTProvider(generic.TorrentProvider):
for mode in search_params:
rc['cats'] = re.compile('(?i)cat=(?:%s)' % self._categories_string(mode, template='', delimiter='|'))
for search_string in search_params[mode]:
- search_string = unidecode(search_string)
html = self.get_url(self.urls['search'] % ('+'.join(search_string.split()),
self._categories_string(mode)))
diff --git a/sickgear/providers/scenehd.py b/sickgear/providers/scenehd.py
index 4b982fe0..74da4457 100644
--- a/sickgear/providers/scenehd.py
+++ b/sickgear/providers/scenehd.py
@@ -23,7 +23,6 @@ from .. import logger
from ..helpers import try_int
from bs4_parser import BS4Parser
-from _23 import unidecode
from six import iteritems
@@ -62,7 +61,6 @@ class SceneHDProvider(generic.TorrentProvider):
'nuked': 'nuke', 'filter': 'free'})])
for mode in search_params:
for search_string in search_params[mode]:
- search_string = unidecode(search_string)
search_url = self.urls['search'] % (search_string, self._categories_string(mode, '%s', ','))
html = self.get_url(search_url, timeout=90)
diff --git a/sickgear/providers/scenetime.py b/sickgear/providers/scenetime.py
index 96d14262..f4f783fb 100644
--- a/sickgear/providers/scenetime.py
+++ b/sickgear/providers/scenetime.py
@@ -23,7 +23,6 @@ from .. import logger
from ..helpers import anon_url, try_int
from bs4_parser import BS4Parser
-from _23 import unidecode
from six import iteritems
@@ -70,7 +69,6 @@ class SceneTimeProvider(generic.TorrentProvider):
urls = []
for search_string in search_params[mode]:
urls += [[]]
- search_string = unidecode(search_string)
search_url = self.urls['search'] % (self._categories_string(),
'+'.join(search_string.replace('.', ' ').split()),
('', '&freeleech=on')[self.freeleech])
diff --git a/sickgear/providers/shazbat.py b/sickgear/providers/shazbat.py
index b0187e49..3121924d 100644
--- a/sickgear/providers/shazbat.py
+++ b/sickgear/providers/shazbat.py
@@ -26,7 +26,7 @@ from .. import logger
from ..helpers import try_int
from bs4_parser import BS4Parser
-from _23 import unidecode, unquote_plus
+from _23 import unquote_plus
from six import iteritems, text_type
@@ -75,7 +75,6 @@ class ShazbatProvider(generic.TorrentProvider):
if self.should_skip():
return results
else:
- search_string = unidecode(search_string)
search_string = search_string.replace(show_detail, '').strip()
search_url = self.urls['search'] % search_string
html = self.get_url(search_url)
diff --git a/sickgear/providers/showrss.py b/sickgear/providers/showrss.py
index b630b2fb..e9356e14 100644
--- a/sickgear/providers/showrss.py
+++ b/sickgear/providers/showrss.py
@@ -25,7 +25,7 @@ from .. import logger
from ..helpers import sanitize_scene_name
from bs4_parser import BS4Parser
-from _23 import decode_str, filter_list, html_unescape, list_keys, list_values, unidecode
+from _23 import decode_str, html_unescape
from six import iteritems, iterkeys
@@ -51,11 +51,11 @@ class ShowRSSProvider(generic.TorrentProvider):
def logged_in(self, y):
if all([None is y or 'logout' in y,
- bool(filter_list(lambda c: 'remember_web_' in c, iterkeys(self.session.cookies)))]):
+ bool(list(filter(lambda c: 'remember_web_' in c, iterkeys(self.session.cookies))))]):
if None is not y:
self.shows = dict(re.findall(r'(.*?) ', y))
for k, v in iteritems(self.shows):
- self.shows[k] = sanitize_scene_name(html_unescape(unidecode(decode_str(v))))
+ self.shows[k] = sanitize_scene_name(html_unescape(decode_str(v)))
return True
return False
@@ -74,13 +74,12 @@ class ShowRSSProvider(generic.TorrentProvider):
if 'Cache' == mode:
search_url = self.urls['browse']
else:
- search_string = unidecode(search_string)
- show_name = filter_list(lambda x: x.lower() == re.sub(r'\s.*', '', search_string.lower()),
- list_values(self.shows))
+ show_name = list(filter(lambda x: x.lower() == re.sub(r'\s.*', '', search_string.lower()),
+ list(self.shows.values())))
if not show_name:
continue
- search_url = self.urls['search'] % list_keys(self.shows)[
- list_values(self.shows).index(show_name[0])]
+ search_url = self.urls['search'] % list(self.shows)[
+ list(self.shows.values()).index(show_name[0])]
if search_url in urls:
continue
diff --git a/sickgear/providers/snowfl.py b/sickgear/providers/snowfl.py
index e78f1f78..25f46c3a 100644
--- a/sickgear/providers/snowfl.py
+++ b/sickgear/providers/snowfl.py
@@ -25,7 +25,7 @@ from .. import logger
from ..helpers import try_int
from json_helper import json_loads
-from _23 import b64encodestring, filter_iter, map_list, quote, unidecode
+from _23 import b64encodestring, quote
from six import iteritems
# noinspection PyUnreachableCode
@@ -74,7 +74,7 @@ class SnowflProvider(generic.TorrentProvider):
params = dict(token=token[0], ent=token[1])
if 'Cache' != mode:
- params.update({'ss': quote_fx(unidecode(search_string))})
+ params.update({'ss': quote_fx(search_string)})
data_json = None
vals = [i for i in range(3, 8)]
@@ -92,13 +92,13 @@ class SnowflProvider(generic.TorrentProvider):
if self.should_skip():
return results
- for item in filter_iter(lambda di: re.match('(?i).*?(tv|television)',
- di.get('type', '') or di.get('category', ''))
- and (not self.confirmed or di.get('trusted') or di.get('verified')),
- data_json or {}):
- seeders, leechers, size = map_list(lambda arg: try_int(
+ for item in filter(lambda di: re.match('(?i).*?(tv|television)',
+ di.get('type', '') or di.get('category', ''))
+ and (not self.confirmed or di.get('trusted') or di.get('verified')),
+ data_json or {}):
+ seeders, leechers, size = list(map(lambda arg: try_int(
*([item.get(arg[0]) if None is not item.get(arg[0]) else item.get(arg[1])]) * 2),
- (('seeder', 'seed'), ('leecher', 'leech'), ('size', 'size')))
+ (('seeder', 'seed'), ('leecher', 'leech'), ('size', 'size'))))
if self._reject_item(seeders, leechers):
continue
title = item.get('name') or item.get('title')
@@ -163,8 +163,8 @@ class SnowflProvider(generic.TorrentProvider):
else:
from sickgear import providers
if 'torlock' in url.lower():
- prov = next(filter_iter(lambda p: 'torlock' == p.name.lower(), (filter_iter(
- lambda sp: sp.providerType == self.providerType, providers.sortedProviderList()))))
+ prov = next(filter(lambda p: 'torlock' == p.name.lower(), (filter(
+ lambda sp: sp.providerType == self.providerType, providers.sorted_sources()))))
state = prov.enabled
prov.enabled = True
_ = prov.url
diff --git a/sickgear/providers/speedapp.py b/sickgear/providers/speedapp.py
index e730e193..478e20b3 100644
--- a/sickgear/providers/speedapp.py
+++ b/sickgear/providers/speedapp.py
@@ -21,7 +21,6 @@ from . import generic
from ..helpers import try_int
from six import string_types
-from _23 import filter_list, map_list, unidecode
class SpeedAppProvider(generic.TorrentProvider):
@@ -55,14 +54,15 @@ class SpeedAppProvider(generic.TorrentProvider):
self.perms_needed = self.perms
if isinstance(resp, dict) and isinstance(resp.get('scopes'), list):
self._authd = True
- self.perms_needed = filter_list(lambda x: True is not x, [p in resp.get('scopes') or p for p in self.perms])
+ self.perms_needed = list(filter(lambda x: True is not x,
+ [p in resp.get('scopes') or p for p in self.perms]))
if not self.perms_needed:
self.categories = None
resp = self.get_url(self.urls['cats'], skip_auth=True, parse_json=True, headers=self.auth_header())
if isinstance(resp, list):
- categories = [category['id'] for category in filter_list(
+ categories = [category['id'] for category in list(filter(
lambda c: isinstance(c.get('id'), int) and isinstance(c.get('name'), string_types)
- and c.get('name').upper() in ('TV PACKS', 'TV HD', 'TV SD'), resp)]
+ and c.get('name').upper() in ('TV PACKS', 'TV HD', 'TV SD'), resp))]
self.categories = {'Cache': categories, 'Episode': categories, 'Season': categories}
return not any(self.perms_needed)
@@ -81,7 +81,7 @@ class SpeedAppProvider(generic.TorrentProvider):
for mode in search_params:
for search_string in search_params[mode]:
search_url = self.urls['search'] % (
- unidecode(search_string), self._categories_string(mode, template='categories[]=%s'))
+ search_string, self._categories_string(mode, template='categories[]=%s'))
data_json = self.get_url(search_url, skip_auth=True, parse_json=True, headers=self.auth_header())
if self.should_skip():
@@ -111,10 +111,10 @@ class SpeedAppProvider(generic.TorrentProvider):
('%s_api_key_tip' % self.get_id()) == key and \
((not_authd or self.perms_needed)
and ('create token at %s site '
- 'with perms %s' % (self.url_base, self.name, map_list(
+ 'with perms %s' % (self.url_base, self.name, list(map(
lambda p: 't.read' in p and 'Read torrents'
or 't.down' in p and 'Download torrents'
- or 'ch.read' in p and 'Read snatches', self.perms_needed)))
+ or 'ch.read' in p and 'Read snatches', self.perms_needed))))
.replace('[', '').replace(']', '')
or 'token is valid and required permissions are enabled') \
or ''
diff --git a/sickgear/providers/speedcd.py b/sickgear/providers/speedcd.py
index 8f21401f..9964362a 100644
--- a/sickgear/providers/speedcd.py
+++ b/sickgear/providers/speedcd.py
@@ -25,7 +25,7 @@ from ..helpers import try_int
from bs4_parser import BS4Parser
from requests.cookies import cookiejar_from_dict
-from _23 import filter_list, quote, unquote
+from _23 import quote, unquote
from six import string_types, iteritems
@@ -63,12 +63,12 @@ class SpeedCDProvider(generic.TorrentProvider):
self.session.cookies.clear()
json = self.get_url(self.urls['login_1'], skip_auth=True,
post_data={'username': self.username}, parse_json=True)
- resp = filter_list(lambda l: isinstance(l, list), json.get('Fs', []))
+ resp = list(filter(lambda l: isinstance(l, list), json.get('Fs', [])))
def get_html(_resp):
for cur_item in _resp:
if isinstance(cur_item, list):
- _html = filter_list(lambda s: isinstance(s, string_types) and 'password' in s, cur_item)
+ _html = list(filter(lambda s: isinstance(s, string_types) and 'password' in s, cur_item))
if not _html:
_html = get_html(cur_item)
if _html:
@@ -128,13 +128,13 @@ class SpeedCDProvider(generic.TorrentProvider):
cnt = len(items[mode])
try:
- html = filter_list(lambda l: isinstance(l, list), data_json.get('Fs', []))
+ html = list(filter(lambda l: isinstance(l, list), data_json.get('Fs', [])))
while html:
if html and all(isinstance(x, string_types) for x in html):
str_lengths = [len(x) for x in html]
html = html[str_lengths.index(max(str_lengths))]
break
- html = filter_list(lambda l: isinstance(l, list), html)
+ html = list(filter(lambda l: isinstance(l, list), html))
if html and 0 < len(html):
html = html[0]
diff --git a/sickgear/providers/thepiratebay.py b/sickgear/providers/thepiratebay.py
index 51cbd129..bf57db9f 100644
--- a/sickgear/providers/thepiratebay.py
+++ b/sickgear/providers/thepiratebay.py
@@ -25,7 +25,7 @@ from .. import logger
from ..helpers import try_int
from bs4_parser import BS4Parser
-from _23 import b64decodestring, unidecode
+from _23 import b64decodestring
from six import iteritems
@@ -90,7 +90,6 @@ class ThePirateBayProvider(generic.TorrentProvider):
for mode in search_params:
for search_string in search_params[mode]:
- search_string = unidecode(search_string)
if 'Cache' != mode:
search_url = self.urls['api'] % search_string
diff --git a/sickgear/providers/tokyotoshokan.py b/sickgear/providers/tokyotoshokan.py
index 39592d61..338f38f9 100644
--- a/sickgear/providers/tokyotoshokan.py
+++ b/sickgear/providers/tokyotoshokan.py
@@ -22,7 +22,7 @@ from .. import show_name_helpers, tvcache
from ..helpers import try_int
from bs4_parser import BS4Parser
-from _23 import filter_list, map_list, urlencode
+from _23 import urlencode
from six import iteritems
@@ -78,10 +78,10 @@ class TokyoToshokanProvider(generic.TorrentProvider):
info = top.find('td', class_='desc-top')
title = info and re.sub(r'[ .]{2,}', '.', info.get_text().strip())
- links = info and map_list(lambda l: l.get('href', ''), info.find_all('a')) or None
+ links = info and list(map(lambda l: l.get('href', ''), info.find_all('a'))) or None
download_url = self._link(
- (filter_list(lambda l: 'magnet:' in l, links)
- or filter_list(lambda l: not re.search(r'(magnet:|\.se).+', l), links))[0])
+ (list(filter(lambda l: 'magnet:' in l, links))
+ or list(filter(lambda l: not re.search(r'(magnet:|\.se).+', l), links)))[0])
except (AttributeError, TypeError, ValueError, IndexError):
continue
diff --git a/sickgear/providers/torlock.py b/sickgear/providers/torlock.py
index 52fa16b8..79374449 100644
--- a/sickgear/providers/torlock.py
+++ b/sickgear/providers/torlock.py
@@ -23,7 +23,7 @@ from .. import logger
from ..helpers import try_int
from bs4_parser import BS4Parser
-from _23 import b64decodestring, quote_plus, unidecode
+from _23 import b64decodestring, quote_plus
from six import iteritems
@@ -66,8 +66,6 @@ class TorLockProvider(generic.TorrentProvider):
for mode in search_params:
for search_string in search_params[mode]:
- search_string = unidecode(search_string)
-
search_url = self.urls['browse'] if 'Cache' == mode \
else self.urls['search'] % (quote_plus(search_string).replace('+', '-'))
diff --git a/sickgear/providers/torrenting.py b/sickgear/providers/torrenting.py
index cf17d82e..0870d459 100644
--- a/sickgear/providers/torrenting.py
+++ b/sickgear/providers/torrenting.py
@@ -23,7 +23,6 @@ from .. import logger
from ..helpers import try_int
from bs4_parser import BS4Parser
-from _23 import unidecode
from six import iteritems
@@ -67,7 +66,6 @@ class TorrentingProvider(generic.TorrentProvider):
'get': 'download'})])
for mode in search_params:
for search_string in search_params[mode]:
- search_string = unidecode(search_string)
search_url = self.urls['search'] % (self._categories_string(), search_string)
html = self.get_url(search_url)
diff --git a/sickgear/providers/torrentleech.py b/sickgear/providers/torrentleech.py
index f65a3efb..148353f9 100644
--- a/sickgear/providers/torrentleech.py
+++ b/sickgear/providers/torrentleech.py
@@ -21,8 +21,7 @@ import re
from . import generic
from ..helpers import anon_url, try_int
-from _23 import unidecode
-from six import iteritems, PY2
+from six import iteritems
class TorrentLeechProvider(generic.TorrentProvider):
@@ -66,7 +65,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
for page in range((3, 5)['Cache' == mode])[1:]:
urls[-1] += [self.urls[('search', 'browse')['Cache' == mode]] % {
'cats': self._categories_string(mode, '', ','),
- 'query': unidecode(search_string) or search_string,
+ 'query': search_string,
'x': '%spage/%s' % (('facets/tags:FREELEECH/', '')[not self.freeleech], page)
}]
results += self._search_urls(mode, last_recent_search, urls)
@@ -125,8 +124,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
download_url = None
if dl and dl_id:
# noinspection PyUnresolvedReferences
- download_url = self._link('download/%s/%s' % (dl_id, dl),
- url_quote=PY2 and isinstance(dl, unicode) or None)
+ download_url = self._link('download/%s/%s' % (dl_id, dl))
except (BaseException, Exception):
continue
diff --git a/sickgear/providers/tvchaosuk.py b/sickgear/providers/tvchaosuk.py
index 244759cb..8897cf92 100644
--- a/sickgear/providers/tvchaosuk.py
+++ b/sickgear/providers/tvchaosuk.py
@@ -27,7 +27,7 @@ from ..helpers import try_int
from bs4_parser import BS4Parser
from dateutil.parser import parse
-from _23 import unidecode, unquote_plus
+from _23 import unquote_plus
from six import iteritems
@@ -80,7 +80,7 @@ class TVChaosUKProvider(generic.TorrentProvider):
'info': r'/torrents?/(?P(?P\d{2,})[^"]*)', 'get': 'download'})])
for mode in search_params:
for search_string in search_params[mode]:
- search_string = unidecode(unquote_plus(search_string))
+ search_string = unquote_plus(search_string)
vals = [i for i in range(5, 16)]
random.SystemRandom().shuffle(vals)
diff --git a/sickgear/providers/xspeeds.py b/sickgear/providers/xspeeds.py
index 4b11a356..e500b438 100644
--- a/sickgear/providers/xspeeds.py
+++ b/sickgear/providers/xspeeds.py
@@ -25,7 +25,6 @@ from .. import logger
from ..helpers import has_anime, try_int
from bs4_parser import BS4Parser
-from _23 import unidecode
from six import iteritems
@@ -70,7 +69,6 @@ class XspeedsProvider(generic.TorrentProvider):
for search_string in search_params[mode]:
search_string = search_string.replace(u'£', '%')
search_string = re.sub(r'[\s.]+', '%', search_string)
- search_string = unidecode(search_string)
kwargs = dict(post_data={'keywords': search_string, 'do': 'quick_sort', 'page': '0',
'category': '0', 'search_type': 't_name', 'sort': 'added',
diff --git a/sickgear/sab.py b/sickgear/sab.py
index f6d5496a..8efa531b 100644
--- a/sickgear/sab.py
+++ b/sickgear/sab.py
@@ -118,7 +118,7 @@ def access_method(host):
def test_authentication(host=None, username=None, password=None, apikey=None):
"""
- Sends a simple API request to SAB to determine if the given connection information is connect
+ Sends a simple API request to SAB to determine if the given connection information is correct
Returns: A tuple containing the success boolean and a message
:param host: The host where SAB is running (incl port)
diff --git a/sickgear/scene_exceptions.py b/sickgear/scene_exceptions.py
index a9fa0afa..b7ee204a 100644
--- a/sickgear/scene_exceptions.py
+++ b/sickgear/scene_exceptions.py
@@ -35,8 +35,8 @@ from .sgdatetime import timestamp_near
import lib.rarfile.rarfile as rarfile
-from _23 import filter_iter, list_range, map_iter
-from six import iteritems, PY2, text_type
+from _23 import list_range
+from six import iteritems, text_type
# noinspection PyUnreachableCode
if False:
@@ -303,7 +303,7 @@ def retrieve_exceptions():
list(cur_tvid_prodid))]
# if this exception isn't already in the DB then add it
- for cur_exception_dict in filter_iter(lambda e: e not in existing_exceptions, exception_dict[cur_tvid_prodid]):
+ for cur_exception_dict in filter(lambda e: e not in existing_exceptions, exception_dict[cur_tvid_prodid]):
try:
cur_exception, cur_season = next(iteritems(cur_exception_dict))
except (BaseException, Exception):
@@ -311,9 +311,6 @@ def retrieve_exceptions():
logger.log(traceback.format_exc(), logger.ERROR)
continue
- if PY2 and not isinstance(cur_exception, text_type):
- cur_exception = text_type(cur_exception, 'utf-8', 'replace')
-
cl.append(['INSERT INTO scene_exceptions'
' (indexer, indexer_id, show_name, season) VALUES (?,?,?,?)',
list(cur_tvid_prodid) + [cur_exception, cur_season]])
@@ -321,7 +318,7 @@ def retrieve_exceptions():
if cl:
my_db.mass_action(cl)
- name_cache.buildNameCache(update_only_scene=True)
+ name_cache.build_name_cache(update_only_scene=True)
# since this could invalidate the results of the cache we clear it out after updating
if changed_exceptions:
@@ -368,14 +365,11 @@ def update_scene_exceptions(tvid, prodid, scene_exceptions):
exceptionsCache[(tvid, prodid)][cur_season].append(cur_exception)
- if PY2 and not isinstance(cur_exception, text_type):
- cur_exception = text_type(cur_exception, 'utf-8', 'replace')
-
my_db.action('INSERT INTO scene_exceptions'
' (indexer, indexer_id, show_name, season) VALUES (?,?,?,?)',
[tvid, prodid, cur_exception, cur_season])
- sickgear.name_cache.buildNameCache(update_only_scene=True)
+ sickgear.name_cache.build_name_cache(update_only_scene=True)
def _custom_exceptions_fetcher():
@@ -489,7 +483,7 @@ def _anidb_exceptions_fetcher():
if should_refresh('anidb'):
logger.log(u'Checking for AniDB scene exception updates')
- for cur_show_obj in filter_iter(lambda _s: _s.is_anime and TVINFO_TVDB == _s.tvid, sickgear.showList):
+ for cur_show_obj in filter(lambda _s: _s.is_anime and TVINFO_TVDB == _s.tvid, sickgear.showList):
try:
anime = create_anidb_obj(name=cur_show_obj.name, tvdbid=cur_show_obj.prodid, autoCorrectName=True)
except (BaseException, Exception):
@@ -559,8 +553,8 @@ def _xem_get_ids(infosrc_name, xem_origin):
% (task.lower() % ('', 's'), infosrc_name, url), logger.ERROR)
else:
if 'success' == parsed_json.get('result', '') and 'data' in parsed_json:
- xem_ids = list(set(filter_iter(lambda prodid: 0 < prodid,
- map_iter(lambda pid: helpers.try_int(pid), parsed_json['data']))))
+ xem_ids = list(set(filter(lambda prodid: 0 < prodid,
+ map(lambda pid: helpers.try_int(pid), parsed_json['data']))))
if 0 == len(xem_ids):
logger.log(u'Failed %s %s, no data items parsed from URL: %s'
% (task.lower() % ('', 's'), infosrc_name, url), logger.WARNING)
diff --git a/sickgear/scene_numbering.py b/sickgear/scene_numbering.py
index 8bfa2cb7..cccb4abc 100644
--- a/sickgear/scene_numbering.py
+++ b/sickgear/scene_numbering.py
@@ -32,8 +32,6 @@ from .helpers import try_int
from .scene_exceptions import xem_ids_list
from .sgdatetime import timestamp_near
-from _23 import filter_iter, map_list
-
# noinspection PyUnreachableCode
if False:
from typing import Dict, List, Optional, Tuple, Union
@@ -47,8 +45,8 @@ def get_scene_numbering(tvid, prodid, season, episode, fallback_to_xem=True, sho
returns the TVDB numbering.
(so the return values will always be set)
- kwargs['scene_result']: type: Optional[List[Row]] passed thru
- kwargs['show_result']: type: Optional[List[Row]] passed thru
+ kwargs['scene_result']: type: Optional[List[Row]] passed through
+ kwargs['show_result']: type: Optional[List[Row]] passed through
:param tvid: tvid
:type tvid: int
@@ -136,8 +134,8 @@ def get_scene_absolute_numbering(tvid, prodid, absolute_number, season, episode,
returns the TVDB numbering.
(so the return values will always be set)
- kwargs['scene_result']: type: Optional[List[Row]] passed thru
- kwargs['show_result']: type: Optional[List[Row]] passed thru
+ kwargs['scene_result']: type: Optional[List[Row]] passed through
+ kwargs['show_result']: type: Optional[List[Row]] passed through
:param tvid: tvid
:type tvid: int
@@ -718,8 +716,8 @@ def _get_absolute_numbering_for_show(tbl, tvid, prodid):
""" % (tbl, ('indexer_id', 'showid')['tv_episodes' == tbl]), [int(tvid), int(prodid)])
for cur_row in sql_result:
- season, episode, abs_num = map_list(lambda x: try_int(cur_row[x], None),
- ('season', 'episode', 'absolute_number'))
+ season, episode, abs_num = list(map(lambda x: try_int(cur_row[x], None),
+ ('season', 'episode', 'absolute_number')))
if None is season and None is episode and None is not abs_num:
season, episode, _ = _get_sea(tvid, prodid, absolute_number=abs_num)
@@ -815,7 +813,7 @@ def xem_refresh(tvid, prodid, force=False):
return
if 'success' in parsed_json['result']:
- cl = map_list(lambda entry: [
+ cl = list(map(lambda entry: [
"""
UPDATE tv_episodes
SET scene_season = ?, scene_episode = ?, scene_absolute_number = ?
@@ -824,7 +822,7 @@ def xem_refresh(tvid, prodid, force=False):
for v in ('season', 'episode', 'absolute')]
+ [tvid, prodid]
+ [entry.get(xem_origin).get(v) for v in ('season', 'episode')]
- ], filter_iter(lambda x: 'scene' in x, parsed_json['data']))
+ ], filter(lambda x: 'scene' in x, parsed_json['data'])))
if 0 < len(cl):
my_db = db.DBConnection()
diff --git a/sickgear/scheduler.py b/sickgear/scheduler.py
index b310ef81..990df34c 100644
--- a/sickgear/scheduler.py
+++ b/sickgear/scheduler.py
@@ -26,17 +26,17 @@ from exceptions_helper import ex
class Scheduler(threading.Thread):
- def __init__(self, action, cycleTime=datetime.timedelta(minutes=10), run_delay=datetime.timedelta(minutes=0),
- start_time=None, threadName="ScheduledThread", silent=True, prevent_cycle_run=None, paused=False):
+ def __init__(self, action, cycle_time=datetime.timedelta(minutes=10), run_delay=datetime.timedelta(minutes=0),
+ start_time=None, thread_name="ScheduledThread", silent=True, prevent_cycle_run=None, paused=False):
super(Scheduler, self).__init__()
- self.lastRun = datetime.datetime.now() + run_delay - cycleTime
+ self.last_run = datetime.datetime.now() + run_delay - cycle_time
self.action = action
- self.cycleTime = cycleTime
+ self.cycle_time = cycle_time
self.start_time = start_time
self.prevent_cycle_run = prevent_cycle_run
- self.name = threadName
+ self.name = thread_name
self.silent = silent
self._stopper = threading.Event()
self._unpause = threading.Event()
@@ -65,10 +65,10 @@ class Scheduler(threading.Thread):
else:
self.unpause()
- def timeLeft(self):
- return self.cycleTime - (datetime.datetime.now() - self.lastRun)
+ def time_left(self):
+ return self.cycle_time - (datetime.datetime.now() - self.last_run)
- def forceRun(self):
+ def force_run(self):
if not self.action.amActive:
self.force = True
return True
@@ -93,15 +93,15 @@ class Scheduler(threading.Thread):
should_run = False
# check if interval has passed
- if current_time - self.lastRun >= self.cycleTime:
+ if current_time - self.last_run >= self.cycle_time:
# check if wanting to start around certain time taking interval into account
if self.start_time:
hour_diff = current_time.time().hour - self.start_time.hour
- if not hour_diff < 0 and hour_diff < self.cycleTime.seconds // 3600:
+ if not hour_diff < 0 and hour_diff < self.cycle_time.seconds // 3600:
should_run = True
else:
- # set lastRun to only check start_time after another cycleTime
- self.lastRun = current_time
+ # set last_run to only check start_time after another cycle_time
+ self.last_run = current_time
else:
should_run = True
@@ -110,13 +110,13 @@ class Scheduler(threading.Thread):
if should_run and ((self.prevent_cycle_run is not None and self.prevent_cycle_run()) or
getattr(self.action, 'prevent_run', False)):
- logger.log(u'%s skipping this cycleTime' % self.name, logger.WARNING)
- # set lastRun to only check start_time after another cycleTime
- self.lastRun = current_time
+ logger.log(u'%s skipping this cycle_time' % self.name, logger.WARNING)
+ # set last_run to only check start_time after another cycle_time
+ self.last_run = current_time
should_run = False
if should_run:
- self.lastRun = current_time
+ self.last_run = current_time
try:
if not self.silent:
diff --git a/sickgear/search.py b/sickgear/search.py
index 898f1f36..d7c87fc1 100644
--- a/sickgear/search.py
+++ b/sickgear/search.py
@@ -34,7 +34,6 @@ from .common import DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, MULTI_
from .providers.generic import GenericProvider
from .tv import TVEpisode, TVShow
-from _23 import filter_list, filter_iter, list_values
from six import iteritems, itervalues, string_types
# noinspection PyUnreachableCode
@@ -166,9 +165,9 @@ def snatch_episode(result, end_status=SNATCHED):
for cur_ep_obj in result.ep_obj_list:
with cur_ep_obj.lock:
if is_first_best_match(cur_ep_obj.status, result):
- cur_ep_obj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality)
+ cur_ep_obj.status = Quality.composite_status(SNATCHED_BEST, result.quality)
else:
- cur_ep_obj.status = Quality.compositeStatus(end_status, result.quality)
+ cur_ep_obj.status = Quality.composite_status(end_status, result.quality)
item = cur_ep_obj.get_sql()
if None is not item:
@@ -355,7 +354,7 @@ def is_final_result(result):
Checks if the given result is good enough quality that we can stop searching for other ones.
:param result: search result to check
- :return: If the result is the highest quality in both the any/best quality lists then this function
+ :return: If the result is the highest quality in both any and best quality lists then this function
returns True, if not then it's False
"""
@@ -363,7 +362,7 @@ def is_final_result(result):
show_obj = result.ep_obj_list[0].show_obj
- any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
+ any_qualities, best_qualities = Quality.split_quality(show_obj.quality)
# if there is a download that's higher than this then we definitely need to keep looking
if best_qualities and max(best_qualities) > result.quality:
@@ -379,11 +378,11 @@ def is_final_result(result):
elif best_qualities and max(best_qualities) == result.quality:
- # if this is the best download but we have a higher initial download then keep looking
+ # if this is the best download, but we have a higher initial download then keep looking
if any_qualities and max(any_qualities) > result.quality:
return False
- # if this is the best download and we don't have a higher initial download then we're done
+ # if this is the best download, and we don't have a higher initial download then we're done
return True
# if we got here than it's either not on the lists, they're empty, or it's lower than the highest required
@@ -393,7 +392,7 @@ def is_final_result(result):
def is_first_best_match(ep_status, result):
# type: (int, sickgear.classes.SearchResult) -> bool
"""
- Checks if the given result is a best quality match and if we want to archive the episode on first match.
+ Checks if the given result is the best quality match and if we want to archive the episode on first match.
:param ep_status: current episode object status
:param result: search result to check
@@ -404,11 +403,11 @@ def is_first_best_match(ep_status, result):
result.name, logger.DEBUG)
show_obj = result.ep_obj_list[0].show_obj
- cur_status, cur_quality = Quality.splitCompositeStatus(ep_status)
+ cur_status, cur_quality = Quality.split_composite_status(ep_status)
- any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
+ any_qualities, best_qualities = Quality.split_quality(show_obj.quality)
- # if there is a download that's a match to one of our best qualities and
+ # if there is a download that's a match to one of our best qualities, and
# we want to archive the episode then we are done
if best_qualities and show_obj.upgrade_once and \
(result.quality in best_qualities and
@@ -434,7 +433,7 @@ def set_wanted_aired(ep_obj, # type: TVEpisode
:param ep_count_scene: count of episodes in scene seasons
:param manual: manual search
"""
- ep_status, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status)
+ ep_status, ep_quality = common.Quality.split_composite_status(ep_obj.status)
ep_obj.wanted_quality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=unaired, manual=manual)
ep_obj.eps_aired_in_season = ep_count.get(ep_obj.season, 0)
ep_obj.eps_aired_in_scene_season = ep_count_scene.get(
@@ -459,7 +458,7 @@ def get_wanted_qualities(ep_obj, # type: TVEpisode
"""
if isinstance(ep_obj, TVEpisode):
return sickgear.WANTEDLIST_CACHE.get_wantedlist(ep_obj.show_obj.quality, ep_obj.show_obj.upgrade_once,
- cur_quality, cur_status, unaired, manual)
+ cur_quality, cur_status, unaired, manual)
return []
@@ -544,7 +543,7 @@ def wanted_episodes(show_obj, # type: TVShow
for result in sql_result:
ep_obj = show_obj.get_episode(int(result['season']), int(result['episode']), ep_result=ep_sql_result)
- cur_status, cur_quality = common.Quality.splitCompositeStatus(ep_obj.status)
+ cur_status, cur_quality = common.Quality.split_composite_status(ep_obj.status)
ep_obj.wanted_quality = get_wanted_qualities(ep_obj, cur_status, cur_quality, unaired=unaired)
if not ep_obj.wanted_quality:
continue
@@ -590,7 +589,7 @@ def search_for_needed_episodes(ep_obj_list):
orig_thread_name = threading.current_thread().name
- providers = filter_list(lambda x: x.is_active() and x.enable_recentsearch, sickgear.providers.sortedProviderList())
+ providers = list(filter(lambda x: x.is_active() and x.enable_recentsearch, sickgear.providers.sorted_sources()))
for cur_provider in providers:
threading.current_thread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name)
@@ -616,7 +615,7 @@ def search_for_needed_episodes(ep_obj_list):
logger.log(u'All found results for %s were rejected.' % cur_ep_obj.pretty_name(), logger.DEBUG)
continue
- # if it's already in the list (from another provider) and the newly found quality is no better then skip it
+ # if it's already in the list (from another provider) and the newly found quality is no better, then skip it
if cur_ep_obj in found_results and best_result.quality <= found_results[cur_ep_obj].quality:
continue
@@ -633,7 +632,7 @@ def search_for_needed_episodes(ep_obj_list):
found_results[cur_ep_obj] = best_result
try:
- cur_provider.save_list()
+ cur_provider.fails.save_list()
except (BaseException, Exception):
pass
@@ -646,7 +645,7 @@ def search_for_needed_episodes(ep_obj_list):
logger.log('Failed recent search of %s enabled provider%s. More info in debug log.' % (
len(providers), helpers.maybe_plural(providers)), logger.ERROR)
- return list_values(found_results)
+ return list(found_results.values())
def can_reject(release_name):
@@ -719,7 +718,7 @@ def _search_provider_thread(provider, provider_results, show_obj, ep_obj_list, m
logger.log(u'Performing season pack search for %s' % show_obj.unique_name)
try:
- provider.cache._clearCache()
+ provider.cache.clear_cache()
search_result_list = provider.find_search_results(show_obj, ep_obj_list, search_mode, manual_search,
try_other_searches=try_other_searches)
if any(search_result_list):
@@ -738,10 +737,10 @@ def _search_provider_thread(provider, provider_results, show_obj, ep_obj_list, m
# make a list of all the results for this provider
for cur_search_result in search_result_list:
# skip non-tv crap
- search_result_list[cur_search_result] = filter_list(
+ search_result_list[cur_search_result] = list(filter(
lambda ep_item: ep_item.show_obj == show_obj and show_name_helpers.pass_wordlist_checks(
ep_item.name, parse=False, indexer_lookup=False, show_obj=ep_item.show_obj),
- search_result_list[cur_search_result])
+ search_result_list[cur_search_result]))
if cur_search_result in provider_results:
provider_results[cur_search_result] += search_result_list[cur_search_result]
@@ -767,7 +766,7 @@ def cache_torrent_file(
# type: (...) -> Optional[TorrentSearchResult]
cache_file = os.path.join(sickgear.CACHE_DIR or helpers.get_system_temp_dir(),
- '%s.torrent' % (helpers.sanitize_filename(search_result.name)))
+ '%s.torrent' % (helpers.sanitize_filename(search_result.name)))
if not helpers.download_file(
search_result.url, cache_file, session=search_result.provider.session, failure_monitor=False):
@@ -841,7 +840,7 @@ def search_providers(
orig_thread_name = threading.current_thread().name
- provider_list = [x for x in sickgear.providers.sortedProviderList() if x.is_active() and
+ provider_list = [x for x in sickgear.providers.sorted_sources() if x.is_active() and
getattr(x, 'enable_backlog', None) and
(not torrent_only or GenericProvider.TORRENT == x.providerType) and
(not scheduled or getattr(x, 'enable_scheduled_backlog', None))]
@@ -879,7 +878,7 @@ def search_providers(
if provider_id not in found_results or not len(found_results[provider_id]):
continue
- any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
+ any_qualities, best_qualities = Quality.split_quality(show_obj.quality)
# pick the best season NZB
best_season_result = None
@@ -919,8 +918,8 @@ def search_providers(
else:
any_wanted = True
- # if we need every ep in the season and there's nothing better then just download this and
- # be done with it (unless single episodes are preferred)
+ # if we need every ep in the season and there's nothing better,
+ # then download this and be done with it (unless single episodes are preferred)
if all_wanted and highest_quality_overall == best_season_result.quality:
logger.log(u'Every episode in this season is needed, downloading the whole %s %s' %
(best_season_result.provider.providerType, best_season_result.name))
@@ -939,9 +938,9 @@ def search_providers(
logger.log(u'Breaking apart the NZB and adding the individual ones to our results', logger.DEBUG)
# if not, break it apart and add them as the lowest priority results
- individual_results = nzbSplitter.splitResult(best_season_result)
+ individual_results = nzbSplitter.split_result(best_season_result)
- for cur_result in filter_iter(
+ for cur_result in filter(
lambda r: r.show_obj == show_obj and show_name_helpers.pass_wordlist_checks(
r.name, parse=False, indexer_lookup=False, show_obj=r.show_obj), individual_results):
ep_num = None
@@ -986,7 +985,7 @@ def search_providers(
logger.log(u'Checking usefulness of multi episode result [%s]' % multi_result.name, logger.DEBUG)
if sickgear.USE_FAILED_DOWNLOADS and failed_history.has_failed(multi_result.name, multi_result.size,
- multi_result.provider.name):
+ multi_result.provider.name):
logger.log(u'Rejecting previously failed multi episode result [%s]' % multi_result.name)
continue
@@ -1058,7 +1057,7 @@ def search_providers(
found_results[provider_id][cur_search_result][0].ep_obj_list[0]) or \
found_results[provider_id][cur_search_result][0].ep_obj_list[0].status
if old_status:
- status, quality = Quality.splitCompositeStatus(old_status)
+ status, quality = Quality.split_composite_status(old_status)
use_quality_list = (status not in (
common.WANTED, common.FAILED, common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN))
@@ -1094,7 +1093,7 @@ def search_providers(
best_result.after_get_data_func(best_result)
best_result.after_get_data_func = None # consume only once
- # add result if its not a duplicate
+ # add result if it's not a duplicate
found = False
for i, result in enumerate(final_results):
for best_result_ep in best_result.ep_obj_list:
diff --git a/sickgear/search_backlog.py b/sickgear/search_backlog.py
index 904d54a9..fa603986 100644
--- a/sickgear/search_backlog.py
+++ b/sickgear/search_backlog.py
@@ -28,7 +28,6 @@ from .search import wanted_episodes
from .sgdatetime import SGDatetime, timestamp_near
from .tv import TVidProdid, TVEpisode, TVShow
-from _23 import filter_list, map_iter, map_list
from six import iteritems, itervalues, moves
# noinspection PyUnreachableCode
@@ -48,29 +47,29 @@ class BacklogSearchScheduler(scheduler.Scheduler):
self.force = True
def next_run(self):
- if 1 >= self.action._lastBacklog:
+ if 1 >= self.action.last_backlog:
return datetime.date.today()
- elif (self.action._lastBacklog + self.action.cycleTime) < datetime.date.today().toordinal():
+ elif (self.action.last_backlog + self.action.cycle_time) < datetime.date.today().toordinal():
return datetime.date.today()
- return datetime.date.fromordinal(self.action._lastBacklog + self.action.cycleTime)
+ return datetime.date.fromordinal(self.action.last_backlog + self.action.cycle_time)
def next_backlog_timeleft(self):
now = datetime.datetime.now()
- torrent_enabled = 0 < len([x for x in sickgear.providers.sortedProviderList() if x.is_active() and
+ torrent_enabled = 0 < len([x for x in sickgear.providers.sorted_sources() if x.is_active() and
getattr(x, 'enable_backlog', None) and GenericProvider.TORRENT == x.providerType])
- if now > self.action.nextBacklog or self.action.nextCyleTime != self.cycleTime:
- nextruntime = now + self.timeLeft()
+ if now > self.action.nextBacklog or self.action.nextCyleTime != self.cycle_time:
+ nextruntime = now + self.time_left()
if not torrent_enabled:
nextpossibleruntime = (datetime.datetime.fromtimestamp(self.action.last_runtime) +
datetime.timedelta(hours=23))
for _ in moves.xrange(5):
if nextruntime > nextpossibleruntime:
self.action.nextBacklog = nextruntime
- self.action.nextCyleTime = self.cycleTime
+ self.action.nextCyleTime = self.cycle_time
break
- nextruntime += self.cycleTime
+ nextruntime += self.cycle_time
else:
- self.action.nextCyleTime = self.cycleTime
+ self.action.nextCyleTime = self.cycle_time
self.action.nextBacklog = nextruntime
return self.action.nextBacklog - now if self.action.nextBacklog > now else datetime.timedelta(seconds=0)
@@ -78,8 +77,8 @@ class BacklogSearchScheduler(scheduler.Scheduler):
class BacklogSearcher(object):
def __init__(self):
- self._lastBacklog = self._get_last_backlog()
- self.cycleTime = sickgear.BACKLOG_PERIOD
+ self.last_backlog = self._get_last_backlog()
+ self.cycle_time = sickgear.BACKLOG_PERIOD
self.lock = threading.Lock()
self.amActive = False # type: bool
self.amPaused = False # type: bool
@@ -176,7 +175,7 @@ class BacklogSearcher(object):
:param scheduled: scheduled backlog search (can be from webif or scheduler)
:return: any provider is active for given backlog
"""
- return 0 < len([x for x in sickgear.providers.sortedProviderList() if x.is_active() and
+ return 0 < len([x for x in sickgear.providers.sorted_sources() if x.is_active() and
getattr(x, 'enable_backlog', None) and
(not torrent_only or GenericProvider.TORRENT == x.providerType) and
(not scheduled or getattr(x, 'enable_scheduled_backlog', None))])
@@ -212,10 +211,10 @@ class BacklogSearcher(object):
any_torrent_enabled = continued_backlog = False
if not force and standard_backlog and (datetime.datetime.now() - datetime.datetime.fromtimestamp(
self._get_last_runtime())) < datetime.timedelta(hours=23):
- any_torrent_enabled = any(map_iter(
+ any_torrent_enabled = any(map(
lambda x: x.is_active() and getattr(x, 'enable_backlog', None)
and GenericProvider.TORRENT == x.providerType,
- sickgear.providers.sortedProviderList()))
+ sickgear.providers.sorted_sources()))
if not any_torrent_enabled:
logger.log('Last scheduled backlog run was within the last day, skipping this run.', logger.DEBUG)
return
@@ -291,8 +290,8 @@ class BacklogSearcher(object):
if not runparts and parts:
runparts = parts[0]
- wanted_list = filter_list(
- lambda wi: wi and next(itervalues(wi))[0].show_obj.tvid_prodid in runparts, wanted_list)
+ wanted_list = list(filter(
+ lambda wi: wi and next(itervalues(wi))[0].show_obj.tvid_prodid in runparts, wanted_list))
limited_wanted_list = []
if standard_backlog and not any_torrent_enabled and runparts:
@@ -314,8 +313,8 @@ class BacklogSearcher(object):
for i, l in enumerate(parts):
if 0 == i:
continue
- cl += map_list(lambda m: ['INSERT INTO backlogparts (part, indexer, indexerid) VALUES (?,?,?)',
- [i + 1] + TVidProdid(m).list], l)
+ cl += list(map(lambda m: ['INSERT INTO backlogparts (part, indexer, indexerid) VALUES (?,?,?)',
+ [i + 1] + TVidProdid(m).list], l))
if 0 < len(cl):
my_db.mass_action(cl)
@@ -384,8 +383,8 @@ class BacklogSearcher(object):
if last_backlog > datetime.date.today().toordinal():
last_backlog = 1
- self._lastBacklog = last_backlog
- return self._lastBacklog
+ self.last_backlog = last_backlog
+ return self.last_backlog
@staticmethod
def _set_last_backlog(when):
diff --git a/sickgear/search_queue.py b/sickgear/search_queue.py
index 36f804dd..88d430d2 100644
--- a/sickgear/search_queue.py
+++ b/sickgear/search_queue.py
@@ -22,20 +22,16 @@ import re
import threading
import traceback
-import exceptions_helper
# noinspection PyPep8Naming
from exceptions_helper import ex
import sickgear
-from lib.dateutil import tz
from . import common, db, failed_history, generic_queue, helpers, \
history, logger, network_timezones, properFinder, search, ui
from .classes import Proper, SimpleNamespace
from .search import wanted_episodes, get_aired_in_season, set_wanted_aired
from .tv import TVEpisode
-from _23 import filter_list
-
# noinspection PyUnreachableCode
if False:
from typing import Any, AnyStr, Dict, List, Optional, Union
@@ -520,8 +516,8 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
orig_thread_name = threading.current_thread().name
threads = []
- providers = filter_list(lambda x: x.is_active() and x.enable_recentsearch,
- sickgear.providers.sortedProviderList())
+ providers = list(filter(lambda x: x.is_active() and x.enable_recentsearch,
+ sickgear.providers.sorted_sources()))
for cur_provider in providers:
if not cur_provider.cache.should_update():
continue
@@ -530,7 +526,7 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
logger.log('Updating provider caches with recent upload data')
# spawn a thread for each provider to save time waiting for slow response providers
- threads.append(threading.Thread(target=cur_provider.cache.updateCache,
+ threads.append(threading.Thread(target=cur_provider.cache.update_cache,
kwargs={'needed': needed},
name='%s :: [%s]' % (orig_thread_name, cur_provider.name)))
# start the thread we just created
@@ -647,7 +643,7 @@ class ManualSearchQueueItem(BaseSearchQueueItem):
ep_count, ep_count_scene = get_aired_in_season(self.show_obj)
set_wanted_aired(self.segment, True, ep_count, ep_count_scene, manual=True)
if not getattr(self.segment, 'wanted_quality', None):
- ep_status, ep_quality = common.Quality.splitCompositeStatus(self.segment.status)
+ ep_status, ep_quality = common.Quality.split_composite_status(self.segment.status)
self.segment.wanted_quality = search.get_wanted_qualities(self.segment, ep_status, ep_quality,
unaired=True, manual=True)
if not self.segment.wanted_quality:
diff --git a/sickgear/sgdatetime.py b/sickgear/sgdatetime.py
index 86bb84b5..f963c76d 100644
--- a/sickgear/sgdatetime.py
+++ b/sickgear/sgdatetime.py
@@ -23,11 +23,11 @@ import sys
import sickgear
from dateutil import tz
-from six import integer_types, PY2, string_types
+from six import integer_types, string_types
# noinspection PyUnreachableCode
if False:
- from typing import Callable, Optional, Union
+ from typing import Optional, Union
date_presets = ('%Y-%m-%d',
'%a, %Y-%m-%d',
@@ -234,7 +234,7 @@ class SGDatetime(datetime.datetime):
"""
convert datetime to filetime
special handling for windows filetime issues
- for pre Windows 7 this can result in an exception for pre 1970 dates
+ for pre Windows 7 this can result in an exception for pre-1970 dates
"""
obj = (dt, self)[self is not None] # type: datetime.datetime
if is_win:
@@ -283,21 +283,15 @@ class SGDatetime(datetime.datetime):
return (default, timestamp)[isinstance(timestamp, (float, integer_types))]
-if PY2:
- """
- Use `timestamp_near` for a timezone aware UTC timestamp in the near future or recent past.
-
- Under py3, using the faster variable assigned cpython callable, so py2 is set up to mimic the signature types.
- Note: the py3 callable is limited to datetime.datetime and does not work with datetime.date.
- """
- def _py2timestamp(dt=None):
+# noinspection PyUnreachableCode
+if False:
+ # just to trick pycharm in correct type detection
+ # noinspection PyUnusedLocal
+ def timestamp_near(d_t):
# type: (datetime.datetime) -> float
- try:
- import time
- return int(time.mktime(dt.timetuple()))
- except (BaseException, Exception):
- return 0
- timestamp_near = _py2timestamp # type: Callable[[datetime.datetime], float]
-else:
- # py3 native timestamp uses milliseconds
- timestamp_near = datetime.datetime.timestamp # type: Callable[[datetime.datetime], float]
+ pass
+
+
+# py3 native timestamp uses milliseconds
+# noinspection PyRedeclaration
+timestamp_near = datetime.datetime.timestamp
diff --git a/sickgear/show_name_helpers.py b/sickgear/show_name_helpers.py
index a18e5878..0ee26627 100644
--- a/sickgear/show_name_helpers.py
+++ b/sickgear/show_name_helpers.py
@@ -28,7 +28,7 @@ from .name_parser.parser import InvalidNameException, InvalidShowException, Name
from .scene_exceptions import get_scene_exceptions
from sg_helpers import scantree
-from _23 import map_list, quote_plus
+from _23 import quote_plus
from six import iterkeys, itervalues
# noinspection PyUnreachableCode
@@ -237,7 +237,7 @@ def get_show_names_all_possible(show_obj, season=-1, scenify=True, spacer='.', f
show_names = list(set(
all_possible_show_names(show_obj, season=season, force_anime=force_anime))) # type: List[AnyStr]
if scenify:
- show_names = map_list(sanitize_scene_name, show_names)
+ show_names = list(map(sanitize_scene_name, show_names))
return url_encode(show_names, spacer)
@@ -264,7 +264,7 @@ def make_scene_season_search_string(show_obj, # type: sickgear.tv.TVShow
ep_obj_list = show_obj.get_all_episodes(ep_obj.season)
# get show qualities
- any_qualities, best_qualities = common.Quality.splitQuality(show_obj.quality)
+ any_qualities, best_qualities = common.Quality.split_quality(show_obj.quality)
# compile a list of all the episode numbers we need in this 'season'
season_strings = []
@@ -272,7 +272,7 @@ def make_scene_season_search_string(show_obj, # type: sickgear.tv.TVShow
# get quality of the episode
cur_composite_status = episode.status
- cur_status, cur_quality = common.Quality.splitCompositeStatus(cur_composite_status)
+ cur_status, cur_quality = common.Quality.split_composite_status(cur_composite_status)
if best_qualities:
highest_best_quality = max(best_qualities)
@@ -378,7 +378,7 @@ def all_possible_show_names(show_obj, season=-1, force_anime=False):
# type: (sickgear.tv.TVShow, int, bool) -> List[AnyStr]
"""
Figures out every possible variation of the name for a particular show. Includes TVDB name, TVRage name,
- country codes on the end, eg. "Show Name (AU)", and any scene exception names.
+ country codes on the end, e.g. "Show Name (AU)", and any scene exception names.
:param show_obj: a TVShow object that we should get the names of
:param season: season
@@ -387,7 +387,7 @@ def all_possible_show_names(show_obj, season=-1, force_anime=False):
"""
show_names = get_scene_exceptions(show_obj.tvid, show_obj.prodid, season=season)[:]
- if not show_names: # if we dont have any season specific exceptions fallback to generic exceptions
+ if not show_names: # if we don't have any season specific exceptions fallback to generic exceptions
season = -1
show_names = get_scene_exceptions(show_obj.tvid, show_obj.prodid, season=season)[:]
diff --git a/sickgear/show_queue.py b/sickgear/show_queue.py
index 70a6cc6d..03046c93 100644
--- a/sickgear/show_queue.py
+++ b/sickgear/show_queue.py
@@ -931,7 +931,7 @@ class QueueItemAdd(ShowQueueItem):
wanted_updates.append({'season': sr['season'], 'episode': sr['episode'],
'status': sr['status']})
elif sr['status'] not in [WANTED]:
- cur_status, cur_quality = Quality.splitCompositeStatus(int(sr['status']))
+ cur_status, cur_quality = Quality.split_composite_status(int(sr['status']))
if sickgear.WANTEDLIST_CACHE.get_wantedlist(
self.quality, self.upgrade_once, cur_quality, cur_status,
unaired=(sickgear.SEARCH_UNAIRED and not sickgear.UNAIRED_RECENT_SEARCH_ONLY)):
@@ -1155,7 +1155,7 @@ class QueueItemAdd(ShowQueueItem):
raise
# update internal name cache
- name_cache.buildNameCache(self.show_obj)
+ name_cache.build_name_cache(self.show_obj)
self.show_obj.load_episodes_from_db()
@@ -1446,7 +1446,7 @@ class QueueItemUpdate(ShowQueueItem):
for cur_season in db_ep_obj_list:
for cur_episode in db_ep_obj_list[cur_season]:
ep_obj = self.show_obj.get_episode(cur_season, cur_episode) # type: Optional[TVEpisode]
- status = sickgear.common.Quality.splitCompositeStatus(ep_obj.status)[0]
+ status = sickgear.common.Quality.split_composite_status(ep_obj.status)[0]
if self.switch or should_delete_episode(status):
if self.switch:
cl.append(self.show_obj.switch_ep_change_sql(
diff --git a/sickgear/show_updater.py b/sickgear/show_updater.py
index 901b431c..9d6970be 100644
--- a/sickgear/show_updater.py
+++ b/sickgear/show_updater.py
@@ -220,7 +220,7 @@ class ShowUpdater(object):
if len(pi_list):
sickgear.show_queue_scheduler.action.daily_update_running = True
- ui.ProgressIndicators.setIndicator('dailyUpdate', ui.QueueProgressIndicator('Daily Update', pi_list))
+ ui.ProgressIndicators.set_indicator('dailyUpdate', ui.QueueProgressIndicator('Daily Update', pi_list))
logger.log(u'Added all shows to show queue for full update')
diff --git a/sickgear/traktChecker.py b/sickgear/traktChecker.py
deleted file mode 100644
index 851ed124..00000000
--- a/sickgear/traktChecker.py
+++ /dev/null
@@ -1,222 +0,0 @@
-# Author: Frank Fenton
-#
-# This file is part of SickGear.
-#
-# SickGear is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# SickGear is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with SickGear. If not, see .
-
-import datetime
-import os
-import traceback
-
-import sickgear
-from . import helpers, logger, search_queue
-from .common import SKIPPED, WANTED
-from .indexers.indexer_config import TVINFO_TVRAGE
-
-
-class TraktChecker(object):
- def __init__(self):
- self.todoWanted = []
-
- def run(self, force=False):
- try:
- # add shows from trakt.tv watchlist
- if sickgear.TRAKT_USE_WATCHLIST:
- self.todoWanted = [] # its about to all get re-added
- if len(sickgear.ROOT_DIRS.split('|')) < 2:
- logger.log(u"No default root directory", logger.ERROR)
- return
- self.updateShows()
- self.updateEpisodes()
-
- # sync trakt.tv library with SickGear library
- if sickgear.TRAKT_SYNC:
- self.syncLibrary()
- except Exception:
- logger.log(traceback.format_exc(), logger.DEBUG)
-
- def findShow(self, tvid, prodid):
- library = TraktCall("user/library/shows/all.json/%API%/" + sickgear.TRAKT_USERNAME, sickgear.TRAKT_API, sickgear.TRAKT_USERNAME, sickgear.TRAKT_PASSWORD)
-
- if library == 'NULL':
- logger.log(u"No shows found in your library, aborting library update", logger.DEBUG)
- return
-
- if not library:
- logger.log(u"Could not connect to trakt service, aborting library check", logger.ERROR)
- return
-
- return filter(lambda x: int(prodid) in [int(x['tvdb_id']) or 0, int(x['tvrage_id'])] or 0, library)
-
- def syncLibrary(self):
- logger.log(u"Syncing Trakt.tv show library", logger.DEBUG)
-
- for cur_show_obj in sickgear.showList:
- self.addShowToTraktLibrary(cur_show_obj)
-
- def removeShowFromTraktLibrary(self, show_obj):
- data = {}
- if self.findShow(show_obj.tvid, show_obj.prodid):
- # URL parameters
- data['tvdb_id'] = helpers.mapIndexersToShow(show_obj)[1]
- data['title'] = show_obj.name
- data['year'] = show_obj.startyear
-
- if len(data):
- logger.log(u"Removing " + show_obj.name + " from trakt.tv library", logger.DEBUG)
- TraktCall("show/unlibrary/%API%", sickgear.TRAKT_API, sickgear.TRAKT_USERNAME, sickgear.TRAKT_PASSWORD,
- data)
-
- def addShowToTraktLibrary(self, show_obj):
- """
- Sends a request to trakt indicating that the given show and all its episodes is part of our library.
-
- show_obj: The TVShow object to add to trakt
- """
-
- data = {}
-
- if not self.findShow(show_obj.tvid, show_obj.prodid):
- # URL parameters
- data['tvdb_id'] = helpers.mapIndexersToShow(show_obj)[1]
- data['title'] = show_obj.name
- data['year'] = show_obj.startyear
-
- if len(data):
- logger.log(u"Adding " + show_obj.name + " to trakt.tv library", logger.DEBUG)
- TraktCall("show/library/%API%", sickgear.TRAKT_API, sickgear.TRAKT_USERNAME, sickgear.TRAKT_PASSWORD,
- data)
-
- def updateShows(self):
- logger.log(u"Starting trakt show watchlist check", logger.DEBUG)
- watchlist = TraktCall("user/watchlist/shows.json/%API%/" + sickgear.TRAKT_USERNAME, sickgear.TRAKT_API, sickgear.TRAKT_USERNAME, sickgear.TRAKT_PASSWORD)
-
- if watchlist == 'NULL':
- logger.log(u"No shows found in your watchlist, aborting watchlist update", logger.DEBUG)
- return
-
- if not watchlist:
- logger.log(u"Could not connect to trakt service, aborting watchlist update", logger.ERROR)
- return
-
- for show in watchlist:
- tvid = int(sickgear.TRAKT_DEFAULT_INDEXER)
- prodid = int(show[('tvdb_id', 'tvrage_id')[TVINFO_TVRAGE == tvid]])
-
- if int(sickgear.TRAKT_METHOD_ADD) != 2:
- self.addDefaultShow(tvid, prodid, show["title"], SKIPPED)
- else:
- self.addDefaultShow(tvid, prodid, show["title"], WANTED)
-
- if int(sickgear.TRAKT_METHOD_ADD) == 1:
- show_obj = helpers.find_show_by_id({tvid: prodid})
- if None is not show_obj:
- self.setEpisodeToWanted(show_obj, 1, 1)
- else:
- self.todoWanted.append((prodid, 1, 1))
-
- def updateEpisodes(self):
- """
- Sets episodes to wanted that are in trakt watchlist
- """
- logger.log(u"Starting trakt episode watchlist check", logger.DEBUG)
- watchlist = TraktCall("user/watchlist/episodes.json/%API%/" + sickgear.TRAKT_USERNAME, sickgear.TRAKT_API, sickgear.TRAKT_USERNAME, sickgear.TRAKT_PASSWORD)
-
- if watchlist == 'NULL':
- logger.log(u"No episodes found in your watchlist, aborting watchlist update", logger.DEBUG)
- return
-
- if not watchlist:
- logger.log(u"Could not connect to trakt service, aborting watchlist update", logger.ERROR)
- return
-
- for show in watchlist:
- tvid = int(sickgear.TRAKT_DEFAULT_INDEXER)
- prodid = int(show[('tvdb_id', 'tvrage_id')[TVINFO_TVRAGE == tvid]])
-
- self.addDefaultShow(tvid, prodid, show['title'], SKIPPED)
- show_obj = helpers.find_show_by_id({tvid: prodid})
-
- try:
- if show_obj and show_obj.tvid == tvid:
- for episode in show["episodes"]:
- if None is not show_obj:
- self.setEpisodeToWanted(show_obj, episode["season"], episode["number"])
- else:
- self.todoWanted.append((prodid, episode["season"], episode["number"]))
- except TypeError:
- logger.log(u"Could not parse the output from trakt for " + show["title"], logger.DEBUG)
-
- def addDefaultShow(self, tvid, prod_id, name, status):
- """
- Adds a new show with the default settings
- """
- if not helpers.find_show_by_id({int(tvid): int(prodid)}):
- logger.log(u"Adding show " + str(prod_id))
- root_dirs = sickgear.ROOT_DIRS.split('|')
-
- try:
- location = root_dirs[int(root_dirs[0]) + 1]
- except:
- location = None
-
- if location:
- showPath = os.path.join(location, helpers.sanitize_filename(name))
- dir_exists = helpers.make_dir(showPath)
- if not dir_exists:
- logger.log(u"Unable to create the folder " + showPath + ", can't add the show", logger.ERROR)
- return
- else:
- helpers.chmod_as_parent(showPath)
-
- sickgear.show_queue_scheduler.action.add_show(
- int(tvid), int(prod_id), showPath,
- quality=int(sickgear.QUALITY_DEFAULT),
- paused=sickgear.TRAKT_START_PAUSED, default_status=status,
- flatten_folders=int(sickgear.FLATTEN_FOLDERS_DEFAULT)
- )
- else:
- logger.log(u"There was an error creating the show, no root directory setting found", logger.ERROR)
- return
-
- def setEpisodeToWanted(self, show_obj, s, e):
- """
- Sets an episode to wanted, only is it is currently skipped
- """
- ep_obj = show_obj.get_episode(int(s), int(e))
- if ep_obj:
-
- with ep_obj.lock:
- if ep_obj.status != SKIPPED or ep_obj.airdate == datetime.date.fromordinal(1):
- return
-
- logger.log(u"Setting episode s" + str(s) + "e" + str(e) + " of show " + show_obj.name + " to wanted")
- # figure out what segment the episode is in and remember it so we can backlog it
-
- ep_obj.status = WANTED
- ep_obj.save_to_db()
-
- backlog_queue_item = search_queue.BacklogQueueItem(show_obj, [ep_obj])
- sickgear.search_queue_scheduler.action.add_item(backlog_queue_item)
-
- logger.log(u"Starting backlog for " + show_obj.name + " season " + str(
- s) + " episode " + str(e) + " because some eps were set to wanted")
-
- def manageNewShow(self, show_obj):
- logger.log(u"Checking if trakt watch list wants to search for episodes from new show " + show_obj.name,
- logger.DEBUG)
- episodes = [i for i in self.todoWanted if i[0] == show_obj.prodid]
- for episode in episodes:
- self.todoWanted.remove(episode)
- self.setEpisodeToWanted(show_obj, episode[1], episode[2])
diff --git a/sickgear/trakt_helpers.py b/sickgear/trakt_helpers.py
index acbbb398..b1a8314f 100644
--- a/sickgear/trakt_helpers.py
+++ b/sickgear/trakt_helpers.py
@@ -5,7 +5,7 @@ import re
import sickgear
from .helpers import try_int
-from _23 import decode_bytes, decode_str, list_items
+from _23 import decode_bytes, decode_str
from six import iteritems, text_type
@@ -51,7 +51,7 @@ def build_config_string(config):
:param config: dicts of Trakt account id, parent location
:return: string csv of parsed config kwargs for config file
"""
- return text_type(list_items(config))
+ return text_type(list(config.items()))
def trakt_collection_remove_account(account_id):
diff --git a/sickgear/tv.py b/sickgear/tv.py
index af779dbd..73143391 100644
--- a/sickgear/tv.py
+++ b/sickgear/tv.py
@@ -43,7 +43,7 @@ import sickgear
from . import db, helpers, history, image_cache, indexermapper, logger, \
name_cache, network_timezones, notifiers, postProcessor, subtitles
from .anime import AniGroupList
-from .classes import weakList
+from .classes import WeakList
from .common import Quality, statusStrings, \
ARCHIVED, DOWNLOADED, FAILED, IGNORED, SKIPPED, SNATCHED, SNATCHED_ANY, SNATCHED_PROPER, UNAIRED, UNKNOWN, WANTED, \
NAMING_DUPLICATE, NAMING_EXTEND, NAMING_LIMITED_EXTEND, NAMING_LIMITED_EXTEND_E_PREFIXED, NAMING_SEPARATED_REPEAT
@@ -63,8 +63,7 @@ from lib.tvinfo_base import RoleTypes, TVINFO_FACEBOOK, TVINFO_INSTAGRAM, TVINFO
from lib.tvinfo_base.exceptions import *
from sg_helpers import calc_age, int_to_time, remove_file_perm, time_to_int
-from _23 import filter_iter, filter_list, list_keys
-from six import integer_types, iteritems, itervalues, moves, PY2, string_types
+from six import integer_types, iteritems, itervalues, moves, string_types
# noinspection PyUnreachableCode
if False:
@@ -172,9 +171,9 @@ class TVidProdid(object):
if coreid_warnings:
logger.log('%s\n' % pre_msg +
'|>%s^-- Note: Bootstrap & Tornado startup functions stripped from traceback log.' %
- '|>'.join(filter_iter(lambda text: not re.search(r'(?i)bootstrap|traceback\.'
- r'format_stack|pydevd|tornado'
- r'|webserveinit', text),
+ '|>'.join(filter(lambda text: not re.search(r'(?i)bootstrap|traceback\.'
+ r'format_stack|pydevd|tornado'
+ r'|webserveinit', text),
traceback.format_stack(inspect.currentframe()))))
except IndexError:
pass
@@ -281,7 +280,7 @@ def usable_id(value):
def usable_rid(value):
# type: (Union[AnyStr]) -> Optional[AnyStr]
"""
- return value if is a id:format is valid
+ return value if is an id:format is valid
otherwise None if value fails basic id format validation
"""
if isinstance(value, string_types) and ':' in value:
@@ -379,7 +378,7 @@ class Person(Referential):
akas=None, # type: Set[AnyStr]
character_obj=None, # type: Character
tmp_character_obj=None # type: Character
- ): # type: (...) -> Person
+ ):
super(Person, self).__init__(sid)
@@ -453,7 +452,7 @@ class Person(Referential):
def reset(self, person_obj=None):
# type: (TVInfoPerson) -> None
"""
- reset all properties with the exception of: name, id, ids
+ reset all properties except; name, id, ids
:param person_obj: TVInfo Person object to reset to
"""
@@ -789,7 +788,9 @@ class Person(Referential):
if None is not rp:
if confirmed_on_src:
for i in (TVINFO_TRAKT, TVINFO_IMDB, TVINFO_TMDB, TVINFO_TVMAZE, TVINFO_TVDB):
- # in case it's the current source use it's id and lock if from being changed
+ if not rp.ids.get(i):
+ continue
+ # in case it's the current source use its id and lock if from being changed
if cur_tv_info_src == i and rp.ids.get(i):
source_confirmed[i] = True
if rp.ids.get(i) != self.ids.get(i):
@@ -803,6 +804,8 @@ class Person(Referential):
self.dirty_ids = True
for i in (TVINFO_INSTAGRAM, TVINFO_TWITTER, TVINFO_FACEBOOK, TVINFO_WIKIPEDIA):
+ if not rp.social_ids.get(i):
+ continue
if rp.social_ids.get(i) and not self.ids.get(i) or \
(rp.social_ids.get(i) and rp.social_ids.get(i) != self.ids.get(i)):
self.ids[i] = rp.social_ids[i]
@@ -892,11 +895,12 @@ class Person(Referential):
]
if force or self.dirty_ids:
for s, v in iteritems(self.ids):
- cl.extend([
- ['UPDATE person_ids SET src_id = ? WHERE person_id = ? AND src = ?', [v, self.id, s]],
- ["INSERT INTO person_ids (src, src_id, person_id) SELECT %s, '%s', %s WHERE changes() == 0"
- % (s, v, self.id)]
- ])
+ if v:
+ cl.extend([
+ ['UPDATE person_ids SET src_id = ? WHERE person_id = ? AND src = ?', [v, self.id, s]],
+ ["INSERT INTO person_ids (src, src_id, person_id) SELECT %s, '%s', %s WHERE changes() == 0"
+ % (s, v, self.id)]
+ ])
if cl:
r_id = my_db.mass_action(cl)
if r_id and r_id[-1:][0]:
@@ -1399,8 +1403,8 @@ class TVShow(TVShowBase):
@cast_list.setter
def cast_list(self, value):
- # type: (weakList[Character]) -> None
- self._cast_list = None if not isinstance(value, weakList) else weakref.ref(value)
+ # type: (WeakList[Character]) -> None
+ self._cast_list = None if not isinstance(value, WeakList) else weakref.ref(value)
@property
def network_id(self):
@@ -1896,7 +1900,7 @@ class TVShow(TVShowBase):
bio=cur_row['c_bio'], ids=c_ids, image_url=cur_row['image_url'], person=[person],
persons_years=p_years, show_obj=self, sid=cur_row['c_id'],
thumb_url=cur_row['thumb_url'], updated=cur_row['cast_updated']))
- cast_list = weakList(c for c in old_cast or [] if c.id not in old_list)
+ cast_list = WeakList(c for c in old_cast or [] if c.id not in old_list)
self.cast_list = cast_list
return cast_list
@@ -1986,7 +1990,7 @@ class TVShow(TVShowBase):
return True
return False
- # In some situations self.status = None.. need to figure out where that is!
+ # In some situations self.status = None, need to figure out where that is!
if not self._status:
self.status = ''
logger.log('Status missing for show: [%s] with status: [%s]' %
@@ -2022,7 +2026,7 @@ class TVShow(TVShowBase):
last_airdate = datetime.date.fromordinal(sql_result[1][0]['airdate']) \
if sql_result and sql_result[1] else datetime.date.fromordinal(1)
- # if show is not 'Ended' and last episode aired less then 460 days ago
+ # if show is not 'Ended' and last episode aired less than 460 days ago
# or don't have an airdate for the last episode always update (status 'Continuing' or '')
update_days_limit = 2013
ended_limit = datetime.timedelta(days=update_days_limit)
@@ -2442,7 +2446,7 @@ class TVShow(TVShowBase):
logger.log('No episode number found in %s, ignoring it' % path, logger.ERROR)
return None
- # for now lets assume that any episode in the show dir belongs to that show
+ # for now let's assume that any episode in the show dir belongs to that show
season_number = parse_result.season_number if None is not parse_result.season_number else 1
episode_numbers = parse_result.episode_numbers
root_ep_obj = None
@@ -2467,7 +2471,7 @@ class TVShow(TVShowBase):
else:
# if there is a new file associated with this ep then re-check the quality
- status, quality = sickgear.common.Quality.splitCompositeStatus(ep_obj.status)
+ status, quality = sickgear.common.Quality.split_composite_status(ep_obj.status)
if IGNORED == status:
continue
@@ -2502,25 +2506,25 @@ class TVShow(TVShowBase):
# if user replaces a file, attempt to recheck the quality unless it's know to be the same file
if check_quality_again and not same_file:
- new_quality = Quality.nameQuality(path, self.is_anime)
+ new_quality = Quality.name_quality(path, self.is_anime)
if Quality.UNKNOWN == new_quality:
- new_quality = Quality.fileQuality(path)
+ new_quality = Quality.file_quality(path)
logger.log('Since this file was renamed, file %s was checked and quality "%s" found'
% (path, Quality.qualityStrings[new_quality]), logger.DEBUG)
- status, quality = sickgear.common.Quality.splitCompositeStatus(ep_obj.status)
+ status, quality = sickgear.common.Quality.split_composite_status(ep_obj.status)
if Quality.UNKNOWN != new_quality or status in (SKIPPED, UNAIRED):
- ep_obj.status = Quality.compositeStatus(DOWNLOADED, new_quality)
+ ep_obj.status = Quality.composite_status(DOWNLOADED, new_quality)
# check for status/quality changes as long as it's a new file
elif not same_file and sickgear.helpers.has_media_ext(path)\
and ep_obj.status not in Quality.DOWNLOADED + Quality.ARCHIVED + [IGNORED]:
- old_status, old_quality = Quality.splitCompositeStatus(ep_obj.status)
- new_quality = Quality.nameQuality(path, self.is_anime)
+ old_status, old_quality = Quality.split_composite_status(ep_obj.status)
+ new_quality = Quality.name_quality(path, self.is_anime)
if Quality.UNKNOWN == new_quality:
- new_quality = Quality.fileQuality(path)
+ new_quality = Quality.file_quality(path)
if Quality.UNKNOWN == new_quality:
- new_quality = Quality.assumeQuality(path)
+ new_quality = Quality.assume_quality(path)
new_status = None
@@ -2532,7 +2536,7 @@ class TVShow(TVShowBase):
logger.DEBUG)
new_status = DOWNLOADED
- # if it was snatched proper and we found a higher quality one then allow the status change
+ # if it was snatched proper, and we found a higher quality one then allow the status change
elif SNATCHED_PROPER == old_status and old_quality < new_quality:
logger.log('STATUS: this episode used to be snatched proper with quality %s but'
' a file exists with quality %s so setting the status to DOWNLOADED'
@@ -2546,18 +2550,18 @@ class TVShow(TVShowBase):
if None is not new_status:
with ep_obj.lock:
logger.log('STATUS: we have an associated file, so setting the status from %s to DOWNLOADED/%s'
- % (ep_obj.status, Quality.compositeStatus(new_status, new_quality)), logger.DEBUG)
- ep_obj.status = Quality.compositeStatus(new_status, new_quality)
+ % (ep_obj.status, Quality.composite_status(new_status, new_quality)), logger.DEBUG)
+ ep_obj.status = Quality.composite_status(new_status, new_quality)
elif same_file:
- status, quality = Quality.splitCompositeStatus(ep_obj.status)
+ status, quality = Quality.split_composite_status(ep_obj.status)
if status in (SKIPPED, UNAIRED):
- new_quality = Quality.nameQuality(path, self.is_anime)
+ new_quality = Quality.name_quality(path, self.is_anime)
if Quality.UNKNOWN == new_quality:
- new_quality = Quality.fileQuality(path)
+ new_quality = Quality.file_quality(path)
logger.log('Since this file has status: "%s", file %s was checked and quality "%s" found'
% (statusStrings[status], path, Quality.qualityStrings[new_quality]), logger.DEBUG)
- ep_obj.status = Quality.compositeStatus(DOWNLOADED, new_quality)
+ ep_obj.status = Quality.composite_status(DOWNLOADED, new_quality)
with ep_obj.lock:
result = ep_obj.get_sql()
@@ -2769,7 +2773,7 @@ class TVShow(TVShowBase):
:param scheduled_update:
:param switch:
"""
- # There's gotta be a better way of doing this but we don't wanna
+ # There's gotta be a better way of doing this, but we don't want to
# change the cache value elsewhere
if None is tvapi:
tvinfo_config = sickgear.TVInfoAPI(self.tvid).api_params.copy()
@@ -2896,7 +2900,7 @@ class TVShow(TVShowBase):
cast_list = self._load_cast_from_db()
remove_char_ids = {c.id for c in cast_list or []}
- cast_ordered = weakList()
+ cast_ordered = WeakList()
for ct, c_l in iteritems(show_info_cast): # type: (integer_types, List[TVInfoCharacter])
if ct not in (RoleTypes.ActorMain, RoleTypes.Host, RoleTypes.Interviewer, RoleTypes.Presenter):
continue
@@ -3152,9 +3156,9 @@ class TVShow(TVShowBase):
if isinstance(imdb_tv.get('numberOfEpisodes'), (int, string_types)):
imdb_info['episode_count'] = try_int(imdb_tv.get('numberOfEpisodes'), 1)
if isinstance(imdb_tv.get('genres'), (list, tuple)):
- imdb_info['genres'] = '|'.join(filter_iter(lambda _v: _v, imdb_tv.get('genres')))
+ imdb_info['genres'] = '|'.join(filter(lambda _v: _v, imdb_tv.get('genres')))
if isinstance(imdb_tv.get('origins'), list):
- imdb_info['country_codes'] = '|'.join(filter_iter(lambda _v: _v, imdb_tv.get('origins')))
+ imdb_info['country_codes'] = '|'.join(filter(lambda _v: _v, imdb_tv.get('origins')))
# certificate
if isinstance(imdb_certificates.get('certificates'), dict):
@@ -3256,7 +3260,7 @@ class TVShow(TVShowBase):
action = ('delete', 'trash')[sickgear.TRASH_REMOVE_SHOW]
# remove self from show list
- sickgear.showList = filter_list(lambda so: so.tvid_prodid != self.tvid_prodid, sickgear.showList)
+ sickgear.showList = list(filter(lambda so: so.tvid_prodid != self.tvid_prodid, sickgear.showList))
try:
del sickgear.showDict[self.sid_int]
except (BaseException, Exception):
@@ -3382,11 +3386,11 @@ class TVShow(TVShowBase):
# check if downloaded files still exist, update our data if this has changed
if 1 != sickgear.SKIP_REMOVED_FILES:
with ep_obj.lock:
- # if it used to have a file associated with it and it doesn't anymore then set it to IGNORED
+ # if it used to have a file associated with it, and it doesn't anymore then set it to IGNORED
if ep_obj.location and ep_obj.status in Quality.DOWNLOADED:
if ARCHIVED == sickgear.SKIP_REMOVED_FILES:
- ep_obj.status = Quality.compositeStatus(
- ARCHIVED, Quality.qualityDownloaded(ep_obj.status))
+ ep_obj.status = Quality.composite_status(
+ ARCHIVED, Quality.quality_downloaded(ep_obj.status))
else:
ep_obj.status = (sickgear.SKIP_REMOVED_FILES, IGNORED)[
not sickgear.SKIP_REMOVED_FILES]
@@ -3541,7 +3545,7 @@ class TVShow(TVShowBase):
sickgear.FANART_RATINGS[self.tvid_prodid] = rating
sickgear.save_config()
- name_cache.buildNameCache(self)
+ name_cache.build_name_cache(self)
self.reset_not_found_count()
old_sid_int = self.create_sid(old_tvid, old_prodid)
if old_sid_int != self.sid_int:
@@ -3676,7 +3680,7 @@ class TVShow(TVShowBase):
wq = getattr(self.sxe_ep_obj.get(season, {}).get(episode, {}), 'wanted_quality', None)
if None is not wq:
if quality in wq:
- cur_status, cur_quality = Quality.splitCompositeStatus(self.sxe_ep_obj[season][episode].status)
+ cur_status, cur_quality = Quality.split_composite_status(self.sxe_ep_obj[season][episode].status)
if cur_status in (WANTED, UNAIRED, SKIPPED, FAILED):
logger.log('Existing episode status is wanted/unaired/skipped/failed,'
' getting found episode', logger.DEBUG)
@@ -3696,7 +3700,7 @@ class TVShow(TVShowBase):
pass
# if the quality isn't one we want under any circumstances then just say no
- initial_qualities, archive_qualities = Quality.splitQuality(self._quality)
+ initial_qualities, archive_qualities = Quality.split_quality(self._quality)
all_qualities = list(set(initial_qualities + archive_qualities))
initial = '= (%s)' % ','.join([Quality.qualityStrings[qual] for qual in initial_qualities])
@@ -3721,7 +3725,7 @@ class TVShow(TVShowBase):
logger.log('Unable to find a matching episode in database, ignoring found episode', logger.DEBUG)
return False
- cur_status, cur_quality = Quality.splitCompositeStatus(int(sql_result[0]['status']))
+ cur_status, cur_quality = Quality.split_composite_status(int(sql_result[0]['status']))
ep_status_text = statusStrings[cur_status]
logger.log('Existing episode status: %s (%s)' % (statusStrings[cur_status], ep_status_text), logger.DEBUG)
@@ -4007,7 +4011,7 @@ class TVEpisode(TVEpisodeBase):
return
self.refresh_subtitles()
- # added the if because sometime it raises an error
+ # added the if because sometimes it raises an error
self.subtitles_searchcount = self.subtitles_searchcount + 1 if self.subtitles_searchcount else 1
self.subtitles_lastsearch = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.save_to_db()
@@ -4220,8 +4224,6 @@ class TVEpisode(TVEpisodeBase):
tzinfo = self._show_obj.timezone
elif isinstance(self._show_obj.network, string_types) and self._show_obj.network:
tzinfo = network_timezones.get_network_timezone(self._show_obj.network)
- if PY2:
- return SGDatetime.combine(self.airdate, ep_time).replace(tzinfo=tzinfo).timestamp_far()
return SGDatetime.combine(self.airdate, ep_time, tzinfo=tzinfo).timestamp_far()
return None
@@ -4290,7 +4292,7 @@ class TVEpisode(TVEpisodeBase):
except (BaseTVinfoEpisodenotfound, BaseTVinfoSeasonnotfound):
logger.log('Unable to find the episode on %s... has it been removed? Should I delete from db?' %
sickgear.TVInfoAPI(self.tvid).name, logger.DEBUG)
- # if I'm no longer on the Indexers but I once was then delete myself from the DB
+ # if I'm no longer on the Indexers, but I once was then delete myself from the DB
if -1 != self._epid and helpers.should_delete_episode(self._status):
self.delete_episode()
elif UNKNOWN == self._status:
@@ -4350,7 +4352,7 @@ class TVEpisode(TVEpisodeBase):
except (ValueError, IndexError):
logger.error('Malformed air date retrieved from %s (%s - %sx%s)' %
(sickgear.TVInfoAPI(self.tvid).name, self.show_obj.unique_name, season, episode))
- # if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now
+ # if I'm incomplete on TVDB, but I once was complete then just delete myself from the DB for now
if -1 != self._epid and helpers.should_delete_episode(self._status):
self.delete_episode()
elif UNKNOWN == self._status:
@@ -4482,7 +4484,7 @@ class TVEpisode(TVEpisodeBase):
# leave propers alone, you have to either post-process them or manually change them back
elif self._status not in Quality.SNATCHED_ANY + Quality.DOWNLOADED + Quality.ARCHIVED:
msg = '(1) Status changes from %s to ' % statusStrings[self._status]
- self.status = Quality.statusFromNameOrFile(self._location, anime=self._show_obj.is_anime)
+ self.status = Quality.status_from_name_or_file(self._location, anime=self._show_obj.is_anime)
logger.log('%s%s' % (msg, statusStrings[self._status]), logger.DEBUG)
# shouldn't get here probably
@@ -4511,7 +4513,7 @@ class TVEpisode(TVEpisodeBase):
if '' != self.location:
if UNKNOWN == self._status and sickgear.helpers.has_media_ext(self.location):
- status_quality = Quality.statusFromNameOrFile(self.location, anime=self._show_obj.is_anime)
+ status_quality = Quality.status_from_name_or_file(self.location, anime=self._show_obj.is_anime)
logger.log('(3) Status changes from %s to %s' % (self._status, status_quality), logger.DEBUG)
self.status = status_quality
@@ -4839,8 +4841,8 @@ class TVEpisode(TVEpisodeBase):
def _ep_name(self):
"""
:return: the name of the episode to use during renaming. Combines the names of related episodes.
- Eg. "Ep Name (1)" and "Ep Name (2)" becomes "Ep Name"
- "Ep Name" and "Other Ep Name" becomes "Ep Name & Other Ep Name"
+ E.g. "Ep Name (1)" and "Ep Name (2)" becomes "Ep Name"
+ "Ep Name" and "Other Ep Name" becomes "Ep Name & Other Ep Name"
:rtype: AnyStr
"""
@@ -4913,7 +4915,7 @@ class TVEpisode(TVEpisodeBase):
return ''
return parse_result.release_group
- ep_status, ep_qual = Quality.splitCompositeStatus(self._status)
+ ep_status, ep_qual = Quality.split_composite_status(self._status)
if sickgear.NAMING_STRIP_YEAR:
show_name = re.sub(r'\(\d+\)$', '', self._show_obj.name).rstrip()
@@ -4964,7 +4966,7 @@ class TVEpisode(TVEpisodeBase):
result_name = pattern
# do the replacements
- for cur_replacement in sorted(list_keys(replace_map), reverse=True):
+ for cur_replacement in sorted(list(replace_map), reverse=True):
result_name = result_name.replace(cur_replacement, helpers.sanitize_filename(replace_map[cur_replacement]))
result_name = result_name.replace(cur_replacement.lower(),
helpers.sanitize_filename(replace_map[cur_replacement].lower()))
@@ -5059,7 +5061,7 @@ class TVEpisode(TVEpisodeBase):
if not ep_sep or not ep_format:
continue
- # start with the ep string, eg. E03
+ # start with the ep string, e.g. E03
ep_string = self._format_string(ep_format.upper(), replace_map)
for cur_ep_obj in self.related_ep_obj:
@@ -5087,7 +5089,7 @@ class TVEpisode(TVEpisodeBase):
if 3 != anime_type:
absolute_number = (self._absolute_number, self._episode)[0 == self._absolute_number]
- if 0 != self._season: # dont set absolute numbers if we are on specials !
+ if 0 != self._season: # don't set absolute numbers if we are on specials !
if 1 == anime_type: # this crazy person wants both ! (note: +=)
ep_string += sep + '%(#)03d' % {'#': absolute_number}
elif 2 == anime_type: # total anime freak only need the absolute number ! (note: =)
@@ -5270,7 +5272,7 @@ class TVEpisode(TVEpisodeBase):
def airdate_modify_stamp(self):
"""
- Make the modify date and time of a file reflect the show air date and time.
+ Make modify date and time of a file reflect the show air date and time.
Note: Also called from postProcessor
"""
diff --git a/sickgear/tv_base.py b/sickgear/tv_base.py
index b8a72466..d00dc284 100644
--- a/sickgear/tv_base.py
+++ b/sickgear/tv_base.py
@@ -20,7 +20,7 @@ import sickgear
from . import logger
from ._legacy_classes import LegacyTVShow, LegacyTVEpisode
from .common import UNKNOWN
-from .name_cache import buildNameCache
+from .name_cache import build_name_cache
from six import string_types
@@ -132,7 +132,7 @@ class TVShowBase(LegacyTVShow, TVBase):
_current_name = self._name
self.dirty_setter('_name')(self, *arg)
if _current_name != self._name:
- buildNameCache(self)
+ build_name_cache(self)
# imdbid = property(lambda self: self._imdbid, dirty_setter('_imdbid'))
@property
diff --git a/sickgear/tvcache.py b/sickgear/tvcache.py
index 6450b15d..cdcb4b8a 100644
--- a/sickgear/tvcache.py
+++ b/sickgear/tvcache.py
@@ -30,12 +30,10 @@ from .rssfeeds import RSSFeeds
from .sgdatetime import timestamp_near
from .tv import TVEpisode
-from _23 import filter_list, map_iter
-from six import PY2, text_type
-
# noinspection PyUnreachableCode
if False:
from typing import Any, AnyStr, Dict, List, Tuple, Union
+ from providers.generic import GenericProvider, NZBProvider, TorrentProvider
class CacheDBConnection(db.DBConnection):
@@ -44,7 +42,7 @@ class CacheDBConnection(db.DBConnection):
# Create the table if it's not already there
try:
- if not self.hasTable('lastUpdate'):
+ if not self.has_table('lastUpdate'):
self.action('CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)')
except (BaseException, Exception) as e:
if ex(e) != 'table lastUpdate already exists':
@@ -53,7 +51,7 @@ class CacheDBConnection(db.DBConnection):
class TVCache(object):
def __init__(self, provider, interval=10):
- # type: (AnyStr, int) -> None
+ # type: (Union[GenericProvider, NZBProvider, TorrentProvider], int) -> None
self.provider = provider
self.providerID = self.provider.get_id()
self.providerDB = None
@@ -63,7 +61,7 @@ class TVCache(object):
def get_db():
return CacheDBConnection()
- def _clearCache(self):
+ def clear_cache(self):
if self.should_clear_cache():
my_db = self.get_db()
my_db.action('DELETE FROM provider_cache WHERE provider = ?', [self.providerID])
@@ -84,26 +82,13 @@ class TVCache(object):
data = None
return data
- def _checkAuth(self):
+ def check_auth(self):
# noinspection PyProtectedMember
return self.provider._check_auth()
- @staticmethod
- def _checkItemAuth(title, url):
- """
-
- :param title: title
- :type title: AnyStr
- :param url: url
- :type url: AnyStr
- :return:
- :rtype: bool
- """
- return True
-
- def updateCache(self, **kwargs):
+ def update_cache(self, **kwargs):
try:
- self._checkAuth()
+ self.check_auth()
except AuthException as e:
logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
return []
@@ -113,13 +98,13 @@ class TVCache(object):
# clear cache
if data:
- self._clearCache()
+ self.clear_cache()
# parse data
cl = []
for item in data or []:
title, url = self._title_and_url(item)
- ci = self._parseItem(title, url)
+ ci = self.parse_item(title, url)
if None is not ci:
cl.append(ci)
@@ -131,13 +116,13 @@ class TVCache(object):
logger.log('Warning could not save cache value [%s], caught err: %s' % (cl, ex(e)))
# set updated as time the attempt to fetch data is
- self.setLastUpdate()
+ self.set_last_update()
def get_rss(self, url, **kwargs):
return RSSFeeds(self.provider).get_feed(url, **kwargs)
@staticmethod
- def _translateTitle(title):
+ def _translate_title(title):
"""
:param title: title
@@ -148,7 +133,7 @@ class TVCache(object):
return u'' + title.replace(' ', '.')
@staticmethod
- def _translateLinkURL(url):
+ def _translate_link_url(url):
"""
:param url: url
@@ -158,7 +143,7 @@ class TVCache(object):
"""
return url.replace('&', '&')
- def _parseItem(self, title, url):
+ def parse_item(self, title, url):
"""
:param title: title
@@ -168,18 +153,16 @@ class TVCache(object):
:return:
:rtype: None or List[AnyStr, List[Any]]
"""
- self._checkItemAuth(title, url)
-
if title and url:
- title = self._translateTitle(title)
- url = self._translateLinkURL(url)
+ title = self._translate_title(title)
+ url = self._translate_link_url(url)
return self.add_cache_entry(title, url)
logger.log('Data returned from the %s feed is incomplete, this result is unusable' % self.provider.name,
logger.DEBUG)
- def _getLastUpdate(self):
+ def _get_last_update(self):
"""
:return:
@@ -189,15 +172,15 @@ class TVCache(object):
sql_result = my_db.select('SELECT time FROM lastUpdate WHERE provider = ?', [self.providerID])
if sql_result:
- lastTime = int(sql_result[0]['time'])
- if lastTime > int(timestamp_near(datetime.datetime.now())):
- lastTime = 0
+ last_time = int(sql_result[0]['time'])
+ if last_time > int(timestamp_near(datetime.datetime.now())):
+ last_time = 0
else:
- lastTime = 0
+ last_time = 0
- return datetime.datetime.fromtimestamp(lastTime)
+ return datetime.datetime.fromtimestamp(last_time)
- def _getLastSearch(self):
+ def _get_last_search(self):
"""
:return:
@@ -207,15 +190,15 @@ class TVCache(object):
sql_result = my_db.select('SELECT time FROM lastSearch WHERE provider = ?', [self.providerID])
if sql_result:
- lastTime = int(sql_result[0]['time'])
- if lastTime > int(timestamp_near(datetime.datetime.now())):
- lastTime = 0
+ last_time = int(sql_result[0]['time'])
+ if last_time > int(timestamp_near(datetime.datetime.now())):
+ last_time = 0
else:
- lastTime = 0
+ last_time = 0
- return datetime.datetime.fromtimestamp(lastTime)
+ return datetime.datetime.fromtimestamp(last_time)
- def setLastUpdate(self, to_date=None):
+ def set_last_update(self, to_date=None):
"""
:param to_date: date time
@@ -229,7 +212,7 @@ class TVCache(object):
{'time': int(time.mktime(to_date.timetuple()))},
{'provider': self.providerID})
- def setLastSearch(self, to_date=None):
+ def _set_last_search(self, to_date=None):
"""
:param to_date: date time
@@ -243,8 +226,8 @@ class TVCache(object):
{'time': int(time.mktime(to_date.timetuple()))},
{'provider': self.providerID})
- lastUpdate = property(_getLastUpdate)
- lastSearch = property(_getLastSearch)
+ last_update = property(_get_last_update)
+ last_search = property(_get_last_search)
def should_update(self):
"""
@@ -253,7 +236,7 @@ class TVCache(object):
:rtype: bool
"""
# if we've updated recently then skip the update
- return datetime.datetime.now() - self.lastUpdate >= datetime.timedelta(minutes=self.update_iv)
+ return datetime.datetime.now() - self.last_update >= datetime.timedelta(minutes=self.update_iv)
def should_clear_cache(self):
"""
@@ -262,7 +245,7 @@ class TVCache(object):
:rtype: bool
"""
# if recent search hasn't used our previous results yet then don't clear the cache
- return self.lastSearch >= self.lastUpdate
+ return self.last_search >= self.last_update
def add_cache_entry(self,
name, # type: AnyStr
@@ -315,7 +298,7 @@ class TVCache(object):
if season_number and episode_numbers:
# store episodes as a separated string
- episode_text = '|%s|' % '|'.join(map_iter(str, episode_numbers))
+ episode_text = '|%s|' % '|'.join(map(str, episode_numbers))
# get the current timestamp
cur_timestamp = int(timestamp_near(datetime.datetime.now()))
@@ -323,9 +306,6 @@ class TVCache(object):
# get quality of release
quality = parse_result.quality
- if PY2 and not isinstance(name, text_type):
- name = text_type(name, 'utf-8', 'replace')
-
# get release group
release_group = parse_result.release_group
@@ -346,22 +326,22 @@ class TVCache(object):
url, cur_timestamp, quality, release_group, version,
parse_result.show_obj.tvid]]
- def searchCache(self,
- episode, # type: TVEpisode
- manual_search=False # type: bool
- ): # type: (...) -> List[SearchResult]
+ def search_cache(self,
+ episode, # type: TVEpisode
+ manual_search=False # type: bool
+ ): # type: (...) -> List[SearchResult]
"""
:param episode: episode object
:param manual_search: manual search
:return: found results or empty List
"""
- neededEps = self.findNeededEpisodes(episode, manual_search)
- if 0 != len(neededEps):
- return neededEps[episode]
+ needed_eps = self.find_needed_episodes(episode, manual_search)
+ if 0 != len(needed_eps):
+ return needed_eps[episode]
return []
- def listPropers(self, date=None):
+ def list_propers(self, date=None):
"""
:param date: date
@@ -376,16 +356,16 @@ class TVCache(object):
if date:
sql += ' AND time >= ' + str(int(time.mktime(date.timetuple())))
- return filter_list(lambda x: x['indexerid'] != 0, my_db.select(sql, [self.providerID]))
+ return list(filter(lambda x: x['indexerid'] != 0, my_db.select(sql, [self.providerID])))
- def findNeededEpisodes(self, ep_obj_list, manual_search=False):
+ def find_needed_episodes(self, ep_obj_list, manual_search=False):
# type: (Union[TVEpisode, List[TVEpisode]], bool) -> Dict[TVEpisode, SearchResult]
"""
:param ep_obj_list: episode object or list of episode objects
:param manual_search: manual search
"""
- neededEps = {}
+ needed_eps = {}
cl = []
my_db = self.get_db()
@@ -408,8 +388,8 @@ class TVCache(object):
sql_result = list(itertools.chain(*sql_result))
if not sql_result:
- self.setLastSearch()
- return neededEps
+ self._set_last_search()
+ return needed_eps
# for each cache entry
for cur_result in sql_result:
@@ -479,12 +459,12 @@ class TVCache(object):
check_is_repack=True)
# add it to the list
- if ep_obj not in neededEps:
- neededEps[ep_obj] = [result]
+ if ep_obj not in needed_eps:
+ needed_eps[ep_obj] = [result]
else:
- neededEps[ep_obj].append(result)
+ needed_eps[ep_obj].append(result)
# datetime stamp this search so cache gets cleared
- self.setLastSearch()
+ self._set_last_search()
- return neededEps
+ return needed_eps
diff --git a/sickgear/ui.py b/sickgear/ui.py
index b03d9728..522a9093 100644
--- a/sickgear/ui.py
+++ b/sickgear/ui.py
@@ -117,7 +117,7 @@ class Notification(object):
class ProgressIndicator(object):
def __init__(self, percent_complete=0, current_status=None):
- self.percentComplete = percent_complete
+ self.percent_complete = percent_complete
self.currentStatus = {'title': ''} if None is current_status else current_status
@@ -128,20 +128,20 @@ class ProgressIndicators(object):
}
@staticmethod
- def getIndicator(name):
+ def get_indicator(name):
if name not in ProgressIndicators._pi:
return []
# if any of the progress indicators are done take them off the list
for curPI in ProgressIndicators._pi[name]:
- if None is not curPI and 100 == curPI.percentComplete():
+ if None is not curPI and 100 == curPI.percent_complete():
ProgressIndicators._pi[name].remove(curPI)
# return the list of progress indicators associated with this name
return ProgressIndicators._pi[name]
@staticmethod
- def setIndicator(name, indicator):
+ def set_indicator(name, indicator):
ProgressIndicators._pi[name].append(indicator)
@@ -154,16 +154,16 @@ class QueueProgressIndicator(object):
self.queueItemList = queue_item_list
self.name = name
- def numTotal(self):
+ def num_total(self):
return len(self.queueItemList)
- def numFinished(self):
+ def num_finished(self):
return len([x for x in self.queueItemList if not x.is_in_queue()])
- def numRemaining(self):
+ def num_remaining(self):
return len([x for x in self.queueItemList if x.is_in_queue()])
- def nextName(self):
+ def next_name(self):
for curItem in [
sickgear.show_queue_scheduler.action.currentItem] + sickgear.show_queue_scheduler.action.queue:
if curItem in self.queueItemList:
@@ -171,13 +171,13 @@ class QueueProgressIndicator(object):
return "Unknown"
- def percentComplete(self):
- numFinished = self.numFinished()
- numTotal = self.numTotal()
+ def percent_complete(self):
+ num_finished = self.num_finished()
+ num_total = self.num_total()
- if 0 == numTotal:
+ if 0 == num_total:
return 0
- return int(float(numFinished) / float(numTotal) * 100)
+ return int(float(num_finished) / float(num_total) * 100)
class LoadingTVShow(object):
diff --git a/sickgear/version_checker.py b/sickgear/version_checker.py
index 23609e71..7da64b0e 100644
--- a/sickgear/version_checker.py
+++ b/sickgear/version_checker.py
@@ -35,7 +35,6 @@ from sg_helpers import cmdline_runner, get_url
# noinspection PyUnresolvedReferences
from six.moves import urllib
from six import string_types
-from _23 import list_keys
# noinspection PyUnreachableCode
if False:
@@ -83,7 +82,7 @@ class PackagesUpdater(object):
ui.notifications.message(msg)
return False
- logger.log('Update(s) for %s found %s' % (self.install_type, list_keys(sickgear.UPDATES_TODO)))
+ logger.log('Update(s) for %s found %s' % (self.install_type, list(sickgear.UPDATES_TODO)))
# save updates_todo to config to be loaded after restart
sickgear.save_config()
diff --git a/sickgear/watchedstate.py b/sickgear/watchedstate.py
index 0ffbf41e..14454cbc 100644
--- a/sickgear/watchedstate.py
+++ b/sickgear/watchedstate.py
@@ -33,6 +33,7 @@ class WatchedStateUpdater(object):
return sickgear.watched_state_queue_scheduler.action.is_in_queue(self.queue_item)
def run(self):
+ # noinspection PyUnresolvedReferences
if self.is_enabled():
self.amActive = True
new_item = self.queue_item()
diff --git a/sickgear/watchedstate_queue.py b/sickgear/watchedstate_queue.py
index 7c5d83f5..81c8d614 100644
--- a/sickgear/watchedstate_queue.py
+++ b/sickgear/watchedstate_queue.py
@@ -48,7 +48,7 @@ class WatchedStateQueue(generic_queue.GenericQueue):
return length
- def add_item(self, item):
+ def add_item(self, item, **kwargs):
if isinstance(item, EmbyWatchedStateQueueItem) and not self.is_in_queue(EmbyWatchedStateQueueItem):
# emby watched state item
generic_queue.GenericQueue.add_item(self, item)
diff --git a/sickgear/webapi.py b/sickgear/webapi.py
index 691f2c6c..40246086 100644
--- a/sickgear/webapi.py
+++ b/sickgear/webapi.py
@@ -55,8 +55,8 @@ from .tv import TVEpisode, TVShow, TVidProdid
from .webserve import AddShows
import dateutil.parser
-from _23 import decode_str, list_keys, unquote_plus
-from six import integer_types, iteritems, iterkeys, PY2, string_types, text_type
+from _23 import decode_str, unquote_plus
+from six import integer_types, iteritems, iterkeys, string_types, text_type
# noinspection PyUnreachableCode
if False:
@@ -253,8 +253,6 @@ class Api(webserve.BaseHandler):
result = function(*ag)
return result
except Exception as e:
- if PY2:
- logger.log('traceback: %s' % traceback.format_exc(), logger.ERROR)
logger.log(ex(e), logger.ERROR)
raise e
@@ -793,7 +791,7 @@ def _mapQuality(show_obj):
anyQualities = []
bestQualities = []
- iqualityID, aqualityID = Quality.splitQuality(int(show_obj))
+ iqualityID, aqualityID = Quality.split_quality(int(show_obj))
if iqualityID:
for quality in iqualityID:
anyQualities.append(quality_map[quality])
@@ -1043,7 +1041,7 @@ class CMD_SickGearComingEpisodes(ApiCall):
ep['network'] and network_timezones.get_network_timezone(ep['network'], return_name=True)[1])
# remove all field we don't want for api response
- for cur_f in list_keys(ep):
+ for cur_f in list(ep):
if cur_f not in [ # fields to preserve
'absolute_number', 'air_by_date', 'airdate', 'airs', 'archive_firstmatch',
'classification', 'data_network', 'data_show_name',
@@ -1157,7 +1155,7 @@ class CMD_SickGearEpisode(ApiCall):
timezone, episode['timezone'] = network_timezones.get_network_timezone(show_obj.network, return_name=True)
episode['airdate'] = SGDatetime.sbfdate(SGDatetime.convert_to_setting(
network_timezones.parse_date_time(int(episode['airdate']), show_obj.airs, timezone)), d_preset=dateFormat)
- status, quality = Quality.splitCompositeStatus(int(episode["status"]))
+ status, quality = Quality.split_composite_status(int(episode["status"]))
episode["status"] = _get_status_Strings(status)
episode["quality"] = _get_quality_string(quality)
episode["file_size_human"] = _sizeof_fmt(episode["file_size"])
@@ -1226,7 +1224,7 @@ class CMD_SickGearEpisodeSearch(ApiCall):
# return the correct json value
if ep_queue_item.success:
- status, quality = Quality.splitCompositeStatus(ep_obj.status)
+ status, quality = Quality.split_composite_status(ep_obj.status)
# TODO: split quality and status?
return _responds(RESULT_SUCCESS, {"quality": _get_quality_string(quality)},
"Snatched (" + _get_quality_string(quality) + ")")
@@ -1350,7 +1348,7 @@ class CMD_SickGearEpisodeSetStatus(ApiCall):
continue
if None is not self.quality:
- ep_obj.status = Quality.compositeStatus(self.status, self.quality)
+ ep_obj.status = Quality.composite_status(self.status, self.quality)
else:
ep_obj.status = self.status
result = ep_obj.get_sql()
@@ -1669,7 +1667,7 @@ class CMD_SickGearHistory(ApiCall):
results = []
np = NameParser(True, testing=True, indexer_lookup=False, try_scene_exceptions=False)
for cur_result in sql_result:
- status, quality = Quality.splitCompositeStatus(int(cur_result["action"]))
+ status, quality = Quality.split_composite_status(int(cur_result["action"]))
if type_filter and status not in type_filter:
continue
status = _get_status_Strings(status)
@@ -2166,14 +2164,14 @@ class CMD_SickGearForceSearch(ApiCall):
result = None
if 'recent' == self.searchtype and not sickgear.search_queue_scheduler.action.is_recentsearch_in_progress() \
and not sickgear.recent_search_scheduler.action.amActive:
- result = sickgear.recent_search_scheduler.forceRun()
+ result = sickgear.recent_search_scheduler.force_run()
elif 'backlog' == self.searchtype and not sickgear.search_queue_scheduler.action.is_backlog_in_progress() \
and not sickgear.backlog_search_scheduler.action.amActive:
sickgear.backlog_search_scheduler.force_search(force_type=FORCED_BACKLOG)
result = True
elif 'proper' == self.searchtype and not sickgear.search_queue_scheduler.action.is_propersearch_in_progress() \
and not sickgear.proper_finder_scheduler.action.amActive:
- result = sickgear.proper_finder_scheduler.forceRun()
+ result = sickgear.proper_finder_scheduler.force_run()
if result:
return _responds(RESULT_SUCCESS, msg='%s search successfully forced' % self.searchtype)
return _responds(RESULT_FAILURE,
@@ -2668,7 +2666,7 @@ class CMD_SickGearSetDefaults(ApiCall):
aqualityID.append(quality_map[quality])
if iqualityID or aqualityID:
- sickgear.QUALITY_DEFAULT = Quality.combineQualities(iqualityID, aqualityID)
+ sickgear.QUALITY_DEFAULT = Quality.combine_qualities(iqualityID, aqualityID)
if self.status:
# convert the string status to a int
@@ -3367,7 +3365,7 @@ class CMD_SickGearShowAddExisting(ApiCall):
aqualityID.append(quality_map[quality])
if iqualityID or aqualityID:
- newQuality = Quality.combineQualities(iqualityID, aqualityID)
+ newQuality = Quality.combine_qualities(iqualityID, aqualityID)
sickgear.show_queue_scheduler.action.add_show(
int(self.tvid), int(self.prodid), self.location,
@@ -3473,7 +3471,7 @@ class CMD_SickGearShowAddNew(ApiCall):
aqualityID.append(quality_map[quality])
if iqualityID or aqualityID:
- newQuality = Quality.combineQualities(iqualityID, aqualityID)
+ newQuality = Quality.combine_qualities(iqualityID, aqualityID)
# use default status as a failsafe
newStatus = sickgear.STATUS_DEFAULT
@@ -4146,7 +4144,7 @@ class CMD_SickGearShowSeasons(ApiCall):
[self.tvid, self.prodid])
seasons = {} # type: Dict[int, Dict]
for cur_result in sql_result:
- status, quality = Quality.splitCompositeStatus(int(cur_result["status"]))
+ status, quality = Quality.split_composite_status(int(cur_result["status"]))
cur_result["status"] = _get_status_Strings(status)
cur_result["quality"] = _get_quality_string(quality)
timezone, cur_result['timezone'] = network_timezones.get_network_timezone(show_obj.network,
@@ -4179,7 +4177,7 @@ class CMD_SickGearShowSeasons(ApiCall):
for cur_result in sql_result:
curEpisode = int(cur_result["episode"])
del cur_result["episode"]
- status, quality = Quality.splitCompositeStatus(int(cur_result["status"]))
+ status, quality = Quality.split_composite_status(int(cur_result["status"]))
cur_result["status"] = _get_status_Strings(status)
cur_result["quality"] = _get_quality_string(quality)
timezone, cur_result['timezone'] = network_timezones.get_network_timezone(show_obj.network,
@@ -4264,7 +4262,7 @@ class CMD_SickGearShowSetQuality(ApiCall):
aqualityID.append(quality_map[quality])
if iqualityID or aqualityID:
- newQuality = Quality.combineQualities(iqualityID, aqualityID)
+ newQuality = Quality.combine_qualities(iqualityID, aqualityID)
show_obj.quality = newQuality
show_obj.upgrade_once = self.upgradeonce
@@ -4328,7 +4326,7 @@ class CMD_SickGearShowStats(ApiCall):
# add all the downloaded qualities
episode_qualities_counts_download = {"total": 0}
for statusCode in Quality.DOWNLOADED:
- status, quality = Quality.splitCompositeStatus(statusCode)
+ status, quality = Quality.split_composite_status(statusCode)
if quality in [Quality.NONE]:
continue
episode_qualities_counts_download[statusCode] = 0
@@ -4336,7 +4334,7 @@ class CMD_SickGearShowStats(ApiCall):
# add all snatched qualities
episode_qualities_counts_snatch = {"total": 0}
for statusCode in Quality.SNATCHED_ANY:
- status, quality = Quality.splitCompositeStatus(statusCode)
+ status, quality = Quality.split_composite_status(statusCode)
if quality in [Quality.NONE]:
continue
episode_qualities_counts_snatch[statusCode] = 0
@@ -4347,7 +4345,7 @@ class CMD_SickGearShowStats(ApiCall):
[self.prodid, self.tvid])
# the main loop that goes through all episodes
for cur_result in sql_result:
- status, quality = Quality.splitCompositeStatus(int(cur_result["status"]))
+ status, quality = Quality.split_composite_status(int(cur_result["status"]))
episode_status_counts_total["total"] += 1
@@ -4369,7 +4367,7 @@ class CMD_SickGearShowStats(ApiCall):
if "total" == statusCode:
episodes_stats["downloaded"]["total"] = episode_qualities_counts_download[statusCode]
continue
- status, quality = Quality.splitCompositeStatus(int(statusCode))
+ status, quality = Quality.split_composite_status(int(statusCode))
statusString = Quality.qualityStrings[quality].lower().replace(" ", "_").replace("(", "").replace(")", "")
episodes_stats["downloaded"][statusString] = episode_qualities_counts_download[statusCode]
@@ -4380,7 +4378,7 @@ class CMD_SickGearShowStats(ApiCall):
if "total" == statusCode:
episodes_stats["snatched"]["total"] = episode_qualities_counts_snatch[statusCode]
continue
- status, quality = Quality.splitCompositeStatus(int(statusCode))
+ status, quality = Quality.split_composite_status(int(statusCode))
statusString = Quality.qualityStrings[quality].lower().replace(" ", "_").replace("(", "").replace(")", "")
if Quality.qualityStrings[quality] in episodes_stats["snatched"]:
episodes_stats["snatched"][statusString] += episode_qualities_counts_snatch[statusCode]
@@ -4392,7 +4390,7 @@ class CMD_SickGearShowStats(ApiCall):
if "total" == statusCode:
episodes_stats["total"] = episode_status_counts_total[statusCode]
continue
- status, quality = Quality.splitCompositeStatus(int(statusCode))
+ status, quality = Quality.split_composite_status(int(statusCode))
statusString = statusStrings.statusStrings[statusCode].lower().replace(" ", "_").replace("(", "").replace(
")", "")
episodes_stats[statusString] = episode_status_counts_total[statusCode]
@@ -4655,7 +4653,7 @@ class CMD_SickGearShowsForceUpdate(ApiCall):
or sickgear.show_update_scheduler.action.amActive:
return _responds(RESULT_FAILURE, msg="show update already running.")
- result = sickgear.show_update_scheduler.forceRun()
+ result = sickgear.show_update_scheduler.force_run()
if result:
return _responds(RESULT_SUCCESS, msg="daily show update started")
return _responds(RESULT_FAILURE, msg="can't start show update currently")
diff --git a/sickgear/webserve.py b/sickgear/webserve.py
index 6439dfd5..6ccdec7f 100644
--- a/sickgear/webserve.py
+++ b/sickgear/webserve.py
@@ -19,6 +19,7 @@ from __future__ import with_statement, division
# noinspection PyProtectedMember
from mimetypes import MimeTypes
+from urllib.parse import urljoin
import base64
import copy
@@ -41,13 +42,21 @@ from json_helper import json_dumps, json_loads
import sg_helpers
from sg_helpers import remove_file, scantree, is_virtualenv
+from sg_futures import SgThreadPoolExecutor
+try:
+ from multiprocessing import cpu_count
+except ImportError:
+ # some platforms don't have multiprocessing
+ def cpu_count():
+ return None
+
import sickgear
from . import classes, clients, config, db, helpers, history, image_cache, logger, name_cache, naming, \
network_timezones, notifiers, nzbget, processTV, sab, scene_exceptions, search_queue, subtitles, ui
from .anime import AniGroupList, pull_anidb_groups, short_group_names
from .browser import folders_at_path
from .common import ARCHIVED, DOWNLOADED, FAILED, IGNORED, SKIPPED, SNATCHED, SNATCHED_ANY, UNAIRED, UNKNOWN, WANTED, \
- SD, HD720p, HD1080p, UHD2160p, Overview, Quality, qualityPresetStrings, statusStrings
+ SD, HD720p, HD1080p, UHD2160p, Overview, Quality, qualityPresetStrings, statusStrings
from .helpers import get_media_stats, has_image_ext, real_path, remove_article, remove_file_perm, starify
from .indexermapper import MapStatus, map_indexers_to_show, save_mapping
from .indexers.indexer_config import TVINFO_IMDB, TVINFO_TMDB, TVINFO_TRAKT, TVINFO_TVDB, TVINFO_TVMAZE, \
@@ -72,13 +81,9 @@ from unidecode import unidecode
import dateutil.parser
from tornado import gen, iostream
-# noinspection PyUnresolvedReferences
+from tornado.escape import utf8
from tornado.web import RequestHandler, StaticFileHandler, authenticated
from tornado.concurrent import run_on_executor
-# tornado.web.RequestHandler above is unresolved until...
-# 1) RouteHandler derives from RequestHandler instead of LegacyBaseHandler
-# 2) the following line is removed (plus the noinspection deleted)
-from ._legacy import LegacyBaseHandler
from lib import subliminal
from lib.cfscrape import CloudflareScraper
@@ -90,14 +95,15 @@ from lib.api_trakt.exceptions import TraktException, TraktAuthException
import lib.rarfile.rarfile as rarfile
-from _23 import decode_bytes, decode_str, filter_list, filter_iter, getargspec, list_keys, list_values, \
- map_consume, map_iter, map_list, map_none, ordered_dict, quote_plus, unquote_plus, urlparse
-from six import binary_type, integer_types, iteritems, iterkeys, itervalues, moves, PY2, string_types
+from _23 import decode_bytes, decode_str, getargspec, \
+ map_consume, map_none, quote_plus, unquote_plus, urlparse
+from six import binary_type, integer_types, iteritems, iterkeys, itervalues, moves, string_types
# noinspection PyUnreachableCode
if False:
from typing import Any, AnyStr, Dict, List, Optional, Set, Tuple
from sickgear.providers.generic import TorrentProvider
+ from tv import TVInfoShow
# noinspection PyAbstractClass
@@ -187,7 +193,50 @@ class BaseStaticFileHandler(StaticFileHandler):
self.set_header('X-Frame-Options', 'SAMEORIGIN')
-class RouteHandler(LegacyBaseHandler):
+class RouteHandler(RequestHandler):
+
+ executor = SgThreadPoolExecutor(thread_name_prefix='WEBSERVER', max_workers=min(32, (cpu_count() or 1) + 4))
+
+ def redirect(self, url, permanent=False, status=None):
+ """Send a redirect to the given (optionally relative) URL.
+
+ ----->>>>> NOTE: Removed self.finish <<<<<-----
+
+ If the ``status`` argument is specified, that value is used as the
+ HTTP status code; otherwise either 301 (permanent) or 302
+ (temporary) is chosen based on the ``permanent`` argument.
+ The default is 302 (temporary).
+ """
+ if not url.startswith(sickgear.WEB_ROOT):
+ url = sickgear.WEB_ROOT + url
+
+ # noinspection PyUnresolvedReferences
+ if self._headers_written:
+ raise Exception('Cannot redirect after headers have been written')
+ if status is None:
+ status = 301 if permanent else 302
+ else:
+ assert isinstance(status, int)
+ assert 300 <= status <= 399
+ self.set_status(status)
+ self.set_header('Location', urljoin(utf8(self.request.uri), utf8(url)))
+
+ def write_error(self, status_code, **kwargs):
+ body = ''
+ try:
+ if self.request.body:
+ body = '\nRequest body: %s' % decode_str(self.request.body)
+ except (BaseException, Exception):
+ pass
+ logger.log('Sent %s error response to a `%s` request for `%s` with headers:\n%s%s' %
+ (status_code, self.request.method, self.request.path, self.request.headers, body), logger.WARNING)
+ # suppress traceback by removing 'exc_info' kwarg
+ if 'exc_info' in kwargs:
+ logger.log('Gracefully handled exception text:\n%s' % traceback.format_exception(*kwargs["exc_info"]),
+ logger.DEBUG)
+ del kwargs['exc_info']
+ return super(RouteHandler, self).write_error(status_code, **kwargs)
+
def data_received(self, *args):
pass
@@ -198,9 +247,7 @@ class RouteHandler(LegacyBaseHandler):
return [self.decode_data(d) for d in data]
if not isinstance(data, string_types):
return data
- if not PY2:
- return data.encode('latin1').decode('utf-8')
- return data.decode('utf-8')
+ return data.encode('latin1').decode('utf-8')
@gen.coroutine
def route_method(self, route, use_404=False, limit_route=None, xsrf_filter=True):
@@ -240,7 +287,7 @@ class RouteHandler(LegacyBaseHandler):
# no filtering for legacy and routes that depend on *args and **kwargs
result = yield self.async_call(method, request_kwargs) # method(**request_kwargs)
else:
- filter_kwargs = dict(filter_iter(lambda kv: kv[0] in method_args, iteritems(request_kwargs)))
+ filter_kwargs = dict(filter(lambda kv: kv[0] in method_args, iteritems(request_kwargs)))
result = yield self.async_call(method, filter_kwargs) # method(**filter_kwargs)
self.finish(result)
@@ -249,8 +296,6 @@ class RouteHandler(LegacyBaseHandler):
try:
return function(**kw)
except (BaseException, Exception) as e:
- if PY2:
- raise Exception(traceback.format_exc().replace('\n', ' '))
raise e
def page_not_found(self):
@@ -311,7 +356,7 @@ class BaseHandler(RouteHandler):
elif 'fanart' == which[0:6]:
image_file_name = [cache_obj.fanart_path(
*tvid_prodid_obj.tuple +
- ('%s' % (re.sub(r'.*?fanart_(\d+(?:\.\w{1,20})?\.\w{5,8}).*', r'\1.', which, 0, re.I)),))]
+ ('%s' % (re.sub(r'.*?fanart_(\d+(?:\.\w{1,20})?\.\w{5,8}).*', r'\1.', which, 0, re.I)),))]
for cur_name in image_file_name:
if os.path.isfile(cur_name):
@@ -622,7 +667,7 @@ class RepoHandler(BaseStaticFileHandler):
return self.index([('resource.language.en_gb/', 'English/')[self.kodi_is_legacy]])
def render_kodi_service_sickgear_watchedstate_updater_resources_language_english_index(self):
- return self.index([('strings.po', 'strings.xml')[self.kodi_is_legacy]])
+ return self.index([('strings.po', 'strings.xml')[self.kodi_is_legacy]])
def repo_sickgear_details(self):
return re.findall(r'(?si)addon\sid="(repository\.[^"]+)[^>]+version="([^"]+)',
@@ -879,9 +924,10 @@ class LogfileHandler(BaseHandler):
super(LogfileHandler, self).__init__(application, request, **kwargs)
self.lock = threading.Lock()
+ # noinspection PyUnusedLocal
@authenticated
@gen.coroutine
- def get(self, path, *args, **kwargs):
+ def get(self, *args, **kwargs):
logfile_name = logger.current_log_file()
try:
@@ -1131,7 +1177,7 @@ class MainHandler(WebHandler):
# make a dict out of the sql results
sql_result = [dict(row) for row in sql_result
- if Quality.splitCompositeStatus(helpers.try_int(row['status']))[0] not in
+ if Quality.split_composite_status(helpers.try_int(row['status']))[0] not in
SNATCHED_ANY + [DOWNLOADED, ARCHIVED, IGNORED, SKIPPED]]
# multi dimension sort
@@ -1182,8 +1228,8 @@ class MainHandler(WebHandler):
pass
if imdb_id:
sql_result[index]['imdb_url'] = sickgear.indexers.indexer_config.tvinfo_config[
- sickgear.indexers.indexer_config.TVINFO_IMDB][
- 'show_url'] % imdb_id
+ sickgear.indexers.indexer_config.TVINFO_IMDB][
+ 'show_url'] % imdb_id
else:
sql_result[index]['imdb_url'] = ''
@@ -1286,7 +1332,7 @@ class MainHandler(WebHandler):
now = datetime.datetime.now()
events = [
- ('recent', sickgear.recent_search_scheduler.timeLeft),
+ ('recent', sickgear.recent_search_scheduler.time_left),
('backlog', sickgear.backlog_search_scheduler.next_backlog_timeleft),
]
@@ -1393,7 +1439,7 @@ r.close()
if data:
my_db = db.DBConnection(row_type='dict')
- media_paths = map_list(lambda arg: os.path.basename(arg[1]['path_file']), iteritems(data))
+ media_paths = list(map(lambda arg: os.path.basename(arg[1]['path_file']), iteritems(data)))
def chunks(lines, n):
for c in range(0, len(lines), n):
@@ -1553,13 +1599,13 @@ class Home(MainHandler):
index = 0
if 'custom' == sickgear.SHOWLIST_TAGVIEW:
for name in sickgear.SHOW_TAGS:
- results = filter_list(lambda so: so.tag == name, sickgear.showList)
+ results = list(filter(lambda so: so.tag == name, sickgear.showList))
if results:
t.showlists.append(['container%s' % index, name, results])
index += 1
elif 'anime' == sickgear.SHOWLIST_TAGVIEW:
- show_results = filter_list(lambda so: not so.anime, sickgear.showList)
- anime_results = filter_list(lambda so: so.anime, sickgear.showList)
+ show_results = list(filter(lambda so: not so.anime, sickgear.showList))
+ anime_results = list(filter(lambda so: so.anime, sickgear.showList))
if show_results:
t.showlists.append(['container%s' % index, 'Show List', show_results])
index += 1
@@ -1904,7 +1950,7 @@ class Home(MainHandler):
' AND notify_list != ""',
[TVidProdid.glue])
notify_lists = {}
- for r in filter_iter(lambda x: x['notify_list'].strip(), rows):
+ for r in filter(lambda x: x['notify_list'].strip(), rows):
# noinspection PyTypeChecker
notify_lists[r['tvid_prodid']] = r['notify_list']
@@ -2000,7 +2046,7 @@ class Home(MainHandler):
if not line.strip():
continue
if line.startswith(' '):
- change_parts = re.findall(r'^[\W]+(.*)$', line)
+ change_parts = re.findall(r'^\W+(.*)$', line)
change['text'] += change_parts and (' %s' % change_parts[0].strip()) or ''
else:
if change:
@@ -2012,11 +2058,11 @@ class Home(MainHandler):
elif not max_rel:
break
elif line.startswith('### '):
- rel_data = re.findall(r'(?im)^###\W*([^\s]+)\W\(([^)]+)\)', line)
+ rel_data = re.findall(r'(?im)^###\W*(\S+)\W\(([^)]+)\)', line)
rel_data and output.append({'type': 'rel', 'ver': rel_data[0][0], 'date': rel_data[0][1]})
max_rel -= 1
elif line.startswith('# '):
- max_data = re.findall(r'^#\W*([\d]+)\W*$', line)
+ max_data = re.findall(r'^#\W*(\d+)\W*$', line)
max_rel = max_data and helpers.try_int(max_data[0], None) or 5
if change:
output.append(change)
@@ -2075,6 +2121,7 @@ class Home(MainHandler):
else:
self.redirect('/home/')
+ # noinspection PyUnusedLocal
def season_render(self, tvid_prodid=None, season=None, **kwargs):
response = {'success': False}
@@ -2265,7 +2312,7 @@ class Home(MainHandler):
del (ep_counts['totals'][0])
ep_counts['eps_all'] = sum(itervalues(ep_counts['totals']))
- ep_counts['eps_most'] = max(list_values(ep_counts['totals']) + [0])
+ ep_counts['eps_most'] = max(list(ep_counts['totals'].values()) + [0])
all_seasons = sorted(iterkeys(ep_counts['totals']), reverse=True)
t.lowest_season, t.highest_season = all_seasons and (all_seasons[-1], all_seasons[0]) or (0, 0)
@@ -2313,7 +2360,7 @@ class Home(MainHandler):
status_overview = show_obj.get_overview(row['status'])
if status_overview:
ep_counts[status_overview] += row['cnt']
- if ARCHIVED == Quality.splitCompositeStatus(row['status'])[0]:
+ if ARCHIVED == Quality.split_composite_status(row['status'])[0]:
ep_counts['archived'].setdefault(row['season'], 0)
ep_counts['archived'][row['season']] = row['cnt'] + ep_counts['archived'].get(row['season'], 0)
else:
@@ -2380,7 +2427,7 @@ class Home(MainHandler):
t.clean_show_name = quote_plus(sickgear.indexermapper.clean_show_name(show_obj.name))
- t.min_initial = Quality.get_quality_ui(min(Quality.splitQuality(show_obj.quality)[0]))
+ t.min_initial = Quality.get_quality_ui(min(Quality.split_quality(show_obj.quality)[0]))
t.show_obj.exceptions = scene_exceptions.get_scene_exceptions(show_obj.tvid, show_obj.prodid)
# noinspection PyUnresolvedReferences
t.all_scene_exceptions = show_obj.exceptions # normally Unresolved as not a class attribute, force set above
@@ -2426,7 +2473,7 @@ class Home(MainHandler):
sorted_show_list[i].unique_name = '%s (%s)' % (sorted_show_list[i].name, start_year)
dups[sorted_show_list[i].unique_name] = i
- name_cache.buildNameCache()
+ name_cache.build_name_cache()
@staticmethod
def sorted_show_lists():
@@ -2436,7 +2483,7 @@ class Home(MainHandler):
if 'custom' == sickgear.SHOWLIST_TAGVIEW:
sorted_show_lists = []
for tag in sickgear.SHOW_TAGS:
- results = filter_list(lambda _so: _so.tag == tag, sickgear.showList)
+ results = list(filter(lambda _so: _so.tag == tag, sickgear.showList))
if results:
sorted_show_lists.append([tag, sorted(results, key=lambda x: titler(x.unique_name))])
# handle orphaned shows
@@ -2581,12 +2628,12 @@ class Home(MainHandler):
for k, v in iteritems(new_ids):
if None is v.get('id') or None is v.get('status'):
continue
- if (show_obj.ids.get(k, {'id': 0}).get('id') != v.get('id') or
- (MapStatus.NO_AUTOMATIC_CHANGE == v.get('status') and
- MapStatus.NO_AUTOMATIC_CHANGE != show_obj.ids.get(
- k, {'status': MapStatus.NONE}).get('status')) or
- (MapStatus.NO_AUTOMATIC_CHANGE != v.get('status') and
- MapStatus.NO_AUTOMATIC_CHANGE == show_obj.ids.get(
+ if (show_obj.ids.get(k, {'id': 0}).get('id') != v.get('id')
+ or (MapStatus.NO_AUTOMATIC_CHANGE == v.get('status')
+ and MapStatus.NO_AUTOMATIC_CHANGE != show_obj.ids.get(
+ k, {'status': MapStatus.NONE}).get('status'))
+ or (MapStatus.NO_AUTOMATIC_CHANGE != v.get('status')
+ and MapStatus.NO_AUTOMATIC_CHANGE == show_obj.ids.get(
k, {'status': MapStatus.NONE}).get('status'))):
show_obj.ids[k]['id'] = (0, v['id'])[v['id'] >= 0]
show_obj.ids[k]['status'] = (MapStatus.NOT_FOUND, v['status'])[v['id'] != 0]
@@ -2841,7 +2888,7 @@ class Home(MainHandler):
errors = []
with show_obj.lock:
- show_obj.quality = Quality.combineQualities(map_list(int, any_qualities), map_list(int, best_qualities))
+ show_obj.quality = Quality.combine_qualities(list(map(int, any_qualities)), list(map(int, best_qualities)))
show_obj.upgrade_once = upgrade_once
# reversed for now
@@ -3036,6 +3083,7 @@ class Home(MainHandler):
self.redirect('/home/view-show?tvid_prodid=%s' % show_obj.tvid_prodid)
+ # noinspection PyUnusedLocal
def subtitle_show(self, tvid_prodid=None, force=0):
if None is tvid_prodid:
@@ -3054,6 +3102,7 @@ class Home(MainHandler):
self.redirect('/home/view-show?tvid_prodid=%s' % show_obj.tvid_prodid)
+ # noinspection PyUnusedLocal
def update_mb(self, tvid_prodid=None, **kwargs):
if notifiers.NotifierFactory().get('EMBY').update_library(
@@ -3119,7 +3168,7 @@ class Home(MainHandler):
return json_dumps({'result': 'error'})
return self._generic_message('Error', err_msg)
- min_initial = min(Quality.splitQuality(show_obj.quality)[0])
+ min_initial = min(Quality.split_quality(show_obj.quality)[0])
segments = {}
if None is not eps:
@@ -3161,12 +3210,12 @@ class Home(MainHandler):
if ARCHIVED == status:
if ep_obj.status in Quality.DOWNLOADED or direct:
- ep_obj.status = Quality.compositeStatus(
- ARCHIVED, (Quality.splitCompositeStatus(ep_obj.status)[1], min_initial)[use_default])
+ ep_obj.status = Quality.composite_status(
+ ARCHIVED, (Quality.split_composite_status(ep_obj.status)[1], min_initial)[use_default])
elif DOWNLOADED == status:
if ep_obj.status in Quality.ARCHIVED:
- ep_obj.status = Quality.compositeStatus(
- DOWNLOADED, Quality.splitCompositeStatus(ep_obj.status)[1])
+ ep_obj.status = Quality.composite_status(
+ DOWNLOADED, Quality.split_composite_status(ep_obj.status)[1])
else:
ep_obj.status = status
@@ -3252,12 +3301,12 @@ class Home(MainHandler):
for _cur_ep_obj in cur_ep_obj.related_ep_obj + [cur_ep_obj]:
if _cur_ep_obj in ep_obj_rename_list:
break
- ep_status, ep_qual = Quality.splitCompositeStatus(_cur_ep_obj.status)
+ ep_status, ep_qual = Quality.split_composite_status(_cur_ep_obj.status)
if not ep_qual:
continue
ep_obj_rename_list.append(cur_ep_obj)
else:
- ep_status, ep_qual = Quality.splitCompositeStatus(cur_ep_obj.status)
+ ep_status, ep_qual = Quality.split_composite_status(cur_ep_obj.status)
if not ep_qual:
continue
ep_obj_rename_list.append(cur_ep_obj)
@@ -3334,7 +3383,7 @@ class Home(MainHandler):
# retrieve the episode object and fail if we can't get one
ep_obj = self._get_episode(tvid_prodid, season, episode)
if not isinstance(ep_obj, str):
- if UNKNOWN == Quality.splitCompositeStatus(ep_obj.status)[0]:
+ if UNKNOWN == Quality.split_composite_status(ep_obj.status)[0]:
ep_obj.status = SKIPPED
# make a queue item for the TVEpisode and put it on the queue
@@ -3371,7 +3420,7 @@ class Home(MainHandler):
sickgear.search_queue.remove_old_fifo(sickgear.search_queue.MANUAL_SEARCH_HISTORY)
results = sickgear.search_queue.MANUAL_SEARCH_HISTORY
- for item in filter_iter(lambda q: hasattr(q, 'segment_ns'), queued):
+ for item in filter(lambda q: hasattr(q, 'segment_ns'), queued):
for ep_ns in item.segment_ns:
ep_data, uniq_sxe = self.prepare_episode(ep_ns, 'queued')
ep_data_list.append(ep_data)
@@ -3387,9 +3436,9 @@ class Home(MainHandler):
seen_eps.add(uniq_sxe)
episode_params = dict(searchstate='finished', retrystate=True, statusoverview=True)
- for item in filter_iter(lambda r: hasattr(r, 'segment_ns') and (
+ for item in filter(lambda r: hasattr(r, 'segment_ns') and (
not tvid_prodid or tvid_prodid == str(r.show_ns.tvid_prodid)), results):
- for ep_ns in filter_iter(
+ for ep_ns in filter(
lambda e: (e.show_ns.tvid, e.show_ns.prodid, e.season, e.episode) not in seen_eps, item.segment_ns):
ep_obj = getattr(ep_ns, 'ep_obj', None)
if not ep_obj:
@@ -3403,8 +3452,8 @@ class Home(MainHandler):
ep_data_list.append(ep_data)
seen_eps.add(uniq_sxe)
- for snatched in filter_iter(lambda s: ((s.tvid, s.prodid, s.season, s.episode) not in seen_eps),
- item.snatched_eps):
+ for snatched in filter(lambda s: ((s.tvid, s.prodid, s.season, s.episode) not in seen_eps),
+ item.snatched_eps):
ep_obj = getattr(snatched, 'ep_obj', None)
if not ep_obj:
continue
@@ -3439,9 +3488,9 @@ class Home(MainHandler):
"""
# Find the quality class for the episode
quality_class = Quality.qualityStrings[Quality.UNKNOWN]
- ep_status, ep_quality = Quality.splitCompositeStatus(ep_type.status)
+ ep_status, ep_quality = Quality.split_composite_status(ep_type.status)
for x in (SD, HD720p, HD1080p, UHD2160p):
- if ep_quality in Quality.splitQuality(x)[0]:
+ if ep_quality in Quality.split_quality(x)[0]:
quality_class = qualityPresetStrings[x]
break
@@ -3470,7 +3519,7 @@ class Home(MainHandler):
if isinstance(ep_obj, str):
return json_dumps({'result': 'failure'})
- # try do download subtitles for that episode
+ # try to download subtitles for that episode
try:
previous_subtitles = set([subliminal.language.Language(x) for x in ep_obj.subtitles])
ep_obj.subtitles = set([x.language for x in next(itervalues(ep_obj.download_subtitles()))])
@@ -3884,7 +3933,7 @@ class HomeProcessMedia(Home):
regexp = re.compile(r'(?i) ', flags=re.UNICODE)
result = regexp.sub('\n', result)
if None is not quiet and 1 == int(quiet):
- regexp = re.compile(u'(?i)]+>([^<]+)<[/]a>', flags=re.UNICODE)
+ regexp = re.compile(u'(?i) ]+>([^<]+) ', flags=re.UNICODE)
return u'%s' % regexp.sub(r'\1', result)
return self._generic_message('Postprocessing results', u'%s ' % result)
@@ -3941,12 +3990,12 @@ class AddShows(Home):
b_term = decode_str(used_search_term).strip()
terms = []
try:
- for cur_term in ([], [b_term.encode('utf-8')])[PY2] + [unidecode(b_term), b_term]:
+ for cur_term in [unidecode(b_term), b_term]:
if cur_term not in terms:
terms += [cur_term]
except (BaseException, Exception):
text = used_search_term.strip()
- terms = [text if not PY2 else text.encode('utf-8')]
+ terms = text
return set(s for s in set([used_search_term] + terms) if s)
@@ -3989,7 +4038,7 @@ class AddShows(Home):
r'(?P[^ ]+themoviedb\.org/tv/(?P\d+)[^ ]*)|'
r'(?P[^ ]+trakt\.tv/shows/(?P[^ /]+)[^ ]*)|'
r'(?P[^ ]+thetvdb\.com/series/(?P[^ /]+)[^ ]*)|'
- r'(?P[^ ]+thetvdb\.com/[^\d]+(?P[^ /]+)[^ ]*)|'
+ r'(?P[^ ]+thetvdb\.com/\D+(?P[^ /]+)[^ ]*)|'
r'(?P[^ ]+tvmaze\.com/shows/(?P\d+)/?[^ ]*)', search_term)
if id_check:
for cur_match in id_check:
@@ -4039,7 +4088,7 @@ class AddShows(Home):
t = sickgear.TVInfoAPI(cur_tvid).setup(**tvinfo_config)
results.setdefault(cur_tvid, {})
try:
- for cur_result in t.search_show(list(used_search_term), ids=ids_search_used):
+ for cur_result in t.search_show(list(used_search_term), ids=ids_search_used): # type: TVInfoShow
if TVINFO_TRAKT == cur_tvid and not cur_result['ids'].tvdb:
continue
tv_src_id = int(cur_result['id'])
@@ -4082,7 +4131,7 @@ class AddShows(Home):
for tvid, name in iteritems(sickgear.TVInfoAPI().all_sources)}
if TVINFO_TRAKT in results and TVINFO_TVDB in results:
- tvdb_ids = list_keys(results[TVINFO_TVDB])
+ tvdb_ids = list(results[TVINFO_TVDB])
results[TVINFO_TRAKT] = {k: v for k, v in iteritems(results[TVINFO_TRAKT]) if v['ids'].tvdb not in tvdb_ids}
def in_db(tvid, prod_id):
@@ -4397,9 +4446,9 @@ class AddShows(Home):
t.infosrc = sickgear.TVInfoAPI().search_sources
search_tvid = None
if use_show_name and 1 == show_name.count(':'): # if colon is found once
- search_tvid = filter_list(lambda x: bool(x),
+ search_tvid = list(filter(lambda x: bool(x),
[('%s:' % sickgear.TVInfoAPI(_tvid).config['slug']) in show_name and _tvid
- for _tvid, _ in iteritems(t.infosrc)])
+ for _tvid, _ in iteritems(t.infosrc)]))
search_tvid = 1 == len(search_tvid) and search_tvid[0]
t.provided_tvid = search_tvid or int(tvid or sickgear.TVINFO_DEFAULT)
t.infosrc_icons = [sickgear.TVInfoAPI(cur_tvid).config.get('icon') for cur_tvid in t.infosrc]
@@ -4530,7 +4579,7 @@ class AddShows(Home):
def info_anidb(self, ids, show_name):
- if not filter_list(lambda tvid_prodid: helpers.find_show_by_id(tvid_prodid), ids.split(' ')):
+ if not list(filter(lambda tvid_prodid: helpers.find_show_by_id(tvid_prodid), ids.split(' '))):
return self.new_show('|'.join(['', '', '', ' '.join([ids, show_name])]), use_show_name=True, is_anime=True)
@staticmethod
@@ -4617,8 +4666,8 @@ class AddShows(Home):
oldest, newest, oldest_dt, newest_dt = None, None, 9999999, 0
show_list = (data or {}).get('list', {}).get('items', {})
- idx_ids = dict(map_iter(lambda so: (so.imdbid, (so.tvid, so.prodid)),
- filter_iter(lambda _so: getattr(_so, 'imdbid', None), sickgear.showList)))
+ idx_ids = dict(map(lambda so: (so.imdbid, (so.tvid, so.prodid)),
+ filter(lambda _so: getattr(_so, 'imdbid', None), sickgear.showList)))
# list_id = (data or {}).get('list', {}).get('id', {})
for row in show_list:
@@ -4683,7 +4732,7 @@ class AddShows(Home):
def parse_imdb_html(self, html, filtered, kwargs):
- img_size = re.compile(r'(?im)(V1[^XY]+([XY]))(\d+)([^\d]+)(\d+)([^\d]+)(\d+)([^\d]+)(\d+)([^\d]+)(\d+)(.*?)$')
+ img_size = re.compile(r'(?im)(V1[^XY]+([XY]))(\d+)(\D+)(\d+)(\D+)(\d+)(\D+)(\d+)(\D+)(\d+)(.*?)$')
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
show_list = soup.select('.lister-list')
@@ -4753,7 +4802,7 @@ class AddShows(Home):
show_obj = helpers.find_show_by_id({TVINFO_IMDB: int(ids['imdb'].replace('tt', ''))},
no_mapped_ids=False)
- for tvid in filter_iter(lambda _tvid: _tvid == show_obj.tvid, sickgear.TVInfoAPI().search_sources):
+ for tvid in filter(lambda _tvid: _tvid == show_obj.tvid, sickgear.TVInfoAPI().search_sources):
infosrc_slug, infosrc_url = (sickgear.TVInfoAPI(tvid).config[x] for x in
('slug', 'show_url'))
filtered[-1]['ids'][infosrc_slug] = show_obj.prodid
@@ -5114,7 +5163,7 @@ class AddShows(Home):
def info_trakt(self, ids, show_name):
- if not filter_list(lambda tvid_prodid: helpers.find_show_by_id(tvid_prodid), ids.split(' ')):
+ if not list(filter(lambda tvid_prodid: helpers.find_show_by_id(tvid_prodid), ids.split(' '))):
return self.new_show('|'.join(['', '', '', ' '.join([ids, show_name])]), use_show_name=True)
def ne_default(self):
@@ -5204,7 +5253,7 @@ class AddShows(Home):
channel_tag_copy = copy.copy(channel_tag)
if channel_tag_copy:
network = channel_tag_copy.a.extract().get_text(strip=True)
- date_info = re.sub(r'^[^\d]+', '', channel_tag_copy.get_text(strip=True))
+ date_info = re.sub(r'^\D+', '', channel_tag_copy.get_text(strip=True))
if date_info:
dt = dateutil.parser.parse((date_info, '%s.01.01' % date_info)[4 == len(date_info)])
@@ -5213,7 +5262,7 @@ class AddShows(Home):
and 'printed' in ' '.join(t.get('class', ''))]
if len(tag):
age_args = {}
- future = re.sub(r'[^\d]+(.*)', r'\1', tag[0].get_text(strip=True))
+ future = re.sub(r'\D+(.*)', r'\1', tag[0].get_text(strip=True))
for (dim, rcx) in rc:
value = helpers.try_int(rcx.sub(r'\1', future), None)
if value:
@@ -5241,7 +5290,7 @@ class AddShows(Home):
genres = row.find(class_='genre')
if genres:
- genres = re.sub(r',([^\s])', r', \1', genres.get_text(strip=True))
+ genres = re.sub(r',(\S)', r', \1', genres.get_text(strip=True))
overview = row.find(class_='summary')
if overview:
overview = overview.get_text(strip=True)
@@ -5428,7 +5477,7 @@ class AddShows(Home):
# noinspection PyUnusedLocal
def info_tvmaze(self, ids, show_name):
- if not filter_list(lambda tvid_prodid: helpers.find_show_by_id(tvid_prodid), ids.split(' ')):
+ if not list(filter(lambda tvid_prodid: helpers.find_show_by_id(tvid_prodid), ids.split(' '))):
return self.new_show('|'.join(['', '', '', ' '.join([ids, show_name])]), use_show_name=True)
def tvc_default(self):
@@ -5727,7 +5776,7 @@ class AddShows(Home):
dt_ordinal = 0
dt_string = ''
- date_tags = filter_list(lambda t: t.find('span'), row.find_all('div', class_='clamp-details'))
+ date_tags = list(filter(lambda t: t.find('span'), row.find_all('div', class_='clamp-details')))
if date_tags:
dt = dateutil.parser.parse(date_tags[0].get_text().strip())
dt_ordinal = dt.toordinal()
@@ -5840,11 +5889,11 @@ class AddShows(Home):
tvid_prodid_list = []
# first, process known ids
- for tvid, infosrc_slug in filter_iter(
+ for tvid, infosrc_slug in filter(
lambda tvid_slug: item['ids'].get(tvid_slug[1])
and not sickgear.TVInfoAPI(tvid_slug[0]).config.get('defunct'),
- map_iter(lambda _tvid: (_tvid, sickgear.TVInfoAPI(_tvid).config['slug']),
- iterkeys(sickgear.TVInfoAPI().all_sources))):
+ map(lambda _tvid: (_tvid, sickgear.TVInfoAPI(_tvid).config['slug']),
+ iterkeys(sickgear.TVInfoAPI().all_sources))):
try:
src_id = item['ids'][infosrc_slug]
tvid_prodid_list += ['%s:%s' % (infosrc_slug, src_id)]
@@ -5899,7 +5948,7 @@ class AddShows(Home):
known.append(item['show_id'])
t.all_shows.append(item)
- if any(filter_iter(lambda tp: tp in sickgear.BROWSELIST_HIDDEN, tvid_prodid_list)):
+ if any(filter(lambda tp: tp in sickgear.BROWSELIST_HIDDEN, tvid_prodid_list)):
item['hide'] = True
t.num_hidden += 1
@@ -6035,7 +6084,7 @@ class AddShows(Home):
any_qualities = [any_qualities]
if type(best_qualities) != list:
best_qualities = [best_qualities]
- new_quality = Quality.combineQualities(map_list(int, any_qualities), map_list(int, best_qualities))
+ new_quality = Quality.combine_qualities(list(map(int, any_qualities)), list(map(int, best_qualities)))
upgrade_once = config.checkbox_to_value(upgrade_once)
wanted_begin = config.minimax(wanted_begin, 0, -1, 10)
@@ -6230,7 +6279,7 @@ class Manage(MainHandler):
if cur_season not in result:
result[cur_season] = {}
- cur_quality = Quality.splitCompositeStatus(int(cur_result['status']))[1]
+ cur_quality = Quality.split_composite_status(int(cur_result['status']))[1]
result[cur_season][cur_episode] = {'name': cur_result['name'],
'airdateNever': 1000 > int(cur_result['airdate']),
'qualityCss': Quality.get_quality_css(cur_quality),
@@ -6250,9 +6299,9 @@ class Manage(MainHandler):
if event_sql_result:
for cur_result_event in event_sql_result:
if None is d_status and cur_result_event['action'] in Quality.DOWNLOADED:
- d_status, d_qual = Quality.splitCompositeStatus(cur_result_event['action'])
+ d_status, d_qual = Quality.split_composite_status(cur_result_event['action'])
if None is s_status and cur_result_event['action'] in Quality.SNATCHED_ANY:
- s_status, s_quality = Quality.splitCompositeStatus(cur_result_event['action'])
+ s_status, s_quality = Quality.split_composite_status(cur_result_event['action'])
aged = ((datetime.datetime.now() -
datetime.datetime.strptime(str(cur_result_event['date']),
sickgear.history.dateFormat))
@@ -6293,11 +6342,11 @@ class Manage(MainHandler):
if Quality.NONE == cur_quality:
return undo_from_history, change_to, status
- cur_status = Quality.splitCompositeStatus(int(cur_status))[0]
+ cur_status = Quality.split_composite_status(int(cur_status))[0]
if any([location]):
undo_from_history = True
change_to = statusStrings[DOWNLOADED]
- status = [Quality.compositeStatus(DOWNLOADED, d_qual or cur_quality)]
+ status = [Quality.composite_status(DOWNLOADED, d_qual or cur_quality)]
elif cur_status in Quality.SNATCHED_ANY + [IGNORED, SKIPPED, WANTED]:
if None is d_qual:
if cur_status not in [IGNORED, SKIPPED]:
@@ -6309,7 +6358,7 @@ class Manage(MainHandler):
or sickgear.SKIP_REMOVED_FILES in [ARCHIVED, IGNORED, SKIPPED]:
undo_from_history = True
change_to = '%s %s' % (statusStrings[ARCHIVED], Quality.qualityStrings[d_qual])
- status = [Quality.compositeStatus(ARCHIVED, d_qual)]
+ status = [Quality.composite_status(ARCHIVED, d_qual)]
elif sickgear.SKIP_REMOVED_FILES in [IGNORED, SKIPPED] \
and cur_status not in [IGNORED, SKIPPED]:
change_to = statusStrings[statusStrings[sickgear.SKIP_REMOVED_FILES]]
@@ -6403,8 +6452,7 @@ class Manage(MainHandler):
' AND season != 0'
' AND indexer = ? AND showid = ?',
status_list + tvid_prodid_list)
- what = (sql_result and '|'.join(map_iter(lambda r: '%sx%s' % (r['season'], r['episode']),
- sql_result))
+ what = (sql_result and '|'.join(map(lambda r: '%sx%s' % (r['season'], r['episode']), sql_result))
or None)
to = new_status
@@ -6562,7 +6610,8 @@ class Manage(MainHandler):
' WHERE indexer = ? AND showid = ?'
' AND season != 0 AND status LIKE \'%4\'',
TVidProdid(cur_tvid_prodid).list)
- to_download[cur_tvid_prodid] = map_list(lambda x: '%sx%s' % (x['season'], x['episode']), sql_result)
+ to_download[cur_tvid_prodid] = list(map(lambda x: '%sx%s' % (x['season'], x['episode']),
+ sql_result))
for epResult in to_download[cur_tvid_prodid]:
season, episode = epResult.split('x')
@@ -6897,7 +6946,7 @@ class Manage(MainHandler):
new_subtitles = 'on' if new_subtitles else 'off'
if 'keep' == quality_preset:
- any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
+ any_qualities, best_qualities = Quality.split_quality(show_obj.quality)
elif int(quality_preset):
best_qualities = []
@@ -7114,7 +7163,7 @@ class ManageSearch(Manage):
def retry_provider(provider=None):
if not provider:
return
- prov = [p for p in sickgear.providerList + sickgear.newznabProviderList if p.get_id() == provider]
+ prov = [p for p in sickgear.provider_list + sickgear.newznab_providers if p.get_id() == provider]
if not prov:
return
prov[0].retry_next()
@@ -7135,7 +7184,7 @@ class ManageSearch(Manage):
# force it to run the next time it looks
if not sickgear.search_queue_scheduler.action.is_recentsearch_in_progress():
- result = sickgear.recent_search_scheduler.forceRun()
+ result = sickgear.recent_search_scheduler.force_run()
if result:
logger.log(u'Recent search forced')
ui.notifications.message('Recent search started')
@@ -7146,7 +7195,7 @@ class ManageSearch(Manage):
def force_find_propers(self):
# force it to run the next time it looks
- result = sickgear.proper_finder_scheduler.forceRun()
+ result = sickgear.proper_finder_scheduler.force_run()
if result:
logger.log(u'Find propers search forced')
ui.notifications.message('Find propers search started')
@@ -7170,7 +7219,7 @@ class ShowTasks(Manage):
t = PageTemplate(web_handler=self, file='manage_showProcesses.tmpl')
t.queue_length = sickgear.show_queue_scheduler.action.queue_length()
t.people_queue = sickgear.people_queue_scheduler.action.queue_data()
- t.next_run = sickgear.show_update_scheduler.lastRun.replace(
+ t.next_run = sickgear.show_update_scheduler.last_run.replace(
hour=sickgear.show_update_scheduler.start_time.hour)
t.show_update_running = sickgear.show_queue_scheduler.action.is_show_update_running() \
or sickgear.show_update_scheduler.action.amActive
@@ -7256,7 +7305,7 @@ class ShowTasks(Manage):
def force_show_update(self):
- result = sickgear.show_update_scheduler.forceRun()
+ result = sickgear.show_update_scheduler.force_run()
if result:
logger.log(u'Show Update forced')
ui.notifications.message('Forced Show Update started')
@@ -7416,7 +7465,7 @@ class History(MainHandler):
r['status'] = r['status_w']
r['file_size'] = r['file_size_w']
- r['status'], r['quality'] = Quality.splitCompositeStatus(helpers.try_int(r['status']))
+ r['status'], r['quality'] = Quality.split_composite_status(helpers.try_int(r['status']))
r['season'], r['episode'] = '%02i' % r['season'], '%02i' % r['episode']
if r['tvep_id'] not in mru_count:
# depends on SELECT ORDER BY date_watched DESC to determine mru_count
@@ -7432,9 +7481,9 @@ class History(MainHandler):
elif 'stats' in sickgear.HISTORY_LAYOUT:
- prov_list = [p.name for p in (sickgear.providerList
- + sickgear.newznabProviderList
- + sickgear.torrentRssProviderList)]
+ prov_list = [p.name for p in (sickgear.provider_list
+ + sickgear.newznab_providers
+ + sickgear.torrent_rss_providers)]
# noinspection SqlResolve
sql = 'SELECT COUNT(1) AS count,' \
' MIN(DISTINCT date) AS earliest,' \
@@ -7461,12 +7510,12 @@ class History(MainHandler):
elif 'failures' in sickgear.HISTORY_LAYOUT:
- t.provider_fail_stats = filter_list(lambda stat: len(stat['fails']), [
+ t.provider_fail_stats = list(filter(lambda stat: len(stat['fails']), [
dict(name=p.name, id=p.get_id(), active=p.is_active(), prov_img=p.image_name(),
prov_id=p.get_id(), # 2020.03.17 legacy var, remove at future date
fails=p.fails.fails_sorted, next_try=p.get_next_try_time,
has_limit=getattr(p, 'has_limit', False), tmr_limit_time=p.tmr_limit_time)
- for p in sickgear.providerList + sickgear.newznabProviderList])
+ for p in sickgear.provider_list + sickgear.newznab_providers]))
t.provider_fail_cnt = len([p for p in t.provider_fail_stats if len(p['fails'])])
t.provider_fails = t.provider_fail_cnt # 2020.03.17 legacy var, remove at future date
@@ -7500,11 +7549,11 @@ class History(MainHandler):
return result
with sg_helpers.DOMAIN_FAILURES.lock:
- t.domain_fail_stats = filter_list(lambda stat: len(stat['fails']), [
+ t.domain_fail_stats = list(filter(lambda stat: len(stat['fails']), [
dict(name=k, id=sickgear.GenericProvider.make_id(k), img=img(k), cls=img(k, True),
fails=v.fails_sorted, next_try=v.get_next_try_time,
has_limit=getattr(v, 'has_limit', False), tmr_limit_time=v.tmr_limit_time)
- for k, v in iteritems(sg_helpers.DOMAIN_FAILURES.domain_list)])
+ for k, v in iteritems(sg_helpers.DOMAIN_FAILURES.domain_list)]))
t.domain_fail_cnt = len([d for d in t.domain_fail_stats if len(d['fails'])])
@@ -7658,7 +7707,7 @@ class History(MainHandler):
ParentId=folder_id,
Filters='IsPlayed',
format='json'), timeout=10, parse_json=True) or {}
- for d in filter_iter(lambda item: 'Episode' == item.get('Type', ''), items.get('Items')):
+ for d in filter(lambda item: 'Episode' == item.get('Type', ''), items.get('Items')):
try:
root_dir_found = False
path_file = d.get('Path')
@@ -7700,11 +7749,11 @@ class History(MainHandler):
if states:
# Prune user removed items that are no longer being returned by API
- media_paths = map_list(lambda arg: os.path.basename(arg[1]['path_file']), iteritems(states))
+ media_paths = list(map(lambda arg: os.path.basename(arg[1]['path_file']), iteritems(states)))
sql = 'FROM tv_episodes_watched WHERE hide=1 AND label LIKE "%%{Emby}"'
my_db = db.DBConnection(row_type='dict')
files = my_db.select('SELECT location %s' % sql)
- for i in filter_iter(lambda f: os.path.basename(f['location']) not in media_paths, files):
+ for i in filter(lambda f: os.path.basename(f['location']) not in media_paths, files):
loc = i.get('location')
if loc:
my_db.select('DELETE %s AND location="%s"' % (sql, loc))
@@ -7769,11 +7818,11 @@ class History(MainHandler):
if states:
# Prune user removed items that are no longer being returned by API
- media_paths = map_list(lambda arg: os.path.basename(arg[1]['path_file']), iteritems(states))
+ media_paths = list(map(lambda arg: os.path.basename(arg[1]['path_file']), iteritems(states)))
sql = 'FROM tv_episodes_watched WHERE hide=1 AND label LIKE "%%{Plex}"'
my_db = db.DBConnection(row_type='dict')
files = my_db.select('SELECT location %s' % sql)
- for i in filter_iter(lambda f: os.path.basename(f['location']) not in media_paths, files):
+ for i in filter(lambda f: os.path.basename(f['location']) not in media_paths, files):
loc = i.get('location')
if loc:
my_db.select('DELETE %s AND location="%s"' % (sql, loc))
@@ -7844,8 +7893,8 @@ class History(MainHandler):
for cur_result in sql_result:
show_obj = helpers.find_show_by_id(tvid_prodid_dict)
ep_obj = show_obj.get_episode(cur_result['season'], cur_result['episode'])
- for n in filter_iter(lambda x: x.name.lower() in ('emby', 'kodi', 'plex'),
- notifiers.NotifierFactory().get_enabled()):
+ for n in filter(lambda x: x.name.lower() in ('emby', 'kodi', 'plex'),
+ notifiers.NotifierFactory().get_enabled()):
if 'PLEX' == n.name:
if updating:
continue
@@ -7966,7 +8015,7 @@ class ConfigGeneral(Config):
seasons = [-1] + seasons[0:-1] # bubble -1
# prepare a seasonal ordered dict for output
- alts = ordered_dict([(season, {}) for season in seasons])
+ alts = dict([(season, {}) for season in seasons])
# add original show name
show_obj = sickgear.helpers.find_show_by_id(tvid_prodid, no_mapped_ids=True)
@@ -8015,7 +8064,7 @@ class ConfigGeneral(Config):
return json_dumps(dict(text='%s\n\n' % ui_output))
@staticmethod
- def generate_key():
+ def generate_key(*args, **kwargs):
""" Return a new randomized API_KEY
"""
# Create some values to seed md5
@@ -8023,8 +8072,10 @@ class ConfigGeneral(Config):
result = hashlib.new('md5', decode_bytes(seed)).hexdigest()
- # Return a hex digest of the md5, eg 49f68a5c8493ec2c0bf489821c21fc3b
- logger.log(u'New API generated')
+ # Return a hex digest of the md5, e.g. 49f68a5c8493ec2c0bf489821c21fc3b
+ app_name = kwargs.get('app_name')
+ app_name = '' if not app_name else ' for [%s]' % app_name
+ logger.log(u'New API generated%s' % app_name)
return result
@@ -8057,8 +8108,8 @@ class ConfigGeneral(Config):
any_qualities = ([], any_qualities.split(','))[any(any_qualities)]
best_qualities = ([], best_qualities.split(','))[any(best_qualities)]
- sickgear.QUALITY_DEFAULT = int(Quality.combineQualities(map_list(int, any_qualities),
- map_list(int, best_qualities)))
+ sickgear.QUALITY_DEFAULT = int(Quality.combine_qualities(list(map(int, any_qualities)),
+ list(map(int, best_qualities))))
sickgear.WANTED_BEGIN_DEFAULT = config.minimax(default_wanted_begin, 0, -1, 10)
sickgear.WANTED_LATEST_DEFAULT = config.minimax(default_wanted_latest, 0, -1, 10)
sickgear.SHOW_TAG_DEFAULT = default_tag
@@ -8071,33 +8122,6 @@ class ConfigGeneral(Config):
sickgear.save_config()
- @staticmethod
- def generateKey(*args, **kwargs):
- """ Return a new randomized API_KEY
- """
-
- try:
- from hashlib import md5
- except ImportError:
- # noinspection PyUnresolvedReferences,PyCompatibility
- from md5 import md5
-
- # Create some values to seed md5
- t = str(time.time())
- r = str(random.random())
-
- # Create the md5 instance and give it the current time
- m = md5(decode_bytes(t))
-
- # Update the md5 instance with the random variable
- m.update(decode_bytes(r))
-
- # Return a hex digest of the md5, eg 49f68a5c8493ec2c0bf489821c21fc3b
- app_name = kwargs.get('app_name')
- app_name = '' if not app_name else ' for [%s]' % app_name
- logger.log(u'New apikey generated%s' % app_name)
- return m.hexdigest()
-
def create_apikey(self, app_name):
result = dict()
if not app_name:
@@ -8105,7 +8129,7 @@ class ConfigGeneral(Config):
elif app_name in [k[0] for k in sickgear.API_KEYS if k[0]]:
result['result'] = 'Failed: name is not unique'
else:
- api_key = self.generateKey(app_name=app_name)
+ api_key = self.generate_key(app_name=app_name)
if api_key in [k[1] for k in sickgear.API_KEYS if k[0]]:
result['result'] = 'Failed: apikey already exists, try again'
else:
@@ -8203,7 +8227,7 @@ class ConfigGeneral(Config):
sickgear.FANART_LIMIT = config.minimax(fanart_limit, 3, 0, 500)
sickgear.SHOWLIST_TAGVIEW = showlist_tagview
- # 'Show List' is the must have default fallback. Tags in use that are removed from config ui are restored,
+ # 'Show List' is the must-have default fallback. Tags in use that are removed from config ui are restored,
# not deleted. Deduped list order preservation is key to feature function.
my_db = db.DBConnection()
sql_result = my_db.select('SELECT DISTINCT tag FROM tv_shows')
@@ -8215,7 +8239,7 @@ class ConfigGeneral(Config):
results += [u'An attempt was prevented to remove a show list group name still in use']
dedupe = {}
sickgear.SHOW_TAGS = [dedupe.setdefault(item, item) for item in (cleanser + new_names + [u'Show List'])
- if item not in dedupe]
+ if item not in dedupe]
sickgear.HOME_SEARCH_FOCUS = config.checkbox_to_value(home_search_focus)
sickgear.USE_IMDB_INFO = config.checkbox_to_value(use_imdb_info)
@@ -8258,8 +8282,8 @@ class ConfigGeneral(Config):
sickgear.WEB_IPV64 = config.checkbox_to_value(web_ipv64)
sickgear.HANDLE_REVERSE_PROXY = config.checkbox_to_value(handle_reverse_proxy)
sickgear.SEND_SECURITY_HEADERS = config.checkbox_to_value(send_security_headers)
- hosts = ','.join(filter_iter(lambda name: not helpers.re_valid_hostname(with_allowed=False).match(name),
- config.clean_hosts(allowed_hosts).split(',')))
+ hosts = ','.join(filter(lambda name: not helpers.re_valid_hostname(with_allowed=False).match(name),
+ config.clean_hosts(allowed_hosts).split(',')))
if not hosts or self.request.host_name in hosts:
sickgear.ALLOWED_HOSTS = hosts
sickgear.ALLOW_ANYIP = config.checkbox_to_value(allow_anyip)
@@ -8399,9 +8423,9 @@ class ConfigSearch(Config):
sickgear.USENET_RETENTION = config.to_int(usenet_retention, default=500)
sickgear.IGNORE_WORDS, sickgear.IGNORE_WORDS_REGEX = helpers.split_word_str(ignore_words
- if ignore_words else '')
+ if ignore_words else '')
sickgear.REQUIRE_WORDS, sickgear.REQUIRE_WORDS_REGEX = helpers.split_word_str(require_words
- if require_words else '')
+ if require_words else '')
clean_ignore_require_words()
@@ -8410,7 +8434,7 @@ class ConfigSearch(Config):
sickgear.SEARCH_UNAIRED = bool(config.checkbox_to_value(search_unaired))
sickgear.UNAIRED_RECENT_SEARCH_ONLY = bool(config.checkbox_to_value(unaired_recent_search_only,
- value_off=1, value_on=0))
+ value_off=1, value_on=0))
sickgear.FLARESOLVERR_HOST = config.clean_url(flaresolverr_host)
sg_helpers.FLARESOLVERR_HOST = sickgear.FLARESOLVERR_HOST
@@ -8672,9 +8696,9 @@ class ConfigProviders(Config):
return json_dumps({'error': 'No Provider Name or url specified'})
provider_dict = dict(zip([sickgear.providers.generic_provider_name(x.get_id())
- for x in sickgear.newznabProviderList], sickgear.newznabProviderList))
+ for x in sickgear.newznab_providers], sickgear.newznab_providers))
provider_url_dict = dict(zip([sickgear.providers.generic_provider_url(x.url)
- for x in sickgear.newznabProviderList], sickgear.newznabProviderList))
+ for x in sickgear.newznab_providers], sickgear.newznab_providers))
temp_provider = newznab.NewznabProvider(name, config.clean_url(url))
@@ -8698,12 +8722,12 @@ class ConfigProviders(Config):
error = '\nNo provider %s specified' % error
return json_dumps({'success': False, 'error': error})
- if name in [n.name for n in sickgear.newznabProviderList if n.url == url]:
- provider = [n for n in sickgear.newznabProviderList if n.name == name][0]
+ if name in [n.name for n in sickgear.newznab_providers if n.url == url]:
+ provider = [n for n in sickgear.newznab_providers if n.name == name][0]
tv_categories = provider.clean_newznab_categories(provider.all_cats)
state = provider.is_enabled()
else:
- providers = dict(zip([x.get_id() for x in sickgear.newznabProviderList], sickgear.newznabProviderList))
+ providers = dict(zip([x.get_id() for x in sickgear.newznab_providers], sickgear.newznab_providers))
temp_provider = newznab.NewznabProvider(name, url, key)
if None is not key and starify(key, True):
temp_provider.key = providers[temp_provider.get_id()].key
@@ -8719,7 +8743,7 @@ class ConfigProviders(Config):
return json_dumps({'error': 'Invalid name specified'})
provider_dict = dict(
- zip([x.get_id() for x in sickgear.torrentRssProviderList], sickgear.torrentRssProviderList))
+ zip([x.get_id() for x in sickgear.torrent_rss_providers], sickgear.torrent_rss_providers))
temp_provider = rsstorrent.TorrentRssProvider(name, url, cookies)
@@ -8734,7 +8758,7 @@ class ConfigProviders(Config):
@staticmethod
def check_providers_ping():
- for p in sickgear.providers.sortedProviderList():
+ for p in sickgear.providers.sorted_sources():
if getattr(p, 'ping_iv', None):
if p.is_active() and (p.get_id() not in sickgear.provider_ping_thread_pool
or not sickgear.provider_ping_thread_pool[p.get_id()].is_alive()):
@@ -8752,7 +8776,7 @@ class ConfigProviders(Config):
pass
# stop removed providers
- prov = [n.get_id() for n in sickgear.providers.sortedProviderList()]
+ prov = [n.get_id() for n in sickgear.providers.sorted_sources()]
for p in [x for x in sickgear.provider_ping_thread_pool if x not in prov]:
sickgear.provider_ping_thread_pool[p].stop = True
try:
@@ -8768,7 +8792,7 @@ class ConfigProviders(Config):
provider_list = []
# add all the newznab info we have into our list
- newznab_sources = dict(zip([x.get_id() for x in sickgear.newznabProviderList], sickgear.newznabProviderList))
+ newznab_sources = dict(zip([x.get_id() for x in sickgear.newznab_providers], sickgear.newznab_providers))
active_ids = []
reload_page = False
if newznab_string:
@@ -8811,7 +8835,7 @@ class ConfigProviders(Config):
[k for k in nzb_src.may_filter
if config.checkbox_to_value(kwargs.get('%s_filter_%s' % (cur_id, k)))])
- for attr in filter_iter(lambda a: hasattr(nzb_src, a), [
+ for attr in filter(lambda a: hasattr(nzb_src, a), [
'search_fallback', 'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog',
'scene_only', 'scene_loose', 'scene_loose_active', 'scene_rej_nuked', 'scene_nuked_active'
]):
@@ -8825,18 +8849,18 @@ class ConfigProviders(Config):
new_provider.enabled = True
_ = new_provider.caps # when adding a custom, trigger server_type update
new_provider.enabled = False
- sickgear.newznabProviderList.append(new_provider)
+ sickgear.newznab_providers.append(new_provider)
active_ids.append(cur_id)
# delete anything that is missing
if sickgear.USE_NZBS:
- for source in [x for x in sickgear.newznabProviderList if x.get_id() not in active_ids]:
- sickgear.newznabProviderList.remove(source)
+ for source in [x for x in sickgear.newznab_providers if x.get_id() not in active_ids]:
+ sickgear.newznab_providers.remove(source)
# add all the torrent RSS info we have into our list
- torrent_rss_sources = dict(zip([x.get_id() for x in sickgear.torrentRssProviderList],
- sickgear.torrentRssProviderList))
+ torrent_rss_sources = dict(zip([x.get_id() for x in sickgear.torrent_rss_providers],
+ sickgear.torrent_rss_providers))
active_ids = []
if torrentrss_string:
for curTorrentRssProviderStr in torrentrss_string.split('!!!'):
@@ -8872,19 +8896,19 @@ class ConfigProviders(Config):
if attr_check in kwargs:
setattr(torrss_src, attr, str(kwargs.get(attr_check) or '').strip())
else:
- sickgear.torrentRssProviderList.append(new_provider)
+ sickgear.torrent_rss_providers.append(new_provider)
active_ids.append(cur_id)
# delete anything that is missing
if sickgear.USE_TORRENTS:
- for source in [x for x in sickgear.torrentRssProviderList if x.get_id() not in active_ids]:
- sickgear.torrentRssProviderList.remove(source)
+ for source in [x for x in sickgear.torrent_rss_providers if x.get_id() not in active_ids]:
+ sickgear.torrent_rss_providers.remove(source)
# enable/disable states of source providers
provider_str_list = provider_order.split()
- sources = dict(zip([x.get_id() for x in sickgear.providers.sortedProviderList()],
- sickgear.providers.sortedProviderList()))
+ sources = dict(zip([x.get_id() for x in sickgear.providers.sorted_sources()],
+ sickgear.providers.sorted_sources()))
for cur_src_str in provider_str_list:
src_name, src_enabled = cur_src_str.split(':')
@@ -8908,7 +8932,7 @@ class ConfigProviders(Config):
torrent_rss_sources[src_name].enabled = src_enabled
# update torrent source settings
- for torrent_src in [src for src in sickgear.providers.sortedProviderList()
+ for torrent_src in [src for src in sickgear.providers.sorted_sources()
if sickgear.GenericProvider.TORRENT == src.providerType]: # type: TorrentProvider
src_id_prefix = torrent_src.get_id() + '_'
@@ -8925,12 +8949,12 @@ class ConfigProviders(Config):
elif not starify(key, True):
setattr(torrent_src, attr, key)
- for attr in filter_iter(lambda a: hasattr(torrent_src, a), [
+ for attr in filter(lambda a: hasattr(torrent_src, a), [
'username', 'uid', '_seed_ratio', 'scene_or_contain'
]):
setattr(torrent_src, attr, str(kwargs.get(src_id_prefix + attr.replace('_seed_', ''), '')).strip())
- for attr in filter_iter(lambda a: hasattr(torrent_src, a), [
+ for attr in filter(lambda a: hasattr(torrent_src, a), [
'minseed', 'minleech', 'seed_time'
]):
setattr(torrent_src, attr, config.to_int(str(kwargs.get(src_id_prefix + attr, '')).strip()))
@@ -8941,7 +8965,7 @@ class ConfigProviders(Config):
[k for k in getattr(torrent_src, 'may_filter', 'nop')
if config.checkbox_to_value(kwargs.get('%sfilter_%s' % (src_id_prefix, k)))])
- for attr in filter_iter(lambda a: hasattr(torrent_src, a), [
+ for attr in filter(lambda a: hasattr(torrent_src, a), [
'confirmed', 'freeleech', 'reject_m2ts', 'use_after_get_data', 'enable_recentsearch',
'enable_backlog', 'search_fallback', 'enable_scheduled_backlog',
'scene_only', 'scene_loose', 'scene_loose_active',
@@ -8949,13 +8973,13 @@ class ConfigProviders(Config):
]):
setattr(torrent_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr)))
- for attr, default in filter_iter(lambda arg: hasattr(torrent_src, arg[0]), [
+ for attr, default in filter(lambda arg: hasattr(torrent_src, arg[0]), [
('search_mode', 'eponly'),
]):
setattr(torrent_src, attr, str(kwargs.get(src_id_prefix + attr) or default).strip())
# update nzb source settings
- for nzb_src in [src for src in sickgear.providers.sortedProviderList() if
+ for nzb_src in [src for src in sickgear.providers.sorted_sources() if
sickgear.GenericProvider.NZB == src.providerType]:
src_id_prefix = nzb_src.get_id() + '_'
@@ -8973,17 +8997,17 @@ class ConfigProviders(Config):
setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr)) or
not getattr(nzb_src, 'supports_backlog', True))
- for attr in filter_iter(lambda a: hasattr(nzb_src, a),
- ['search_fallback', 'enable_backlog', 'enable_scheduled_backlog',
- 'scene_only', 'scene_loose', 'scene_loose_active',
- 'scene_rej_nuked', 'scene_nuked_active']):
+ for attr in filter(lambda a: hasattr(nzb_src, a),
+ ['search_fallback', 'enable_backlog', 'enable_scheduled_backlog',
+ 'scene_only', 'scene_loose', 'scene_loose_active',
+ 'scene_rej_nuked', 'scene_nuked_active']):
setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr)))
for (attr, default) in [('scene_or_contain', ''), ('search_mode', 'eponly')]:
if hasattr(nzb_src, attr):
setattr(nzb_src, attr, str(kwargs.get(src_id_prefix + attr) or default).strip())
- sickgear.NEWZNAB_DATA = '!!!'.join([x.config_str() for x in sickgear.newznabProviderList])
+ sickgear.NEWZNAB_DATA = '!!!'.join([x.config_str() for x in sickgear.newznab_providers])
sickgear.PROVIDER_ORDER = provider_list
helpers.clear_unused_providers()
@@ -9491,17 +9515,11 @@ class EventLogs(MainHandler):
class WebFileBrowser(MainHandler):
def index(self, path='', include_files=False, **kwargs):
- """ prevent issues with requests using legacy params """
- include_files = include_files or kwargs.get('includeFiles') or False
- """ /legacy """
self.set_header('Content-Type', 'application/json')
return json_dumps(folders_at_path(path, True, bool(int(include_files))))
def complete(self, term, include_files=0, **kwargs):
- """ prevent issues with requests using legacy params """
- include_files = include_files or kwargs.get('includeFiles') or False
- """ /legacy """
self.set_header('Content-Type', 'application/json')
return json_dumps([entry['path'] for entry in folders_at_path(
@@ -9704,7 +9722,7 @@ class CachedImages(MainHandler):
:param tvid_prodid:
:param thumb: return thumb or normal as fallback
:param pid: optional person_id
- :param prefer_person: prefer person image if person_id is set and character has more then 1 person assigned
+ :param prefer_person: prefer person image if person_id is set and character has more than 1 person assigned
"""
_ = kwargs.get('oid') # suppress pyc non used var highlight, oid (original id) is a visual ui key
show_obj = tvid_prodid and helpers.find_show_by_id(tvid_prodid)
diff --git a/sickgear/webserveInit.py b/sickgear/webserveInit.py
index 8fd7b086..025afaa6 100644
--- a/sickgear/webserveInit.py
+++ b/sickgear/webserveInit.py
@@ -1,5 +1,5 @@
import os
-from sys import exc_info, platform
+from sys import exc_info
import threading
from tornado.ioloop import IOLoop
@@ -8,14 +8,9 @@ from tornado.routing import AnyMatches, Rule
from tornado.web import Application, _ApplicationRouter
from . import logger, webapi, webserve
-from ._legacy import LegacyConfigPostProcessing, LegacyHomeAddShows, \
- LegacyManageManageSearches, LegacyManageShowProcesses, LegacyErrorLogs
from .helpers import create_https_certificates, re_valid_hostname
import sickgear
-from _23 import PY38
-from six import PY2
-
# noinspection PyUnreachableCode
if False:
# noinspection PyUnresolvedReferences
@@ -219,22 +214,6 @@ class WebServer(threading.Thread):
(r'%s/api/builder(/?)(.*)' % self.options['web_root'], webserve.ApiBuilder),
(r'%s/api(/?.*)' % self.options['web_root'], webapi.Api),
# ----------------------------------------------------------------------------------------------------------
- # legacy deprecated Aug 2019
- (r'%s/home/addShows/?$' % self.options['web_root'], LegacyHomeAddShows),
- (r'%s/manage/manageSearches/?$' % self.options['web_root'], LegacyManageManageSearches),
- (r'%s/manage/showProcesses/?$' % self.options['web_root'], LegacyManageShowProcesses),
- (r'%s/config/postProcessing/?$' % self.options['web_root'], LegacyConfigPostProcessing),
- (r'%s/errorlogs/?$' % self.options['web_root'], LegacyErrorLogs),
- (r'%s/home/is_alive(/?.*)' % self.options['web_root'], webserve.IsAliveHandler),
- (r'%s/home/addShows(/?.*)' % self.options['web_root'], webserve.AddShows),
- (r'%s/manage/manageSearches(/?.*)' % self.options['web_root'], webserve.ManageSearch),
- (r'%s/manage/showProcesses(/?.*)' % self.options['web_root'], webserve.ShowTasks),
- (r'%s/config/postProcessing(/?.*)' % self.options['web_root'], webserve.ConfigMediaProcess),
- (r'%s/errorlogs(/?.*)' % self.options['web_root'], webserve.EventLogs),
- # ----------------------------------------------------------------------------------------------------------
- # legacy deprecated Aug 2019 - never remove as used in external scripts
- (r'%s/home/postprocess(/?.*)' % self.options['web_root'], webserve.HomeProcessMedia),
- (r'%s(/?update_watched_state_kodi/?)' % self.options['web_root'], webserve.NoXSRFHandler),
# regular catchall routes - keep here at the bottom
(r'%s/home(/?.*)' % self.options['web_root'], webserve.Home),
(r'%s/manage/(/?.*)' % self.options['web_root'], webserve.Manage),
@@ -255,14 +234,10 @@ class WebServer(threading.Thread):
logger.log(u'Starting SickGear on %s://%s:%s/' % (protocol, self.options['host'], self.options['port']))
# python 3 needs to start event loop first
- if not PY2:
- import asyncio
- if 'win32' == platform and PY38:
- # noinspection PyUnresolvedReferences
- asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
- asyncio.set_event_loop(asyncio.new_event_loop())
- from tornado.platform.asyncio import AnyThreadEventLoopPolicy
- asyncio.set_event_loop_policy(AnyThreadEventLoopPolicy())
+ import asyncio
+ asyncio.set_event_loop(asyncio.new_event_loop())
+ from tornado.platform.asyncio import AnyThreadEventLoopPolicy
+ asyncio.set_event_loop_policy(AnyThreadEventLoopPolicy())
try:
self.server = self.app.listen(self.options['port'], self.options['host'], ssl_options=ssl_options,
diff --git a/tests/common_tests.py b/tests/common_tests.py
index 5c5ad53a..b254283b 100644
--- a/tests/common_tests.py
+++ b/tests/common_tests.py
@@ -135,7 +135,7 @@ class QualityTests(unittest.TestCase):
def check_quality_names(self, quality, cases):
for fn in cases:
- second = common.Quality.nameQuality(fn)
+ second = common.Quality.name_quality(fn)
self.assertEqual(quality, second, msg='fail [%s] != [%s] for case: %s' %
(Quality.qualityStrings[quality], Quality.qualityStrings[second], fn))
@@ -148,7 +148,7 @@ class QualityTests(unittest.TestCase):
def check_wantedquality_list(self, cases):
for show_quality, result in cases:
- sq = common.Quality.combineQualities(*show_quality)
+ sq = common.Quality.combine_qualities(*show_quality)
wd = common.WantedQualities()
_ = wd.get_wantedlist(sq, False, common.Quality.NONE, common.UNAIRED, manual=True)
for w, v in iteritems(wd):
@@ -158,7 +158,7 @@ class QualityTests(unittest.TestCase):
def check_wantedquality_get_wantedlist(self, cases):
for show_quality, result in cases:
- sq = common.Quality.combineQualities(*show_quality)
+ sq = common.Quality.combine_qualities(*show_quality)
wd = common.WantedQualities()
for case, wlist in result:
ka = {'qualities': sq}
@@ -169,7 +169,7 @@ class QualityTests(unittest.TestCase):
def check_sceneQuality(self, cases):
msg = 'Test case: "%s", actual: [%s] != expected: [%s]'
for show_name, result in cases:
- sq = common.Quality.sceneQuality(show_name[0], show_name[1])
+ sq = common.Quality.scene_quality(show_name[0], show_name[1])
self.assertEqual(result, sq, msg=msg % (show_name[0], Quality.qualityStrings[sq],
Quality.qualityStrings[result]))
@@ -177,8 +177,8 @@ class QualityTests(unittest.TestCase):
def test_SDTV(self):
- self.assertEqual(common.Quality.compositeStatus(common.DOWNLOADED, common.Quality.SDTV),
- common.Quality.statusFromName('Test.Show.S01E02-GROUP.mkv'))
+ self.assertEqual(common.Quality.composite_status(common.DOWNLOADED, common.Quality.SDTV),
+ common.Quality.status_from_name('Test.Show.S01E02-GROUP.mkv'))
def test_qualites(self):
self.longMessage = True
diff --git a/tests/helpers_tests.py b/tests/helpers_tests.py
index e78da024..e80e1827 100644
--- a/tests/helpers_tests.py
+++ b/tests/helpers_tests.py
@@ -65,7 +65,7 @@ class HelpersTests(unittest.TestCase):
((WANTED, Quality.NONE), True),
]
for c, b in test_cases:
- self.assertEqual(helpers.should_delete_episode(Quality.compositeStatus(*c)), b)
+ self.assertEqual(helpers.should_delete_episode(Quality.composite_status(*c)), b)
def test_encrypt(self):
helpers.unique_key1 = '0x12d48f154876c16164a1646'
diff --git a/tests/migration_tests.py b/tests/migration_tests.py
index 1682afdb..2a2d3a6c 100644
--- a/tests/migration_tests.py
+++ b/tests/migration_tests.py
@@ -48,7 +48,7 @@ class MigrationBasicTests(test.SickbeardTestDBCase):
update.execute()
sleep(0.1)
- db.MigrationCode(my_db)
+ db.migration_code(my_db)
my_db.close()
# force python to garbage collect all db connections, so that the file can be deleted
@@ -67,9 +67,9 @@ class MigrationBasicTests(test.SickbeardTestDBCase):
# 0 -> 31
class OldInitialSchema(db.SchemaUpgrade):
def execute(self):
- db.backup_database(self.connection, 'sickbeard.db', self.checkDBVersion())
+ db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
- if not self.hasTable('tv_shows') and not self.hasTable('db_version'):
+ if not self.has_table('tv_shows') and not self.has_table('db_version'):
queries = [
'CREATE TABLE db_version (db_version INTEGER);',
'CREATE TABLE history ('
@@ -105,7 +105,7 @@ class OldInitialSchema(db.SchemaUpgrade):
self.connection.action(query)
else:
- cur_db_version = self.checkDBVersion()
+ cur_db_version = self.call_check_db_version()
if cur_db_version < MIN_DB_VERSION:
logger.log_error_and_exit(
@@ -127,13 +127,13 @@ class OldInitialSchema(db.SchemaUpgrade):
' your database may be unusable due to their modifications.'
)
- return self.checkDBVersion()
+ return self.call_check_db_version()
class AddDefaultEpStatusToTvShows(db.SchemaUpgrade):
def execute(self):
- self.addColumn('tv_shows', 'default_ep_status', 'TEXT', '')
- self.setDBVersion(41, check_db_version=False)
+ self.add_column('tv_shows', 'default_ep_status', 'TEXT', '')
+ self.set_db_version(41, check_db_version=False)
if '__main__' == __name__:
diff --git a/tests/name_parser_tests.py b/tests/name_parser_tests.py
index daa49edc..6e052375 100644
--- a/tests/name_parser_tests.py
+++ b/tests/name_parser_tests.py
@@ -508,8 +508,8 @@ class MultiSceneNumbering(test.SickbeardTestDBCase):
)
my_db = db.DBConnection()
my_db.mass_action(c_l)
- name_cache.addNameToCache(e_t['show_obj']['name'], tvid=e_t['show_obj']['tvid'],
- prodid=e_t['show_obj']['prodid'])
+ name_cache.add_name_to_cache(e_t['show_obj']['name'], tvid=e_t['show_obj']['tvid'],
+ prodid=e_t['show_obj']['prodid'])
for _t in e_t['tests']:
try:
res = parser.NameParser(True, convert=True).parse(_t['parse_name'])
@@ -533,8 +533,8 @@ class EpisodeNameCases(unittest.TestCase):
e_obj.season = e_o['season']
e_obj.episode = e_o['number']
s.sxe_ep_obj.setdefault(e_obj.season, {})[e_obj.episode] = e_obj
- name_cache.addNameToCache(e_t['show_obj']['name'], tvid=e_t['show_obj']['tvid'],
- prodid=e_t['show_obj']['prodid'])
+ name_cache.add_name_to_cache(e_t['show_obj']['name'], tvid=e_t['show_obj']['tvid'],
+ prodid=e_t['show_obj']['prodid'])
try:
res = parser.NameParser(True).parse(e_t['parse_name'])
except (BaseException, Exception):
@@ -550,7 +550,7 @@ class InvalidCases(unittest.TestCase):
for s in [TVShowTest(name=rls_name, prodid=prodid, tvid=tvid, is_anime=is_anime)]:
sickgear.showList.append(s)
sickgear.showDict[s.sid_int] = s
- name_cache.addNameToCache(show_name, tvid=tvid, prodid=prodid)
+ name_cache.add_name_to_cache(show_name, tvid=tvid, prodid=prodid)
invalidexception = False
try:
_ = parser.NameParser(True).parse(rls_name)
@@ -939,7 +939,7 @@ class ExtraInfoNoNameTests(test.SickbeardTestDBCase):
sickgear.showList = [tvs]
sickgear.showDict = {tvs.sid_int: tvs}
name_cache.nameCache = {}
- name_cache.buildNameCache()
+ name_cache.build_name_cache()
np = parser.NameParser()
r = np.parse(case[2], cache_result=False)
diff --git a/tests/pp_tests.py b/tests/pp_tests.py
index f6fd97a4..58e05bf5 100644
--- a/tests/pp_tests.py
+++ b/tests/pp_tests.py
@@ -27,7 +27,7 @@ import unittest
import sickgear
from sickgear.helpers import real_path
-from sickgear.name_cache import addNameToCache
+from sickgear.name_cache import add_name_to_cache
from sickgear.postProcessor import PostProcessor
from sickgear.processTV import ProcessTVShow
from sickgear.tv import TVEpisode, TVShow, logger
@@ -94,7 +94,7 @@ class PPBasicTests(test.SickbeardTestDBCase):
ep_obj.release_name = 'test setter'
ep_obj.save_to_db()
- addNameToCache('show name', tvid=TVINFO_TVDB, prodid=3)
+ add_name_to_cache('show name', tvid=TVINFO_TVDB, prodid=3)
sickgear.PROCESS_METHOD = 'move'
pp = PostProcessor(test.FILEPATH)
diff --git a/tests/scene_helpers_tests.py b/tests/scene_helpers_tests.py
index 2827522b..e49e27aa 100644
--- a/tests/scene_helpers_tests.py
+++ b/tests/scene_helpers_tests.py
@@ -75,7 +75,7 @@ class SceneExceptionTestCase(test.SickbeardTestDBCase):
sickgear.showDict[s.sid_int] = s
sickgear.webserve.Home.make_showlist_unique_names()
scene_exceptions.retrieve_exceptions()
- name_cache.buildNameCache()
+ name_cache.build_name_cache()
def test_sceneExceptionsEmpty(self):
self.assertEqual(scene_exceptions.get_scene_exceptions(0, 0), [])
@@ -99,7 +99,7 @@ class SceneExceptionTestCase(test.SickbeardTestDBCase):
sickgear.showList.append(s)
sickgear.showDict[s.sid_int] = s
scene_exceptions.retrieve_exceptions()
- name_cache.buildNameCache()
+ name_cache.build_name_cache()
self.assertEqual(scene_exceptions.get_scene_exception_by_name(u'ブラック・ラグーン'), [1, 79604, -1])
self.assertEqual(scene_exceptions.get_scene_exception_by_name(u'Burakku Ragūn'), [1, 79604, -1])
self.assertEqual(scene_exceptions.get_scene_exception_by_name('Rokka no Yuusha'), [1, 295243, -1])
@@ -114,11 +114,11 @@ class SceneExceptionTestCase(test.SickbeardTestDBCase):
my_db.action('DELETE FROM scene_exceptions WHERE 1=1')
# put something in the cache
- name_cache.addNameToCache('Cached Name', prodid=0)
+ name_cache.add_name_to_cache('Cached Name', prodid=0)
# updating should not clear the cache this time since our exceptions didn't change
scene_exceptions.retrieve_exceptions()
- self.assertEqual(name_cache.retrieveNameFromCache('Cached Name'), (0, 0))
+ self.assertEqual(name_cache.retrieve_name_from_cache('Cached Name'), (0, 0))
if '__main__' == __name__:
diff --git a/tests/show_tests.py b/tests/show_tests.py
index 06471da0..82bac9b0 100644
--- a/tests/show_tests.py
+++ b/tests/show_tests.py
@@ -31,7 +31,7 @@ from sickgear.tv import TVEpisode, TVShow
wanted_tests = [
dict(
name='Start and End',
- show=dict(indexer=1, indexerid=1, quality=Quality.combineQualities([Quality.SDTV], [])),
+ show=dict(indexer=1, indexerid=1, quality=Quality.combine_qualities([Quality.SDTV], [])),
episodes=[
dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)),
dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)),
@@ -64,7 +64,7 @@ wanted_tests = [
dict(
name='Start and End, entire season',
- show=dict(indexer=1, indexerid=10, quality=Quality.combineQualities([Quality.SDTV], [])),
+ show=dict(indexer=1, indexerid=10, quality=Quality.combine_qualities([Quality.SDTV], [])),
episodes=[
dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 2)),
dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)),
@@ -97,7 +97,7 @@ wanted_tests = [
dict(
name='Start, entire season',
- show=dict(indexer=1, indexerid=210, quality=Quality.combineQualities([Quality.SDTV], [])),
+ show=dict(indexer=1, indexerid=210, quality=Quality.combine_qualities([Quality.SDTV], [])),
episodes=[
dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 2)),
dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)),
@@ -130,7 +130,7 @@ wanted_tests = [
dict(
name='End only',
- show=dict(indexer=1, indexerid=2, quality=Quality.combineQualities([Quality.SDTV], [])),
+ show=dict(indexer=1, indexerid=2, quality=Quality.combine_qualities([Quality.SDTV], [])),
episodes=[
dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 3)),
dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)),
@@ -163,7 +163,7 @@ wanted_tests = [
dict(
name='End only, entire season',
- show=dict(indexer=1, indexerid=20, quality=Quality.combineQualities([Quality.SDTV], [])),
+ show=dict(indexer=1, indexerid=20, quality=Quality.combine_qualities([Quality.SDTV], [])),
episodes=[
dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 4)),
dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)),
@@ -196,7 +196,7 @@ wanted_tests = [
dict(
name='End only, multi season',
- show=dict(indexer=1, indexerid=3, quality=Quality.combineQualities([Quality.SDTV], [])),
+ show=dict(indexer=1, indexerid=3, quality=Quality.combine_qualities([Quality.SDTV], [])),
episodes=[
dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 5)),
dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)),
@@ -229,7 +229,7 @@ wanted_tests = [
dict(
name='End only, multi season, entire season',
- show=dict(indexer=1, indexerid=30, quality=Quality.combineQualities([Quality.SDTV], [])),
+ show=dict(indexer=1, indexerid=30, quality=Quality.combine_qualities([Quality.SDTV], [])),
episodes=[
dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 6)),
dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)),
@@ -262,7 +262,7 @@ wanted_tests = [
dict(
name='End only, multi season, cross season',
- show=dict(indexer=1, indexerid=33, quality=Quality.combineQualities([Quality.SDTV], [])),
+ show=dict(indexer=1, indexerid=33, quality=Quality.combine_qualities([Quality.SDTV], [])),
episodes=[
dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 7)),
dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)),
@@ -295,7 +295,7 @@ wanted_tests = [
dict(
name='all episodes unaired',
- show=dict(indexer=1, indexerid=35, quality=Quality.combineQualities([Quality.SDTV], [])),
+ show=dict(indexer=1, indexerid=35, quality=Quality.combine_qualities([Quality.SDTV], [])),
episodes=[
dict(season=1, episode=1, status=UNAIRED, quality=Quality.NONE, airdate=datetime.date.fromordinal(1)),
dict(season=1, episode=2, status=UNAIRED, quality=Quality.NONE, airdate=datetime.date.fromordinal(1)),
@@ -317,7 +317,7 @@ wanted_tests = [
dict(
name='no episodes',
- show=dict(indexer=1, indexerid=36, quality=Quality.combineQualities([Quality.SDTV], [])),
+ show=dict(indexer=1, indexerid=36, quality=Quality.combine_qualities([Quality.SDTV], [])),
episodes=[
],
start_wanted=7, end_wanted=3,
@@ -332,7 +332,7 @@ wanted_tests = [
dict(
name='no episodes, whole first season',
- show=dict(indexer=1, indexerid=37, quality=Quality.combineQualities([Quality.SDTV], [])),
+ show=dict(indexer=1, indexerid=37, quality=Quality.combine_qualities([Quality.SDTV], [])),
episodes=[
],
start_wanted=-1, end_wanted=0,
@@ -347,7 +347,7 @@ wanted_tests = [
dict(
name='no episodes, whole last season',
- show=dict(indexer=1, indexerid=38, quality=Quality.combineQualities([Quality.SDTV], [])),
+ show=dict(indexer=1, indexerid=38, quality=Quality.combine_qualities([Quality.SDTV], [])),
episodes=[
],
start_wanted=0, end_wanted=-1,
@@ -362,7 +362,7 @@ wanted_tests = [
dict(
name='no episodes, whole first and last season',
- show=dict(indexer=1, indexerid=39, quality=Quality.combineQualities([Quality.SDTV], [])),
+ show=dict(indexer=1, indexerid=39, quality=Quality.combine_qualities([Quality.SDTV], [])),
episodes=[
],
start_wanted=-1, end_wanted=-1,
@@ -408,7 +408,7 @@ class ShowAddTests(test.SickbeardTestDBCase):
show_obj.sxe_ep_obj[ep['season']] = {}
show_obj.sxe_ep_obj[ep['season']][ep['episode']] = TVEpisode(show_obj, ep['season'], ep['episode'])
episode = show_obj.sxe_ep_obj[ep['season']][ep['episode']]
- episode.status = Quality.compositeStatus(ep['status'], ep['quality'])
+ episode.status = Quality.composite_status(ep['status'], ep['quality'])
episode.airdate = ep['airdate']
episode.name = 'nothing'
episode.epid = ep_id
diff --git a/tests/snatch_tests.py b/tests/snatch_tests.py
index 6e75c8f6..5240becd 100644
--- a/tests/snatch_tests.py
+++ b/tests/snatch_tests.py
@@ -57,7 +57,7 @@ class SearchTest(test.SickbeardTestDBCase):
return True
def __init__(self, something):
- for provider in sickgear.providers.sortedProviderList():
+ for provider in sickgear.providers.sorted_sources():
provider.get_url = self._fake_getURL
#provider.isActive = self._fake_isActive
diff --git a/tests/test_lib.py b/tests/test_lib.py
index 59b255f4..e3fe8be3 100644
--- a/tests/test_lib.py
+++ b/tests/test_lib.py
@@ -91,8 +91,8 @@ sickgear.NAMING_SPORTS_PATTERN = ''
sickgear.NAMING_MULTI_EP = 1
sickgear.PROVIDER_ORDER = []
-sickgear.newznabProviderList = providers.getNewznabProviderList('')
-sickgear.providerList = providers.makeProviderList()
+sickgear.newznab_providers = providers.newznab_source_list('')
+sickgear.provider_list = providers.provider_modules()
sickgear.PROG_DIR = os.path.abspath('..')
# sickgear.DATA_DIR = os.path.join(sickgear.PROG_DIR, 'tests')
@@ -195,16 +195,16 @@ def setup_test_db():
"""upgrades the db to the latest version
"""
# upgrading the db
- db.MigrationCode(db.DBConnection())
+ db.migration_code(db.DBConnection())
# fix up any db problems
- db.sanityCheckDatabase(db.DBConnection(), mainDB.MainSanityCheck)
+ db.sanity_check_db(db.DBConnection(), mainDB.MainSanityCheck)
# and for cachedb too
- db.upgradeDatabase(db.DBConnection('cache.db'), cache_db.InitialSchema)
+ db.upgrade_database(db.DBConnection('cache.db'), cache_db.InitialSchema)
# and for faileddb too
- db.upgradeDatabase(db.DBConnection('failed.db'), failed_db.InitialSchema)
+ db.upgrade_database(db.DBConnection('failed.db'), failed_db.InitialSchema)
def teardown_test_db():
diff --git a/tests/webapi_tests.py b/tests/webapi_tests.py
index 7b5d410b..a7b1c9a9 100644
--- a/tests/webapi_tests.py
+++ b/tests/webapi_tests.py
@@ -75,7 +75,7 @@ test_shows = [
'quality_init': [], 'quality_upgrade': [],
'episodes': {
1: {
- 1: {'name': 'ep1', 'status': Quality.compositeStatus(DOWNLOADED, Quality.HDWEBDL),
+ 1: {'name': 'ep1', 'status': Quality.composite_status(DOWNLOADED, Quality.HDWEBDL),
'airdate': old_date, 'description': 'ep1 description'},
2: {'name': 'ep2', 'status': WANTED, 'airdate': last_week, 'description': 'ep2 description'},
3: {'name': 'ep3', 'status': WANTED, 'airdate': today, 'description': 'ep3 description'},
@@ -174,17 +174,17 @@ class WebAPICase(test.SickbeardTestDBCase):
sickgear.events = Events(None)
sickgear.show_queue_scheduler = scheduler.Scheduler(
show_queue.ShowQueue(),
- cycleTime=datetime.timedelta(seconds=3),
- threadName='SHOWQUEUE')
+ cycle_time=datetime.timedelta(seconds=3),
+ thread_name='SHOWQUEUE')
sickgear.search_queue_scheduler = scheduler.Scheduler(
search_queue.SearchQueue(),
- cycleTime=datetime.timedelta(seconds=3),
- threadName='SEARCHQUEUE')
+ cycle_time=datetime.timedelta(seconds=3),
+ thread_name='SEARCHQUEUE')
sickgear.backlog_search_scheduler = search_backlog.BacklogSearchScheduler(
search_backlog.BacklogSearcher(),
- cycleTime=datetime.timedelta(minutes=60),
+ cycle_time=datetime.timedelta(minutes=60),
run_delay=datetime.timedelta(minutes=60),
- threadName='BACKLOG')
+ thread_name='BACKLOG')
sickgear.indexermapper.indexer_list = [i for i in sickgear.indexers.indexer_api.TVInfoAPI().all_sources]
for root_dirs, path, expected in root_folder_tests:
sickgear.ROOT_DIRS = root_dirs
@@ -198,8 +198,8 @@ class WebAPICase(test.SickbeardTestDBCase):
elif k in show_obj.__dict__:
show_obj.__dict__[k] = v
if 'quality_init' in cur_show and cur_show['quality_init']:
- show_obj.quality = Quality.combineQualities(cur_show['quality_init'],
- cur_show.get('quality_upgrade', []))
+ show_obj.quality = Quality.combine_qualities(cur_show['quality_init'],
+ cur_show.get('quality_upgrade', []))
show_obj.dirty = True
show_obj.save_to_db(True)
@@ -216,7 +216,7 @@ class WebAPICase(test.SickbeardTestDBCase):
ep_obj.__dict__[k] = v
show_obj.sxe_ep_obj.setdefault(season, {})[ep] = ep_obj
ep_obj.save_to_db(True)
- status, quality = Quality.splitCompositeStatus(ep_obj.status)
+ status, quality = Quality.split_composite_status(ep_obj.status)
if status in (DOWNLOADED, SNATCHED):
s_r = SearchResult([ep_obj])
s_r.show_obj, s_r.quality, s_r.provider, s_r.name = \
@@ -240,8 +240,8 @@ class WebAPICase(test.SickbeardTestDBCase):
for cur_show in test_shows:
show_obj = sickgear.helpers.find_show_by_id({cur_show['tvid']: cur_show['prodid']})
if 'quality_init' in cur_show and cur_show['quality_init']:
- show_obj.quality = Quality.combineQualities(cur_show['quality_init'],
- cur_show.get('quality_upgrade', []))
+ show_obj.quality = Quality.combine_qualities(cur_show['quality_init'],
+ cur_show.get('quality_upgrade', []))
else:
show_obj.quality = int(sickgear.QUALITY_DEFAULT)
show_obj.upgrade_once = int(cur_show.get('upgrade_once', 0))
@@ -821,7 +821,7 @@ class WebAPICase(test.SickbeardTestDBCase):
if cur_quality:
params.update({'quality': cur_quality_str})
old_status = ep_obj.status
- status, quality = Quality.splitCompositeStatus(ep_obj.status)
+ status, quality = Quality.split_composite_status(ep_obj.status)
expect_fail = UNAIRED == status or (DOWNLOADED == status and not cur_quality)
expected_msg = (success_msg, failed_msg)[expect_fail]
data = self._request_from_api(webapi.CMD_SickGearEpisodeSetStatus, params=params)