diff --git a/CHANGES.md b/CHANGES.md index 10c13574..d9ba3777 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,4 +1,24 @@ -### 3.27.13 (2023-04-12 10:15:00 UTC) +### 3.28.0 (2023-04-12 13:05:00 UTC) + +* Update html5lib 1.1 (f87487a) to 1.2-dev (3e500bb) +* Update package resource API 63.2.0 (3ae44cd) to 67.5.1 (f51eccd) +* Update Tornado Web Server 6.2.0 (a4f08a3) to 6.3.0 (7186b86) +* Update urllib3 1.26.13 (25fbd5f) to 1.26.14 (a06c05c) +* Change remove calls to legacy py2 fix encoding function +* Change requirements for pure py3 +* Change codebase cleanups +* Change improve perf by using generators with `any` +* Change deprecate processEpisode used by nzbToMedia to advise how to configure API instead +* Change optionally add disk free space in response to three Web API endpoints +* Change increase API version number to 15 +* Add actually use mount points to get disk free space +* Add optional "freespace" parameter to endpoints: sg.getrootdirs, sg.addrootdir, sg.deleterootdir +* Change update help of affected endpoints +* Fix explicitly save rootdirs after adding or deleting via Web API +* Change add Rarbg UHD search category + + +### 3.27.13 (2023-04-12 10:15:00 UTC) * Change fix show id log output * Change handle exceptions thrown from pkg_resources parsing newly extended working set modules not even used by SG @@ -771,7 +791,7 @@ * Change allow Python 3.8.10 and 3.9.5 * Remove PiSexy provider -* Fix refreshShow, prevent another refresh of show if already in queue and not forced +* Fix refresh_show, prevent another refresh of show if already in queue and not forced * Fix webapi set scene season * Fix set path in all_tests for py2 * Fix webapi exception if no backlog was done before (CMD_SickGearCheckScheduler) @@ -1103,7 +1123,7 @@ * Add API response field `global exclude require` to sg.listrequirewords endpoint * Change improve Popen resource usage under py2 * Add overall failure monitoring to History/Connect fails (renamed from "Provider fails") -* Change log exception during updateCache in newznab +* Change log exception during update_cache in newznab * Change make Py3.9 preparations * Change anime "Available groups" to display "No groups listed..." when API is fine with no results instead of blank * Change improve clarity of anime group lists by using terms Allow list and Block list diff --git a/_cleaner.py b/_cleaner.py index bfc31345..60b21232 100644 --- a/_cleaner.py +++ b/_cleaner.py @@ -37,6 +37,9 @@ if old_magic != magic_number: # skip cleaned005 as used during dev by testers cleanups = [ + ['.cleaned009.tmp', r'lib\scandir', [ + r'lib\scandir\__pycache__', r'lib\scandir', + ]], ['.cleaned008.tmp', r'lib\tornado_py3', [ r'lib\bs4_py2\builder\__pycache__', r'lib\bs4_py2\builder', r'lib\bs4_py2', r'lib\bs4_py3\builder\__pycache__', r'lib\bs4_py3\builder', r'lib\bs4_py3', @@ -122,7 +125,7 @@ for cleaned_path, test_path, dir_list in cleanups: pass with io.open(cleaned_file, 'w+', encoding='utf-8') as fp: - fp.write(u'This file exists to prevent a rerun delete of *.pyc, *.pyo files') + fp.write('This file exists to prevent a rerun delete of *.pyc, *.pyo files') fp.flush() os.fsync(fp.fileno()) @@ -163,10 +166,10 @@ if not os.path.isfile(cleaned_file) or os.path.exists(test): swap_name = cleaned_file cleaned_file = danger_output danger_output = swap_name - msg = u'Failed (permissions?) to delete file(s). You must manually delete:\r\n%s' % '\r\n'.join(bad_files) + msg = 'Failed (permissions?) to delete file(s). You must manually delete:\r\n%s' % '\r\n'.join(bad_files) print(msg) else: - msg = u'This file exists to prevent a rerun delete of dead lib/html5lib files' + msg = 'This file exists to prevent a rerun delete of dead lib/html5lib files' with io.open(cleaned_file, 'w+', encoding='utf-8') as fp: fp.write(msg) diff --git a/gui/slick/interfaces/default/apiBuilder.tmpl b/gui/slick/interfaces/default/apiBuilder.tmpl index acf1fab1..4b696fb4 100644 --- a/gui/slick/interfaces/default/apiBuilder.tmpl +++ b/gui/slick/interfaces/default/apiBuilder.tmpl @@ -70,11 +70,11 @@ addList("Command", "Help", "?cmd=help", "sg.functions-list", "","", "default"); addOption("sg.functions-list", "$k", "&subject=$k", "", "", "#echo ('sb', 'sg')['sg' in $k]#") #end for addList("Command", "SickBeard.AddRootDir", "?cmd=sb.addrootdir", "sb.addrootdir"); -addList("Command", "SickGear.AddRootDir", "?cmd=sg.addrootdir", "sb.addrootdir"); +addList("Command", "SickGear.AddRootDir", "?cmd=sg.addrootdir", "sg.addrootdir"); addOption("Command", "SickBeard.CheckScheduler", "?cmd=sb.checkscheduler"); addOption("Command", "SickGear.CheckScheduler", "?cmd=sg.checkscheduler"); addList("Command", "SickBeard.DeleteRootDir", "?cmd=sb.deleterootdir", "sb.deleterootdir"); -addList("Command", "SickGear.DeleteRootDir", "?cmd=sg.deleterootdir", "sb.deleterootdir"); +addList("Command", "SickGear.DeleteRootDir", "?cmd=sg.deleterootdir", "sg.deleterootdir"); addOption("Command", "SickBeard.ForceSearch", "?cmd=sb.forcesearch"); addList("Command", "SickGear.ForceSearch", "?cmd=sg.forcesearch", "sg.forcesearch"); addOption("Command", "SickGear.SearchQueue", "?cmd=sg.searchqueue"); @@ -88,7 +88,7 @@ addList("Command", "SickGear.GetIndexers", "?cmd=sg.getindexers", "listindexers" addList("Command", "SickGear.GetIndexerIcon", "?cmd=sg.getindexericon", "getindexericon"); addList("Command", "SickGear.GetNetworkIcon", "?cmd=sg.getnetworkicon", "getnetworkicon"); addOption("Command", "SickBeard.GetRootDirs", "?cmd=sb.getrootdirs"); -addOption("Command", "SickGear.GetRootDirs", "?cmd=sg.getrootdirs"); +addList("Command", "SickGear.GetRootDirs", "?cmd=sg.getrootdirs", "sg.addfreespace"); addList("Command", "SickBeard.PauseBacklog", "?cmd=sb.pausebacklog", "sb.pausebacklog"); addList("Command", "SickGear.PauseBacklog", "?cmd=sg.pausebacklog", "sb.pausebacklog"); addOption("Command", "SickBeard.Ping", "?cmd=sb.ping"); @@ -621,10 +621,26 @@ addOption("sb.addrootdir-opt", "Optional Param", "", 1); addOption("sb.addrootdir-opt", "Default", "&default=1"); addOption("sb.addrootdir-opt", "Not Default", "&default=0"); -addOption("sb.deleterootdir", "C:\\Temp", "&location=C:\\Temp", "", 1); +addList("sg.addrootdir", "C:\\Temp", "&location=C:\\Temp", "sg.addrootdir-opt"); +addList("sg.addrootdir", "/usr/bin", "&location=/usr/bin/", "sg.addrootdir-opt"); +addList("sg.addrootdir", "S:\\Invalid_Location", "&location=S:\\Invalid_Location", "sg.addrootdir-opt"); + +addList("sg.addrootdir-opt", "Optional Param", "", "sg.addfreespace"); +addList("sg.addrootdir-opt", "Default", "&default=1", "sg.addfreespace"); +addList("sg.addrootdir-opt", "Not Default", "&default=0", "sg.addfreespace"); + +addOption("sb.deleterootdir", "C:\\Temp", "&location=C:\\Temp", 1); addOption("sb.deleterootdir", "/usr/bin", "&location=/usr/bin/"); addOption("sb.deleterootdir", "S:\\Invalid_Location", "&location=S:\\Invalid_Location"); +addList("sg.deleterootdir", "C:\\Temp", "&location=C:\\Temp", "sg.addfreespace"); +addList("sg.deleterootdir", "/usr/bin", "&location=/usr/bin/", "sg.addfreespace"); +addList("sg.deleterootdir", "S:\\Invalid_Location", "&location=S:\\Invalid_Location", "sg.addfreespace"); + +addOption("sg.addfreespace", "Optional Param", "", 1) +addOption("sg.addfreespace", "incl Freespace", "&freespace=1") +addOption("sg.addfreespace", "excl Freespace", "&freespace=0") + #for $cur_show_obj in $sortedShowList: addList("show.pause", "$cur_show_obj.name", "&indexerid=$cur_show_obj.prodid", "show.pause-opt"); #end for diff --git a/gui/slick/interfaces/default/cache.tmpl b/gui/slick/interfaces/default/cache.tmpl index 7b115f67..be3ba876 100644 --- a/gui/slick/interfaces/default/cache.tmpl +++ b/gui/slick/interfaces/default/cache.tmpl @@ -65,7 +65,7 @@ #for $hItem in $cacheResults: - #set $provider = $providers.getProviderClass($hItem['provider']) + #set $provider = $providers.get_by_id($hItem['provider']) #set $tip = '%s @ %s' % ($hItem['provider'], $SGDatetime.sbfdatetime($SGDatetime.fromtimestamp($hItem['time']))) #set $ver = $hItem['version'] #set $ver = ($ver, '')[-1 == $ver] diff --git a/gui/slick/interfaces/default/cast_person.tmpl b/gui/slick/interfaces/default/cast_person.tmpl index 3ce066a3..3d9b9568 100644 --- a/gui/slick/interfaces/default/cast_person.tmpl +++ b/gui/slick/interfaces/default/cast_person.tmpl @@ -182,7 +182,11 @@ def param(visible=True, rid=None, cache_person=None, cache_char=None, person=Non #end if #set $section_links = False +#set $all_sources = $TVInfoAPI().all_sources #for $cur_src, $cur_sid in sorted(iteritems($person.ids)) + #if $cur_src not in $all_sources: + #continue + #end if #if $TVInfoAPI($cur_src).config.get('people_url') #if not $section_links #set $section_links = True diff --git a/gui/slick/interfaces/default/config.tmpl b/gui/slick/interfaces/default/config.tmpl index 26b462e9..23d9b4ce 100644 --- a/gui/slick/interfaces/default/config.tmpl +++ b/gui/slick/interfaces/default/config.tmpl @@ -29,7 +29,7 @@ Config file:$sg_str('CONFIG_FILE') - Database file:$db.dbFilename() + Database file:$db.db_filename() #if $db.db_supports_backup Database backups:$backup_db_path #end if diff --git a/gui/slick/interfaces/default/config_general.tmpl b/gui/slick/interfaces/default/config_general.tmpl index 9f604721..e776220f 100644 --- a/gui/slick/interfaces/default/config_general.tmpl +++ b/gui/slick/interfaces/default/config_general.tmpl @@ -13,7 +13,6 @@ #from sickgear.sgdatetime import * <% def sg_var(varname, default=False): return getattr(sickgear, varname, default) %>#slurp# <% def sg_str(varname, default=''): return getattr(sickgear, varname, default) %>#slurp# -#from _23 import list_keys ## #set global $title = 'Config - General' #set global $header = 'General Settings' @@ -846,7 +845,7 @@ File logging level: ' response = requests.get(self.server_url + 'index.php') if response.status_code != 200: raise ServiceError('Initiate failed') - + match = re.search(login_pattern, response.content, re.IGNORECASE | re.DOTALL) if not match: raise ServiceError('Can not find unique id parameter on page') - + login_parameter = {'username': 'sickbeard', 'passwd': 'subliminal', 'remember': 'yes', @@ -77,7 +77,7 @@ class Itasa(ServiceBase): 'option': 'com_user', 'task': 'login', 'silent': 'true', - 'return': match.group(1), + 'return': match.group(1), match.group(2): match.group(3) } @@ -85,7 +85,7 @@ class Itasa(ServiceBase): r = self.session.post(self.server_url + 'index.php', data=login_parameter) if not re.search('logouticon.png', r.content, re.IGNORECASE | re.DOTALL): raise ServiceError('Itasa Login Failed') - + @cachedmethod def get_series_id(self, name): """Get the show page and cache every show found in it""" @@ -100,7 +100,7 @@ class Itasa(ServiceBase): series_id = int(match.group(1)) self.cache_for(self.get_series_id, args=(series_name,), result=series_id) return self.cached_value(self.get_series_id, args=(name,)) - + def get_episode_id(self, series, series_id, season, episode, quality): """Get the id subtitle for episode with the given quality""" @@ -115,14 +115,14 @@ class Itasa(ServiceBase): if seasons.text.lower().strip() == 'stagione %s' % str(season): season_link = seasons['href'] break - + if not season_link: logger.debug(u'Could not find season %s for series %s' % (series, str(season))) return None - + r = self.session.get(season_link) soup = BeautifulSoup(r.content, self.required_features) - + all_qualities = soup.find('div', attrs = {'id' : 'remositorycontainerlist'}) for qualities in all_qualities.find_all(href=re.compile('func=select')): if qualities.text.lower().strip() in self.quality_dict[quality]: @@ -131,11 +131,11 @@ class Itasa(ServiceBase): soup = BeautifulSoup(r.content, self.required_features) break - #If we want SDTV we are just on the right page so quality link will be None + #If we want SDTV we are just on the right page so quality link will be None if not quality == Quality.SDTV and not quality_link: logger.debug(u'Could not find a subtitle with required quality for series %s season %s' % (series, str(season))) return None - + all_episodes = soup.find('div', attrs = {'id' : 'remositoryfilelisting'}) for episodes in all_episodes.find_all(href=re.compile('func=fileinfo')): ep_string = "%(seasonnumber)dx%(episodenumber)02d" % {'seasonnumber': season, 'episodenumber': episode} @@ -144,12 +144,12 @@ class Itasa(ServiceBase): if match: episode_id = match.group(1) return episode_id - + return episode_id - + def list_checked(self, video, languages): return self.query(video.path or video.release, languages, get_keywords(video.guess), video.series, video.season, video.episode) - + def query(self, filepath, languages, keywords, series, season, episode): logger.debug(u'Getting subtitles for %s season %d episode %d with languages %r' % (series, season, episode, languages)) @@ -160,8 +160,8 @@ class Itasa(ServiceBase): except KeyError: logger.debug(u'Could not find series id for %s' % series) return [] - - episode_id = self.get_episode_id(series, series_id, season, episode, Quality.nameQuality(filepath)) + + episode_id = self.get_episode_id(series, series_id, season, episode, Quality.name_quality(filepath)) if not episode_id: logger.debug(u'Could not find subtitle for series %s' % series) return [] @@ -173,11 +173,11 @@ class Itasa(ServiceBase): sub_language = self.get_language('it') path = get_subtitle_path(filepath, sub_language, self.config.multi) subtitle = ResultSubtitle(path, sub_language, self.__class__.__name__.lower(), sub_link) - + return [subtitle] def download(self, subtitle): - + logger.info(u'Downloading %s in %s' % (subtitle.link, subtitle.path)) try: r = self.session.get(subtitle.link, headers={'Referer': self.server_url, 'User-Agent': self.user_agent}) @@ -204,13 +204,13 @@ class Itasa(ServiceBase): else: zipsub.close() raise DownloadFailedError('No subtitles found in zip file') - + zipsub.close() except Exception as e: if os.path.exists(subtitle.path): os.remove(subtitle.path) raise DownloadFailedError(str(e)) - + logger.debug(u'Download finished') - -Service = Itasa \ No newline at end of file + +Service = Itasa diff --git a/lib/subliminal/videos.py b/lib/subliminal/videos.py index 84a8fa11..e83fd7c3 100644 --- a/lib/subliminal/videos.py +++ b/lib/subliminal/videos.py @@ -29,9 +29,6 @@ import struct from six import PY2, text_type from _23 import decode_str -# noinspection PyPep8Naming -import encodingKludge as ek - __all__ = ['EXTENSIONS', 'MIMETYPES', 'Video', 'Episode', 'Movie', 'UnknownVideo', 'scan', 'hash_opensubtitles', 'hash_thesubdb'] @@ -62,10 +59,10 @@ class Video(object): self._path = None self.hashes = {} self.subtitle_path = subtitle_path - + if PY2 and isinstance(path, text_type): path = path.encode('utf-8') - + if os.path.exists(path): self._path = path self.size = os.path.getsize(self._path) @@ -150,8 +147,8 @@ class Video(object): folder = '.' existing = [f for f in os.listdir(folder) if f.startswith(basename)] if self.subtitle_path: - subsDir = ek.ek(os.path.join, folder, self.subtitle_path) - if ek.ek(os.path.isdir, subsDir): + subsDir = os.path.join(folder, self.subtitle_path) + if os.path.isdir(subsDir): existing.extend([f for f in os.listdir(subsDir) if f.startswith(basename)]) for path in existing: for ext in subtitles.EXTENSIONS: @@ -232,7 +229,7 @@ def scan(entry, max_depth=3, scan_filter=None, depth=0): """ if PY2 and isinstance(entry, text_type): entry = entry.encode('utf-8') - + if depth > max_depth != 0: # we do not want to search the whole file system except if max_depth = 0 return [] if os.path.isdir(entry): # a dir? recurse diff --git a/lib/tornado/__init__.py b/lib/tornado/__init__.py index 39d7c44b..060b836a 100644 --- a/lib/tornado/__init__.py +++ b/lib/tornado/__init__.py @@ -22,5 +22,46 @@ # is zero for an official release, positive for a development branch, # or negative for a release candidate or beta (after the base version # number has been incremented) -version = "6.2" -version_info = (6, 2, 0, 0) +version = "6.3.dev1" +version_info = (6, 3, 0, -100) + +import importlib +import typing + +__all__ = [ + "auth", + "autoreload", + "concurrent", + "curl_httpclient", + "escape", + "gen", + "http1connection", + "httpclient", + "httpserver", + "httputil", + "ioloop", + "iostream", + "locale", + "locks", + "log", + "netutil", + "options", + "platform", + "process", + "queues", + "routing", + "simple_httpclient", + "tcpclient", + "tcpserver", + "template", + "testing", + "util", + "web", +] + + +# Copied from https://peps.python.org/pep-0562/ +def __getattr__(name: str) -> typing.Any: + if name in __all__: + return importlib.import_module("." + name, __name__) + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") diff --git a/lib/tornado/_locale_data.py b/lib/tornado/_locale_data.py index c706230e..7a5d2852 100644 --- a/lib/tornado/_locale_data.py +++ b/lib/tornado/_locale_data.py @@ -15,66 +15,66 @@ """Data used by the tornado.locale module.""" LOCALE_NAMES = { - "af_ZA": {"name_en": u"Afrikaans", "name": u"Afrikaans"}, - "am_ET": {"name_en": u"Amharic", "name": u"አማርኛ"}, - "ar_AR": {"name_en": u"Arabic", "name": u"العربية"}, - "bg_BG": {"name_en": u"Bulgarian", "name": u"Български"}, - "bn_IN": {"name_en": u"Bengali", "name": u"বাংলা"}, - "bs_BA": {"name_en": u"Bosnian", "name": u"Bosanski"}, - "ca_ES": {"name_en": u"Catalan", "name": u"Català"}, - "cs_CZ": {"name_en": u"Czech", "name": u"Čeština"}, - "cy_GB": {"name_en": u"Welsh", "name": u"Cymraeg"}, - "da_DK": {"name_en": u"Danish", "name": u"Dansk"}, - "de_DE": {"name_en": u"German", "name": u"Deutsch"}, - "el_GR": {"name_en": u"Greek", "name": u"Ελληνικά"}, - "en_GB": {"name_en": u"English (UK)", "name": u"English (UK)"}, - "en_US": {"name_en": u"English (US)", "name": u"English (US)"}, - "es_ES": {"name_en": u"Spanish (Spain)", "name": u"Español (España)"}, - "es_LA": {"name_en": u"Spanish", "name": u"Español"}, - "et_EE": {"name_en": u"Estonian", "name": u"Eesti"}, - "eu_ES": {"name_en": u"Basque", "name": u"Euskara"}, - "fa_IR": {"name_en": u"Persian", "name": u"فارسی"}, - "fi_FI": {"name_en": u"Finnish", "name": u"Suomi"}, - "fr_CA": {"name_en": u"French (Canada)", "name": u"Français (Canada)"}, - "fr_FR": {"name_en": u"French", "name": u"Français"}, - "ga_IE": {"name_en": u"Irish", "name": u"Gaeilge"}, - "gl_ES": {"name_en": u"Galician", "name": u"Galego"}, - "he_IL": {"name_en": u"Hebrew", "name": u"עברית"}, - "hi_IN": {"name_en": u"Hindi", "name": u"हिन्दी"}, - "hr_HR": {"name_en": u"Croatian", "name": u"Hrvatski"}, - "hu_HU": {"name_en": u"Hungarian", "name": u"Magyar"}, - "id_ID": {"name_en": u"Indonesian", "name": u"Bahasa Indonesia"}, - "is_IS": {"name_en": u"Icelandic", "name": u"Íslenska"}, - "it_IT": {"name_en": u"Italian", "name": u"Italiano"}, - "ja_JP": {"name_en": u"Japanese", "name": u"日本語"}, - "ko_KR": {"name_en": u"Korean", "name": u"한국어"}, - "lt_LT": {"name_en": u"Lithuanian", "name": u"Lietuvių"}, - "lv_LV": {"name_en": u"Latvian", "name": u"Latviešu"}, - "mk_MK": {"name_en": u"Macedonian", "name": u"Македонски"}, - "ml_IN": {"name_en": u"Malayalam", "name": u"മലയാളം"}, - "ms_MY": {"name_en": u"Malay", "name": u"Bahasa Melayu"}, - "nb_NO": {"name_en": u"Norwegian (bokmal)", "name": u"Norsk (bokmål)"}, - "nl_NL": {"name_en": u"Dutch", "name": u"Nederlands"}, - "nn_NO": {"name_en": u"Norwegian (nynorsk)", "name": u"Norsk (nynorsk)"}, - "pa_IN": {"name_en": u"Punjabi", "name": u"ਪੰਜਾਬੀ"}, - "pl_PL": {"name_en": u"Polish", "name": u"Polski"}, - "pt_BR": {"name_en": u"Portuguese (Brazil)", "name": u"Português (Brasil)"}, - "pt_PT": {"name_en": u"Portuguese (Portugal)", "name": u"Português (Portugal)"}, - "ro_RO": {"name_en": u"Romanian", "name": u"Română"}, - "ru_RU": {"name_en": u"Russian", "name": u"Русский"}, - "sk_SK": {"name_en": u"Slovak", "name": u"Slovenčina"}, - "sl_SI": {"name_en": u"Slovenian", "name": u"Slovenščina"}, - "sq_AL": {"name_en": u"Albanian", "name": u"Shqip"}, - "sr_RS": {"name_en": u"Serbian", "name": u"Српски"}, - "sv_SE": {"name_en": u"Swedish", "name": u"Svenska"}, - "sw_KE": {"name_en": u"Swahili", "name": u"Kiswahili"}, - "ta_IN": {"name_en": u"Tamil", "name": u"தமிழ்"}, - "te_IN": {"name_en": u"Telugu", "name": u"తెలుగు"}, - "th_TH": {"name_en": u"Thai", "name": u"ภาษาไทย"}, - "tl_PH": {"name_en": u"Filipino", "name": u"Filipino"}, - "tr_TR": {"name_en": u"Turkish", "name": u"Türkçe"}, - "uk_UA": {"name_en": u"Ukraini ", "name": u"Українська"}, - "vi_VN": {"name_en": u"Vietnamese", "name": u"Tiếng Việt"}, - "zh_CN": {"name_en": u"Chinese (Simplified)", "name": u"中文(简体)"}, - "zh_TW": {"name_en": u"Chinese (Traditional)", "name": u"中文(繁體)"}, + "af_ZA": {"name_en": "Afrikaans", "name": "Afrikaans"}, + "am_ET": {"name_en": "Amharic", "name": "አማርኛ"}, + "ar_AR": {"name_en": "Arabic", "name": "العربية"}, + "bg_BG": {"name_en": "Bulgarian", "name": "Български"}, + "bn_IN": {"name_en": "Bengali", "name": "বাংলা"}, + "bs_BA": {"name_en": "Bosnian", "name": "Bosanski"}, + "ca_ES": {"name_en": "Catalan", "name": "Català"}, + "cs_CZ": {"name_en": "Czech", "name": "Čeština"}, + "cy_GB": {"name_en": "Welsh", "name": "Cymraeg"}, + "da_DK": {"name_en": "Danish", "name": "Dansk"}, + "de_DE": {"name_en": "German", "name": "Deutsch"}, + "el_GR": {"name_en": "Greek", "name": "Ελληνικά"}, + "en_GB": {"name_en": "English (UK)", "name": "English (UK)"}, + "en_US": {"name_en": "English (US)", "name": "English (US)"}, + "es_ES": {"name_en": "Spanish (Spain)", "name": "Español (España)"}, + "es_LA": {"name_en": "Spanish", "name": "Español"}, + "et_EE": {"name_en": "Estonian", "name": "Eesti"}, + "eu_ES": {"name_en": "Basque", "name": "Euskara"}, + "fa_IR": {"name_en": "Persian", "name": "فارسی"}, + "fi_FI": {"name_en": "Finnish", "name": "Suomi"}, + "fr_CA": {"name_en": "French (Canada)", "name": "Français (Canada)"}, + "fr_FR": {"name_en": "French", "name": "Français"}, + "ga_IE": {"name_en": "Irish", "name": "Gaeilge"}, + "gl_ES": {"name_en": "Galician", "name": "Galego"}, + "he_IL": {"name_en": "Hebrew", "name": "עברית"}, + "hi_IN": {"name_en": "Hindi", "name": "हिन्दी"}, + "hr_HR": {"name_en": "Croatian", "name": "Hrvatski"}, + "hu_HU": {"name_en": "Hungarian", "name": "Magyar"}, + "id_ID": {"name_en": "Indonesian", "name": "Bahasa Indonesia"}, + "is_IS": {"name_en": "Icelandic", "name": "Íslenska"}, + "it_IT": {"name_en": "Italian", "name": "Italiano"}, + "ja_JP": {"name_en": "Japanese", "name": "日本語"}, + "ko_KR": {"name_en": "Korean", "name": "한국어"}, + "lt_LT": {"name_en": "Lithuanian", "name": "Lietuvių"}, + "lv_LV": {"name_en": "Latvian", "name": "Latviešu"}, + "mk_MK": {"name_en": "Macedonian", "name": "Македонски"}, + "ml_IN": {"name_en": "Malayalam", "name": "മലയാളം"}, + "ms_MY": {"name_en": "Malay", "name": "Bahasa Melayu"}, + "nb_NO": {"name_en": "Norwegian (bokmal)", "name": "Norsk (bokmål)"}, + "nl_NL": {"name_en": "Dutch", "name": "Nederlands"}, + "nn_NO": {"name_en": "Norwegian (nynorsk)", "name": "Norsk (nynorsk)"}, + "pa_IN": {"name_en": "Punjabi", "name": "ਪੰਜਾਬੀ"}, + "pl_PL": {"name_en": "Polish", "name": "Polski"}, + "pt_BR": {"name_en": "Portuguese (Brazil)", "name": "Português (Brasil)"}, + "pt_PT": {"name_en": "Portuguese (Portugal)", "name": "Português (Portugal)"}, + "ro_RO": {"name_en": "Romanian", "name": "Română"}, + "ru_RU": {"name_en": "Russian", "name": "Русский"}, + "sk_SK": {"name_en": "Slovak", "name": "Slovenčina"}, + "sl_SI": {"name_en": "Slovenian", "name": "Slovenščina"}, + "sq_AL": {"name_en": "Albanian", "name": "Shqip"}, + "sr_RS": {"name_en": "Serbian", "name": "Српски"}, + "sv_SE": {"name_en": "Swedish", "name": "Svenska"}, + "sw_KE": {"name_en": "Swahili", "name": "Kiswahili"}, + "ta_IN": {"name_en": "Tamil", "name": "தமிழ்"}, + "te_IN": {"name_en": "Telugu", "name": "తెలుగు"}, + "th_TH": {"name_en": "Thai", "name": "ภาษาไทย"}, + "tl_PH": {"name_en": "Filipino", "name": "Filipino"}, + "tr_TR": {"name_en": "Turkish", "name": "Türkçe"}, + "uk_UA": {"name_en": "Ukraini ", "name": "Українська"}, + "vi_VN": {"name_en": "Vietnamese", "name": "Tiếng Việt"}, + "zh_CN": {"name_en": "Chinese (Simplified)", "name": "中文(简体)"}, + "zh_TW": {"name_en": "Chinese (Traditional)", "name": "中文(繁體)"}, } diff --git a/lib/tornado/auth.py b/lib/tornado/auth.py index d1cf29b3..59501f56 100644 --- a/lib/tornado/auth.py +++ b/lib/tornado/auth.py @@ -42,7 +42,7 @@ Example usage for Google OAuth: user = await self.get_authenticated_user( redirect_uri='http://your.site.com/auth/google', code=self.get_argument('code')) - # Save the user with e.g. set_secure_cookie + # Save the user with e.g. set_signed_cookie else: self.authorize_redirect( redirect_uri='http://your.site.com/auth/google', @@ -136,7 +136,7 @@ class OpenIdMixin(object): args = dict( (k, v[-1]) for k, v in handler.request.arguments.items() ) # type: Dict[str, Union[str, bytes]] - args["openid.mode"] = u"check_authentication" + args["openid.mode"] = "check_authentication" url = self._OPENID_ENDPOINT # type: ignore if http_client is None: http_client = self.get_auth_http_client() @@ -211,14 +211,14 @@ class OpenIdMixin(object): for key in handler.request.arguments: if ( key.startswith("openid.ns.") - and handler.get_argument(key) == u"http://openid.net/srv/ax/1.0" + and handler.get_argument(key) == "http://openid.net/srv/ax/1.0" ): ax_ns = key[10:] break def get_ax_arg(uri: str) -> str: if not ax_ns: - return u"" + return "" prefix = "openid." + ax_ns + ".type." ax_name = None for name in handler.request.arguments.keys(): @@ -227,8 +227,8 @@ class OpenIdMixin(object): ax_name = "openid." + ax_ns + ".value." + part break if not ax_name: - return u"" - return handler.get_argument(ax_name, u"") + return "" + return handler.get_argument(ax_name, "") email = get_ax_arg("http://axschema.org/contact/email") name = get_ax_arg("http://axschema.org/namePerson") @@ -247,7 +247,7 @@ class OpenIdMixin(object): if name: user["name"] = name elif name_parts: - user["name"] = u" ".join(name_parts) + user["name"] = " ".join(name_parts) elif email: user["name"] = email.split("@")[0] if email: @@ -694,7 +694,7 @@ class TwitterMixin(OAuthMixin): async def get(self): if self.get_argument("oauth_token", None): user = await self.get_authenticated_user() - # Save the user using e.g. set_secure_cookie() + # Save the user using e.g. set_signed_cookie() else: await self.authorize_redirect() @@ -855,8 +855,28 @@ class GoogleOAuth2Mixin(OAuth2Mixin): _OAUTH_NO_CALLBACKS = False _OAUTH_SETTINGS_KEY = "google_oauth" + def get_google_oauth_settings(self) -> Dict[str, str]: + """Return the Google OAuth 2.0 credentials that you created with + [Google Cloud + Platform](https://console.cloud.google.com/apis/credentials). The dict + format is:: + + { + "key": "your_client_id", "secret": "your_client_secret" + } + + If your credentials are stored differently (e.g. in a db) you can + override this method for custom provision. + """ + handler = cast(RequestHandler, self) + return handler.settings[self._OAUTH_SETTINGS_KEY] + async def get_authenticated_user( - self, redirect_uri: str, code: str + self, + redirect_uri: str, + code: str, + client_id: Optional[str] = None, + client_secret: Optional[str] = None, ) -> Dict[str, Any]: """Handles the login for the Google user, returning an access token. @@ -883,11 +903,11 @@ class GoogleOAuth2Mixin(OAuth2Mixin): "https://www.googleapis.com/oauth2/v1/userinfo", access_token=access["access_token"]) # Save the user and access token with - # e.g. set_secure_cookie. + # e.g. set_signed_cookie. else: self.authorize_redirect( redirect_uri='http://your.site.com/auth/google', - client_id=self.settings['google_oauth']['key'], + client_id=self.get_google_oauth_settings()['key'], scope=['profile', 'email'], response_type='code', extra_params={'approval_prompt': 'auto'}) @@ -899,14 +919,20 @@ class GoogleOAuth2Mixin(OAuth2Mixin): The ``callback`` argument was removed. Use the returned awaitable object instead. """ # noqa: E501 - handler = cast(RequestHandler, self) + + if client_id is None or client_secret is None: + settings = self.get_google_oauth_settings() + if client_id is None: + client_id = settings["key"] + if client_secret is None: + client_secret = settings["secret"] http = self.get_auth_http_client() body = urllib.parse.urlencode( { "redirect_uri": redirect_uri, "code": code, - "client_id": handler.settings[self._OAUTH_SETTINGS_KEY]["key"], - "client_secret": handler.settings[self._OAUTH_SETTINGS_KEY]["secret"], + "client_id": client_id, + "client_secret": client_secret, "grant_type": "authorization_code", } ) @@ -951,7 +977,7 @@ class FacebookGraphMixin(OAuth2Mixin): client_id=self.settings["facebook_api_key"], client_secret=self.settings["facebook_secret"], code=self.get_argument("code")) - # Save the user with e.g. set_secure_cookie + # Save the user with e.g. set_signed_cookie else: self.authorize_redirect( redirect_uri='/auth/facebookgraph/', diff --git a/lib/tornado/curl_httpclient.py b/lib/tornado/curl_httpclient.py index 61b6b7a9..23320e48 100644 --- a/lib/tornado/curl_httpclient.py +++ b/lib/tornado/curl_httpclient.py @@ -36,11 +36,11 @@ from tornado.httpclient import ( ) from tornado.log import app_log -from typing import Dict, Any, Callable, Union, Tuple, Optional +from typing import Dict, Any, Callable, Union, Optional import typing if typing.TYPE_CHECKING: - from typing import Deque # noqa: F401 + from typing import Deque, Tuple # noqa: F401 curl_log = logging.getLogger("tornado.curl_httpclient") diff --git a/lib/tornado/escape.py b/lib/tornado/escape.py index 3cf7ff2e..55354c30 100644 --- a/lib/tornado/escape.py +++ b/lib/tornado/escape.py @@ -368,7 +368,7 @@ def linkify( # have a status bar, such as Safari by default) params += ' title="%s"' % href - return u'%s' % (href, params, url) + return '%s' % (href, params, url) # First HTML-escape so that our strings are all safe. # The regex is modified to avoid character entites other than & so diff --git a/lib/tornado/gen.py b/lib/tornado/gen.py index 1946ab91..4819b857 100644 --- a/lib/tornado/gen.py +++ b/lib/tornado/gen.py @@ -743,7 +743,7 @@ class Runner(object): self.running = False self.finished = False self.io_loop = IOLoop.current() - if self.handle_yield(first_yielded): + if self.ctx_run(self.handle_yield, first_yielded): gen = result_future = first_yielded = None # type: ignore self.ctx_run(self.run) @@ -763,21 +763,25 @@ class Runner(object): return self.future = None try: - exc_info = None - try: value = future.result() - except Exception: - exc_info = sys.exc_info() - future = None + except Exception as e: + # Save the exception for later. It's important that + # gen.throw() not be called inside this try/except block + # because that makes sys.exc_info behave unexpectedly. + exc: Optional[Exception] = e + else: + exc = None + finally: + future = None - if exc_info is not None: + if exc is not None: try: - yielded = self.gen.throw(*exc_info) # type: ignore + yielded = self.gen.throw(exc) finally: - # Break up a reference to itself - # for faster GC on CPython. - exc_info = None + # Break up a circular reference for faster GC on + # CPython. + del exc else: yielded = self.gen.send(value) diff --git a/lib/tornado/ioloop.py b/lib/tornado/ioloop.py index 2c05755d..bcdcca09 100644 --- a/lib/tornado/ioloop.py +++ b/lib/tornado/ioloop.py @@ -83,7 +83,7 @@ class IOLoop(Configurable): import functools import socket - import tornado.ioloop + import tornado from tornado.iostream import IOStream async def handle_connection(connection, address): @@ -123,8 +123,7 @@ class IOLoop(Configurable): and instead initialize the `asyncio` event loop and use `IOLoop.current()`. In some cases, such as in test frameworks when initializing an `IOLoop` to be run in a secondary thread, it may be appropriate to construct - an `IOLoop` with ``IOLoop(make_current=False)``. Constructing an `IOLoop` - without the ``make_current=False`` argument is deprecated since Tornado 6.2. + an `IOLoop` with ``IOLoop(make_current=False)``. In general, an `IOLoop` cannot survive a fork or be shared across processes in any way. When multiple processes are being used, each process should @@ -145,12 +144,10 @@ class IOLoop(Configurable): cannot be used on Python 3 except to redundantly specify the `asyncio` event loop. - .. deprecated:: 6.2 - It is deprecated to create an event loop that is "current" but not - running. This means it is deprecated to pass - ``make_current=True`` to the ``IOLoop`` constructor, or to create - an ``IOLoop`` while no asyncio event loop is running unless - ``make_current=False`` is used. + .. versionchanged:: 6.3 + ``make_current=True`` is now the default when creating an IOLoop - + previously the default was to make the event loop current if there wasn't + already a current one. """ # These constants were originally based on constants from the epoll module. @@ -263,17 +260,20 @@ class IOLoop(Configurable): """ try: loop = asyncio.get_event_loop() - except (RuntimeError, AssertionError): + except RuntimeError: if not instance: return None - raise + # Create a new asyncio event loop for this thread. + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + try: return IOLoop._ioloop_for_asyncio[loop] except KeyError: if instance: from tornado.platform.asyncio import AsyncIOMainLoop - current = AsyncIOMainLoop(make_current=True) # type: Optional[IOLoop] + current = AsyncIOMainLoop() # type: Optional[IOLoop] else: current = None return current @@ -295,12 +295,17 @@ class IOLoop(Configurable): This method also sets the current `asyncio` event loop. .. deprecated:: 6.2 - The concept of an event loop that is "current" without - currently running is deprecated in asyncio since Python - 3.10. All related functionality in Tornado is also - deprecated. Instead, start the event loop with `asyncio.run` - before interacting with it. + Setting and clearing the current event loop through Tornado is + deprecated. Use ``asyncio.set_event_loop`` instead if you need this. """ + warnings.warn( + "make_current is deprecated; start the event loop first", + DeprecationWarning, + stacklevel=2, + ) + self._make_current() + + def _make_current(self) -> None: # The asyncio event loops override this method. raise NotImplementedError() @@ -344,16 +349,9 @@ class IOLoop(Configurable): return AsyncIOLoop - def initialize(self, make_current: Optional[bool] = None) -> None: - if make_current is None: - if IOLoop.current(instance=False) is None: - self.make_current() - elif make_current: - current = IOLoop.current(instance=False) - # AsyncIO loops can already be current by this point. - if current is not None and current is not self: - raise RuntimeError("current IOLoop already exists") - self.make_current() + def initialize(self, make_current: bool = True) -> None: + if make_current: + self._make_current() def close(self, all_fds: bool = False) -> None: """Closes the `IOLoop`, freeing any resources used. diff --git a/lib/tornado/iostream.py b/lib/tornado/iostream.py index 7f19a7fa..e7291263 100644 --- a/lib/tornado/iostream.py +++ b/lib/tornado/iostream.py @@ -195,11 +195,9 @@ class _StreamBuffer(object): pos += size size = 0 else: - # Amortized O(1) shrink for Python 2 pos += size - if len(b) <= 2 * pos: - del typing.cast(bytearray, b)[:pos] - pos = 0 + del typing.cast(bytearray, b)[:pos] + pos = 0 size = 0 assert size == 0 @@ -254,7 +252,6 @@ class BaseIOStream(object): self.max_write_buffer_size = max_write_buffer_size self.error = None # type: Optional[BaseException] self._read_buffer = bytearray() - self._read_buffer_pos = 0 self._read_buffer_size = 0 self._user_read_buffer = False self._after_user_read_buffer = None # type: Optional[bytearray] @@ -451,21 +448,17 @@ class BaseIOStream(object): available_bytes = self._read_buffer_size n = len(buf) if available_bytes >= n: - end = self._read_buffer_pos + n - buf[:] = memoryview(self._read_buffer)[self._read_buffer_pos : end] - del self._read_buffer[:end] + buf[:] = memoryview(self._read_buffer)[:n] + del self._read_buffer[:n] self._after_user_read_buffer = self._read_buffer elif available_bytes > 0: - buf[:available_bytes] = memoryview(self._read_buffer)[ - self._read_buffer_pos : - ] + buf[:available_bytes] = memoryview(self._read_buffer)[:] # Set up the supplied buffer as our temporary read buffer. # The original (if it had any data remaining) has been # saved for later. self._user_read_buffer = True self._read_buffer = buf - self._read_buffer_pos = 0 self._read_buffer_size = available_bytes self._read_bytes = n self._read_partial = partial @@ -818,7 +811,6 @@ class BaseIOStream(object): if self._user_read_buffer: self._read_buffer = self._after_user_read_buffer or bytearray() self._after_user_read_buffer = None - self._read_buffer_pos = 0 self._read_buffer_size = len(self._read_buffer) self._user_read_buffer = False result = size # type: Union[int, bytes] @@ -931,20 +923,17 @@ class BaseIOStream(object): # since large merges are relatively expensive and get undone in # _consume(). if self._read_buffer: - loc = self._read_buffer.find( - self._read_delimiter, self._read_buffer_pos - ) + loc = self._read_buffer.find(self._read_delimiter) if loc != -1: - loc -= self._read_buffer_pos delimiter_len = len(self._read_delimiter) self._check_max_bytes(self._read_delimiter, loc + delimiter_len) return loc + delimiter_len self._check_max_bytes(self._read_delimiter, self._read_buffer_size) elif self._read_regex is not None: if self._read_buffer: - m = self._read_regex.search(self._read_buffer, self._read_buffer_pos) + m = self._read_regex.search(self._read_buffer) if m is not None: - loc = m.end() - self._read_buffer_pos + loc = m.end() self._check_max_bytes(self._read_regex, loc) return loc self._check_max_bytes(self._read_regex, self._read_buffer_size) @@ -1001,19 +990,9 @@ class BaseIOStream(object): return b"" assert loc <= self._read_buffer_size # Slice the bytearray buffer into bytes, without intermediate copying - b = ( - memoryview(self._read_buffer)[ - self._read_buffer_pos : self._read_buffer_pos + loc - ] - ).tobytes() - self._read_buffer_pos += loc + b = (memoryview(self._read_buffer)[:loc]).tobytes() self._read_buffer_size -= loc - # Amortized O(1) shrink - # (this heuristic is implemented natively in Python 3.4+ - # but is replicated here for Python 2) - if self._read_buffer_pos > self._read_buffer_size: - del self._read_buffer[: self._read_buffer_pos] - self._read_buffer_pos = 0 + del self._read_buffer[:loc] return b def _check_closed(self) -> None: @@ -1092,9 +1071,8 @@ class IOStream(BaseIOStream): .. testcode:: - import tornado.ioloop - import tornado.iostream import socket + import tornado async def main(): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) diff --git a/lib/tornado/locale.py b/lib/tornado/locale.py index 533ce4d4..55072af2 100644 --- a/lib/tornado/locale.py +++ b/lib/tornado/locale.py @@ -268,7 +268,7 @@ class Locale(object): def __init__(self, code: str) -> None: self.code = code - self.name = LOCALE_NAMES.get(code, {}).get("name", u"Unknown") + self.name = LOCALE_NAMES.get(code, {}).get("name", "Unknown") self.rtl = False for prefix in ["fa", "ar", "he"]: if self.code.startswith(prefix): @@ -406,7 +406,7 @@ class Locale(object): str_time = "%d:%02d" % (local_date.hour, local_date.minute) elif self.code == "zh_CN": str_time = "%s%d:%02d" % ( - (u"\u4e0a\u5348", u"\u4e0b\u5348")[local_date.hour >= 12], + ("\u4e0a\u5348", "\u4e0b\u5348")[local_date.hour >= 12], local_date.hour % 12 or 12, local_date.minute, ) @@ -458,7 +458,7 @@ class Locale(object): return "" if len(parts) == 1: return parts[0] - comma = u" \u0648 " if self.code.startswith("fa") else u", " + comma = " \u0648 " if self.code.startswith("fa") else ", " return _("%(commas)s and %(last)s") % { "commas": comma.join(parts[:-1]), "last": parts[len(parts) - 1], diff --git a/lib/tornado/netutil.py b/lib/tornado/netutil.py index 069e9a6b..04db085a 100644 --- a/lib/tornado/netutil.py +++ b/lib/tornado/netutil.py @@ -44,10 +44,10 @@ if hasattr(ssl, "OP_NO_COMPRESSION"): # module-import time, the import lock is already held by the main thread, # leading to deadlock. Avoid it by caching the idna encoder on the main # thread now. -u"foo".encode("idna") +"foo".encode("idna") # For undiagnosed reasons, 'latin1' codec may also need to be preloaded. -u"foo".encode("latin1") +"foo".encode("latin1") # Default backlog used when calling sock.listen() _DEFAULT_BACKLOG = 128 @@ -115,7 +115,7 @@ def bind_sockets( sys.platform == "darwin" and address == "localhost" and af == socket.AF_INET6 - and sockaddr[3] != 0 + and sockaddr[3] != 0 # type: ignore ): # Mac OS X includes a link-local address fe80::1%lo0 in the # getaddrinfo results for 'localhost'. However, the firewall diff --git a/lib/tornado/options.py b/lib/tornado/options.py index e62f7efe..b8296691 100644 --- a/lib/tornado/options.py +++ b/lib/tornado/options.py @@ -56,7 +56,7 @@ Your ``main()`` method can parse the command line or parse a config file with either `parse_command_line` or `parse_config_file`:: import myapp.db, myapp.server - import tornado.options + import tornado if __name__ == '__main__': tornado.options.parse_command_line() @@ -427,7 +427,9 @@ class OptionParser(object): % (option.name, option.type.__name__) ) - if type(config[name]) == str and option.type != str: + if type(config[name]) == str and ( + option.type != str or option.multiple + ): option.parse(config[name]) else: option.set(config[name]) diff --git a/lib/tornado/platform/asyncio.py b/lib/tornado/platform/asyncio.py index ca671ac6..a15a74df 100644 --- a/lib/tornado/platform/asyncio.py +++ b/lib/tornado/platform/asyncio.py @@ -36,10 +36,10 @@ import warnings from tornado.gen import convert_yielded from tornado.ioloop import IOLoop, _Selectable -from typing import Any, TypeVar, Awaitable, Callable, Union, Optional, List, Tuple, Dict +from typing import Any, TypeVar, Awaitable, Callable, Union, Optional, List, Dict if typing.TYPE_CHECKING: - from typing import Set # noqa: F401 + from typing import Set, Tuple # noqa: F401 from typing_extensions import Protocol class _HasFileno(Protocol): @@ -74,20 +74,6 @@ def _atexit_callback() -> None: atexit.register(_atexit_callback) -if sys.version_info >= (3, 10): - - def _get_event_loop() -> asyncio.AbstractEventLoop: - try: - return asyncio.get_running_loop() - except RuntimeError: - pass - - return asyncio.get_event_loop_policy().get_event_loop() - - -else: - from asyncio import get_event_loop as _get_event_loop - class BaseAsyncIOLoop(IOLoop): def initialize( # type: ignore @@ -206,15 +192,7 @@ class BaseAsyncIOLoop(IOLoop): handler_func(fileobj, events) def start(self) -> None: - try: - old_loop = _get_event_loop() - except (RuntimeError, AssertionError): - old_loop = None # type: ignore - try: - asyncio.set_event_loop(self.asyncio_loop) - self.asyncio_loop.run_forever() - finally: - asyncio.set_event_loop(old_loop) + self.asyncio_loop.run_forever() def stop(self) -> None: self.asyncio_loop.stop() @@ -298,7 +276,7 @@ class AsyncIOMainLoop(BaseAsyncIOLoop): def initialize(self, **kwargs: Any) -> None: # type: ignore super().initialize(asyncio.get_event_loop(), **kwargs) - def make_current(self) -> None: + def _make_current(self) -> None: # AsyncIOMainLoop already refers to the current asyncio loop so # nothing to do here. pass @@ -349,12 +327,7 @@ class AsyncIOLoop(BaseAsyncIOLoop): self._clear_current() super().close(all_fds=all_fds) - def make_current(self) -> None: - warnings.warn( - "make_current is deprecated; start the event loop first", - DeprecationWarning, - stacklevel=2, - ) + def _make_current(self) -> None: if not self.is_current: try: self.old_asyncio = asyncio.get_event_loop() @@ -672,10 +645,18 @@ class AddThreadSelectorEventLoop(asyncio.AbstractEventLoop): self._writers[fd] = functools.partial(callback, *args) self._wake_selector() - def remove_reader(self, fd: "_FileDescriptorLike") -> None: - del self._readers[fd] + def remove_reader(self, fd: "_FileDescriptorLike") -> bool: + try: + del self._readers[fd] + except KeyError: + return False self._wake_selector() + return True - def remove_writer(self, fd: "_FileDescriptorLike") -> None: - del self._writers[fd] + def remove_writer(self, fd: "_FileDescriptorLike") -> bool: + try: + del self._writers[fd] + except KeyError: + return False self._wake_selector() + return True diff --git a/lib/tornado/platform/caresresolver.py b/lib/tornado/platform/caresresolver.py index 962f84f4..1ba45c9a 100644 --- a/lib/tornado/platform/caresresolver.py +++ b/lib/tornado/platform/caresresolver.py @@ -15,14 +15,15 @@ if typing.TYPE_CHECKING: class CaresResolver(Resolver): """Name resolver based on the c-ares library. - This is a non-blocking and non-threaded resolver. It may not produce - the same results as the system resolver, but can be used for non-blocking + This is a non-blocking and non-threaded resolver. It may not produce the + same results as the system resolver, but can be used for non-blocking resolution when threads cannot be used. - c-ares fails to resolve some names when ``family`` is ``AF_UNSPEC``, - so it is only recommended for use in ``AF_INET`` (i.e. IPv4). This is - the default for ``tornado.simple_httpclient``, but other libraries - may default to ``AF_UNSPEC``. + ``pycares`` will not return a mix of ``AF_INET`` and ``AF_INET6`` when + ``family`` is ``AF_UNSPEC``, so it is only recommended for use in + ``AF_INET`` (i.e. IPv4). This is the default for + ``tornado.simple_httpclient``, but other libraries may default to + ``AF_UNSPEC``. .. versionchanged:: 5.0 The ``io_loop`` argument (deprecated since version 4.1) has been removed. diff --git a/lib/tornado/queues.py b/lib/tornado/queues.py index 32132e16..1358d0ec 100644 --- a/lib/tornado/queues.py +++ b/lib/tornado/queues.py @@ -381,7 +381,7 @@ class PriorityQueue(Queue): def _put(self, item: _T) -> None: heapq.heappush(self._queue, item) - def _get(self) -> _T: + def _get(self) -> _T: # type: ignore[type-var] return heapq.heappop(self._queue) @@ -418,5 +418,5 @@ class LifoQueue(Queue): def _put(self, item: _T) -> None: self._queue.append(item) - def _get(self) -> _T: + def _get(self) -> _T: # type: ignore[type-var] return self._queue.pop() diff --git a/lib/tornado/simple_httpclient.py b/lib/tornado/simple_httpclient.py index 3a1aa53d..2460863f 100644 --- a/lib/tornado/simple_httpclient.py +++ b/lib/tornado/simple_httpclient.py @@ -547,7 +547,7 @@ class _HTTPConnection(httputil.HTTPMessageDelegate): value: Optional[BaseException], tb: Optional[TracebackType], ) -> bool: - if self.final_callback: + if self.final_callback is not None: self._remove_timeout() if isinstance(value, StreamClosedError): if value.real_error is None: diff --git a/lib/tornado/tcpclient.py b/lib/tornado/tcpclient.py index e2d682ea..0a829062 100644 --- a/lib/tornado/tcpclient.py +++ b/lib/tornado/tcpclient.py @@ -21,6 +21,7 @@ import socket import numbers import datetime import ssl +import typing from tornado.concurrent import Future, future_add_done_callback from tornado.ioloop import IOLoop @@ -29,7 +30,10 @@ from tornado import gen from tornado.netutil import Resolver from tornado.gen import TimeoutError -from typing import Any, Union, Dict, Tuple, List, Callable, Iterator, Optional, Set +from typing import Any, Union, Dict, Tuple, List, Callable, Iterator, Optional + +if typing.TYPE_CHECKING: + from typing import Set # noqa(F401) _INITIAL_CONNECT_TIMEOUT = 0.3 diff --git a/lib/tornado/tcpserver.py b/lib/tornado/tcpserver.py index 183aac21..deab8f2a 100644 --- a/lib/tornado/tcpserver.py +++ b/lib/tornado/tcpserver.py @@ -246,9 +246,7 @@ class TCPServer(object): .. deprecated:: 6.2 Use either ``listen()`` or ``add_sockets()`` instead of ``bind()`` - and ``start()``. The ``bind()/start()`` pattern depends on - interfaces that have been deprecated in Python 3.10 and will be - removed in future versions of Python. + and ``start()``. """ sockets = bind_sockets( port, @@ -295,9 +293,7 @@ class TCPServer(object): .. deprecated:: 6.2 Use either ``listen()`` or ``add_sockets()`` instead of ``bind()`` - and ``start()``. The ``bind()/start()`` pattern depends on - interfaces that have been deprecated in Python 3.10 and will be - removed in future versions of Python. + and ``start()``. """ assert not self._started self._started = True diff --git a/lib/tornado/testing.py b/lib/tornado/testing.py index 688464f0..9bfadf45 100644 --- a/lib/tornado/testing.py +++ b/lib/tornado/testing.py @@ -135,7 +135,8 @@ class AsyncTestCase(unittest.TestCase): By default, a new `.IOLoop` is constructed for each test and is available as ``self.io_loop``. If the code being tested requires a - global `.IOLoop`, subclasses should override `get_new_ioloop` to return it. + reused global `.IOLoop`, subclasses should override `get_new_ioloop` to return it, + although this is deprecated as of Tornado 6.3. The `.IOLoop`'s ``start`` and ``stop`` methods should not be called directly. Instead, use `self.stop ` and `self.wait @@ -162,17 +163,6 @@ class AsyncTestCase(unittest.TestCase): response = self.wait() # Test contents of response self.assertIn("FriendFeed", response.body) - - .. deprecated:: 6.2 - - AsyncTestCase and AsyncHTTPTestCase are deprecated due to changes - in future versions of Python (after 3.10). The interfaces used - in this class are incompatible with the deprecation and intended - removal of certain methods related to the idea of a "current" - event loop while no event loop is actually running. Use - `unittest.IsolatedAsyncioTestCase` instead. Note that this class - does not emit DeprecationWarnings until better migration guidance - can be provided. """ def __init__(self, methodName: str = "runTest") -> None: @@ -193,49 +183,22 @@ class AsyncTestCase(unittest.TestCase): self._test_generator = None # type: Optional[Union[Generator, Coroutine]] def setUp(self) -> None: - setup_with_context_manager(self, warnings.catch_warnings()) - warnings.filterwarnings( - "ignore", - message="There is no current event loop", - category=DeprecationWarning, - module=r"tornado\..*", - ) + py_ver = sys.version_info + if ((3, 10, 0) <= py_ver < (3, 10, 9)) or ((3, 11, 0) <= py_ver <= (3, 11, 1)): + # Early releases in the Python 3.10 and 3.1 series had deprecation + # warnings that were later reverted; we must suppress them here. + setup_with_context_manager(self, warnings.catch_warnings()) + warnings.filterwarnings( + "ignore", + message="There is no current event loop", + category=DeprecationWarning, + module=r"tornado\..*", + ) super().setUp() - # NOTE: this code attempts to navigate deprecation warnings introduced - # in Python 3.10. The idea of an implicit current event loop is - # deprecated in that version, with the intention that tests like this - # explicitly create a new event loop and run on it. However, other - # packages such as pytest-asyncio (as of version 0.16.0) still rely on - # the implicit current event loop and we want to be compatible with them - # (even when run on 3.10, but not, of course, on the future version of - # python that removes the get/set_event_loop methods completely). - # - # Deprecation warnings were introduced inconsistently: - # asyncio.get_event_loop warns, but - # asyncio.get_event_loop_policy().get_event_loop does not. Similarly, - # none of the set_event_loop methods warn, although comments on - # https://bugs.python.org/issue39529 indicate that they are also - # intended for future removal. - # - # Therefore, we first attempt to access the event loop with the - # (non-warning) policy method, and if it fails, fall back to creating a - # new event loop. We do not have effective test coverage of the - # new event loop case; this will have to be watched when/if - # get_event_loop is actually removed. - self.should_close_asyncio_loop = False - try: - self.asyncio_loop = asyncio.get_event_loop_policy().get_event_loop() - except Exception: - self.asyncio_loop = asyncio.new_event_loop() - self.should_close_asyncio_loop = True - - async def get_loop() -> IOLoop: - return self.get_new_ioloop() - - self.io_loop = self.asyncio_loop.run_until_complete(get_loop()) - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - self.io_loop.make_current() + if type(self).get_new_ioloop is not AsyncTestCase.get_new_ioloop: + warnings.warn("get_new_ioloop is deprecated", DeprecationWarning) + self.io_loop = self.get_new_ioloop() + asyncio.set_event_loop(self.io_loop.asyncio_loop) # type: ignore[attr-defined] def tearDown(self) -> None: # Native coroutines tend to produce warnings if they're not @@ -270,17 +233,13 @@ class AsyncTestCase(unittest.TestCase): # Clean up Subprocess, so it can be used again with a new ioloop. Subprocess.uninitialize() - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - self.io_loop.clear_current() + asyncio.set_event_loop(None) if not isinstance(self.io_loop, _NON_OWNED_IOLOOPS): # Try to clean up any file descriptors left open in the ioloop. # This avoids leaks, especially when tests are run repeatedly # in the same process with autoreload (because curl does not # set FD_CLOEXEC on its file descriptors) self.io_loop.close(all_fds=True) - if self.should_close_asyncio_loop: - self.asyncio_loop.close() super().tearDown() # In case an exception escaped or the StackContext caught an exception # when there wasn't a wait() to re-raise it, do so here. @@ -298,6 +257,9 @@ class AsyncTestCase(unittest.TestCase): singletons using the default `.IOLoop`) or if a per-test event loop is being provided by another system (such as ``pytest-asyncio``). + + .. deprecated:: 6.3 + This method will be removed in Tornado 7.0. """ return IOLoop(make_current=False) @@ -435,10 +397,6 @@ class AsyncHTTPTestCase(AsyncTestCase): like ``http_client.fetch()``, into a synchronous operation. If you need to do other asynchronous operations in tests, you'll probably need to use ``stop()`` and ``wait()`` yourself. - - .. deprecated:: 6.2 - `AsyncTestCase` and `AsyncHTTPTestCase` are deprecated due to changes - in Python 3.10; see comments on `AsyncTestCase` for more details. """ def setUp(self) -> None: @@ -672,7 +630,7 @@ def gen_test( # noqa: F811 if self._test_generator is not None and getattr( self._test_generator, "cr_running", True ): - self._test_generator.throw(type(e), e) + self._test_generator.throw(e) # In case the test contains an overly broad except # clause, we may get back here. # Coroutine was stopped or didn't raise a useful stack trace, @@ -724,28 +682,37 @@ class ExpectLog(logging.Filter): ) -> None: """Constructs an ExpectLog context manager. - :param logger: Logger object (or name of logger) to watch. Pass - an empty string to watch the root logger. - :param regex: Regular expression to match. Any log entries on - the specified logger that match this regex will be suppressed. - :param required: If true, an exception will be raised if the end of - the ``with`` statement is reached without matching any log entries. + :param logger: Logger object (or name of logger) to watch. Pass an + empty string to watch the root logger. + :param regex: Regular expression to match. Any log entries on the + specified logger that match this regex will be suppressed. + :param required: If true, an exception will be raised if the end of the + ``with`` statement is reached without matching any log entries. :param level: A constant from the ``logging`` module indicating the expected log level. If this parameter is provided, only log messages at this level will be considered to match. Additionally, the - supplied ``logger`` will have its level adjusted if necessary - (for the duration of the ``ExpectLog`` to enable the expected - message. + supplied ``logger`` will have its level adjusted if necessary (for + the duration of the ``ExpectLog`` to enable the expected message. .. versionchanged:: 6.1 Added the ``level`` parameter. + + .. deprecated:: 6.3 + In Tornado 7.0, only ``WARNING`` and higher logging levels will be + matched by default. To match ``INFO`` and lower levels, the ``level`` + argument must be used. This is changing to minimize differences + between ``tornado.testing.main`` (which enables ``INFO`` logs by + default) and most other test runners (including those in IDEs) + which have ``INFO`` logs disabled by default. """ if isinstance(logger, basestring_type): logger = logging.getLogger(logger) self.logger = logger self.regex = re.compile(regex) self.required = required - self.matched = False + # matched and deprecated_level_matched are a counter for the respective event. + self.matched = 0 + self.deprecated_level_matched = 0 self.logged_stack = False self.level = level self.orig_level = None # type: Optional[int] @@ -755,13 +722,20 @@ class ExpectLog(logging.Filter): self.logged_stack = True message = record.getMessage() if self.regex.match(message): + if self.level is None and record.levelno < logging.WARNING: + # We're inside the logging machinery here so generating a DeprecationWarning + # here won't be reported cleanly (if warnings-as-errors is enabled, the error + # just gets swallowed by the logging module), and even if it were it would + # have the wrong stack trace. Just remember this fact and report it in + # __exit__ instead. + self.deprecated_level_matched += 1 if self.level is not None and record.levelno != self.level: app_log.warning( "Got expected log message %r at unexpected level (%s vs %s)" % (message, logging.getLevelName(self.level), record.levelname) ) return True - self.matched = True + self.matched += 1 return False return True @@ -783,6 +757,15 @@ class ExpectLog(logging.Filter): self.logger.removeFilter(self) if not typ and self.required and not self.matched: raise Exception("did not get expected log message") + if ( + not typ + and self.required + and (self.deprecated_level_matched >= self.matched) + ): + warnings.warn( + "ExpectLog matched at INFO or below without level argument", + DeprecationWarning, + ) # From https://nedbatchelder.com/blog/201508/using_context_managers_in_test_setup.html diff --git a/lib/tornado/web.py b/lib/tornado/web.py index cd6a81b4..18634d89 100644 --- a/lib/tornado/web.py +++ b/lib/tornado/web.py @@ -23,7 +23,7 @@ Here is a simple "Hello, world" example app: .. testcode:: import asyncio - import tornado.web + import tornado class MainHandler(tornado.web.RequestHandler): def get(self): @@ -166,7 +166,7 @@ May be overridden by passing a ``version`` keyword argument. """ DEFAULT_SIGNED_VALUE_MIN_VERSION = 1 -"""The oldest signed value accepted by `.RequestHandler.get_secure_cookie`. +"""The oldest signed value accepted by `.RequestHandler.get_signed_cookie`. May be overridden by passing a ``min_version`` keyword argument. @@ -210,7 +210,7 @@ class RequestHandler(object): self, application: "Application", request: httputil.HTTPServerRequest, - **kwargs: Any + **kwargs: Any, ) -> None: super().__init__() @@ -603,21 +603,28 @@ class RequestHandler(object): expires: Optional[Union[float, Tuple, datetime.datetime]] = None, path: str = "/", expires_days: Optional[float] = None, - **kwargs: Any + # Keyword-only args start here for historical reasons. + *, + max_age: Optional[int] = None, + httponly: bool = False, + secure: bool = False, + samesite: Optional[str] = None, ) -> None: """Sets an outgoing cookie name/value with the given options. Newly-set cookies are not immediately visible via `get_cookie`; they are not present until the next request. - expires may be a numeric timestamp as returned by `time.time`, - a time tuple as returned by `time.gmtime`, or a - `datetime.datetime` object. + Most arguments are passed directly to `http.cookies.Morsel` directly. + See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Set-Cookie + for more information. + + ``expires`` may be a numeric timestamp as returned by `time.time`, + a time tuple as returned by `time.gmtime`, or a + `datetime.datetime` object. ``expires_days`` is provided as a convenience + to set an expiration time in days from today (if both are set, ``expires`` + is used). - Additional keyword arguments are set on the cookies.Morsel - directly. - See https://docs.python.org/3/library/http.cookies.html#http.cookies.Morsel - for available attributes. """ # The cookie library only accepts type str, in both python 2 and 3 name = escape.native_str(name) @@ -641,56 +648,82 @@ class RequestHandler(object): morsel["expires"] = httputil.format_timestamp(expires) if path: morsel["path"] = path - for k, v in kwargs.items(): - if k == "max_age": - k = "max-age" + if max_age: + # Note change from _ to -. + morsel["max-age"] = str(max_age) + if httponly: + # Note that SimpleCookie ignores the value here. The presense of an + # httponly (or secure) key is treated as true. + morsel["httponly"] = True + if secure: + morsel["secure"] = True + if samesite: + morsel["samesite"] = samesite - # skip falsy values for httponly and secure flags because - # SimpleCookie sets them regardless - if k in ["httponly", "secure"] and not v: - continue - - morsel[k] = v - - def clear_cookie( - self, name: str, path: str = "/", domain: Optional[str] = None - ) -> None: + def clear_cookie(self, name: str, **kwargs: Any) -> None: """Deletes the cookie with the given name. - Due to limitations of the cookie protocol, you must pass the same - path and domain to clear a cookie as were used when that cookie - was set (but there is no way to find out on the server side - which values were used for a given cookie). + This method accepts the same arguments as `set_cookie`, except for + ``expires`` and ``max_age``. Clearing a cookie requires the same + ``domain`` and ``path`` arguments as when it was set. In some cases the + ``samesite`` and ``secure`` arguments are also required to match. Other + arguments are ignored. Similar to `set_cookie`, the effect of this method will not be seen until the following request. + + .. versionchanged:: 6.3 + + Now accepts all keyword arguments that ``set_cookie`` does. + The ``samesite`` and ``secure`` flags have recently become + required for clearing ``samesite="none"`` cookies. """ + for excluded_arg in ["expires", "max_age"]: + if excluded_arg in kwargs: + raise TypeError( + f"clear_cookie() got an unexpected keyword argument '{excluded_arg}'" + ) expires = datetime.datetime.utcnow() - datetime.timedelta(days=365) - self.set_cookie(name, value="", path=path, expires=expires, domain=domain) + self.set_cookie(name, value="", expires=expires, **kwargs) - def clear_all_cookies(self, path: str = "/", domain: Optional[str] = None) -> None: - """Deletes all the cookies the user sent with this request. + def clear_all_cookies(self, **kwargs: Any) -> None: + """Attempt to delete all the cookies the user sent with this request. - See `clear_cookie` for more information on the path and domain - parameters. + See `clear_cookie` for more information on keyword arguments. Due to + limitations of the cookie protocol, it is impossible to determine on the + server side which values are necessary for the ``domain``, ``path``, + ``samesite``, or ``secure`` arguments, this method can only be + successful if you consistently use the same values for these arguments + when setting cookies. - Similar to `set_cookie`, the effect of this method will not be - seen until the following request. + Similar to `set_cookie`, the effect of this method will not be seen + until the following request. .. versionchanged:: 3.2 Added the ``path`` and ``domain`` parameters. + + .. versionchanged:: 6.3 + + Now accepts all keyword arguments that ``set_cookie`` does. + + .. deprecated:: 6.3 + + The increasingly complex rules governing cookies have made it + impossible for a ``clear_all_cookies`` method to work reliably + since all we know about cookies are their names. Applications + should generally use ``clear_cookie`` one at a time instead. """ for name in self.request.cookies: - self.clear_cookie(name, path=path, domain=domain) + self.clear_cookie(name, **kwargs) - def set_secure_cookie( + def set_signed_cookie( self, name: str, value: Union[str, bytes], expires_days: Optional[float] = 30, version: Optional[int] = None, - **kwargs: Any + **kwargs: Any, ) -> None: """Signs and timestamps a cookie so it cannot be forged. @@ -698,11 +731,11 @@ class RequestHandler(object): to use this method. It should be a long, random sequence of bytes to be used as the HMAC secret for the signature. - To read a cookie set with this method, use `get_secure_cookie()`. + To read a cookie set with this method, use `get_signed_cookie()`. Note that the ``expires_days`` parameter sets the lifetime of the cookie in the browser, but is independent of the ``max_age_days`` - parameter to `get_secure_cookie`. + parameter to `get_signed_cookie`. A value of None limits the lifetime to the current browser session. Secure cookies may contain arbitrary byte values, not just unicode @@ -715,22 +748,30 @@ class RequestHandler(object): Added the ``version`` argument. Introduced cookie version 2 and made it the default. + + .. versionchanged:: 6.3 + + Renamed from ``set_secure_cookie`` to ``set_signed_cookie`` to + avoid confusion with other uses of "secure" in cookie attributes + and prefixes. The old name remains as an alias. """ self.set_cookie( name, self.create_signed_value(name, value, version=version), expires_days=expires_days, - **kwargs + **kwargs, ) + set_secure_cookie = set_signed_cookie + def create_signed_value( self, name: str, value: Union[str, bytes], version: Optional[int] = None ) -> bytes: """Signs and timestamps a string so it cannot be forged. - Normally used via set_secure_cookie, but provided as a separate + Normally used via set_signed_cookie, but provided as a separate method for non-cookie uses. To decode a value not stored - as a cookie use the optional value argument to get_secure_cookie. + as a cookie use the optional value argument to get_signed_cookie. .. versionchanged:: 3.2.1 @@ -749,7 +790,7 @@ class RequestHandler(object): secret, name, value, version=version, key_version=key_version ) - def get_secure_cookie( + def get_signed_cookie( self, name: str, value: Optional[str] = None, @@ -763,12 +804,19 @@ class RequestHandler(object): Similar to `get_cookie`, this method only returns cookies that were present in the request. It does not see outgoing cookies set by - `set_secure_cookie` in this handler. + `set_signed_cookie` in this handler. .. versionchanged:: 3.2.1 Added the ``min_version`` argument. Introduced cookie version 2; both versions 1 and 2 are accepted by default. + + .. versionchanged:: 6.3 + + Renamed from ``get_secure_cookie`` to ``get_signed_cookie`` to + avoid confusion with other uses of "secure" in cookie attributes + and prefixes. The old name remains as an alias. + """ self.require_setting("cookie_secret", "secure cookies") if value is None: @@ -781,12 +829,22 @@ class RequestHandler(object): min_version=min_version, ) - def get_secure_cookie_key_version( + get_secure_cookie = get_signed_cookie + + def get_signed_cookie_key_version( self, name: str, value: Optional[str] = None ) -> Optional[int]: """Returns the signing key version of the secure cookie. The version is returned as int. + + .. versionchanged:: 6.3 + + Renamed from ``get_secure_cookie_key_version`` to + ``set_signed_cookie_key_version`` to avoid confusion with other + uses of "secure" in cookie attributes and prefixes. The old name + remains as an alias. + """ self.require_setting("cookie_secret", "secure cookies") if value is None: @@ -795,6 +853,8 @@ class RequestHandler(object): return None return get_signature_key_version(value) + get_secure_cookie_key_version = get_signed_cookie_key_version + def redirect( self, url: str, permanent: bool = False, status: Optional[int] = None ) -> None: @@ -1321,7 +1381,7 @@ class RequestHandler(object): and is cached for future access:: def get_current_user(self): - user_cookie = self.get_secure_cookie("user") + user_cookie = self.get_signed_cookie("user") if user_cookie: return json.loads(user_cookie) return None @@ -1331,7 +1391,7 @@ class RequestHandler(object): @gen.coroutine def prepare(self): - user_id_cookie = self.get_secure_cookie("user_id") + user_id_cookie = self.get_signed_cookie("user_id") if user_id_cookie: self.current_user = yield load_user(user_id_cookie) @@ -1643,7 +1703,7 @@ class RequestHandler(object): # Find all weak and strong etag values from If-None-Match header # because RFC 7232 allows multiple etag values in a single header. etags = re.findall( - br'\*|(?:W/)?"[^"]*"', utf8(self.request.headers.get("If-None-Match", "")) + rb'\*|(?:W/)?"[^"]*"', utf8(self.request.headers.get("If-None-Match", "")) ) if not computed_etag or not etags: return False @@ -1676,20 +1736,16 @@ class RequestHandler(object): ) # If XSRF cookies are turned on, reject form submissions without # the proper cookie - if ( - self.request.method - not in ( - "GET", - "HEAD", - "OPTIONS", - ) - and self.application.settings.get("xsrf_cookies") - ): + if self.request.method not in ( + "GET", + "HEAD", + "OPTIONS", + ) and self.application.settings.get("xsrf_cookies"): self.check_xsrf_cookie() result = self.prepare() if result is not None: - result = await result + result = await result # type: ignore if self._prepared_future is not None: # Tell the Application we've finished with prepare() # and are ready for the body to arrive. @@ -1848,7 +1904,7 @@ def stream_request_body(cls: Type[_RequestHandlerType]) -> Type[_RequestHandlerT * The regular HTTP method (``post``, ``put``, etc) will be called after the entire body has been read. - See the `file receiver demo `_ + See the `file receiver demo `_ for example usage. """ # noqa: E501 if not issubclass(cls, RequestHandler): @@ -2046,7 +2102,7 @@ class Application(ReversibleRouter): handlers: Optional[_RuleList] = None, default_host: Optional[str] = None, transforms: Optional[List[Type["OutputTransform"]]] = None, - **settings: Any + **settings: Any, ) -> None: if transforms is None: self.transforms = [] # type: List[Type[OutputTransform]] @@ -2106,7 +2162,7 @@ class Application(ReversibleRouter): backlog: int = tornado.netutil._DEFAULT_BACKLOG, flags: Optional[int] = None, reuse_port: bool = False, - **kwargs: Any + **kwargs: Any, ) -> HTTPServer: """Starts an HTTP server for this application on the given port. @@ -2393,7 +2449,7 @@ class HTTPError(Exception): status_code: int = 500, log_message: Optional[str] = None, *args: Any, - **kwargs: Any + **kwargs: Any, ) -> None: self.status_code = status_code self.log_message = log_message @@ -3441,7 +3497,7 @@ def create_signed_value( # A leading version number in decimal # with no leading zeros, followed by a pipe. -_signed_value_version_re = re.compile(br"^([1-9][0-9]*)\|(.*)$") +_signed_value_version_re = re.compile(rb"^([1-9][0-9]*)\|(.*)$") def _get_version(value: bytes) -> int: diff --git a/lib/tornado/websocket.py b/lib/tornado/websocket.py index 82c29d84..1d42e10b 100644 --- a/lib/tornado/websocket.py +++ b/lib/tornado/websocket.py @@ -23,7 +23,6 @@ import hashlib import os import sys import struct -import tornado.escape import tornado.web from urllib.parse import urlparse import zlib @@ -34,6 +33,7 @@ from tornado import gen, httpclient, httputil from tornado.ioloop import IOLoop, PeriodicCallback from tornado.iostream import StreamClosedError, IOStream from tornado.log import gen_log, app_log +from tornado.netutil import Resolver from tornado import simple_httpclient from tornado.queues import Queue from tornado.tcpclient import TCPClient @@ -822,7 +822,7 @@ class WebSocketProtocol13(WebSocketProtocol): self._masked_frame = None self._frame_mask = None # type: Optional[bytes] self._frame_length = None - self._fragmented_message_buffer = None # type: Optional[bytes] + self._fragmented_message_buffer = None # type: Optional[bytearray] self._fragmented_message_opcode = None self._waiting = None # type: object self._compression_options = params.compression_options @@ -1177,10 +1177,10 @@ class WebSocketProtocol13(WebSocketProtocol): # nothing to continue self._abort() return - self._fragmented_message_buffer += data + self._fragmented_message_buffer.extend(data) if is_final_frame: opcode = self._fragmented_message_opcode - data = self._fragmented_message_buffer + data = bytes(self._fragmented_message_buffer) self._fragmented_message_buffer = None else: # start of new data message if self._fragmented_message_buffer is not None: @@ -1189,7 +1189,7 @@ class WebSocketProtocol13(WebSocketProtocol): return if not is_final_frame: self._fragmented_message_opcode = opcode - self._fragmented_message_buffer = data + self._fragmented_message_buffer = bytearray(data) if is_final_frame: handled_future = self._handle_message(opcode, data) @@ -1362,6 +1362,7 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection): ping_timeout: Optional[float] = None, max_message_size: int = _default_max_message_size, subprotocols: Optional[List[str]] = [], + resolver: Optional[Resolver] = None, ) -> None: self.connect_future = Future() # type: Future[WebSocketClientConnection] self.read_queue = Queue(1) # type: Queue[Union[None, str, bytes]] @@ -1402,7 +1403,7 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection): # Websocket connection is currently unable to follow redirects request.follow_redirects = False - self.tcp_client = TCPClient() + self.tcp_client = TCPClient(resolver=resolver) super().__init__( None, request, diff --git a/lib/tornado/wsgi.py b/lib/tornado/wsgi.py index c60f152d..32641be3 100644 --- a/lib/tornado/wsgi.py +++ b/lib/tornado/wsgi.py @@ -27,12 +27,15 @@ container. """ -import sys +import concurrent.futures from io import BytesIO import tornado +import sys +from tornado.concurrent import dummy_executor from tornado import escape from tornado import httputil +from tornado.ioloop import IOLoop from tornado.log import access_log from typing import List, Tuple, Optional, Callable, Any, Dict, Text @@ -54,20 +57,28 @@ def to_wsgi_str(s: bytes) -> str: class WSGIContainer(object): - r"""Makes a WSGI-compatible function runnable on Tornado's HTTP server. + r"""Makes a WSGI-compatible application runnable on Tornado's HTTP server. .. warning:: WSGI is a *synchronous* interface, while Tornado's concurrency model - is based on single-threaded asynchronous execution. This means that - running a WSGI app with Tornado's `WSGIContainer` is *less scalable* - than running the same app in a multi-threaded WSGI server like - ``gunicorn`` or ``uwsgi``. Use `WSGIContainer` only when there are - benefits to combining Tornado and WSGI in the same process that - outweigh the reduced scalability. + is based on single-threaded *asynchronous* execution. Many of Tornado's + distinguishing features are not available in WSGI mode, including efficient + long-polling and websockets. The primary purpose of `WSGIContainer` is + to support both WSGI applications and native Tornado ``RequestHandlers`` in + a single process. WSGI-only applications are likely to be better off + with a dedicated WSGI server such as ``gunicorn`` or ``uwsgi``. - Wrap a WSGI function in a `WSGIContainer` and pass it to `.HTTPServer` to - run it. For example:: + Wrap a WSGI application in a `WSGIContainer` to make it implement the Tornado + `.HTTPServer` ``request_callback`` interface. The `WSGIContainer` object can + then be passed to classes from the `tornado.routing` module, + `tornado.web.FallbackHandler`, or to `.HTTPServer` directly. + + This class is intended to let other frameworks (Django, Flask, etc) + run on the Tornado HTTP server and I/O loop. + + Realistic usage will be more complicated, but the simplest possible example uses a + hand-written WSGI application with `.HTTPServer`:: def simple_app(environ, start_response): status = "200 OK" @@ -83,18 +94,46 @@ class WSGIContainer(object): asyncio.run(main()) - This class is intended to let other frameworks (Django, web.py, etc) - run on the Tornado HTTP server and I/O loop. + The recommended pattern is to use the `tornado.routing` module to set up routing + rules between your WSGI application and, typically, a `tornado.web.Application`. + Alternatively, `tornado.web.Application` can be used as the top-level router + and `tornado.web.FallbackHandler` can embed a `WSGIContainer` within it. - The `tornado.web.FallbackHandler` class is often useful for mixing - Tornado and WSGI apps in the same server. See - https://github.com/bdarnell/django-tornado-demo for a complete example. + If the ``executor`` argument is provided, the WSGI application will be executed + on that executor. This must be an instance of `concurrent.futures.Executor`, + typically a ``ThreadPoolExecutor`` (``ProcessPoolExecutor`` is not supported). + If no ``executor`` is given, the application will run on the event loop thread in + Tornado 6.3; this will change to use an internal thread pool by default in + Tornado 7.0. + + .. warning:: + By default, the WSGI application is executed on the event loop's thread. This + limits the server to one request at a time (per process), making it less scalable + than most other WSGI servers. It is therefore highly recommended that you pass + a ``ThreadPoolExecutor`` when constructing the `WSGIContainer`, after verifying + that your application is thread-safe. The default will change to use a + ``ThreadPoolExecutor`` in Tornado 7.0. + + .. versionadded:: 6.3 + The ``executor`` parameter. + + .. deprecated:: 6.3 + The default behavior of running the WSGI application on the event loop thread + is deprecated and will change in Tornado 7.0 to use a thread pool by default. """ - def __init__(self, wsgi_application: "WSGIAppType") -> None: + def __init__( + self, + wsgi_application: "WSGIAppType", + executor: Optional[concurrent.futures.Executor] = None, + ) -> None: self.wsgi_application = wsgi_application + self.executor = dummy_executor if executor is None else executor def __call__(self, request: httputil.HTTPServerRequest) -> None: + IOLoop.current().spawn_callback(self.handle_request, request) + + async def handle_request(self, request: httputil.HTTPServerRequest) -> None: data = {} # type: Dict[str, Any] response = [] # type: List[bytes] @@ -113,15 +152,33 @@ class WSGIContainer(object): data["headers"] = headers return response.append - app_response = self.wsgi_application( - WSGIContainer.environ(request), start_response + loop = IOLoop.current() + app_response = await loop.run_in_executor( + self.executor, + self.wsgi_application, + self.environ(request), + start_response, ) try: - response.extend(app_response) - body = b"".join(response) + app_response_iter = iter(app_response) + + def next_chunk() -> Optional[bytes]: + try: + return next(app_response_iter) + except StopIteration: + # StopIteration is special and is not allowed to pass through + # coroutines normally. + return None + + while True: + chunk = await loop.run_in_executor(self.executor, next_chunk) + if chunk is None: + break + response.append(chunk) finally: if hasattr(app_response, "close"): app_response.close() # type: ignore + body = b"".join(response) if not data: raise Exception("WSGI app did not call start_response") @@ -147,9 +204,12 @@ class WSGIContainer(object): request.connection.finish() self._log(status_code, request) - @staticmethod - def environ(request: httputil.HTTPServerRequest) -> Dict[Text, Any]: - """Converts a `tornado.httputil.HTTPServerRequest` to a WSGI environment.""" + def environ(self, request: httputil.HTTPServerRequest) -> Dict[Text, Any]: + """Converts a `tornado.httputil.HTTPServerRequest` to a WSGI environment. + + .. versionchanged:: 6.3 + No longer a static method. + """ hostport = request.host.split(":") if len(hostport) == 2: host = hostport[0] @@ -172,7 +232,7 @@ class WSGIContainer(object): "wsgi.url_scheme": request.protocol, "wsgi.input": BytesIO(escape.utf8(request.body)), "wsgi.errors": sys.stderr, - "wsgi.multithread": False, + "wsgi.multithread": self.executor is not dummy_executor, "wsgi.multiprocess": True, "wsgi.run_once": False, } diff --git a/lib/tvinfo_base/base.py b/lib/tvinfo_base/base.py index a1d57bc4..7173aded 100644 --- a/lib/tvinfo_base/base.py +++ b/lib/tvinfo_base/base.py @@ -8,7 +8,6 @@ import time from exceptions_helper import ex from six import integer_types, iteritems, iterkeys, string_types, text_type -from _23 import list_items, list_values from lib.tvinfo_base.exceptions import * from sg_helpers import calc_age, make_path @@ -53,7 +52,7 @@ tv_src_names = { TVINFO_IMDB: 'imdb', TVINFO_TRAKT: 'trakt', TVINFO_TMDB: 'tmdb', - TVINFO_TVDB_SLUG : 'tvdb slug', + TVINFO_TVDB_SLUG: 'tvdb slug', TVINFO_TRAKT_SLUG: 'trakt slug', TVINFO_SLUG: 'generic slug', @@ -67,7 +66,7 @@ tv_src_names = { log = logging.getLogger('TVInfo') log.addHandler(logging.NullHandler()) -TVInfoShowContainer = {} # type: Dict[ShowContainer] +TVInfoShowContainer = {} # type: Dict[str, ShowContainer] class ShowContainer(dict): @@ -94,7 +93,7 @@ class ShowContainer(dict): if acquired_lock: try: current_time = time.time() - for k, v in list_items(self): + for k, v in list(self.items()): if self.max_age < current_time - v[1]: lock_acquired = self[k].lock.acquire(False) if lock_acquired: @@ -125,7 +124,7 @@ class TVInfoIDs(object): trakt=None, # type: integer_types rage=None, # type: integer_types ids=None # type: Dict[int, integer_types] - ): # type: (...) -> TVInfoIDs + ): ids = ids or {} self.tvdb = tvdb or ids.get(TVINFO_TVDB) self.tmdb = tmdb or ids.get(TVINFO_TMDB) @@ -156,7 +155,7 @@ class TVInfoIDs(object): class TVInfoSocialIDs(object): def __init__(self, twitter=None, instagram=None, facebook=None, wikipedia=None, ids=None): - # type: (str_int, str_int, str_int, str_int, Dict[int, str_int]) -> TVInfoSocialIDs + # type: (str_int, str_int, str_int, str_int, Dict[int, str_int]) -> None ids = ids or {} self.twitter = twitter or ids.get(TVINFO_TWITTER) self.instagram = instagram or ids.get(TVINFO_INSTAGRAM) @@ -231,7 +230,7 @@ class TVInfoImage(object): lang=None, height=None, width=None, aspect_ratio=None): self.img_id = img_id # type: Optional[integer_types] self.image_type = image_type # type: integer_types - self.sizes = sizes # type: Dict[TVInfoImageSize, AnyStr] + self.sizes = sizes # type: Dict[int, AnyStr] self.type_str = type_str # type: AnyStr self.main_image = main_image # type: bool self.rating = rating # type: Optional[Union[float, integer_types]] @@ -243,7 +242,7 @@ class TVInfoImage(object): def __str__(self): return '' % (TVInfoImageType.reverse_str.get(self.image_type, 'unknown'), - ', '.join(TVInfoImageSize.reverse_str.get(s, 'unkown') for s in self.sizes)) + ', '.join(TVInfoImageSize.reverse_str.get(s, 'unknown') for s in self.sizes)) __repr__ = __str__ @@ -409,7 +408,7 @@ class TVInfoShow(dict): match, and so on. """ results = [] - for cur_season in list_values(self): + for cur_season in self.values(): searchresult = cur_season.search(term=term, key=key) if 0 != len(searchresult): results.extend(searchresult) @@ -487,7 +486,7 @@ class TVInfoSeason(dict): instances. """ results = [] - for ep in list_values(self): + for ep in self.values(): searchresult = ep.search(term=term, key=key) if None is not searchresult: results.append(searchresult) @@ -679,7 +678,7 @@ class PersonBase(dict): ids=None, # type: Dict thumb_url=None, # type: AnyStr **kwargs # type: Dict - ): # type: (...) -> PersonBase + ): super(PersonBase, self).__init__(**kwargs) self.id = p_id # type: Optional[integer_types] self.name = name # type: Optional[AnyStr] @@ -769,7 +768,7 @@ class TVInfoPerson(PersonBase): real_name=None, # type: AnyStr akas=None, # type: Set[AnyStr] **kwargs # type: Dict - ): # type: (...) -> TVInfoPerson + ): super(TVInfoPerson, self).__init__( p_id=p_id, name=name, image=image, thumb_url=thumb_url, bio=bio, gender=gender, birthdate=birthdate, deathdate=deathdate, country=country, images=images, @@ -795,7 +794,7 @@ class TVInfoPerson(PersonBase): class TVInfoCharacter(PersonBase): def __init__(self, person=None, voice=None, plays_self=None, regular=None, show=None, start_year=None, end_year=None, **kwargs): - # type: (List[TVInfoPerson], bool, bool, bool, TVInfoShow, int, int, Dict) -> TVInfoCharacter + # type: (List[TVInfoPerson], bool, bool, bool, TVInfoShow, int, int, Dict) -> None super(TVInfoCharacter, self).__init__(**kwargs) self.person = person # type: List[TVInfoPerson] self.voice = voice # type: Optional[bool] diff --git a/lib/urllib3/_version.py b/lib/urllib3/_version.py index 308d7f28..7c031661 100644 --- a/lib/urllib3/_version.py +++ b/lib/urllib3/_version.py @@ -1,2 +1,2 @@ # This file is protected via CODEOWNERS -__version__ = "1.26.13" +__version__ = "1.26.14" diff --git a/lib/urllib3/connection.py b/lib/urllib3/connection.py index 10fb36c4..54b96b19 100644 --- a/lib/urllib3/connection.py +++ b/lib/urllib3/connection.py @@ -229,6 +229,11 @@ class HTTPConnection(_HTTPConnection, object): ) def request(self, method, url, body=None, headers=None): + # Update the inner socket's timeout value to send the request. + # This only triggers if the connection is re-used. + if getattr(self, "sock", None) is not None: + self.sock.settimeout(self.timeout) + if headers is None: headers = {} else: diff --git a/lib/urllib3/connectionpool.py b/lib/urllib3/connectionpool.py index 70873927..c23d736b 100644 --- a/lib/urllib3/connectionpool.py +++ b/lib/urllib3/connectionpool.py @@ -379,7 +379,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): timeout_obj = self._get_timeout(timeout) timeout_obj.start_connect() - conn.timeout = timeout_obj.connect_timeout + conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout) # Trigger any extra validation we need to do. try: diff --git a/lib/urllib3/util/timeout.py b/lib/urllib3/util/timeout.py index ff69593b..78e18a62 100644 --- a/lib/urllib3/util/timeout.py +++ b/lib/urllib3/util/timeout.py @@ -2,9 +2,8 @@ from __future__ import absolute_import import time -# The default socket timeout, used by httplib to indicate that no timeout was -# specified by the user -from socket import _GLOBAL_DEFAULT_TIMEOUT +# The default socket timeout, used by httplib to indicate that no timeout was; specified by the user +from socket import _GLOBAL_DEFAULT_TIMEOUT, getdefaulttimeout from ..exceptions import TimeoutStateError @@ -116,6 +115,10 @@ class Timeout(object): # __str__ provided for backwards compatibility __str__ = __repr__ + @classmethod + def resolve_default_timeout(cls, timeout): + return getdefaulttimeout() if timeout is cls.DEFAULT_TIMEOUT else timeout + @classmethod def _validate_timeout(cls, value, name): """Check that a timeout attribute is valid. diff --git a/lib/urllib3/util/url.py b/lib/urllib3/util/url.py index 3a169a43..e5682d3b 100644 --- a/lib/urllib3/util/url.py +++ b/lib/urllib3/util/url.py @@ -50,7 +50,7 @@ _variations = [ "(?:(?:%(hex)s:){0,6}%(hex)s)?::", ] -UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~" +UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._\-~" IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")" ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+" IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]" @@ -303,7 +303,7 @@ def _normalize_host(host, scheme): def _idna_encode(name): - if name and any([ord(x) > 128 for x in name]): + if name and any(ord(x) >= 128 for x in name): try: import idna except ImportError: diff --git a/recommended.txt b/recommended.txt index fab6cb83..726b5ba7 100644 --- a/recommended.txt +++ b/recommended.txt @@ -1,22 +1,16 @@ --extra-index-url https://gitlab+deploy-token-1599941:UNupqjtDab_zxNzvP2gA@gitlab.com/api/v4/projects/279215/packages/pypi/simple -cffi >= 1.15.1 -cryptography != 38.0.2; '3.7' <= python_version and 'Windows' == platform_system -cryptography != 38.0.2; '3.7' <= python_version and 'Linux' == platform_system and ('x86_64' == platform_machine or 'aarch64' == platform_machine) -cryptography <= 3.3.2; '3.7' <= python_version and 'Linux' == platform_system and 'x86_64' != platform_machine and 'aarch64' != platform_machine -cryptography != 38.0.2; '3.7' <= python_version and 'Darwin' == platform_system and 'x86_64' == platform_machine -cryptography <= 3.3.2; '3.7' <= python_version and 'Darwin' == platform_system and 'x86_64' != platform_machine -cryptography <= 3.3.2; '3.0' > python_version +cffi +cryptography != 38.0.2; 'Windows' == platform_system +cryptography != 38.0.2; 'Linux' == platform_system and ('x86_64' == platform_machine or 'aarch64' == platform_machine) +cryptography <= 3.3.2; 'Linux' == platform_system and 'x86_64' != platform_machine and 'aarch64' != platform_machine +cryptography != 38.0.2; 'Darwin' == platform_system and 'x86_64' == platform_machine +cryptography <= 3.3.2; 'Darwin' == platform_system and 'x86_64' != platform_machine lxml >= 4.9.2; 'Windows' == platform_system lxml; 'Windows' != platform_system -orjson; '3.7' <= python_version and 'Windows' == platform_system -orjson; '3.7' <= python_version and 'Linux' == platform_system and ('x86_64' == platform_machine or 'aarch64' == platform_machine or 'armv7l' == platform_machine) -pip >= 22.2.2; '3.7' <= python_version -pip <= 20.3.4; '3.0' > python_version -Levenshtein >= 0.20.5; '3.11' >= python_version and '3.7' <= python_version -rapidfuzz < 3.0.0; '3.7' <= python_version -python-Levenshtein == 0.12.0; '3.0' > python_version -regex >= 2022.9.13; '3.11' >= python_version and '3.7' <= python_version -regex <= 2020.10.28; '3.0' > python_version -scandir >= 1.10.0; '3.0' > python_version -setuptools >= 65.2.0; '3.7' <= python_version -setuptools <= 44.1.1; '3.0' > python_version +orjson; 'Windows' == platform_system +orjson; 'Linux' == platform_system and ('x86_64' == platform_machine or 'aarch64' == platform_machine or 'armv7l' == platform_machine) +pip +Levenshtein +rapidfuzz < 3.0.0 +regex +setuptools diff --git a/requirements.txt b/requirements.txt index c09c9121..2abbce53 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1 @@ -Cheetah3 <= 3.2.6, != 3.2.6.post2; '3.0' > python_version -CT3; '3.7' <= python_version +CT3 diff --git a/sickgear.py b/sickgear.py index 3f36de17..056fcd3b 100755 --- a/sickgear.py +++ b/sickgear.py @@ -43,7 +43,8 @@ versions = [((3, 7, 1), (3, 8, 16)), ((3, 9, 0), (3, 9, 2)), ((3, 9, 4), (3, 9, 16)), ((3, 10, 0), (3, 11, 3))] # inclusive version ranges if not any(list(map(lambda v: v[0] <= sys.version_info[:3] <= v[1], versions))) and not int(os.environ.get('PYT', 0)): - print('Python %s.%s.%s detected.' % sys.version_info[:3]) + major, minor, micro = sys.version_info[:3] + print('Python %s.%s.%s detected.' % (major, minor, micro)) print('Sorry, SickGear requires a Python version %s' % ', '.join(map( lambda r: '%s - %s' % tuple(map(lambda v: str(v).replace(',', '.')[1:-1], r)), versions))) sys.exit(1) @@ -90,7 +91,7 @@ from multiprocessing import freeze_support from configobj import ConfigObj # noinspection PyPep8Naming -from encodingKludge import EXIT_BAD_ENCODING, SYS_ENCODING +from encodingKludge import SYS_ENCODING from exceptions_helper import ex import sickgear from sickgear import db, logger, name_cache, network_timezones @@ -189,7 +190,7 @@ class SickGear(object): rc.load_msg = load_msg rc.run(max_v) else: - print(u'ERROR: Could not download Rollback Module.') + print('ERROR: Could not download Rollback Module.') except (BaseException, Exception): pass @@ -200,10 +201,6 @@ class SickGear(object): sickgear.PROG_DIR = os.path.dirname(sickgear.MY_FULLNAME) sickgear.DATA_DIR = sickgear.PROG_DIR sickgear.MY_ARGS = sys.argv[1:] - if EXIT_BAD_ENCODING: - print('Sorry, you MUST add the SickGear folder to the PYTHONPATH environment variable') - print('or find another way to force Python to use %s for string encoding.' % SYS_ENCODING) - sys.exit(1) sickgear.SYS_ENCODING = SYS_ENCODING legacy_runner = globals().get('_legacy_sickgear_runner') if not legacy_runner: @@ -229,7 +226,7 @@ class SickGear(object): if o in ('-h', '--help'): sys.exit(self.help_message()) - # For now we'll just silence the logging + # For now, we'll just silence the logging if o in ('-q', '--quiet'): self.console_logging = False @@ -293,13 +290,13 @@ class SickGear(object): if self.run_as_daemon: pid_dir = os.path.dirname(self.pid_file) if not os.access(pid_dir, os.F_OK): - sys.exit(u"PID dir: %s doesn't exist. Exiting." % pid_dir) + sys.exit(f"PID dir: {pid_dir} doesn't exist. Exiting.") if not os.access(pid_dir, os.W_OK): - sys.exit(u'PID dir: %s must be writable (write permissions). Exiting.' % pid_dir) + sys.exit(f'PID dir: {pid_dir} must be writable (write permissions). Exiting.') else: if self.console_logging: - print(u'Not running in daemon mode. PID file creation disabled') + print('Not running in daemon mode. PID file creation disabled') self.create_pid = False @@ -312,27 +309,27 @@ class SickGear(object): try: os.makedirs(sickgear.DATA_DIR, 0o744) except os.error: - sys.exit(u'Unable to create data directory: %s Exiting.' % sickgear.DATA_DIR) + sys.exit(f'Unable to create data directory: {sickgear.DATA_DIR} Exiting.') # Make sure we can write to the data dir if not os.access(sickgear.DATA_DIR, os.W_OK): - sys.exit(u'Data directory: %s must be writable (write permissions). Exiting.' % sickgear.DATA_DIR) + sys.exit(f'Data directory: {sickgear.DATA_DIR} must be writable (write permissions). Exiting.') # Make sure we can write to the config file if not os.access(sickgear.CONFIG_FILE, os.W_OK): if os.path.isfile(sickgear.CONFIG_FILE): - sys.exit(u'Config file: %s must be writeable (write permissions). Exiting.' % sickgear.CONFIG_FILE) + sys.exit(f'Config file: {sickgear.CONFIG_FILE} must be writeable (write permissions). Exiting.') elif not os.access(os.path.dirname(sickgear.CONFIG_FILE), os.W_OK): - sys.exit(u'Config file directory: %s must be writeable (write permissions). Exiting' - % os.path.dirname(sickgear.CONFIG_FILE)) + sys.exit(f'Config file directory: {os.path.dirname(sickgear.CONFIG_FILE)}' + f' must be writeable (write permissions). Exiting') os.chdir(sickgear.DATA_DIR) if self.console_logging: - print(u'Starting up SickGear from %s' % sickgear.CONFIG_FILE) + print(f'Starting up SickGear from {sickgear.CONFIG_FILE}') # Load the config and publish it to the sickgear package if not os.path.isfile(sickgear.CONFIG_FILE): - print(u'Unable to find "%s", all settings will be default!' % sickgear.CONFIG_FILE) + print(f'Unable to find "{sickgear.CONFIG_FILE}", all settings will be default!') sickgear.CFG = ConfigObj(sickgear.CONFIG_FILE) try: @@ -356,7 +353,7 @@ class SickGear(object): sickgear.initialize(console_logging=self.console_logging) if self.forced_port: - logger.log(u'Forcing web server to port %s' % self.forced_port) + logger.log(f'Forcing web server to port {self.forced_port}') self.start_port = self.forced_port else: self.start_port = sickgear.WEB_PORT @@ -406,12 +403,11 @@ class SickGear(object): self.webserver.wait_server_start() sickgear.started = True except (BaseException, Exception): - logger.log(u'Unable to start web server, is something else running on port %d?' % self.start_port, - logger.ERROR) + logger.error(f'Unable to start web server, is something else running on port {self.start_port:d}?') if self.run_as_systemd: self.exit(0) if sickgear.LAUNCH_BROWSER and not self.no_launch: - logger.log(u'Launching browser and exiting', logger.ERROR) + logger.error('Launching browser and exiting') sickgear.launch_browser(self.start_port) self.exit(1) @@ -431,7 +427,7 @@ class SickGear(object): ('sickbeard.db', sickgear.mainDB.MIN_DB_VERSION, sickgear.mainDB.MAX_DB_VERSION, sickgear.mainDB.TEST_BASE_VERSION, 'MainDb') ]: - cur_db_version = db.DBConnection(d).checkDBVersion() + cur_db_version = db.DBConnection(d).check_db_version() # handling of standalone TEST db versions load_msg = 'Downgrading %s to production version' % d @@ -440,47 +436,46 @@ class SickGear(object): print('Your [%s] database version (%s) is a test db version and doesn\'t match SickGear required ' 'version (%s), downgrading to production db' % (d, cur_db_version, max_v)) self.execute_rollback(mo, max_v, load_msg) - cur_db_version = db.DBConnection(d).checkDBVersion() + cur_db_version = db.DBConnection(d).check_db_version() if 100000 <= cur_db_version: - print(u'Rollback to production failed.') - sys.exit(u'If you have used other forks, your database may be unusable due to their changes') + print('Rollback to production failed.') + sys.exit('If you have used other forks, your database may be unusable due to their changes') if 100000 <= max_v and None is not base_v: max_v = base_v # set max_v to the needed base production db for test_db - print(u'Rollback to production of [%s] successful.' % d) + print(f'Rollback to production of [{d}] successful.') sickgear.classes.loading_msg.set_msg_progress(load_msg, 'Finished') - # handling of production version higher then current base of test db + # handling of production version higher than current base of test db if isinstance(base_v, integer_types) and max_v >= 100000 > cur_db_version > base_v: sickgear.classes.loading_msg.set_msg_progress(load_msg, 'Rollback') print('Your [%s] database version (%s) is a db version and doesn\'t match SickGear required ' 'version (%s), downgrading to production base db' % (d, cur_db_version, max_v)) self.execute_rollback(mo, base_v, load_msg) - cur_db_version = db.DBConnection(d).checkDBVersion() + cur_db_version = db.DBConnection(d).check_db_version() if 100000 <= cur_db_version: - print(u'Rollback to production base failed.') - sys.exit(u'If you have used other forks, your database may be unusable due to their changes') + print('Rollback to production base failed.') + sys.exit('If you have used other forks, your database may be unusable due to their changes') if 100000 <= max_v and None is not base_v: max_v = base_v # set max_v to the needed base production db for test_db - print(u'Rollback to production base of [%s] successful.' % d) + print(f'Rollback to production base of [{d}] successful.') sickgear.classes.loading_msg.set_msg_progress(load_msg, 'Finished') # handling of production db versions if 0 < cur_db_version < 100000: if cur_db_version < min_v: - print(u'Your [%s] database version (%s) is too old to migrate from with this version of SickGear' - % (d, cur_db_version)) - sys.exit(u'Upgrade using a previous version of SG first,' - + u' or start with no database file to begin fresh') + print(f'Your [{d}] database version ({cur_db_version})' + f' is too old to migrate from with this version of SickGear') + sys.exit('Upgrade using a previous version of SG first,' + ' or start with no database file to begin fresh') if cur_db_version > max_v: sickgear.classes.loading_msg.set_msg_progress(load_msg, 'Rollback') - print(u'Your [%s] database version (%s) has been incremented past' - u' what this version of SickGear supports. Trying to rollback now. Please wait...' % - (d, cur_db_version)) + print(f'Your [{d}] database version ({cur_db_version}) has been incremented past what this' + f' version of SickGear supports. Trying to rollback now. Please wait...') self.execute_rollback(mo, max_v, load_msg) - if db.DBConnection(d).checkDBVersion() > max_v: - print(u'Rollback failed.') - sys.exit(u'If you have used other forks, your database may be unusable due to their changes') - print(u'Rollback of [%s] successful.' % d) + if db.DBConnection(d).check_db_version() > max_v: + print('Rollback failed.') + sys.exit('If you have used other forks, your database may be unusable due to their changes') + print(f'Rollback of [{d}] successful.') sickgear.classes.loading_msg.set_msg_progress(load_msg, 'Finished') # migrate the config if it needs it @@ -504,9 +499,9 @@ class SickGear(object): if os.path.exists(restore_dir): sickgear.classes.loading_msg.message = 'Restoring files' if self.restore(restore_dir, sickgear.DATA_DIR): - logger.log(u'Restore successful...') + logger.log('Restore successful...') else: - logger.log_error_and_exit(u'Restore FAILED!') + logger.log_error_and_exit('Restore FAILED!') # refresh network timezones sickgear.classes.loading_msg.message = 'Checking network timezones' @@ -557,7 +552,7 @@ class SickGear(object): # Build internal name cache sickgear.classes.loading_msg.message = 'Build name cache' - name_cache.buildNameCache() + name_cache.build_name_cache() # load all ids from xem sickgear.classes.loading_msg.message = 'Loading xem data' @@ -672,7 +667,7 @@ class SickGear(object): # Write pid if self.create_pid: pid = str(os.getpid()) - logger.log(u'Writing PID: %s to %s' % (pid, self.pid_file)) + logger.log(f'Writing PID: {pid} to {self.pid_file}') try: os.fdopen(os.open(self.pid_file, os.O_CREAT | os.O_WRONLY, 0o644), 'w').write('%s\n' % pid) except (BaseException, Exception) as er: @@ -708,7 +703,7 @@ class SickGear(object): Populates the showList with shows from the database """ - logger.log(u'Loading initial show list') + logger.log('Loading initial show list') my_db = db.DBConnection(row_type='dict') sql_result = my_db.select( @@ -752,8 +747,7 @@ class SickGear(object): sickgear.showDict[show_obj.sid_int] = show_obj _ = show_obj.ids except (BaseException, Exception) as err: - logger.log('There was an error creating the show in %s: %s' % ( - cur_result['location'], ex(err)), logger.ERROR) + logger.error('There was an error creating the show in %s: %s' % (cur_result['location'], ex(err))) sickgear.webserve.Home.make_showlist_unique_names() @staticmethod @@ -772,6 +766,7 @@ class SickGear(object): return False def shutdown(self, ev_type): + logger.debug(f'Shutdown ev_type:{ev_type}, sickgear.started:{sickgear.started}') if sickgear.started: # stop all tasks sickgear.halt() @@ -803,13 +798,13 @@ class SickGear(object): popen_list += sickgear.MY_ARGS if self.run_as_systemd: - logger.log(u'Restarting SickGear with exit(1) handler and %s' % popen_list) + logger.log(f'Restarting SickGear with exit(1) handler and {popen_list}') logger.close() self.exit(1) if '--nolaunch' not in popen_list: popen_list += ['--nolaunch'] - logger.log(u'Restarting SickGear with %s' % popen_list) + logger.log(f'Restarting SickGear with {popen_list}') logger.close() from _23 import Popen with Popen(popen_list, cwd=os.getcwd()): @@ -820,7 +815,7 @@ class SickGear(object): @staticmethod def exit(code): - # noinspection PyProtectedMember + # noinspection PyProtectedMember,PyUnresolvedReferences os._exit(code) diff --git a/sickgear/__init__.py b/sickgear/__init__.py index c8e3ff06..305952a2 100644 --- a/sickgear/__init__.py +++ b/sickgear/__init__.py @@ -34,11 +34,9 @@ import threading import uuid import zlib -# noinspection PyPep8Naming -import encodingKludge as ek from . import classes, db, helpers, image_cache, indexermapper, logger, metadata, naming, people_queue, providers, \ scene_exceptions, scene_numbering, scheduler, search_backlog, search_propers, search_queue, search_recent, \ - show_queue, show_updater, subtitles, trakt_helpers, traktChecker, version_checker, watchedstate_queue + show_queue, show_updater, subtitles, trakt_helpers, version_checker, watchedstate_queue from . import auto_post_processer, properFinder # must come after the above imports from .common import SD, SKIPPED, USER_AGENT from .config import check_section, check_setting_int, check_setting_str, ConfigMigrator, minimax @@ -57,8 +55,8 @@ from browser_ua import get_ua from configobj import ConfigObj from api_trakt import TraktAPI -from _23 import b64encodestring, decode_bytes, filter_iter, list_items, map_list, ordered_dict, scandir -from six import iteritems, PY2, string_types +from _23 import b64encodestring, decode_bytes, scandir +from six import iteritems, string_types import sg_helpers # noinspection PyUnreachableCode @@ -121,9 +119,9 @@ REMOVE_FILENAME_CHARS = None IMPORT_DEFAULT_CHECKED_SHOWS = 0 # /non ui settings -providerList = [] -newznabProviderList = [] -torrentRssProviderList = [] +provider_list = [] +newznab_providers = [] +torrent_rss_providers = [] metadata_provider_dict = {} MODULE_UPDATE_STRING = None @@ -513,7 +511,7 @@ ANIDB_USE_MYLIST = False ADBA_CONNECTION = None # type: Connection ANIME_TREAT_AS_HDTV = False -GUI_NAME = None +GUI_NAME = '' DEFAULT_HOME = None FANART_LIMIT = None FANART_PANEL = None @@ -657,7 +655,7 @@ def initialize(console_logging=True): def init_stage_1(console_logging): # Misc - global showList, showDict, switched_shows, providerList, newznabProviderList, torrentRssProviderList, \ + global showList, showDict, switched_shows, provider_list, newznab_providers, torrent_rss_providers, \ WEB_HOST, WEB_ROOT, ACTUAL_CACHE_DIR, CACHE_DIR, ZONEINFO_DIR, ADD_SHOWS_WO_DIR, ADD_SHOWS_METALANG, \ CREATE_MISSING_SHOW_DIRS, SHOW_DIRS_WITH_DOTS, \ RECENTSEARCH_STARTUP, NAMING_FORCE_FOLDERS, SOCKET_TIMEOUT, DEBUG, TVINFO_DEFAULT, \ @@ -668,7 +666,7 @@ def init_stage_1(console_logging): # Add Show Defaults global QUALITY_DEFAULT, WANTED_BEGIN_DEFAULT, WANTED_LATEST_DEFAULT, SHOW_TAG_DEFAULT, PAUSE_DEFAULT, \ STATUS_DEFAULT, SCENE_DEFAULT, SUBTITLES_DEFAULT, FLATTEN_FOLDERS_DEFAULT, ANIME_DEFAULT - # Post processing + # Post-processing global KEEP_PROCESSED_DIR, PROCESS_LAST_DIR, PROCESS_LAST_METHOD, PROCESS_LAST_CLEANUP # Views global GUI_NAME, HOME_LAYOUT, FOOTER_TIME_LAYOUT, POSTER_SORTBY, POSTER_SORTDIR, DISPLAY_SHOW_SPECIALS, \ @@ -805,15 +803,15 @@ def init_stage_1(console_logging): CACHE_DIR = ACTUAL_CACHE_DIR if not helpers.make_dir(CACHE_DIR): - logger.log(u'!!! Creating local cache dir failed, using system default', logger.ERROR) + logger.error('!!! creating local cache dir failed, using system default') CACHE_DIR = None # clean cache folders if CACHE_DIR: helpers.clear_cache() - ZONEINFO_DIR = ek.ek(os.path.join, CACHE_DIR, 'zoneinfo') - if not ek.ek(os.path.isdir, ZONEINFO_DIR) and not helpers.make_path(ZONEINFO_DIR): - logger.log(u'!!! Creating local zoneinfo dir failed', logger.ERROR) + ZONEINFO_DIR = os.path.join(CACHE_DIR, 'zoneinfo') + if not os.path.isdir(ZONEINFO_DIR) and not helpers.make_path(ZONEINFO_DIR): + logger.error('!!! creating local zoneinfo dir failed') sg_helpers.CACHE_DIR = CACHE_DIR sg_helpers.DATA_DIR = DATA_DIR @@ -832,7 +830,7 @@ def init_stage_1(console_logging): TRIM_ZERO = bool(check_setting_int(CFG, 'GUI', 'trim_zero', 0)) DATE_PRESET = check_setting_str(CFG, 'GUI', 'date_preset', '%x') TIME_PRESET_W_SECONDS = check_setting_str(CFG, 'GUI', 'time_preset', '%I:%M:%S %p') - TIME_PRESET = TIME_PRESET_W_SECONDS.replace(u':%S', u'') + TIME_PRESET = TIME_PRESET_W_SECONDS.replace(':%S', '') TIMEZONE_DISPLAY = check_setting_str(CFG, 'GUI', 'timezone_display', 'network') SHOW_TAGS = check_setting_str(CFG, 'GUI', 'show_tags', 'Show List').split(',') SHOW_TAG_DEFAULT = check_setting_str(CFG, 'GUI', 'show_tag_default', @@ -844,7 +842,7 @@ def init_stage_1(console_logging): LOG_DIR = os.path.normpath(os.path.join(DATA_DIR, ACTUAL_LOG_DIR)) if not helpers.make_dir(LOG_DIR): - logger.log(u'!!! No log folder, logging to screen only!', logger.ERROR) + logger.error('!!! no log folder, logging to screen only!') FILE_LOGGING_PRESET = check_setting_str(CFG, 'General', 'file_logging_preset', 'DEBUG') if bool(check_setting_int(CFG, 'General', 'file_logging_db', 0)): @@ -1054,8 +1052,8 @@ def init_stage_1(console_logging): NZBGET_SKIP_PM = bool(check_setting_int(CFG, 'NZBGet', 'nzbget_skip_process_media', 0)) try: - ng_script_file = ek.ek(os.path.join, ek.ek(os.path.dirname, ek.ek(os.path.dirname, __file__)), - 'autoProcessTV', 'SickGear-NG', 'SickGear-NG.py') + ng_script_file = os.path.join(os.path.dirname(os.path.dirname(__file__)), + 'autoProcessTV', 'SickGear-NG', 'SickGear-NG.py') with io.open(ng_script_file, 'r', encoding='utf8') as ng: text = ng.read() NZBGET_SCRIPT_VERSION = re.search(r""".*version: (\d+\.\d+)""", text, flags=re.M).group(1) @@ -1355,10 +1353,10 @@ def init_stage_1(console_logging): EPISODE_VIEW_MISSED_RANGE = check_setting_int(CFG, 'GUI', 'episode_view_missed_range', 7) HISTORY_LAYOUT = check_setting_str(CFG, 'GUI', 'history_layout', 'detailed') - BROWSELIST_HIDDEN = map_list( + BROWSELIST_HIDDEN = list(map( lambda y: TVidProdid.glue in y and y or '%s%s%s' % ( (TVINFO_TVDB, TVINFO_IMDB)[bool(helpers.parse_imdb_id(y))], TVidProdid.glue, y), - [x.strip() for x in check_setting_str(CFG, 'GUI', 'browselist_hidden', '').split('|~|') if x.strip()]) + [x.strip() for x in check_setting_str(CFG, 'GUI', 'browselist_hidden', '').split('|~|') if x.strip()])) BROWSELIST_MRU = sg_helpers.ast_eval(check_setting_str(CFG, 'GUI', 'browselist_prefs', None), {}) BACKUP_DB_PATH = check_setting_str(CFG, 'Backup', 'backup_db_path', '') @@ -1372,16 +1370,16 @@ def init_stage_1(console_logging): sg_helpers.DOMAIN_FAILURES.load_from_db() # initialize NZB and TORRENT providers - providerList = providers.makeProviderList() + provider_list = providers.provider_modules() NEWZNAB_DATA = check_setting_str(CFG, 'Newznab', 'newznab_data', '') - newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA) + newznab_providers = providers.newznab_source_list(NEWZNAB_DATA) torrentrss_data = check_setting_str(CFG, 'TorrentRss', 'torrentrss_data', '') - torrentRssProviderList = providers.getTorrentRssProviderList(torrentrss_data) + torrent_rss_providers = providers.torrent_rss_source_list(torrentrss_data) # dynamically load provider settings - for torrent_prov in [curProvider for curProvider in providers.sortedProviderList() + for torrent_prov in [curProvider for curProvider in providers.sorted_sources() if GenericProvider.TORRENT == curProvider.providerType]: prov_id = torrent_prov.get_id() prov_id_uc = torrent_prov.get_id().upper() @@ -1426,7 +1424,7 @@ def init_stage_1(console_logging): elif isinstance(default, int): setattr(torrent_prov, attr, check_setting_int(CFG, prov_id_uc, attr_check, default)) - for nzb_prov in [curProvider for curProvider in providers.sortedProviderList() + for nzb_prov in [curProvider for curProvider in providers.sorted_sources() if GenericProvider.NZB == curProvider.providerType]: prov_id = nzb_prov.get_id() prov_id_uc = nzb_prov.get_id().upper() @@ -1452,10 +1450,10 @@ def init_stage_1(console_logging): setattr(nzb_prov, attr, check_setting_str(CFG, prov_id_uc, attr_check, default)) elif isinstance(default, int): setattr(nzb_prov, attr, check_setting_int(CFG, prov_id_uc, attr_check, default)) - for cur_provider in filter_iter(lambda p: abs(zlib.crc32(decode_bytes(p.name))) + 40000400 in ( + for cur_provider in filter(lambda p: abs(zlib.crc32(decode_bytes(p.name))) + 40000400 in ( 1449593765, 1597250020, 1524942228, 160758496, 2925374331 ) or (p.url and abs(zlib.crc32(decode_bytes(re.sub(r'[./]', '', p.url[-10:])))) + 40000400 in ( - 2417143804,)), providers.sortedProviderList()): + 2417143804,)), providers.sorted_sources()): header = {'User-Agent': get_ua()} if hasattr(cur_provider, 'nn'): cur_provider.nn = False @@ -1490,7 +1488,7 @@ def init_stage_1(console_logging): ('docker/other', 'snap')['snap' in CUR_COMMIT_HASH] if not os.path.isfile(CONFIG_FILE): - logger.log(u'Unable to find \'%s\', all settings will be default!' % CONFIG_FILE, logger.DEBUG) + logger.debug(f'Unable to find \'{CONFIG_FILE}\', all settings will be default!') update_config = True # Get expected config version @@ -1507,24 +1505,6 @@ def init_stage_1(console_logging): pass logger.sb_log_instance.init_logging(console_logging=console_logging) - if PY2: - try: - import _scandir - except ImportError: - _scandir = None - - try: - import ctypes - except ImportError: - ctypes = None - - if None is not _scandir and None is not ctypes and not getattr(_scandir, 'DirEntry', None): - MODULE_UPDATE_STRING = \ - 'Your scandir binary module is outdated, using the slow but newer Python module.' \ - '
Upgrade the binary at a command prompt with' \ - ' # python -m pip install -U scandir' \ - '
Important: You must Shutdown SickGear before upgrading' - showList = [] showDict = {} @@ -1559,19 +1539,19 @@ def init_stage_2(): # initialize main database my_db = db.DBConnection() - db.MigrationCode(my_db) + db.migration_code(my_db) # initialize the cache database my_db = db.DBConnection('cache.db') - db.upgradeDatabase(my_db, cache_db.InitialSchema) + db.upgrade_database(my_db, cache_db.InitialSchema) # initialize the failed downloads database my_db = db.DBConnection('failed.db') - db.upgradeDatabase(my_db, failed_db.InitialSchema) + db.upgrade_database(my_db, failed_db.InitialSchema) # fix up any db problems my_db = db.DBConnection() - db.sanityCheckDatabase(my_db, mainDB.MainSanityCheck) + db.sanity_check_db(my_db, mainDB.MainSanityCheck) # initialize metadata_providers metadata_provider_dict = metadata.get_metadata_generator_dict() @@ -1594,40 +1574,40 @@ def init_stage_2(): update_now = datetime.timedelta(minutes=0) update_software_scheduler = scheduler.Scheduler( version_checker.SoftwareUpdater(), - cycleTime=datetime.timedelta(hours=UPDATE_INTERVAL), - threadName='SOFTWAREUPDATER', + cycle_time=datetime.timedelta(hours=UPDATE_INTERVAL), + thread_name='SOFTWAREUPDATER', silent=False) update_packages_scheduler = scheduler.Scheduler( version_checker.PackagesUpdater(), - cycleTime=datetime.timedelta(hours=UPDATE_PACKAGES_INTERVAL), + cycle_time=datetime.timedelta(hours=UPDATE_PACKAGES_INTERVAL), # run_delay=datetime.timedelta(minutes=2), - threadName='PACKAGESUPDATER', + thread_name='PACKAGESUPDATER', silent=False) show_queue_scheduler = scheduler.Scheduler( show_queue.ShowQueue(), - cycleTime=datetime.timedelta(seconds=3), - threadName='SHOWQUEUE') + cycle_time=datetime.timedelta(seconds=3), + thread_name='SHOWQUEUE') show_update_scheduler = scheduler.Scheduler( show_updater.ShowUpdater(), - cycleTime=datetime.timedelta(hours=1), + cycle_time=datetime.timedelta(hours=1), start_time=datetime.time(hour=SHOW_UPDATE_HOUR), - threadName='SHOWUPDATER', - prevent_cycle_run=show_queue_scheduler.action.isShowUpdateRunning) # 3AM + thread_name='SHOWUPDATER', + prevent_cycle_run=show_queue_scheduler.action.is_show_update_running) # 3AM people_queue_scheduler = scheduler.Scheduler( people_queue.PeopleQueue(), - cycleTime=datetime.timedelta(seconds=3), - threadName='PEOPLEQUEUE' + cycle_time=datetime.timedelta(seconds=3), + thread_name='PEOPLEQUEUE' ) # searchers search_queue_scheduler = scheduler.Scheduler( search_queue.SearchQueue(), - cycleTime=datetime.timedelta(seconds=3), - threadName='SEARCHQUEUE') + cycle_time=datetime.timedelta(seconds=3), + thread_name='SEARCHQUEUE') init_search_delay = int(os.environ.get('INIT_SEARCH_DELAY', 0)) @@ -1635,13 +1615,13 @@ def init_stage_2(): update_interval = datetime.timedelta(minutes=(RECENTSEARCH_INTERVAL, 1)[4499 == RECENTSEARCH_INTERVAL]) recent_search_scheduler = scheduler.Scheduler( search_recent.RecentSearcher(), - cycleTime=update_interval, + cycle_time=update_interval, run_delay=update_now if RECENTSEARCH_STARTUP else datetime.timedelta(minutes=init_search_delay or 5), - threadName='RECENTSEARCHER', + thread_name='RECENTSEARCHER', prevent_cycle_run=search_queue_scheduler.action.is_recentsearch_in_progress) - if [x for x in providers.sortedProviderList() if x.is_active() and - getattr(x, 'enable_backlog', None) and GenericProvider.NZB == x.providerType]: + if [x for x in providers.sorted_sources() + if x.is_active() and getattr(x, 'enable_backlog', None) and GenericProvider.NZB == x.providerType]: nextbacklogpossible = datetime.datetime.fromtimestamp( search_backlog.BacklogSearcher().last_runtime) + datetime.timedelta(hours=23) now = datetime.datetime.now() @@ -1657,9 +1637,9 @@ def init_stage_2(): backlogdelay = 10 backlog_search_scheduler = search_backlog.BacklogSearchScheduler( search_backlog.BacklogSearcher(), - cycleTime=datetime.timedelta(minutes=get_backlog_cycle_time()), + cycle_time=datetime.timedelta(minutes=get_backlog_cycle_time()), run_delay=datetime.timedelta(minutes=init_search_delay or backlogdelay), - threadName='BACKLOG', + thread_name='BACKLOG', prevent_cycle_run=search_queue_scheduler.action.is_standard_backlog_in_progress) propers_searcher = search_propers.ProperSearcher() @@ -1672,26 +1652,22 @@ def init_stage_2(): proper_finder_scheduler = scheduler.Scheduler( propers_searcher, - cycleTime=datetime.timedelta(days=1), + cycle_time=datetime.timedelta(days=1), run_delay=datetime.timedelta(minutes=init_search_delay or properdelay), - threadName='FINDPROPERS', + thread_name='FINDPROPERS', prevent_cycle_run=search_queue_scheduler.action.is_propersearch_in_progress) # processors media_process_scheduler = scheduler.Scheduler( auto_post_processer.PostProcesser(), - cycleTime=datetime.timedelta(minutes=MEDIAPROCESS_INTERVAL), - threadName='POSTPROCESSER', + cycle_time=datetime.timedelta(minutes=MEDIAPROCESS_INTERVAL), + thread_name='POSTPROCESSER', silent=not PROCESS_AUTOMATICALLY) - """ - trakt_checker_scheduler = scheduler.Scheduler( - traktChecker.TraktChecker(), cycleTime=datetime.timedelta(hours=1), - threadName='TRAKTCHECKER', silent=not USE_TRAKT) - """ + subtitles_finder_scheduler = scheduler.Scheduler( subtitles.SubtitlesFinder(), - cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_INTERVAL), - threadName='FINDSUBTITLES', + cycle_time=datetime.timedelta(hours=SUBTITLES_FINDER_INTERVAL), + thread_name='FINDSUBTITLES', silent=not USE_SUBTITLES) background_mapping_task = threading.Thread(name='MAPPINGSUPDATER', target=indexermapper.load_mapped_ids, @@ -1699,28 +1675,28 @@ def init_stage_2(): watched_state_queue_scheduler = scheduler.Scheduler( watchedstate_queue.WatchedStateQueue(), - cycleTime=datetime.timedelta(seconds=3), - threadName='WATCHEDSTATEQUEUE') + cycle_time=datetime.timedelta(seconds=3), + thread_name='WATCHEDSTATEQUEUE') emby_watched_state_scheduler = scheduler.Scheduler( EmbyWatchedStateUpdater(), - cycleTime=datetime.timedelta(minutes=EMBY_WATCHEDSTATE_INTERVAL), + cycle_time=datetime.timedelta(minutes=EMBY_WATCHEDSTATE_INTERVAL), run_delay=datetime.timedelta(minutes=5), - threadName='EMBYWATCHEDSTATE') + thread_name='EMBYWATCHEDSTATE') plex_watched_state_scheduler = scheduler.Scheduler( PlexWatchedStateUpdater(), - cycleTime=datetime.timedelta(minutes=PLEX_WATCHEDSTATE_INTERVAL), + cycle_time=datetime.timedelta(minutes=PLEX_WATCHEDSTATE_INTERVAL), run_delay=datetime.timedelta(minutes=5), - threadName='PLEXWATCHEDSTATE') + thread_name='PLEXWATCHEDSTATE') MEMCACHE['history_tab_limit'] = 11 MEMCACHE['history_tab'] = History.menu_tab(MEMCACHE['history_tab_limit']) try: - for f in ek.ek(scandir, ek.ek(os.path.join, PROG_DIR, 'gui', GUI_NAME, 'images', 'flags')): + for f in scandir(os.path.join(PROG_DIR, 'gui', GUI_NAME, 'images', 'flags')): if f.is_file(): - MEMCACHE_FLAG_IMAGES[ek.ek(os.path.splitext, f.name)[0].lower()] = True + MEMCACHE_FLAG_IMAGES[os.path.splitext(f.name)[0].lower()] = True except (BaseException, Exception): pass @@ -1752,7 +1728,7 @@ def start(): and True is not TVInfoAPI(i).config.get('people_only')] background_mapping_task.start() - for p in providers.sortedProviderList(): + for p in providers.sorted_sources(): if p.is_active() and getattr(p, 'ping_iv', None): # noinspection PyProtectedMember provider_ping_thread_pool[p.get_id()] = threading.Thread( @@ -1771,19 +1747,20 @@ def restart(soft=True, update_pkg=None): if update_pkg: MY_ARGS.append('--update-pkg') + logger.log('Trigger event restart') events.put(events.SystemEvent.RESTART) else: halt() save_all() - logger.log(u'Re-initializing all data') + logger.log('Re-initializing all data') initialize() def sig_handler(signum=None, _=None): is_ctrlbreak = 'win32' == sys.platform and signal.SIGBREAK == signum - msg = u'Signal "%s" found' % (signal.SIGINT == signum and 'CTRL-C' or is_ctrlbreak and 'CTRL+BREAK' or - signal.SIGTERM == signum and 'Termination' or signum) + msg = 'Signal "%s" found' % (signal.SIGINT == signum and 'CTRL-C' or is_ctrlbreak and 'CTRL+BREAK' or + signal.SIGTERM == signum and 'Termination' or signum) if None is signum or signum in (signal.SIGINT, signal.SIGTERM) or is_ctrlbreak: logger.log('%s, saving and exiting...' % msg) events.put(events.SystemEvent.SHUTDOWN) @@ -1794,8 +1771,10 @@ def sig_handler(signum=None, _=None): def halt(): global __INITIALIZED__, started + logger.debug('Check INIT_LOCK on halt') with INIT_LOCK: + logger.debug(f'Check __INITIALIZED__ on halt: {__INITIALIZED__}') if __INITIALIZED__: logger.log('Exiting threads') @@ -1852,12 +1831,12 @@ def save_all(): global showList # write all shows - logger.log(u'Saving all shows to the database') + logger.log('Saving all shows to the database') for show_obj in showList: # type: tv.TVShow show_obj.save_to_db() # save config - logger.log(u'Saving config file to disk') + logger.log('Saving config file to disk') save_config() @@ -1865,9 +1844,9 @@ def save_config(): new_config = ConfigObj() new_config.filename = CONFIG_FILE - # For passwords you must include the word `password` in the item_name and + # For passwords, you must include the word `password` in the item_name and # add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config() - new_config['General'] = ordered_dict() + new_config['General'] = dict() s_z = check_setting_int(CFG, 'General', 'stack_size', 0) if s_z: new_config['General']['stack_size'] = s_z @@ -1929,8 +1908,9 @@ def save_config(): new_config['General']['flatten_folders_default'] = int(FLATTEN_FOLDERS_DEFAULT) new_config['General']['anime_default'] = int(ANIME_DEFAULT) new_config['General']['provider_order'] = ' '.join(PROVIDER_ORDER) - new_config['General']['provider_homes'] = '%s' % dict([(pid, v) for pid, v in list_items(PROVIDER_HOMES) if pid in [ - p.get_id() for p in [x for x in providers.sortedProviderList() if GenericProvider.TORRENT == x.providerType]]]) + new_config['General']['provider_homes'] = '%s' % dict([(pid, v) for pid, v in list(PROVIDER_HOMES.items()) + if pid in [ + p.get_id() for p in [x for x in providers.sorted_sources() if GenericProvider.TORRENT == x.providerType]]]) new_config['General']['update_notify'] = int(UPDATE_NOTIFY) new_config['General']['update_auto'] = int(UPDATE_AUTO) new_config['General']['update_interval'] = int(UPDATE_INTERVAL) @@ -2016,7 +1996,7 @@ def save_config(): new_config['Backup']['backup_db_max_count'] = BACKUP_DB_MAX_COUNT default_not_zero = ('enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog', 'use_after_get_data') - for src in filter_iter(lambda px: GenericProvider.TORRENT == px.providerType, providers.sortedProviderList()): + for src in filter(lambda px: GenericProvider.TORRENT == px.providerType, providers.sorted_sources()): src_id = src.get_id() src_id_uc = src_id.upper() new_config[src_id_uc] = {} @@ -2054,19 +2034,19 @@ def save_config(): del new_config[src_id_uc] default_not_zero = ('enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog') - for src in filter_iter(lambda px: GenericProvider.NZB == px.providerType, providers.sortedProviderList()): + for src in filter(lambda px: GenericProvider.NZB == px.providerType, providers.sorted_sources()): src_id = src.get_id() src_id_uc = src.get_id().upper() new_config[src_id_uc] = {} if int(src.enabled): new_config[src_id_uc][src_id] = int(src.enabled) - for attr in filter_iter(lambda _a: None is not getattr(src, _a, None), - ('api_key', 'digest', 'username', 'search_mode')): + for attr in filter(lambda _a: None is not getattr(src, _a, None), + ('api_key', 'digest', 'username', 'search_mode')): if 'search_mode' != attr or 'eponly' != getattr(src, attr): new_config[src_id_uc]['%s_%s' % (src_id, attr)] = getattr(src, attr) - for attr in filter_iter(lambda _a: None is not getattr(src, _a, None), ( + for attr in filter(lambda _a: None is not getattr(src, _a, None), ( 'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog', 'scene_only', 'scene_loose', 'scene_loose_active', 'scene_rej_nuked', 'scene_nuked_active', @@ -2282,7 +2262,7 @@ def save_config(): cfg_lc = cfg.lower() cfg_keys += [cfg] new_config[cfg] = {} - for (k, v) in filter_iter(lambda arg: any([arg[1]]) or ( + for (k, v) in filter(lambda arg: any([arg[1]]) or ( # allow saving where item value default is non-zero but 0 is a required setting value cfg_lc in ('kodi', 'xbmc', 'synoindex', 'nzbget', 'torrent', 'telegram') and arg[0] in ('always_on', 'priority', 'send_image')) @@ -2322,13 +2302,13 @@ def save_config(): new_config[notifier]['%s_notify_onsubtitledownload' % notifier.lower()] = int(onsubtitledownload) # remove empty stanzas - for k in filter_iter(lambda c: not new_config[c], cfg_keys): + for k in filter(lambda c: not new_config[c], cfg_keys): del new_config[k] new_config['Newznab'] = {} new_config['Newznab']['newznab_data'] = NEWZNAB_DATA - torrent_rss = '!!!'.join([x.config_str() for x in torrentRssProviderList]) + torrent_rss = '!!!'.join([x.config_str() for x in torrent_rss_providers]) if torrent_rss: new_config['TorrentRss'] = {} new_config['TorrentRss']['torrentrss_data'] = torrent_rss @@ -2420,4 +2400,4 @@ def launch_browser(start_port=None): try: webbrowser.open(browser_url, 1, True) except (BaseException, Exception): - logger.log('Unable to launch a browser', logger.ERROR) + logger.error('Unable to launch a browser') diff --git a/sickgear/_legacy.py b/sickgear/_legacy.py deleted file mode 100644 index 1727a4cb..00000000 --- a/sickgear/_legacy.py +++ /dev/null @@ -1,828 +0,0 @@ -# coding=utf-8 -# -# This file is part of SickGear. -# -# SickGear is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SickGear is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SickGear. If not, see . - -# -# This file contains deprecated routes and parameters -# Eventually, this file and its use will be removed from SG core. -# -import threading -import traceback - -import sickgear -from . import logger -from .indexers.indexer_config import TVINFO_IMDB, TVINFO_TVDB -from .tv import TVidProdid - -from requests.compat import urljoin -from tornado import gen -from tornado.escape import utf8 -from tornado.web import RequestHandler - -from _23 import decode_str, filter_iter -from six import iteritems -from sg_futures import SgThreadPoolExecutor -try: - from multiprocessing import cpu_count -except ImportError: - # some platforms don't have multiprocessing - def cpu_count(): - return None - -""" deprecated_item, remove in 2020 = 8 items """ -""" prevent issues with requests using legacy params = 3 items""" -# TODO: deprecated items, find the above comments and remove in 2020 - - -class LegacyBase(RequestHandler): - - # todo: move to RouteHandler after removing _legacy module - executor = SgThreadPoolExecutor(thread_name_prefix='WEBSERVER', max_workers=min(32, (cpu_count() or 1) + 4)) - - # todo: move to RouteHandler after removing _legacy module - def redirect(self, url, permanent=False, status=None): - """Send a redirect to the given (optionally relative) URL. - - ----->>>>> NOTE: Removed self.finish <<<<<----- - - If the ``status`` argument is specified, that value is used as the - HTTP status code; otherwise either 301 (permanent) or 302 - (temporary) is chosen based on the ``permanent`` argument. - The default is 302 (temporary). - """ - if not url.startswith(sickgear.WEB_ROOT): - url = sickgear.WEB_ROOT + url - - # noinspection PyUnresolvedReferences - if self._headers_written: - raise Exception('Cannot redirect after headers have been written') - if status is None: - status = 301 if permanent else 302 - else: - assert isinstance(status, int) - assert 300 <= status <= 399 - self.set_status(status) - self.set_header('Location', urljoin(utf8(self.request.uri), - utf8(url))) - - # todo: move to RouteHandler after removing _legacy module - def write_error(self, status_code, **kwargs): - body = '' - try: - if self.request.body: - body = '\nRequest body: %s' % decode_str(self.request.body) - except (BaseException, Exception): - pass - logger.log('Sent %s error response to a `%s` request for `%s` with headers:\n%s%s' % - (status_code, self.request.method, self.request.path, self.request.headers, body), logger.WARNING) - # suppress traceback by removing 'exc_info' kwarg - if 'exc_info' in kwargs: - logger.log('Gracefully handled exception text:\n%s' % traceback.format_exception(*kwargs["exc_info"]), - logger.DEBUG) - del kwargs['exc_info'] - return super(LegacyBase, self).write_error(status_code, **kwargs) - - def data_received(self, *args): - pass - - -class LegacyBaseHandler(LegacyBase): - - def redirect_args(self, new_url, exclude=(None,), **kwargs): - args = '&'.join(['%s=%s' % (k, v) for (k, v) in - filter_iter(lambda arg: arg[1] not in exclude, iteritems(kwargs))]) - self.redirect('%s%s' % (new_url, ('', '?' + args)[bool(args)]), permanent=True) - - """ deprecated from BaseHandler ------------------------------------------------------------------------------------ - """ - def getImage(self, *args, **kwargs): - return self.get_image(*args, **kwargs) - - def get_image(self, *args, **kwargs): - # abstract method - pass - - def showPoster(self, show=None, **kwargs): - # test: /showPoster/?show=73141&which=poster_thumb - return self.show_poster(TVidProdid(show)(), **kwargs) - - def show_poster(self, *args, **kwargs): - # abstract method - pass - - """ deprecated from MainHandler ------------------------------------------------------------------------------------ - """ - def episodeView(self, **kwargs): - self.redirect_args('/daily-schedule', exclude=(None, False), **kwargs) - - def setHomeLayout(self, *args, **kwargs): - return self.set_layout_view_shows(*args, **kwargs) - - def set_layout_view_shows(self, *args, **kwargs): - # abstract method - pass - - def setPosterSortBy(self, *args): - return self.set_poster_sortby(*args) - - @staticmethod - def set_poster_sortby(*args): - # abstract method - pass - - def setPosterSortDir(self, *args): - return self.set_poster_sortdir(*args) - - @staticmethod - def set_poster_sortdir(*args): - # abstract method - pass - - def setEpisodeViewLayout(self, *args): - return self.set_layout_daily_schedule(*args) - - def set_layout_daily_schedule(self, *args): - # abstract method - pass - - def toggleEpisodeViewDisplayPaused(self): - return self.toggle_display_paused_daily_schedule() - - # completely deprecated for the three way state set_ function - # def toggle_display_paused_daily_schedule(self): - # # abstract method - # pass - - def toggle_display_paused_daily_schedule(self): - - return self.set_display_paused_daily_schedule(not sickgear.EPISODE_VIEW_DISPLAY_PAUSED) - - def set_display_paused_daily_schedule(self, *args, **kwargs): - # abstract method - pass - - def setEpisodeViewCards(self, *args, **kwargs): - return self.set_cards_daily_schedule(*args, **kwargs) - - def set_cards_daily_schedule(self, *args, **kwargs): - # abstract method - pass - - def setEpisodeViewSort(self, *args, **kwargs): - return self.set_sort_daily_schedule(*args, **kwargs) - - def set_sort_daily_schedule(self, *args, **kwargs): - # abstract method - pass - - def getFooterTime(self, *args, **kwargs): - return self.get_footer_time(*args, **kwargs) - - @staticmethod - def get_footer_time(*args, **kwargs): - # abstract method - pass - - def toggleDisplayShowSpecials(self, **kwargs): - return self.toggle_specials_view_show(TVidProdid(kwargs.get('show'))()) - - def toggle_specials_view_show(self, *args): - # abstract method - pass - - def setHistoryLayout(self, *args): - return self.set_layout_history(*args) - - def set_layout_history(self, *args): - # abstract method - pass - - """ deprecated from Home ------------------------------------------------------------------------------------------- - """ - def showlistView(self): - self.redirect('/view-shows', permanent=True) - - def viewchanges(self): - self.redirect('/home/view-changes', permanent=True) - - def displayShow(self, **kwargs): - self.migrate_redir('view-show', **kwargs) - - def editShow(self, **kwargs): - kwargs['any_qualities'] = kwargs.pop('anyQualities', None) - kwargs['best_qualities'] = kwargs.pop('bestQualities', None) - kwargs['exceptions_list'] = kwargs.pop('exceptions_list', None) - kwargs['direct_call'] = kwargs.pop('directCall', False) - kwargs['tvinfo_lang'] = kwargs.pop('indexerLang', None) - kwargs['subs'] = kwargs.pop('subtitles', None) - self.migrate_redir('edit-show', **kwargs) - - def testRename(self, **kwargs): - self.migrate_redir('rename-media', **kwargs) - - def migrate_redir(self, new_url, **kwargs): - kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))() - self.redirect_args('/home/%s' % new_url, exclude=(None, False), **kwargs) - - def setStatus(self, **kwargs): - kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))() - return self.set_show_status(**kwargs) - - def set_show_status(self, **kwargs): - # abstract method - pass - - def branchCheckout(self, *args): - return self.branch_checkout(*args) - - def branch_checkout(self, *args): - # abstract method - pass - - def pullRequestCheckout(self, *args): - return self.pull_request_checkout(*args) - - def pull_request_checkout(self, *args): - # abstract method - pass - - def display_season(self, **kwargs): - kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))() - return self.season_render(**kwargs) - - def season_render(self, **kwargs): - # abstract method - pass - - def plotDetails(self, show, *args): - return self.plot_details(TVidProdid(show)(), *args) - - @staticmethod - def plot_details(*args): - # abstract method - pass - - def sceneExceptions(self, show): - return self.scene_exceptions(TVidProdid(show)()) - - @staticmethod - def scene_exceptions(*args): - # abstract method - pass - - def saveMapping(self, show, **kwargs): - kwargs['m_tvid'] = kwargs.pop('mindexer', 0) - kwargs['m_prodid'] = kwargs.pop('mindexerid', 0) - return self.save_mapping(TVidProdid(show)(), **kwargs) - - def save_mapping(self, *args, **kwargs): - # abstract method - pass - - def forceMapping(self, show, **kwargs): - return self.force_mapping(TVidProdid(show)(), **kwargs) - - @staticmethod - def force_mapping(*args, **kwargs): - # abstract method - pass - - def deleteShow(self, **kwargs): - kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))() - return self.delete_show(**kwargs) - - def delete_show(self, *args, **kwargs): - # abstract method - pass - - def refreshShow(self, **kwargs): - kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))() - return self.refresh_show(**kwargs) - - def refresh_show(self, *args, **kwargs): - # abstract method - pass - - def updateShow(self, **kwargs): - kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))() - return self.update_show(**kwargs) - - def update_show(self, *args, **kwargs): - # abstract method - pass - - def subtitleShow(self, **kwargs): - kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))() - return self.subtitle_show(**kwargs) - - def subtitle_show(self, *args, **kwargs): - # abstract method - pass - - def doRename(self, **kwargs): - kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))() - return self.do_rename(**kwargs) - - def do_rename(self, *args, **kwargs): - # abstract method - pass - - def episode_search(self, **kwargs): - kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))() - return self.search_episode(**kwargs) - - def search_episode(self, *args, **kwargs): - # abstract method - pass - - def searchEpisodeSubtitles(self, **kwargs): - kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))() - return self.search_episode_subtitles(**kwargs) - - def search_episode_subtitles(self, *args, **kwargs): - # abstract method - pass - - def setSceneNumbering(self, **kwargs): - return self.set_scene_numbering( - tvid_prodid={kwargs.pop('indexer', ''): kwargs.pop('show', '')}, - for_season=kwargs.get('forSeason'), for_episode=kwargs.get('forEpisode'), - scene_season=kwargs.get('sceneSeason'), scene_episode=kwargs.get('sceneEpisode'), - scene_absolute=kwargs.get('sceneAbsolute')) - - @staticmethod - def set_scene_numbering(*args, **kwargs): - # abstract method - pass - - def update_emby(self, **kwargs): - kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))() - return self.update_mb(**kwargs) - - def update_mb(self, *args, **kwargs): - # abstract method - pass - - def search_q_progress(self, **kwargs): - kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))() - return self.search_q_status(**kwargs) - - def search_q_status(self, *args, **kwargs): - # abstract method - pass - - """ deprecated from NewHomeAddShows i.e. HomeAddShows -------------------------------------------------------------- - """ - def addExistingShows(self, **kwargs): - kwargs['prompt_for_settings'] = kwargs.pop('promptForSettings', None) - self.redirect_args('/add-shows/add-existing-shows', **kwargs) - - def addAniDBShow(self, **kwargs): - self.migrate_redir_add_shows('info-anidb', TVINFO_TVDB, **kwargs) - - def addIMDbShow(self, **kwargs): - self.migrate_redir_add_shows('info-imdb', TVINFO_IMDB, **kwargs) - - def addTraktShow(self, **kwargs): - self.migrate_redir_add_shows('info-trakt', TVINFO_TVDB, **kwargs) - - def migrate_redir_add_shows(self, new_url, tvinfo, **kwargs): - prodid = kwargs.pop('indexer_id', None) - if prodid: - kwargs['ids'] = prodid - if TVINFO_TVDB == tvinfo and prodid: - kwargs['ids'] = TVidProdid({tvinfo: prodid})() - kwargs['show_name'] = kwargs.pop('showName', None) - self.redirect_args('/add-shows/%s' % new_url, **kwargs) - - def getIndexerLanguages(self): - return self.get_infosrc_languages() - - @staticmethod - def get_infosrc_languages(): - # abstract method - pass - - def searchIndexersForShowName(self, *args, **kwargs): - return self.search_tvinfo_for_showname(*args, **kwargs) - - def search_tvinfo_for_showname(self, *args, **kwargs): - # abstract method - pass - - def massAddTable(self, **kwargs): - return self.mass_add_table( - root_dir=kwargs.pop('rootDir', None), **kwargs) - - def mass_add_table(self, *args, **kwargs): - # abstract method - pass - - def addNewShow(self, **kwargs): - return self.add_new_show( - provided_tvid=kwargs.pop('providedIndexer', None), - which_series=kwargs.pop('whichSeries', None), - tvinfo_lang=kwargs.pop('indexerLang', 'en'), - root_dir=kwargs.pop('rootDir', None), - default_status=kwargs.pop('defaultStatus', None), - any_qualities=kwargs.pop('anyQualities', None), - best_qualities=kwargs.pop('bestQualities', None), - subs=kwargs.pop('subtitles', None), - full_show_path=kwargs.pop('fullShowPath', None), - skip_show=kwargs.pop('skipShow', None), - **kwargs) - - def add_new_show(self, *args, **kwargs): - # abstract method - pass - - """ deprecated from ConfigGeneral ---------------------------------------------------------------------------------- - """ - def generateKey(self): - return self.generate_key() - - @staticmethod - def generate_key(): - # abstract method - pass - - def saveRootDirs(self, **kwargs): - return self.save_root_dirs(root_dir_string=kwargs.get('rootDirString')) - - @staticmethod - def save_root_dirs(**kwargs): - # abstract method - pass - - def saveResultPrefs(self, **kwargs): - return self.save_result_prefs(**kwargs) - - @staticmethod - def save_result_prefs(**kwargs): - # abstract method - pass - - def saveAddShowDefaults(self, *args, **kwargs): - return self.save_add_show_defaults(*args, **kwargs) - - @staticmethod - def save_add_show_defaults(*args, **kwargs): - # abstract method - pass - - def saveGeneral(self, **kwargs): - return self.save_general(**kwargs) - - def save_general(self, **kwargs): - # abstract method - pass - - """ deprecated from ConfigSearch ----------------------------------------------------------------------------------- - """ - def saveSearch(self, **kwargs): - return self.save_search(**kwargs) - - def save_search(self, **kwargs): - # abstract method - pass - - """ deprecated from ConfigProviders -------------------------------------------------------------------------------- - """ - def canAddNewznabProvider(self, *args): - return self.can_add_newznab_provider(*args) - - @staticmethod - def can_add_newznab_provider(*args): - # abstract method - pass - - def getNewznabCategories(self, *args): - return self.get_newznab_categories(*args) - - @staticmethod - def get_newznab_categories(*args): - # abstract method - pass - - def canAddTorrentRssProvider(self, *args): - return self.can_add_torrent_rss_provider(*args) - - @staticmethod - def can_add_torrent_rss_provider(*args): - # abstract method - pass - - def checkProvidersPing(self): - return self.check_providers_ping() - - @staticmethod - def check_providers_ping(): - # abstract method - pass - - def saveProviders(self, *args, **kwargs): - return self.save_providers(*args, **kwargs) - - def save_providers(self, *args, **kwargs): - # abstract method - pass - - """ deprecated from ConfigPostProcessing --------------------------------------------------------------------------- - """ - def savePostProcessing(self, **kwargs): - return self.save_post_processing(**kwargs) - - def save_post_processing(self, **kwargs): - # abstract method - pass - - def testNaming(self, *args, **kwargs): - return self.test_naming(*args, **kwargs) - - @staticmethod - def test_naming(*args, **kwargs): - # abstract method - pass - - def isNamingValid(self, *args, **kwargs): - return self.is_naming_valid(*args, **kwargs) - - @staticmethod - def is_naming_valid(*args, **kwargs): - # abstract method - pass - - def isRarSupported(self): - return self.is_rar_supported() - - @staticmethod - def is_rar_supported(): - # abstract method - pass - - """ deprecated from ConfigSubtitles -------------------------------------------------------------------------------- - """ - def saveSubtitles(self, **kwargs): - return self.save_subtitles(**kwargs) - - def save_subtitles(self, **kwargs): - # abstract method - pass - - """ deprecated from ConfigAnime ------------------------------------------------------------------------------------ - """ - def saveAnime(self, **kwargs): - return self.save_anime(**kwargs) - - def save_anime(self, **kwargs): - # abstract method - pass - - """ deprecated from Manage ----------------------------------------------------------------------------------------- - """ - def episode_statuses(self, **kwargs): - self.redirect_args('/manage/episode-overview', **kwargs) - - def subtitleMissed(self, **kwargs): - kwargs['which_subs'] = kwargs.pop('whichSubs', None) - self.redirect_args('/manage/subtitle_missed', **kwargs) - - def show_episode_statuses(self, **kwargs): - return self.get_status_episodes(TVidProdid(kwargs.get('indexer_id'))(), kwargs.get('which_status')) - - @staticmethod - def get_status_episodes(*args): - # abstract method - pass - - def showSubtitleMissed(self, **kwargs): - return self.show_subtitle_missed(TVidProdid(kwargs.get('indexer_id'))(), kwargs.get('whichSubs')) - - @staticmethod - def show_subtitle_missed(*args): - # abstract method - pass - - def downloadSubtitleMissed(self, **kwargs): - return self.download_subtitle_missed(**kwargs) - - def download_subtitle_missed(self, **kwargs): - # abstract method - pass - - def backlogShow(self, **kwargs): - return self.backlog_show(TVidProdid(kwargs.get('indexer_id'))()) - - def backlog_show(self, *args): - # abstract method - pass - - def backlogOverview(self): - self.redirect('/manage/backlog_overview', permanent=True) - - def massEdit(self, **kwargs): - return self.mass_edit(to_edit=kwargs.get('toEdit')) - - def mass_edit(self, **kwargs): - # abstract method - pass - - def massEditSubmit(self, **kwargs): - kwargs['to_edit'] = kwargs.pop('toEdit', None) - kwargs['subs'] = kwargs.pop('subtitles', None) - kwargs['any_qualities'] = kwargs.pop('anyQualities', None) - kwargs['best_qualities'] = kwargs.pop('bestQualities', None) - return self.mass_edit_submit(**kwargs) - - def mass_edit_submit(self, **kwargs): - # abstract method - pass - - def bulkChange(self, **kwargs): - return self.bulk_change( - to_update=kwargs.get('toUpdate'), to_refresh=kwargs.get('toRefresh'), - to_rename=kwargs.get('toRename'), to_delete=kwargs.get('toDelete'), to_remove=kwargs.get('toRemove'), - to_metadata=kwargs.get('toMetadata'), to_subtitle=kwargs.get('toSubtitle')) - - def bulk_change(self, **kwargs): - # abstract method - pass - - def failedDownloads(self, **kwargs): - kwargs['to_remove'] = kwargs.pop('toRemove', None) - return self.failed_downloads(**kwargs) - - def failed_downloads(self, **kwargs): - # abstract method - pass - - """ deprecated from ManageSearches --------------------------------------------------------------------------------- - """ - def retryProvider(self, **kwargs): - return self.retry_provider(**kwargs) - - @staticmethod - def retry_provider(**kwargs): - # abstract method - pass - - def forceVersionCheck(self): - return self.check_update() - - def check_update(self): - # abstract method - pass - - def forceBacklog(self): - return self.force_backlog() - - def force_backlog(self): - # abstract method - pass - - def forceSearch(self): - return self.force_search() - - def force_search(self): - # abstract method - pass - - def forceFindPropers(self): - return self.force_find_propers() - - def force_find_propers(self): - # abstract method - pass - - def pauseBacklog(self, **kwargs): - return self.pause_backlog(**kwargs) - - def pause_backlog(self, **kwargs): - # abstract method - pass - - """ deprecated from ShowProcesses ---------------------------------------------------------------------------------- - """ - def forceShowUpdate(self): - return self.force_show_update() - - def force_show_update(self): - # abstract method - pass - - """ deprecated from History ---------------------------------------------------------------------------------------- - """ - def clearHistory(self): - return self.clear_history() - - def clear_history(self): - # abstract method - pass - - def trimHistory(self): - return self.trim_history() - - def trim_history(self): - # abstract method - pass - - """ deprecated from ErrorLogs -------------------------------------------------------------------------------------- - """ - def clearerrors(self): - self.redirect('/errors/clear-log') - - def viewlog(self, **kwargs): - self.redirect_args('/events/view-log/', **kwargs) - - def downloadlog(self): - return self.download_log() - - def download_log(self): - # abstract method - pass - - """ ------------------------------------------------------------------------------------------------------------ """ - """ ------------------------------------------------------------------------------------------------------------ """ - """ end of base deprecated function stubs """ - """ ------------------------------------------------------------------------------------------------------------ """ - """ ------------------------------------------------------------------------------------------------------------ """ - - -class LegacyRouteHandler(RequestHandler): - - def data_received(self, *args): - pass - - def __init__(self, *arg, **kwargs): - super(LegacyRouteHandler, self).__init__(*arg, **kwargs) - self.lock = threading.Lock() - - def set_default_headers(self): - self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0') - self.set_header('X-Robots-Tag', 'noindex, nofollow, noarchive, nocache, noodp, noydir, noimageindex, nosnippet') - if sickgear.SEND_SECURITY_HEADERS: - self.set_header('X-Frame-Options', 'SAMEORIGIN') - - # noinspection PyUnusedLocal - @gen.coroutine - def get(self, *args, **kwargs): - getattr(self, 'index')() - - def redirect(self, url, permanent=False, status=None): - if not url.startswith(sickgear.WEB_ROOT): - url = sickgear.WEB_ROOT + url - - super(LegacyRouteHandler, self).redirect(url, permanent, status) - - -class LegacyManageManageSearches(LegacyRouteHandler): - - """ deprecated from ManageSearches --------------------------------------------------------------------------------- - """ - def index(self): - self.redirect('/manage/search-tasks/', permanent=True) - - -class LegacyManageShowProcesses(LegacyRouteHandler): - - """ deprecated from ManageShowProcesses ---------------------------------------------------------------------------- - """ - def index(self): - self.redirect('/manage/show-tasks/', permanent=True) - - -class LegacyConfigPostProcessing(LegacyRouteHandler): - - """ deprecated from ConfigPostProcessing --------------------------------------------------------------------------- - """ - def index(self): - self.redirect('/config/media-process/', permanent=True) - - -class LegacyHomeAddShows(LegacyRouteHandler): - - """ deprecated from NewHomeAddShows i.e. HomeAddShows -------------------------------------------------------------- - """ - def index(self): - self.redirect('/add-shows/', permanent=True) - - -class LegacyErrorLogs(LegacyRouteHandler): - - """ deprecated from ErrorLogs -------------------------------------------------------------------------------------- - """ - def index(self): - self.redirect('/events/', permanent=True) diff --git a/sickgear/anime.py b/sickgear/anime.py index 48347021..b5f0938f 100644 --- a/sickgear/anime.py +++ b/sickgear/anime.py @@ -19,8 +19,6 @@ import os import adba from adba.aniDBresponses import LoginFirstResponse -# noinspection PyPep8Naming -import encodingKludge as ek from exceptions_helper import ex import sickgear @@ -54,7 +52,7 @@ class AniGroupList(object): self.load() def load(self): - logger.log(u'Building allow amd block list for %s' % self.tvid_prodid, logger.DEBUG) + logger.debug(f'Building allow amd block list for {self.tvid_prodid}') self.allowlist = self._load_list('allowlist') self.blocklist = self._load_list('blocklist') @@ -76,8 +74,7 @@ class AniGroupList(object): for cur_result in sql_result: groups.append(cur_result['keyword']) - logger.log('AniPermsList: %s loaded keywords from %s: %s' % (self.tvid_prodid, table, groups), - logger.DEBUG) + logger.debug('AniPermsList: %s loaded keywords from %s: %s' % (self.tvid_prodid, table, groups)) return groups @@ -90,7 +87,7 @@ class AniGroupList(object): self._del_all_keywords('allowlist') self._add_keywords('allowlist', values) self.allowlist = values - logger.log('Allowlist set to: %s' % self.allowlist, logger.DEBUG) + logger.debug('Allowlist set to: %s' % self.allowlist) def set_block_keywords(self, values): # type: (List[AnyStr]) -> None @@ -101,7 +98,7 @@ class AniGroupList(object): self._del_all_keywords('blocklist') self._add_keywords('blocklist', values) self.blocklist = values - logger.log('Blocklist set to: %s' % self.blocklist, logger.DEBUG) + logger.debug('Blocklist set to: %s' % self.blocklist) def _del_all_keywords(self, table): # type: (AnyStr) -> None @@ -135,15 +132,14 @@ class AniGroupList(object): :return: True or False """ if not result.release_group: - logger.log('Failed to detect release group, invalid result', logger.DEBUG) + logger.debug('Failed to detect release group, invalid result') return False allowed = result.release_group.lower() in [x.lower() for x in self.allowlist] or not self.allowlist blocked = result.release_group.lower() in [x.lower() for x in self.blocklist] - logger.log('Result %sallowed%s in block list. Parsed group name: "%s" from result "%s"' % - (('not ', '')[allowed], (', but', ' and not')[not blocked], result.release_group, result.name), - logger.DEBUG) + logger.debug(f'Result {("not ", "")[allowed]}allowed{(", but", " and not")[not blocked]} in block list.' + f' Parsed group name: "{result.release_group}" from result "{result.name}"') return allowed and not blocked @@ -182,7 +178,7 @@ def short_group_names(groups): def anidb_cache_dir(): # type: (...) -> Optional[AnyStr] - cache_dir = ek.ek(os.path.join, sickgear.CACHE_DIR or get_system_temp_dir(), 'anidb') + cache_dir = os.path.join(sickgear.CACHE_DIR or get_system_temp_dir(), 'anidb') if not make_path(cache_dir): cache_dir = None return cache_dir @@ -195,29 +191,29 @@ def create_anidb_obj(**kwargs): def set_up_anidb_connection(): if not sickgear.USE_ANIDB: - logger.log(u'Usage of anidb disabled. Skipping', logger.DEBUG) + logger.debug('Usage of anidb disabled. Skipping') return False if not sickgear.ANIDB_USERNAME and not sickgear.ANIDB_PASSWORD: - logger.log(u'anidb username and/or password are not set. Aborting anidb lookup.', logger.DEBUG) + logger.debug('anidb username and/or password are not set. Aborting anidb lookup.') return False if not sickgear.ADBA_CONNECTION: - # anidb_logger = (lambda x: logger.log('ANIDB: ' + str(x)), logger.DEBUG) + # anidb_logger = (lambda x: logger.debug('ANIDB: ' + str(x))) sickgear.ADBA_CONNECTION = adba.Connection(keepAlive=True) # , log=anidb_logger) auth = False try: auth = sickgear.ADBA_CONNECTION.authed() except (BaseException, Exception) as e: - logger.log(u'exception msg: ' + ex(e)) + logger.log(f'exception msg: {ex(e)}') pass if not auth: try: sickgear.ADBA_CONNECTION.auth(sickgear.ANIDB_USERNAME, sickgear.ANIDB_PASSWORD) except (BaseException, Exception) as e: - logger.log(u'exception msg: ' + ex(e)) + logger.log(f'exception msg: {ex(e)}') return False else: return True @@ -232,7 +228,7 @@ def pull_anidb_groups(show_name): anime = create_anidb_obj(name=show_name) return anime.get_groups() except (BaseException, Exception) as e: - logger.log(u'Anidb exception: %s' % ex(e), logger.DEBUG) + logger.debug(f'Anidb exception: {ex(e)}') return False @@ -260,7 +256,7 @@ def push_anidb_mylist(filepath, anidb_episode): log = ('Adding the file to the anidb mylist', logger.DEBUG) result = True except (BaseException, Exception) as e: - log = (u'exception msg: %s' % ex(e), logger.MESSAGE) + log = (f'exception msg: {ex(e)}', logger.MESSAGE) result = False return result, log diff --git a/sickgear/auto_post_processer.py b/sickgear/auto_post_processer.py index 5e9f265e..124e8b4a 100644 --- a/sickgear/auto_post_processer.py +++ b/sickgear/auto_post_processer.py @@ -16,9 +16,6 @@ import os.path -# noinspection PyPep8Naming -import encodingKludge as ek - import sickgear from . import logger, processTV @@ -40,14 +37,13 @@ class PostProcesser(object): @staticmethod def _main(): - if not ek.ek(os.path.isdir, sickgear.TV_DOWNLOAD_DIR): - logger.log(u"Automatic post-processing attempted but dir %s doesn't exist" % sickgear.TV_DOWNLOAD_DIR, - logger.ERROR) + if not os.path.isdir(sickgear.TV_DOWNLOAD_DIR): + logger.error('Automatic post-processing attempted but dir %s doesn\'t exist' % sickgear.TV_DOWNLOAD_DIR) return - if not ek.ek(os.path.isabs, sickgear.TV_DOWNLOAD_DIR): - logger.log(u'Automatic post-processing attempted but dir %s is relative ' - '(and probably not what you really want to process)' % sickgear.TV_DOWNLOAD_DIR, logger.ERROR) + if not os.path.isabs(sickgear.TV_DOWNLOAD_DIR): + logger.error('Automatic post-processing attempted but dir %s is relative ' + '(and probably not what you really want to process)' % sickgear.TV_DOWNLOAD_DIR) return processTV.processDir(sickgear.TV_DOWNLOAD_DIR, is_basedir=True) diff --git a/sickgear/browser.py b/sickgear/browser.py index 1dc1a60b..6a902537 100644 --- a/sickgear/browser.py +++ b/sickgear/browser.py @@ -17,8 +17,6 @@ import os import string -# noinspection PyPep8Naming -import encodingKludge as ek from exceptions_helper import ex from . import logger @@ -31,7 +29,7 @@ if 'nt' == os.name: # adapted from # http://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python/827490 -def getWinDrives(): +def get_win_drives(): """ Return list of detected drives """ assert 'nt' == os.name @@ -45,33 +43,24 @@ def getWinDrives(): return drives -def foldersAtPath(path, include_parent=False, include_files=False, **kwargs): - """ deprecated_item, remove in 2020 """ - """ prevent issues with requests using legacy params """ - include_parent = include_parent or kwargs.get('includeParent') or False - include_files = include_files or kwargs.get('includeFiles') or False - """ /legacy """ - return folders_at_path(path, include_parent, include_files) - - def folders_at_path(path, include_parent=False, include_files=False): """ Returns a list of dictionaries with the folders contained at the given path Give the empty string as the path to list the contents of the root path - under Unix this means "/", on Windows this will be a list of drive letters) + under Unix this means "/", (on Windows this will be a list of drive letters) """ # walk up the tree until we find a valid path - while path and not ek.ek(os.path.isdir, path): - if path == ek.ek(os.path.dirname, path): + while path and not os.path.isdir(path): + if path == os.path.dirname(path): path = '' break else: - path = ek.ek(os.path.dirname, path) + path = os.path.dirname(path) if '' == path: if 'nt' == os.name: entries = [{'currentPath': r'\My Computer'}] - for letter in getWinDrives(): + for letter in get_win_drives(): letter_path = '%s:\\' % letter entries.append({'name': letter_path, 'path': letter_path}) return entries @@ -79,8 +68,8 @@ def folders_at_path(path, include_parent=False, include_files=False): path = '/' # fix up the path and find the parent - path = ek.ek(os.path.abspath, ek.ek(os.path.normpath, path)) - parent_path = ek.ek(os.path.dirname, path) + path = os.path.abspath(os.path.normpath(path)) + parent_path = os.path.dirname(path) # if we're at the root then the next step is the meta-node showing our drive letters if 'nt' == os.name and path == parent_path: @@ -89,10 +78,10 @@ def folders_at_path(path, include_parent=False, include_files=False): try: file_list = get_file_list(path, include_files) except OSError as e: - logger.log('Unable to open %s: %r / %s' % (path, e, ex(e)), logger.WARNING) + logger.warning('Unable to open %s: %r / %s' % (path, e, ex(e))) file_list = get_file_list(parent_path, include_files) - file_list = sorted(file_list, key=lambda x: ek.ek(os.path.basename, x['name']).lower()) + file_list = sorted(file_list, key=lambda x: os.path.basename(x['name']).lower()) entries = [{'currentPath': path}] if include_parent and path != parent_path: diff --git a/sickgear/classes.py b/sickgear/classes.py index 3cdecae0..f6e5e564 100644 --- a/sickgear/classes.py +++ b/sickgear/classes.py @@ -25,7 +25,7 @@ import sickgear from ._legacy_classes import LegacySearchResult, LegacyProper from .common import Quality -from six import integer_types, iteritems, PY2, string_types +from six import integer_types, iteritems, string_types # noinspection PyUnreachableCode if False: @@ -155,7 +155,7 @@ class SearchResult(LegacySearchResult): class NZBSearchResult(SearchResult): """ - Regular NZB result with an URL to the NZB + Regular NZB result with a URL to the NZB """ resultType = 'nzb' @@ -169,7 +169,7 @@ class NZBDataSearchResult(SearchResult): class TorrentSearchResult(SearchResult): """ - Torrent result with an URL to the torrent + Torrent result with a URL to the torrent """ resultType = 'torrent' @@ -195,7 +195,7 @@ class ShowInfoFilter(object): return isinstance(show_info, dict) \ and 'seriesname' in show_info \ and isinstance(show_info['seriesname'], string_types) \ - and any([x.search(show_info['seriesname']) for x in self.bad_names]) + and any(x.search(show_info['seriesname']) for x in self.bad_names) @staticmethod def _fix_firstaired(show_info): @@ -359,41 +359,11 @@ class OrderedDefaultdict(OrderedDict): args = (self.default_factory,) if self.default_factory else () return self.__class__, args, None, None, iteritems(self) - if PY2: - # backport from python 3 - def move_to_end(self, key, last=True): - """Move an existing element to the end (or beginning if last==False). + def first_key(self): + return next(iter(self)) - Raises KeyError if the element does not exist. - When last=True, acts like a fast version of self[key]=self.pop(key). - - """ - link_prev, link_next, key = link = getattr(self, '_OrderedDict__map')[key] - link_prev[1] = link_next - link_next[0] = link_prev - root = getattr(self, '_OrderedDict__root') - if last: - last = root[0] - link[0] = last - link[1] = root - last[1] = root[0] = link - else: - first = root[1] - link[0] = root - link[1] = first - root[1] = first[0] = link - - def first_key(self): - return getattr(self, '_OrderedDict__root')[1][2] - - def last_key(self): - return getattr(self, '_OrderedDict__root')[0][2] - else: - def first_key(self): - return next(iter(self)) - - def last_key(self): - return next(reversed(self)) + def last_key(self): + return next(reversed(self)) class ImageUrlList(list): @@ -455,61 +425,14 @@ class EnvVar(object): pass def __getitem__(self, key): - return os.environ(key) + return os.environ[key] @staticmethod def get(key, default=None): return os.environ.get(key, default) -if not PY2: - sickgear.ENV = EnvVar() - -elif 'nt' == os.name: - from ctypes import windll, create_unicode_buffer - - # noinspection PyCompatibility - class WinEnvVar(EnvVar): - - @staticmethod - def get_environment_variable(name): - # noinspection PyUnresolvedReferences - name = unicode(name) # ensures string argument is unicode - n = windll.kernel32.GetEnvironmentVariableW(name, None, 0) - env_value = None - if n: - buf = create_unicode_buffer(u'\0' * n) - windll.kernel32.GetEnvironmentVariableW(name, buf, n) - env_value = buf.value - return env_value - - def __getitem__(self, key): - return self.get_environment_variable(key) - - def get(self, key, default=None): - r = self.get_environment_variable(key) - return r if None is not r else default - - sickgear.ENV = WinEnvVar() -else: - # noinspection PyCompatibility - class LinuxEnvVar(EnvVar): - # noinspection PyMissingConstructor - def __init__(self, environ): - self.environ = environ - - def __getitem__(self, key): - v = self.environ.get(key) - try: - return v if not isinstance(v, str) else v.decode(sickgear.SYS_ENCODING) - except (UnicodeDecodeError, UnicodeEncodeError): - return v - - def get(self, key, default=None): - v = self[key] - return v if None is not v else default - - sickgear.ENV = LinuxEnvVar(os.environ) +sickgear.ENV = EnvVar() # backport from python 3 @@ -533,7 +456,7 @@ class SimpleNamespace(object): # list that supports weak reference -class weakList(list): +class WeakList(list): __slots__ = ('__weakref__',) diff --git a/sickgear/clients/deluge.py b/sickgear/clients/deluge.py index 411315a8..b137f66d 100644 --- a/sickgear/clients/deluge.py +++ b/sickgear/clients/deluge.py @@ -52,7 +52,7 @@ class DelugeAPI(GenericClient): if not connected: hosts = self._post_json({'method': 'web.get_hosts', 'params': [], 'id': 11}) if 0 == len(hosts): - logger.log('%s: WebUI does not contain daemons' % self.name, logger.ERROR) + logger.error('%s: WebUI does not contain daemons' % self.name) return None self._post_json({'method': 'web.connect', 'params': [hosts[0][0]], 'id': 11}, False) @@ -60,7 +60,7 @@ class DelugeAPI(GenericClient): connected = self._post_json({'method': 'web.connected', 'params': [], 'id': 10}) if not connected: - logger.log('%s: WebUI could not connect to daemon' % self.name, logger.ERROR) + logger.error('%s: WebUI could not connect to daemon' % self.name) return None except RequestException: return None @@ -94,7 +94,7 @@ class DelugeAPI(GenericClient): label = sickgear.TORRENT_LABEL if ' ' in label: - logger.log('%s: Invalid label. Label must not contain a space' % self.name, logger.ERROR) + logger.error('%s: Invalid label. Label must not contain a space' % self.name) return False if label: @@ -106,22 +106,21 @@ class DelugeAPI(GenericClient): if None is not labels: if label not in labels: - logger.log('%s: %s label does not exist in Deluge we must add it' % (self.name, label), - logger.DEBUG) + logger.debug('%s: %s label does not exist in Deluge we must add it' % (self.name, label)) self._request_json({ 'method': 'label.add', 'params': [label], 'id': 4}) - logger.log('%s: %s label added to Deluge' % (self.name, label), logger.DEBUG) + logger.debug('%s: %s label added to Deluge' % (self.name, label)) # add label to torrent self._request_json({ 'method': 'label.set_torrent', 'params': [result.hash, label], 'id': 5}) - logger.log('%s: %s label added to torrent' % (self.name, label), logger.DEBUG) + logger.debug('%s: %s label added to torrent' % (self.name, label)) else: - logger.log('%s: label plugin not detected' % self.name, logger.DEBUG) + logger.debug('%s: label plugin not detected' % self.name) return False return True diff --git a/sickgear/clients/download_station.py b/sickgear/clients/download_station.py index 3147ab58..42111909 100644 --- a/sickgear/clients/download_station.py +++ b/sickgear/clients/download_station.py @@ -17,16 +17,15 @@ # Uses the Synology Download Station API: # http://download.synology.com/download/Document/DeveloperGuide/Synology_Download_Station_Web_API.pdf -from datetime import datetime import re import time from .generic import GenericClient from .. import logger -from ..sgdatetime import timestamp_near +from ..sgdatetime import SGDatetime import sickgear -from _23 import filter_iter, filter_list, map_list, unquote_plus +from _23 import unquote_plus from six import string_types # noinspection PyUnreachableCode @@ -71,7 +70,7 @@ class DownloadStationAPI(GenericClient): # type: (AnyStr) -> None out = '%s%s: %s' % (self.name, (' replied with', '')['Could not' in msg], msg) self._errmsg = '
%s.' % out - logger.log(out, logger.ERROR) + logger.error(out) def _error_task(self, response): @@ -96,21 +95,21 @@ class DownloadStationAPI(GenericClient): id=t['id'], title=t['title'], total_size=t.get('size') or 0, added_ts=d.get('create_time'), last_completed_ts=d.get('completed_time'), last_started_ts=d.get('started_time'), seed_elapsed_secs=d.get('seedelapsed'), - wanted_size=sum(map_list(lambda tf: wanted(tf) and tf.get('size') or 0, f)) or None, - wanted_down=sum(map_list(lambda tf: wanted(tf) and downloaded(tf) or 0, f)) or None, + wanted_size=sum(list(map(lambda tf: wanted(tf) and tf.get('size') or 0, f))) or None, + wanted_down=sum(list(map(lambda tf: wanted(tf) and downloaded(tf) or 0, f))) or None, tally_down=downloaded(tx), tally_up=tx.get('size_uploaded'), - state='done' if re.search('finish', t['status']) else ('seed', 'down')[any(filter_list( - lambda tf: wanted(tf) and (downloaded(tf, -1) < tf.get('size', 0)), f))] + state='done' if re.search('finish', t['status']) else ('seed', 'down')[any(list(filter( + lambda tf: wanted(tf) and (downloaded(tf, -1) < tf.get('size', 0)), f)))] )) # only available during "download" and "seeding" file_list = (lambda t: t.get('additional', {}).get('file', {})) valid_stat = (lambda ti: not ti.get('error') and isinstance(ti.get('status'), string_types) - and sum(map_list(lambda tf: wanted(tf) and downloaded(tf) or 0, file_list(ti)))) - result = map_list(lambda t: base_state( + and sum(list(map(lambda tf: wanted(tf) and downloaded(tf) or 0, file_list(ti))))) + result = list(map(lambda t: base_state( t, t.get('additional', {}).get('detail', {}), t.get('additional', {}).get('transfer', {}), file_list(t)), - filter_list(lambda t: t['status'] in ('downloading', 'seeding', 'finished') and valid_stat(t), - tasks)) + list(filter(lambda t: t['status'] in ('downloading', 'seeding', 'finished') and valid_stat(t), + tasks)))) return result @@ -133,13 +132,13 @@ class DownloadStationAPI(GenericClient): t_params=dict(additional='detail,file,transfer'))['data']['tasks'] else: # noinspection PyUnresolvedReferences - tasks = (filter_list(lambda d: d.get('id') == rid, self._testdata), self._testdata)[not rid] + tasks = (list(filter(lambda d: d.get('id') == rid, self._testdata)), self._testdata)[not rid] result += tasks and (isinstance(tasks, list) and tasks or (isinstance(tasks, dict) and [tasks])) \ or ([], [{'error': True, 'id': rid}])[err] except (BaseException, Exception): if getinfo: result += [dict(error=True, id=rid)] - for t in filter_iter(lambda d: isinstance(d.get('title'), string_types) and d.get('title'), result): + for t in filter(lambda d: isinstance(d.get('title'), string_types) and d.get('title'), result): t['title'] = unquote_plus(t.get('title')) return result @@ -164,8 +163,8 @@ class DownloadStationAPI(GenericClient): # type: (Union[AnyStr, list]) -> Union[bool, list] """ Pause item(s) - :param ids: Id(s) to pause - :return: True/Falsy if success/failure else Id(s) that failed to be paused + :param ids: ID(s) to pause + :return: True/Falsy if success/failure else ID(s) that failed to be paused """ return self._action( 'pause', ids, @@ -177,8 +176,8 @@ class DownloadStationAPI(GenericClient): # type: (Union[AnyStr, list]) -> Union[bool, list] """ Resume task(s) in client - :param ids: Id(s) to act on - :return: True if success, Id(s) that could not be resumed, else Falsy if failure + :param ids: ID(s) to act on + :return: True if success, ID(s) that could not be resumed, else Falsy if failure """ return self._perform_task( 'resume', ids, @@ -190,8 +189,8 @@ class DownloadStationAPI(GenericClient): # type: (Union[AnyStr, list]) -> Union[bool, list] """ Delete task(s) from client - :param ids: Id(s) to act on - :return: True if success, Id(s) that could not be deleted, else Falsy if failure + :param ids: ID(s) to act on + :return: True if success, ID(s) that could not be deleted, else Falsy if failure """ return self._perform_task( 'delete', ids, @@ -205,13 +204,13 @@ class DownloadStationAPI(GenericClient): """ Set up and send a method to client :param method: Either `resume` or `delete` - :param ids: Id(s) to perform method on + :param ids: ID(s) to perform method on :param filter_func: Call back function to filter tasks as failed or erroneous :param pause_first: True if task should be paused prior to invoking method - :return: True if success, Id(s) that could not be acted upon, else Falsy if failure + :return: True if success, ID(s) that could not be acted upon, else Falsy if failure """ if isinstance(ids, (string_types, list)): - rids = ids if isinstance(ids, list) else map_list(lambda x: x.strip(), ids.split(',')) + rids = ids if isinstance(ids, list) else list(map(lambda x: x.strip(), ids.split(','))) result = pause_first and self._pause_torrent(rids) # get items not paused result = (isinstance(result, list) and result or []) @@ -225,7 +224,7 @@ class DownloadStationAPI(GenericClient): if isinstance(ids, (string_types, list)): item = dict(fail=[], ignore=[]) - for task in filter_iter(filter_func, self._tinf(ids, err=True)): + for task in filter(filter_func, self._tinf(ids, err=True)): item[('fail', 'ignore')[self._ignore_state(task)]] += [task.get('id')] # retry items not acted on @@ -234,10 +233,10 @@ class DownloadStationAPI(GenericClient): i = 0 while retry_ids: for i in tries: - logger.log('%s: retry %s %s item(s) in %ss' % (self.name, act, len(item['fail']), i), logger.DEBUG) + logger.debug('%s: retry %s %s item(s) in %ss' % (self.name, act, len(item['fail']), i)) time.sleep(i) item['fail'] = [] - for task in filter_iter(filter_func, self._tinf(retry_ids, err=True)): + for task in filter(filter_func, self._tinf(retry_ids, err=True)): item[('fail', 'ignore')[self._ignore_state(task)]] += [task.get('id')] if not item['fail']: @@ -246,8 +245,8 @@ class DownloadStationAPI(GenericClient): retry_ids = item['fail'] else: if max(tries) == i: - logger.log('%s: failed to %s %s item(s) after %s tries over %s mins, aborted' % - (self.name, act, len(item['fail']), len(tries), sum(tries) / 60), logger.DEBUG) + logger.debug('%s: failed to %s %s item(s) after %s tries over %s mins, aborted' % + (self.name, act, len(item['fail']), len(tries), sum(tries) / 60)) return (item['fail'] + item['ignore']) or True @@ -256,20 +255,20 @@ class DownloadStationAPI(GenericClient): """ Add magnet to client (overridden class function) :param search_result: A populated search result object - :return: Id of task in client, True if added but no ID, else Falsy if nothing added + :return: ID of task in client, True if added but no ID, else Falsy if nothing added """ if 3 <= self._task_version: return self._add_torrent(uri={'uri': search_result.url}) - logger.log('%s: the API at %s doesn\'t support torrent magnet, download skipped' % - (self.name, self.host), logger.WARNING) + logger.warning('%s: the API at %s doesn\'t support torrent magnet, download skipped' % + (self.name, self.host)) def _add_torrent_file(self, search_result): # type: (TorrentSearchResult) -> Union[AnyStr, bool] """ Add file to client (overridden class function) :param search_result: A populated search result object - :return: Id of task in client, True if added but no ID, else Falsy if nothing added + :return: ID of task in client, True if added but no ID, else Falsy if nothing added """ return self._add_torrent( files={'file': ('%s.torrent' % re.sub(r'(\.torrent)+$', '', search_result.name), search_result.content)}) @@ -280,7 +279,7 @@ class DownloadStationAPI(GenericClient): Create client task :param uri: URI param for client API :param files: file param for client API - :return: Id of task in client, True if created but no id found, else Falsy if nothing created + :return: ID of task in client, True if created but no id found, else Falsy if nothing created """ if self._testmode: # noinspection PyUnresolvedReferences @@ -298,12 +297,12 @@ class DownloadStationAPI(GenericClient): if 1 < self._task_version and sickgear.TORRENT_PATH: params['destination'] = re.sub(r'^/(volume\d*/)?', '', sickgear.TORRENT_PATH) - task_stamp = int(timestamp_near(datetime.now())) + task_stamp = SGDatetime.timestamp_near() response = self._client_request('create', t_params=params, files=files) # noinspection PyUnresolvedReferences if response and response.get('success'): for s in (1, 3, 5, 10, 15, 30, 60): - tasks = filter_list(lambda t: task_stamp <= t['additional']['detail']['create_time'], self._tinf()) + tasks = list(filter(lambda t: task_stamp <= t['additional']['detail']['create_time'], self._tinf())) try: return str(self._client_has(tasks, uri, files)[0].get('id')) except IndexError: @@ -324,8 +323,8 @@ class DownloadStationAPI(GenericClient): if uri or files: u = isinstance(uri, dict) and (uri.get('uri', '') or '').lower() or None f = isinstance(files, dict) and (files.get('file', [''])[0]).lower() or None - result = filter_list(lambda t: u and t['additional']['detail']['uri'].lower() == u - or f and t['additional']['detail']['uri'].lower() in f, tasks) + result = list(filter(lambda t: u and t['additional']['detail']['uri'].lower() == u + or f and t['additional']['detail']['uri'].lower() in f, tasks)) return result def _client_request(self, method, t_id=None, t_params=None, files=None): @@ -360,7 +359,7 @@ class DownloadStationAPI(GenericClient): return self._error_task(response) if None is not t_id and None is t_params and 'create' != method: - return filter_list(lambda r: r.get('error'), response.get('data', {})) or True + return list(filter(lambda r: r.get('error'), response.get('data', {}))) or True return response diff --git a/sickgear/clients/generic.py b/sickgear/clients/generic.py index a9200e5b..4ecc5084 100644 --- a/sickgear/clients/generic.py +++ b/sickgear/clients/generic.py @@ -51,7 +51,7 @@ class GenericClient(object): seg = seg[0:c - (len(sample) - 2)] + sample output += ['%s: request %s= %s%s%s' % (self.name, arg, ('', '..')[bool(i)], seg, ('', '..')[i != nch])] - logger.log(output, logger.DEBUG) + logger.debug(output) def _request(self, method='get', params=None, data=None, files=None, **kwargs): @@ -61,7 +61,7 @@ class GenericClient(object): self.last_time = time.time() if not self._get_auth(): - logger.log('%s: Authentication failed' % self.name, logger.ERROR) + logger.error('%s: Authentication failed' % self.name) return False # self._log_request_details(method, params, data, files, **kwargs) @@ -70,31 +70,30 @@ class GenericClient(object): response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files, timeout=kwargs.pop('timeout', 120), verify=False, **kwargs) except requests.exceptions.ConnectionError as e: - logger.log('%s: Unable to connect %s' % (self.name, ex(e)), logger.ERROR) + logger.error('%s: Unable to connect %s' % (self.name, ex(e))) return False except (requests.exceptions.MissingSchema, requests.exceptions.InvalidURL): - logger.log('%s: Invalid host' % self.name, logger.ERROR) + logger.error('%s: Invalid host' % self.name) return False except requests.exceptions.HTTPError as e: - logger.log('%s: Invalid HTTP request %s' % (self.name, ex(e)), logger.ERROR) + logger.error('%s: Invalid HTTP request %s' % (self.name, ex(e))) return False except requests.exceptions.Timeout as e: - logger.log('%s: Connection timeout %s' % (self.name, ex(e)), logger.ERROR) + logger.error('%s: Connection timeout %s' % (self.name, ex(e))) return False except (BaseException, Exception) as e: - logger.log('%s: Unknown exception raised when sending torrent to %s: %s' % (self.name, self.name, ex(e)), - logger.ERROR) + logger.error('%s: Unknown exception raised when sending torrent to %s: %s' % (self.name, self.name, ex(e))) return False if 401 == response.status_code: - logger.log('%s: Invalid username or password, check your config' % self.name, logger.ERROR) + logger.error('%s: Invalid username or password, check your config' % self.name) return False if response.status_code in http_error_code: - logger.log('%s: %s' % (self.name, http_error_code[response.status_code]), logger.DEBUG) + logger.debug('%s: %s' % (self.name, http_error_code[response.status_code])) return False - logger.log('%s: Response to %s request is %s' % (self.name, method.upper(), response.text), logger.DEBUG) + logger.debug('%s: Response to %s request is %s' % (self.name, method.upper(), response.text)) return response @@ -129,7 +128,7 @@ class GenericClient(object): def _add_torrent_file(self, result): """ This should be overridden to return the True/False from the client - when a torrent is added via result.content (only .torrent file) + when a torrent is added via `result.content` (only .torrent file) """ return False @@ -179,9 +178,9 @@ class GenericClient(object): """ This should be overridden to resume task(s) in client - :param ids: Id(s) to act on + :param ids: ID(s) to act on :type ids: list or string - :return: True if success, Id(s) that could not be resumed, else Falsy if failure + :return: True if success, ID(s) that could not be resumed, else Falsy if failure :rtype: bool or list """ return False @@ -189,9 +188,9 @@ class GenericClient(object): def _delete_torrent(self, ids): """ This should be overridden to delete task(s) from client - :param ids: Id(s) to act on + :param ids: ID(s) to act on :type ids: list or string - :return: True if success, Id(s) that could not be deleted, else Falsy if failure + :return: True if success, ID(s) that could not be deleted, else Falsy if failure :rtype: bool or list """ return False @@ -200,7 +199,7 @@ class GenericClient(object): def _get_torrent_hash(result): if result.url.startswith('magnet'): - result.hash = re.findall(r'urn:btih:([\w]{32,40})', result.url)[0] + result.hash = re.findall(r'urn:btih:(\w{32,40})', result.url)[0] if 32 == len(result.hash): result.hash = make_btih(result.hash).lower() else: @@ -213,10 +212,10 @@ class GenericClient(object): r_code = False - logger.log('Calling %s client' % self.name, logger.DEBUG) + logger.debug('Calling %s client' % self.name) if not self._get_auth(): - logger.log('%s: Authentication failed' % self.name, logger.ERROR) + logger.error('%s: Authentication failed' % self.name) return r_code try: @@ -225,8 +224,8 @@ class GenericClient(object): result = self._get_torrent_hash(result) except (BaseException, Exception) as e: - logger.log('Bad torrent data: hash is %s for [%s]' % (result.hash, result.name), logger.ERROR) - logger.log('Exception raised when checking torrent data: %s' % (ex(e)), logger.DEBUG) + logger.error('Bad torrent data: hash is %s for [%s]' % (result.hash, result.name)) + logger.debug('Exception raised when checking torrent data: %s' % (ex(e))) return r_code try: @@ -237,30 +236,30 @@ class GenericClient(object): self.created_id = isinstance(r_code, string_types) and r_code or None if not r_code: - logger.log('%s: Unable to send torrent to client' % self.name, logger.ERROR) + logger.error('%s: Unable to send torrent to client' % self.name) return False if not self._set_torrent_pause(result): - logger.log('%s: Unable to set the pause for torrent' % self.name, logger.ERROR) + logger.error('%s: Unable to set the pause for torrent' % self.name) if not self._set_torrent_label(result): - logger.log('%s: Unable to set the label for torrent' % self.name, logger.ERROR) + logger.error('%s: Unable to set the label for torrent' % self.name) if not self._set_torrent_ratio(result): - logger.log('%s: Unable to set the ratio for torrent' % self.name, logger.ERROR) + logger.error('%s: Unable to set the ratio for torrent' % self.name) if not self._set_torrent_seed_time(result): - logger.log('%s: Unable to set the seed time for torrent' % self.name, logger.ERROR) + logger.error('%s: Unable to set the seed time for torrent' % self.name) if not self._set_torrent_path(result): - logger.log('%s: Unable to set the path for torrent' % self.name, logger.ERROR) + logger.error('%s: Unable to set the path for torrent' % self.name) if 0 != result.priority and not self._set_torrent_priority(result): - logger.log('%s: Unable to set priority for torrent' % self.name, logger.ERROR) + logger.error('%s: Unable to set priority for torrent' % self.name) except (BaseException, Exception) as e: - logger.log('%s: Failed sending torrent: %s - %s' % (self.name, result.name, result.hash), logger.ERROR) - logger.log('%s: Exception raised when sending torrent: %s' % (self.name, ex(e)), logger.DEBUG) + logger.error('%s: Failed sending torrent: %s - %s' % (self.name, result.name, result.hash)) + logger.debug('%s: Exception raised when sending torrent: %s' % (self.name, ex(e))) return r_code diff --git a/sickgear/clients/qbittorrent.py b/sickgear/clients/qbittorrent.py index f0aa5ebd..36737f59 100644 --- a/sickgear/clients/qbittorrent.py +++ b/sickgear/clients/qbittorrent.py @@ -14,19 +14,18 @@ # You should have received a copy of the GNU General Public License # along with SickGear. If not, see . -from datetime import datetime import re import time from .generic import GenericClient from .. import logger from ..helpers import get_url, try_int -from ..sgdatetime import timestamp_near +from ..sgdatetime import SGDatetime import sickgear from requests.exceptions import HTTPError -from _23 import filter_iter, filter_list, map_list, unquote_plus +from _23 import unquote_plus from six import string_types # noinspection PyUnreachableCode @@ -58,9 +57,9 @@ class QbittorrentAPI(GenericClient): id=t['hash'], title=t['name'], total_size=gp.get('total_size') or 0, added_ts=gp.get('addition_date'), last_completed_ts=gp.get('completion_date'), last_started_ts=None, seed_elapsed_secs=gp.get('seeding_time'), - wanted_size=sum(map_list(lambda tf: wanted(tf) and tf.get('size') or 0, f)) or None, - wanted_down=sum(map_list(lambda tf: wanted(tf) and downloaded(tf) or 0, f)) or None, - tally_down=sum(map_list(lambda tf: downloaded(tf) or 0, f)) or None, + wanted_size=sum(list(map(lambda tf: wanted(tf) and tf.get('size') or 0, f))) or None, + wanted_down=sum(list(map(lambda tf: wanted(tf) and downloaded(tf) or 0, f))) or None, + tally_down=sum(list(map(lambda tf: downloaded(tf) or 0, f))) or None, tally_up=gp.get('total_uploaded'), state='done' if 'pausedUP' == t.get('state') else ('down', 'seed')['up' in t.get('state').lower()] )) @@ -68,10 +67,10 @@ class QbittorrentAPI(GenericClient): ('torrents/files', 'query/propertiesFiles/%s' % ti['hash'])[not self.api_ns], params=({'hash': ti['hash']}, {})[not self.api_ns], json=True) or {}) valid_stat = (lambda ti: not self._ignore_state(ti) - and sum(map_list(lambda tf: wanted(tf) and downloaded(tf) or 0, file_list(ti)))) - result = map_list(lambda t: base_state(t, self._tinf(t['hash'])[0], file_list(t)), - filter_list(lambda t: re.search('(?i)queue|stall|(up|down)load|pausedUP', t['state']) and - valid_stat(t), self._tinf(ids, False))) + and sum(list(map(lambda tf: wanted(tf) and downloaded(tf) or 0, file_list(ti))))) + result = list(map(lambda t: base_state(t, self._tinf(t['hash'])[0], file_list(t)), + list(filter(lambda t: re.search('(?i)queue|stall|(up|down)load|pausedUP', t['state']) and + valid_stat(t), self._tinf(ids, False))))) return result @@ -109,8 +108,7 @@ class QbittorrentAPI(GenericClient): except (BaseException, Exception): if getinfo: result += [dict(error=True, id=rid)] - for t in filter_iter(lambda d: isinstance(d.get('name'), string_types) and d.get('name'), - (result, [])[getinfo]): + for t in filter(lambda d: isinstance(d.get('name'), string_types) and d.get('name'), (result, [])[getinfo]): t['name'] = unquote_plus(t.get('name')) return result @@ -148,7 +146,7 @@ class QbittorrentAPI(GenericClient): """ Set maximal priority in queue to torrent task :param ids: ID(s) to promote - :return: True/Falsy if success/failure else Id(s) that failed to be changed + :return: True/Falsy if success/failure else ID(s) that failed to be changed """ def _maxpri_filter(t): mark_fail = True @@ -169,7 +167,7 @@ class QbittorrentAPI(GenericClient): task = self._tinf(t.get('hash'), use_props=False, err=True)[0] return 1 < task.get('priority') or self._ignore_state(task) # then mark fail elif isinstance(response, string_types) and 'queueing' in response.lower(): - logger.log('%s: %s' % (self.name, response), logger.ERROR) + logger.error('%s: %s' % (self.name, response)) return not mark_fail return mark_fail @@ -180,7 +178,7 @@ class QbittorrentAPI(GenericClient): """ Set label/category to torrent task :param ids: ID(s) to change - :return: True/Falsy if success/failure else Id(s) that failed to be changed + :return: True/Falsy if success/failure else ID(s) that failed to be changed """ def _label_filter(t): mark_fail = True @@ -196,7 +194,7 @@ class QbittorrentAPI(GenericClient): task = self._tinf(t.get('hash'), use_props=False, err=True)[0] return label not in task.get('category') or self._ignore_state(task) # then mark fail elif isinstance(response, string_types) and 'incorrect' in response.lower(): - logger.log('%s: %s. "%s" isn\'t known to qB' % (self.name, response, label), logger.ERROR) + logger.error('%s: %s. "%s" isn\'t known to qB' % (self.name, response, label)) return not mark_fail return mark_fail @@ -206,8 +204,8 @@ class QbittorrentAPI(GenericClient): # type: (Union[AnyStr, list]) -> Union[bool, list] """ Pause item(s) - :param ids: Id(s) to pause - :return: True/Falsy if success/failure else Id(s) that failed to be paused + :param ids: ID(s) to pause + :return: True/Falsy if success/failure else ID(s) that failed to be paused """ def _pause_filter(t): mark_fail = True @@ -253,8 +251,8 @@ class QbittorrentAPI(GenericClient): # type: (Union[AnyStr, list]) -> Union[bool, list] """ Resume task(s) in client - :param ids: Id(s) to act on - :return: True if success, Id(s) that could not be resumed, else Falsy if failure + :param ids: ID(s) to act on + :return: True if success, ID(s) that could not be resumed, else Falsy if failure """ return self._perform_task( 'resume', ids, @@ -268,8 +266,8 @@ class QbittorrentAPI(GenericClient): # type: (Union[AnyStr, list]) -> Union[bool, list] """ Delete task(s) from client - :param ids: Id(s) to act on - :return: True if success, Id(s) that could not be deleted, else Falsy if failure + :param ids: ID(s) to act on + :return: True if success, ID(s) that could not be deleted, else Falsy if failure """ return self._perform_task( 'delete', ids, @@ -284,13 +282,13 @@ class QbittorrentAPI(GenericClient): """ Set up and send a method to client :param method: Either `resume` or `delete` - :param ids: Id(s) to perform method on + :param ids: ID(s) to perform method on :param filter_func: Call back function passed to _action that will filter tasks as failed or erroneous :param pause_first: True if task should be paused prior to invoking method - :return: True if success, Id(s) that could not be acted upon, else Falsy if failure + :return: True if success, ID(s) that could not be acted upon, else Falsy if failure """ if isinstance(ids, (string_types, list)): - rids = ids if isinstance(ids, list) else map_list(lambda x: x.strip(), ids.split(',')) + rids = ids if isinstance(ids, list) else list(map(lambda x: x.strip(), ids.split(','))) result = pause_first and self._pause_torrent(rids) # get items not paused result = (isinstance(result, list) and result or []) @@ -304,7 +302,7 @@ class QbittorrentAPI(GenericClient): if isinstance(ids, (string_types, list)): item = dict(fail=[], ignore=[]) - for task in filter_iter(filter_func, self._tinf(ids, use_props=False, err=True)): + for task in filter(filter_func, self._tinf(ids, use_props=False, err=True)): item[('fail', 'ignore')[self._ignore_state(task)]] += [task.get('hash')] # retry items that are not acted on @@ -313,10 +311,10 @@ class QbittorrentAPI(GenericClient): i = 0 while retry_ids: for i in tries: - logger.log('%s: retry %s %s item(s) in %ss' % (self.name, act, len(item['fail']), i), logger.DEBUG) + logger.debug('%s: retry %s %s item(s) in %ss' % (self.name, act, len(item['fail']), i)) time.sleep(i) item['fail'] = [] - for task in filter_iter(filter_func, self._tinf(retry_ids, use_props=False, err=True)): + for task in filter(filter_func, self._tinf(retry_ids, use_props=False, err=True)): item[('fail', 'ignore')[self._ignore_state(task)]] += [task.get('hash')] if not item['fail']: @@ -325,8 +323,8 @@ class QbittorrentAPI(GenericClient): retry_ids = item['fail'] else: if max(tries) == i: - logger.log('%s: failed to %s %s item(s) after %s tries over %s mins, aborted' % - (self.name, act, len(item['fail']), len(tries), sum(tries) / 60), logger.DEBUG) + logger.debug('%s: failed to %s %s item(s) after %s tries over %s mins, aborted' % + (self.name, act, len(item['fail']), len(tries), sum(tries) / 60)) return (item['fail'] + item['ignore']) or True @@ -357,7 +355,7 @@ class QbittorrentAPI(GenericClient): :return: True if created, else Falsy if nothing created """ if self._tinf(data.hash): - logger.log('Could not create task, the hash is already in use', logger.ERROR) + logger.error('Could not create task, the hash is already in use') return label = sickgear.TORRENT_LABEL.replace(' ', '_') @@ -373,12 +371,12 @@ class QbittorrentAPI(GenericClient): else: kwargs = dict(post_data=params, files={'torrents': ('%s.torrent' % data.name, data.content)}) - task_stamp = int(timestamp_near(datetime.now())) + task_stamp = SGDatetime.timestamp_near() response = self._client_request(('torrents/add', 'command/%s' % cmd)[not self.api_ns], **kwargs) if True is response: for s in (1, 3, 5, 10, 15, 30, 60): - if filter_list(lambda t: task_stamp <= t['addition_date'], self._tinf(data.hash)): + if list(filter(lambda t: task_stamp <= t['addition_date'], self._tinf(data.hash))): return data.hash time.sleep(s) return True @@ -396,13 +394,13 @@ class QbittorrentAPI(GenericClient): """ Send a request to client :param cmd: Api task to invoke - :param kwargs: keyword arguments to pass thru to helpers getURL function + :param kwargs: keyword arguments to pass through to helpers getURL function :return: JSON decoded response dict, True if success and no response body, Text error or None if failure, """ authless = bool(re.search('(?i)login|version', cmd)) if authless or self.auth: if not authless and not self._get_auth(): - logger.log('%s: Authentication failed' % self.name, logger.ERROR) + logger.error('%s: Authentication failed' % self.name) return # self._log_request_details('%s%s' % (self.api_ns, cmd.strip('/')), **kwargs) @@ -432,7 +430,7 @@ class QbittorrentAPI(GenericClient): self.api_ns = 'api/v2/' response = self._client_request('auth/login', post_data=post_data, raise_status_code=True) if isinstance(response, string_types) and 'banned' in response.lower(): - logger.log('%s: %s' % (self.name, response), logger.ERROR) + logger.error('%s: %s' % (self.name, response)) response = False elif not response: self.api_ns = '' diff --git a/sickgear/clients/rtorrent.py b/sickgear/clients/rtorrent.py index b728c11f..4643fa46 100644 --- a/sickgear/clients/rtorrent.py +++ b/sickgear/clients/rtorrent.py @@ -43,7 +43,7 @@ class RtorrentAPI(GenericClient): if self.auth: try: if self.auth.has_local_id(data.hash): - logger.log('%s: Item already exists %s' % (self.name, data.name), logger.WARNING) + logger.warning('%s: Item already exists %s' % (self.name, data.name)) raise custom_var = (1, sickgear.TORRENT_LABEL_VAR or '')[0 <= sickgear.TORRENT_LABEL_VAR <= 5] @@ -62,8 +62,8 @@ class RtorrentAPI(GenericClient): if torrent and sickgear.TORRENT_LABEL: label = torrent.get_custom(custom_var) if sickgear.TORRENT_LABEL != label: - logger.log('%s: could not change custom%s label value \'%s\' to \'%s\' for %s' % ( - self.name, custom_var, label, sickgear.TORRENT_LABEL, torrent.name), logger.WARNING) + logger.warning('%s: could not change custom%s label value \'%s\' to \'%s\' for %s' % ( + self.name, custom_var, label, sickgear.TORRENT_LABEL, torrent.name)) except (BaseException, Exception): pass @@ -90,7 +90,7 @@ class RtorrentAPI(GenericClient): # try: # if ratio > 0: # - # # Explicitly set all group options to ensure it is setup correctly + # # Explicitly set all group options to ensure it is set up correctly # group.set_upload('1M') # group.set_min(ratio) # group.set_max(ratio) diff --git a/sickgear/clients/transmission.py b/sickgear/clients/transmission.py index 5c8074bb..02a9b8da 100644 --- a/sickgear/clients/transmission.py +++ b/sickgear/clients/transmission.py @@ -84,9 +84,9 @@ class TransmissionAPI(GenericClient): def _add_torrent(self, t_object): - # populate blankable and download_dir + # populate blanked and download_dir if not self._get_auth(): - logger.log('%s: Authentication failed' % self.name, logger.ERROR) + logger.error('%s: Authentication failed' % self.name) return False download_dir = None @@ -95,7 +95,7 @@ class TransmissionAPI(GenericClient): elif self.download_dir: download_dir = self.download_dir else: - logger.log('Path required for Transmission Downloaded files location', logger.ERROR) + logger.error('Path required for Transmission Downloaded files location') if not download_dir and not self.blankable: return False diff --git a/sickgear/clients/utorrent.py b/sickgear/clients/utorrent.py index 749a3efc..24d4a8e0 100644 --- a/sickgear/clients/utorrent.py +++ b/sickgear/clients/utorrent.py @@ -24,17 +24,17 @@ from _23 import urlencode from six import iteritems -class uTorrentAPI(GenericClient): +class UtorrentAPI(GenericClient): def __init__(self, host=None, username=None, password=None): - super(uTorrentAPI, self).__init__('uTorrent', host, username, password) + super(UtorrentAPI, self).__init__('uTorrent', host, username, password) self.url = self.host + 'gui/' def _request(self, method='get', params=None, files=None, **kwargs): params = {} if None is params else params - return super(uTorrentAPI, self)._request( + return super(UtorrentAPI, self)._request( method=method, params='token={0:s}&{1:s}'.format(self.auth, '&'.join( ['%s' % urlencode(dict([[key, str(value)]])) @@ -128,4 +128,4 @@ class uTorrentAPI(GenericClient): return self._request(params=params) -api = uTorrentAPI() +api = UtorrentAPI() diff --git a/sickgear/common.py b/sickgear/common.py index a0e5a382..ae5ecaa2 100644 --- a/sickgear/common.py +++ b/sickgear/common.py @@ -25,7 +25,6 @@ import uuid import sickgear -from _23 import map_list from six import integer_types, iterkeys, string_types # noinspection PyUnresolvedReferences @@ -181,7 +180,7 @@ class Quality(object): return Quality.qualityStrings[quality].replace('SD DVD', 'SD DVD/BR/BD') @staticmethod - def _getStatusStrings(status): + def _get_status_strings(status): """ :param status: status @@ -189,14 +188,14 @@ class Quality(object): :return: :rtype: AnyStr """ - toReturn = {} + to_return = {} for _x in Quality.qualityStrings: - toReturn[Quality.compositeStatus(status, _x)] = '%s (%s)' % ( + to_return[Quality.composite_status(status, _x)] = '%s (%s)' % ( Quality.statusPrefixes[status], Quality.qualityStrings[_x]) - return toReturn + return to_return @staticmethod - def combineQualities(any_qualities, best_qualities): + def combine_qualities(any_qualities, best_qualities): # type: (List[int], List[int]) -> int """ @@ -212,7 +211,7 @@ class Quality(object): return any_quality | (best_quality << 16) @staticmethod - def splitQuality(quality): + def split_quality(quality): # type: (int) -> Tuple[List[int], List[int]] """ @@ -229,10 +228,10 @@ class Quality(object): return sorted(any_qualities), sorted(best_qualities) @staticmethod - def nameQuality(name, anime=False): + def name_quality(name, anime=False): """ Return The quality from an episode File renamed by SickGear - If no quality is achieved it will try sceneQuality regex + If no quality is achieved it will try scene_quality regex :param name: name :type name: AnyStr :param anime: is anmie @@ -241,9 +240,7 @@ class Quality(object): :rtype: int """ - # noinspection PyPep8Naming - import encodingKludge as ek - name = ek.ek(os.path.basename, name) + name = os.path.basename(name) # if we have our exact text then assume we put it there for _x in sorted(iterkeys(Quality.qualityStrings), reverse=True): @@ -251,7 +248,7 @@ class Quality(object): continue if Quality.NONE == _x: # Last chance - return Quality.sceneQuality(name, anime) + return Quality.scene_quality(name, anime) regex = r'\W' + Quality.qualityStrings[_x].replace(' ', r'\W') + r'\W' regex_match = re.search(regex, name, re.I) @@ -259,7 +256,7 @@ class Quality(object): return _x @staticmethod - def sceneQuality(name, anime=False): + def scene_quality(name, anime=False): """ Return The quality from the scene episode File :param name: name @@ -269,10 +266,8 @@ class Quality(object): :return: :rtype: int """ - # noinspection PyPep8Naming - import encodingKludge as ek from sickgear import logger - name = ek.ek(os.path.basename, name) + name = os.path.basename(name) name_has = (lambda quality_list, func=all: func([re.search(q, name, re.I) for q in quality_list])) @@ -305,7 +300,7 @@ class Quality(object): if not hd_options and full_hd: return Quality.FULLHDBLURAY if sickgear.ANIME_TREAT_AS_HDTV: - logger.log(u'Treating file: %s with "unknown" quality as HDTV per user settings' % name, logger.DEBUG) + logger.debug(f'Treating file: {name} with "unknown" quality as HDTV per user settings') return Quality.HDTV return Quality.UNKNOWN @@ -355,7 +350,7 @@ class Quality(object): return Quality.UNKNOWN @staticmethod - def fileQuality(filename): + def file_quality(filename): """ :param filename: filename @@ -363,11 +358,9 @@ class Quality(object): :return: :rtype: int """ - # noinspection PyPep8Naming - import encodingKludge as ek from exceptions_helper import ex from sickgear import logger - if ek.ek(os.path.isfile, filename): + if os.path.isfile(filename): from hachoir.parser import createParser from hachoir.metadata import extractMetadata @@ -376,12 +369,12 @@ class Quality(object): parser = height = None msg = 'Hachoir can\'t parse file "%s" content quality because it found error: %s' try: - parser = ek.ek(createParser, filename) + parser = createParser(filename) except InputStreamError as e: - logger.log(msg % (filename, ex(e)), logger.WARNING) + logger.warning(msg % (filename, ex(e))) except (BaseException, Exception) as e: - logger.log(msg % (filename, ex(e)), logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) + logger.error(msg % (filename, ex(e))) + logger.error(traceback.format_exc()) if parser: extract = None @@ -392,7 +385,7 @@ class Quality(object): parser.parse_comments = False extract = extractMetadata(parser, **args) except (BaseException, Exception) as e: - logger.log(msg % (filename, ex(e)), logger.WARNING) + logger.warning(msg % (filename, ex(e))) if extract: try: height = extract.get('height') @@ -416,7 +409,7 @@ class Quality(object): return Quality.UNKNOWN @staticmethod - def assumeQuality(name): + def assume_quality(name): """ :param name: name @@ -431,7 +424,7 @@ class Quality(object): return Quality.UNKNOWN @staticmethod - def compositeStatus(status, quality): + def composite_status(status, quality): """ :param status: status @@ -444,7 +437,7 @@ class Quality(object): return status + 100 * quality @staticmethod - def qualityDownloaded(status): + def quality_downloaded(status): # type: (int) -> int """ @@ -456,7 +449,7 @@ class Quality(object): return (status - DOWNLOADED) // 100 @staticmethod - def splitCompositeStatus(status): + def split_composite_status(status): # type: (int) -> Tuple[int, int] """Returns a tuple containing (status, quality) :param status: status @@ -471,7 +464,7 @@ class Quality(object): return status, Quality.NONE @staticmethod - def statusFromName(name, assume=True, anime=False): + def status_from_name(name, assume=True, anime=False): """ :param name: name @@ -483,13 +476,13 @@ class Quality(object): :return: :rtype: int or long """ - quality = Quality.nameQuality(name, anime) + quality = Quality.name_quality(name, anime) if assume and Quality.UNKNOWN == quality: - quality = Quality.assumeQuality(name) - return Quality.compositeStatus(DOWNLOADED, quality) + quality = Quality.assume_quality(name) + return Quality.composite_status(DOWNLOADED, quality) @staticmethod - def statusFromNameOrFile(file_path, assume=True, anime=False): + def status_from_name_or_file(file_path, assume=True, anime=False): """ :param file_path: file path @@ -501,12 +494,12 @@ class Quality(object): :return: :rtype: int or long """ - quality = Quality.nameQuality(file_path, anime) + quality = Quality.name_quality(file_path, anime) if Quality.UNKNOWN == quality: - quality = Quality.fileQuality(file_path) + quality = Quality.file_quality(file_path) if assume and Quality.UNKNOWN == quality: - quality = Quality.assumeQuality(file_path) - return Quality.compositeStatus(DOWNLOADED, quality) + quality = Quality.assume_quality(file_path) + return Quality.composite_status(DOWNLOADED, quality) SNATCHED = None SNATCHED_PROPER = None @@ -526,7 +519,7 @@ class WantedQualities(dict): super(WantedQualities, self).__init__(**kwargs) def _generate_wantedlist(self, qualities): - initial_qualities, upgrade_qualities = Quality.splitQuality(qualities) + initial_qualities, upgrade_qualities = Quality.split_quality(qualities) max_initial_quality = max(initial_qualities or [Quality.NONE]) min_upgrade_quality = min(upgrade_qualities or [1 << 16]) self[qualities] = {0: {self.bothlists: False, self.wantedlist: initial_qualities, self.upgradelist: False}} @@ -573,23 +566,23 @@ for (attr_name, qual_val) in [ ('SNATCHED', SNATCHED), ('SNATCHED_PROPER', SNATCHED_PROPER), ('SNATCHED_BEST', SNATCHED_BEST), ('DOWNLOADED', DOWNLOADED), ('ARCHIVED', ARCHIVED), ('FAILED', FAILED), ]: - setattr(Quality, attr_name, map_list(lambda qk: Quality.compositeStatus(qual_val, qk), - iterkeys(Quality.qualityStrings))) + setattr(Quality, attr_name, list(map(lambda qk: Quality.composite_status(qual_val, qk), + iterkeys(Quality.qualityStrings)))) Quality.SNATCHED_ANY = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST -SD = Quality.combineQualities([Quality.SDTV, Quality.SDDVD], []) -HD = Quality.combineQualities( +SD = Quality.combine_qualities([Quality.SDTV, Quality.SDDVD], []) +HD = Quality.combine_qualities( [Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.HDBLURAY, Quality.FULLHDBLURAY], []) # HD720p + HD1080p -HD720p = Quality.combineQualities([Quality.HDTV, Quality.HDWEBDL, Quality.HDBLURAY], []) -HD1080p = Quality.combineQualities([Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY], []) -UHD2160p = Quality.combineQualities([Quality.UHD4KWEB, Quality.UHD4KBLURAY], []) -ANY = Quality.combineQualities( +HD720p = Quality.combine_qualities([Quality.HDTV, Quality.HDWEBDL, Quality.HDBLURAY], []) +HD1080p = Quality.combine_qualities([Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY], []) +UHD2160p = Quality.combine_qualities([Quality.UHD4KWEB, Quality.UHD4KBLURAY], []) +ANY = Quality.combine_qualities( [Quality.SDTV, Quality.SDDVD, Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.HDBLURAY, Quality.FULLHDBLURAY, Quality.UNKNOWN], []) # SD + HD # legacy template, can't remove due to reference in mainDB upgrade? -BEST = Quality.combineQualities([Quality.SDTV, Quality.HDTV, Quality.HDWEBDL], [Quality.HDTV]) +BEST = Quality.combine_qualities([Quality.SDTV, Quality.HDTV, Quality.HDWEBDL], [Quality.HDTV]) qualityPresets = (SD, HD, HD720p, HD1080p, UHD2160p, ANY) @@ -618,7 +611,7 @@ class StatusStrings(object): def __getitem__(self, name): if name in Quality.SNATCHED_ANY + Quality.DOWNLOADED + Quality.ARCHIVED: - status, quality = Quality.splitCompositeStatus(name) + status, quality = Quality.split_composite_status(name) if quality == Quality.NONE: return self.statusStrings[status] return '%s (%s)' % (self.statusStrings[status], Quality.qualityStrings[quality]) @@ -714,7 +707,7 @@ class NeededQualities(object): """ from sickgear.tv import TVShow if isinstance(show_obj, TVShow): - init, upgrade = Quality.splitQuality(show_obj.quality) + init, upgrade = Quality.split_quality(show_obj.quality) all_qual = set(init + upgrade) need_sd = need_hd = need_uhd = need_webdl = False for wanted_qualities in all_qual: @@ -752,9 +745,9 @@ class NeededQualities(object): else: if not self.need_sd and min(wanted_qualities) <= NeededQualities.max_sd: self.need_sd = True - if not self.need_hd and any([i in NeededQualities.hd_qualities for i in wanted_qualities]): + if not self.need_hd and any(i in NeededQualities.hd_qualities for i in wanted_qualities): self.need_hd = True - if not self.need_webdl and any([i in NeededQualities.webdl_qualities for i in wanted_qualities]): + if not self.need_webdl and any(i in NeededQualities.webdl_qualities for i in wanted_qualities): self.need_webdl = True if not self.need_uhd and max(wanted_qualities) > NeededQualities.max_hd: self.need_uhd = True diff --git a/sickgear/config.py b/sickgear/config.py index 759bc0a5..c7adc21a 100644 --- a/sickgear/config.py +++ b/sickgear/config.py @@ -18,15 +18,12 @@ import datetime import os.path import re -# noinspection PyPep8Naming -import encodingKludge as ek - import sickgear import sickgear.providers from . import db, helpers, logger, naming from lib.api_trakt import TraktAPI -from _23 import filter_list, urlsplit, urlunsplit +from _23 import urlsplit, urlunsplit from six import string_types @@ -59,7 +56,7 @@ def change_https_cert(https_cert): if os.path.normpath(sickgear.HTTPS_CERT) != os.path.normpath(https_cert): if helpers.make_dir(os.path.dirname(os.path.abspath(https_cert))): sickgear.HTTPS_CERT = os.path.normpath(https_cert) - logger.log(u'Changed https cert path to %s' % https_cert) + logger.log(f'Changed https cert path to {https_cert}') else: return False @@ -74,7 +71,7 @@ def change_https_key(https_key): if os.path.normpath(sickgear.HTTPS_KEY) != os.path.normpath(https_key): if helpers.make_dir(os.path.dirname(os.path.abspath(https_key))): sickgear.HTTPS_KEY = os.path.normpath(https_key) - logger.log(u'Changed https key path to %s' % https_key) + logger.log(f'Changed https key path to {https_key}') else: return False @@ -92,7 +89,7 @@ def change_log_dir(log_dir, web_log): sickgear.LOG_DIR = abs_log_dir logger.sb_log_instance.init_logging() - logger.log(u'Initialized new log file in %s' % sickgear.LOG_DIR) + logger.log(f'Initialized new log file in {sickgear.LOG_DIR}') log_dir_changed = True else: @@ -112,7 +109,7 @@ def change_nzb_dir(nzb_dir): if os.path.normpath(sickgear.NZB_DIR) != os.path.normpath(nzb_dir): if helpers.make_dir(nzb_dir): sickgear.NZB_DIR = os.path.normpath(nzb_dir) - logger.log(u'Changed NZB folder to %s' % nzb_dir) + logger.log(f'Changed NZB folder to {nzb_dir}') else: return False @@ -127,7 +124,7 @@ def change_torrent_dir(torrent_dir): if os.path.normpath(sickgear.TORRENT_DIR) != os.path.normpath(torrent_dir): if helpers.make_dir(torrent_dir): sickgear.TORRENT_DIR = os.path.normpath(torrent_dir) - logger.log(u'Changed torrent folder to %s' % torrent_dir) + logger.log(f'Changed torrent folder to {torrent_dir}') else: return False @@ -142,7 +139,7 @@ def change_tv_download_dir(tv_download_dir): if os.path.normpath(sickgear.TV_DOWNLOAD_DIR) != os.path.normpath(tv_download_dir): if helpers.make_dir(tv_download_dir): sickgear.TV_DOWNLOAD_DIR = os.path.normpath(tv_download_dir) - logger.log(u'Changed TV download folder to %s' % tv_download_dir) + logger.log(f'Changed TV download folder to {tv_download_dir}') else: return False @@ -155,7 +152,7 @@ def schedule_mediaprocess(iv): if sickgear.MEDIAPROCESS_INTERVAL < sickgear.MIN_MEDIAPROCESS_INTERVAL: sickgear.MEDIAPROCESS_INTERVAL = sickgear.MIN_MEDIAPROCESS_INTERVAL - sickgear.media_process_scheduler.cycleTime = datetime.timedelta(minutes=sickgear.MEDIAPROCESS_INTERVAL) + sickgear.media_process_scheduler.cycle_time = datetime.timedelta(minutes=sickgear.MEDIAPROCESS_INTERVAL) sickgear.media_process_scheduler.set_paused_state() @@ -165,14 +162,14 @@ def schedule_recentsearch(iv): if sickgear.RECENTSEARCH_INTERVAL < sickgear.MIN_RECENTSEARCH_INTERVAL: sickgear.RECENTSEARCH_INTERVAL = sickgear.MIN_RECENTSEARCH_INTERVAL - sickgear.recent_search_scheduler.cycleTime = datetime.timedelta(minutes=sickgear.RECENTSEARCH_INTERVAL) + sickgear.recent_search_scheduler.cycle_time = datetime.timedelta(minutes=sickgear.RECENTSEARCH_INTERVAL) def schedule_backlog(iv): sickgear.BACKLOG_PERIOD = minimax(iv, sickgear.DEFAULT_BACKLOG_PERIOD, - sickgear.MIN_BACKLOG_PERIOD, sickgear.MAX_BACKLOG_PERIOD) + sickgear.MIN_BACKLOG_PERIOD, sickgear.MAX_BACKLOG_PERIOD) - sickgear.backlog_search_scheduler.action.cycleTime = sickgear.BACKLOG_PERIOD + sickgear.backlog_search_scheduler.action.cycle_time = sickgear.BACKLOG_PERIOD def schedule_update_software(iv): @@ -181,7 +178,7 @@ def schedule_update_software(iv): if sickgear.UPDATE_INTERVAL < sickgear.MIN_UPDATE_INTERVAL: sickgear.UPDATE_INTERVAL = sickgear.MIN_UPDATE_INTERVAL - sickgear.update_software_scheduler.cycleTime = datetime.timedelta(hours=sickgear.UPDATE_INTERVAL) + sickgear.update_software_scheduler.cycle_time = datetime.timedelta(hours=sickgear.UPDATE_INTERVAL) def schedule_update_software_notify(update_notify): @@ -198,10 +195,10 @@ def schedule_update_software_notify(update_notify): def schedule_update_packages(iv): sickgear.UPDATE_PACKAGES_INTERVAL = minimax(iv, sickgear.DEFAULT_UPDATE_PACKAGES_INTERVAL, - sickgear.MIN_UPDATE_PACKAGES_INTERVAL, - sickgear.MAX_UPDATE_PACKAGES_INTERVAL) + sickgear.MIN_UPDATE_PACKAGES_INTERVAL, + sickgear.MAX_UPDATE_PACKAGES_INTERVAL) - sickgear.update_packages_scheduler.cycleTime = datetime.timedelta(hours=sickgear.UPDATE_PACKAGES_INTERVAL) + sickgear.update_packages_scheduler.cycle_time = datetime.timedelta(hours=sickgear.UPDATE_PACKAGES_INTERVAL) def schedule_update_packages_notify(update_packages_notify): @@ -231,15 +228,6 @@ def schedule_trakt(use_trakt): return sickgear.USE_TRAKT = use_trakt - # if sickgear.USE_TRAKT: - # sickgear.trakt_checker_scheduler.start() - # else: - # sickgear.trakt_checker_scheduler.stop() - # logger.log(u'Waiting for the TRAKTCHECKER thread to exit') - # try: - # sickgear.trakt_checker_scheduler.join(10) - # except: - # pass def schedule_subtitles(use_subtitles): @@ -253,7 +241,7 @@ def schedule_emby_watched(emby_watched_interval): 0, sickgear.MAX_WATCHEDSTATE_INTERVAL) if emby_watched_iv and emby_watched_iv != sickgear.EMBY_WATCHEDSTATE_INTERVAL: sickgear.EMBY_WATCHEDSTATE_INTERVAL = emby_watched_iv - sickgear.emby_watched_state_scheduler.cycleTime = datetime.timedelta(minutes=emby_watched_iv) + sickgear.emby_watched_state_scheduler.cycle_time = datetime.timedelta(minutes=emby_watched_iv) sickgear.EMBY_WATCHEDSTATE_SCHEDULED = bool(emby_watched_iv) sickgear.emby_watched_state_scheduler.set_paused_state() @@ -264,7 +252,7 @@ def schedule_plex_watched(plex_watched_interval): 0, sickgear.MAX_WATCHEDSTATE_INTERVAL) if plex_watched_iv and plex_watched_iv != sickgear.PLEX_WATCHEDSTATE_INTERVAL: sickgear.PLEX_WATCHEDSTATE_INTERVAL = plex_watched_iv - sickgear.plex_watched_state_scheduler.cycleTime = datetime.timedelta(minutes=plex_watched_iv) + sickgear.plex_watched_state_scheduler.cycle_time = datetime.timedelta(minutes=plex_watched_iv) sickgear.PLEX_WATCHEDSTATE_SCHEDULED = bool(plex_watched_iv) sickgear.plex_watched_state_scheduler.set_paused_state() @@ -348,7 +336,7 @@ def clean_hosts(hosts, default_port=None, allow_base=False): def clean_url(url, add_slash=True): - """ Returns an cleaned url starting with a scheme and folder with trailing '/' or an empty string """ + """ Returns a cleaned url starting with a scheme and folder with trailing '/' or an empty string """ if url and url.strip(): @@ -360,7 +348,7 @@ def clean_url(url, add_slash=True): scheme, netloc, path, query, fragment = urlsplit(url, 'http') if not path.endswith('/'): - basename, ext = ek.ek(os.path.splitext, ek.ek(os.path.basename, path)) + basename, ext = os.path.splitext(os.path.basename(path)) if not ext and add_slash: path += '/' @@ -419,7 +407,7 @@ def check_setting_int(config, cfg_name, item_name, def_val): except (BaseException, Exception): config[cfg_name] = {} config[cfg_name][item_name] = my_val - logger.log('%s -> %s' % (item_name, my_val), logger.DEBUG) + logger.debug('%s -> %s' % (item_name, my_val)) return my_val @@ -434,13 +422,13 @@ def check_setting_float(config, cfg_name, item_name, def_val): config[cfg_name] = {} config[cfg_name][item_name] = my_val - logger.log('%s -> %s' % (item_name, my_val), logger.DEBUG) + logger.debug('%s -> %s' % (item_name, my_val)) return my_val def check_setting_str(config, cfg_name, item_name, def_val, log=True): """ - For passwords you must include the word `password` in the item_name and + For passwords, you must include the word `password` in the item_name and add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config() """ @@ -461,9 +449,9 @@ def check_setting_str(config, cfg_name, item_name, def_val, log=True): config[cfg_name][item_name] = helpers.encrypt(my_val, encryption_version) if log: - logger.log('%s -> %s' % (item_name, my_val), logger.DEBUG) + logger.debug('%s -> %s' % (item_name, my_val)) else: - logger.log('%s -> ******' % item_name, logger.DEBUG) + logger.debug('%s -> ******' % item_name) return (my_val, def_val)['None' == my_val] @@ -509,9 +497,10 @@ class ConfigMigrator(object): if self.config_version > self.expected_config_version: logger.log_error_and_exit( - u'Your config version (%s) has been incremented past what this version of SickGear supports (%s).\n' - 'If you have used other forks or a newer version of SickGear, your config file may be unusable due to ' - 'their modifications.' % (self.config_version, self.expected_config_version)) + f'Your config version ({self.config_version})' + f' has been incremented past what this version of SickGear supports ({self.expected_config_version}).\n' + f'If you have used other forks or a newer version of SickGear,' + f' your config file may be unusable due to their modifications.') sickgear.CONFIG_VERSION = self.config_version @@ -523,20 +512,20 @@ class ConfigMigrator(object): else: migration_name = '' - logger.log(u'Backing up config before upgrade') + logger.log('Backing up config before upgrade') if not helpers.backup_versioned_file(sickgear.CONFIG_FILE, self.config_version): - logger.log_error_and_exit(u'Config backup failed, abort upgrading config') + logger.log_error_and_exit('Config backup failed, abort upgrading config') else: - logger.log(u'Proceeding with upgrade') + logger.log('Proceeding with upgrade') # do the migration, expect a method named _migrate_v - logger.log(u'Migrating config up to version %s %s' % (next_version, migration_name)) + logger.log(f'Migrating config up to version {next_version} {migration_name}') getattr(self, '_migrate_v%s' % next_version)() self.config_version = next_version # save new config after migration sickgear.CONFIG_VERSION = self.config_version - logger.log(u'Saving config file to disk') + logger.log('Saving config file to disk') sickgear.save_config() @staticmethod @@ -581,17 +570,17 @@ class ConfigMigrator(object): new_season_format = str(new_season_format).replace('09', '%0S') new_season_format = new_season_format.replace('9', '%S') - logger.log(u'Changed season folder format from %s to %s, prepending it to your naming config' % - (old_season_format, new_season_format)) + logger.log(f'Changed season folder format from {old_season_format} to {new_season_format},' + f' prepending it to your naming config') sickgear.NAMING_PATTERN = new_season_format + os.sep + sickgear.NAMING_PATTERN except (TypeError, ValueError): - logger.log(u'Can not change %s to new season format' % old_season_format, logger.ERROR) + logger.error(f'Can not change {old_season_format} to new season format') # if no shows had it on then don't flatten any shows and don't put season folders in the config else: - logger.log(u'No shows were using season folders before so I am disabling flattening on all shows') + logger.log('No shows were using season folders before so I am disabling flattening on all shows') # don't flatten any shows at all my_db.action('UPDATE tv_shows SET flatten_folders = 0 WHERE 1=1') @@ -665,7 +654,7 @@ class ConfigMigrator(object): Reads in the old naming settings from your config and generates a new config template from them. """ # get the old settings from the file and store them in the new variable names - for prov in [curProvider for curProvider in sickgear.providers.sortedProviderList() + for prov in [curProvider for curProvider in sickgear.providers.sorted_sources() if 'omgwtfnzbs' == curProvider.name]: prov.username = check_setting_str(self.config_obj, 'omgwtfnzbs', 'omgwtfnzbs_uid', '') prov.api_key = check_setting_str(self.config_obj, 'omgwtfnzbs', 'omgwtfnzbs_key', '') @@ -684,8 +673,7 @@ class ConfigMigrator(object): try: name, url, key, enabled = cur_provider_data.split('|') except ValueError: - logger.log(u'Skipping Newznab provider string: "%s", incorrect format' % cur_provider_data, - logger.ERROR) + logger.error(f'Skipping Newznab provider string: "{cur_provider_data}", incorrect format') continue cat_ids = '5030,5040,5060' @@ -739,7 +727,7 @@ class ConfigMigrator(object): cur_metadata = metadata.split('|') # if target has the old number of values, do upgrade if 6 == len(cur_metadata): - logger.log(u'Upgrading ' + metadata_name + ' metadata, old value: ' + metadata) + logger.log('Upgrading ' + metadata_name + ' metadata, old value: ' + metadata) cur_metadata.insert(4, '0') cur_metadata.append('0') cur_metadata.append('0') @@ -752,15 +740,15 @@ class ConfigMigrator(object): cur_metadata[4], cur_metadata[3] = cur_metadata[3], '0' # write new format metadata = '|'.join(cur_metadata) - logger.log(u'Upgrading %s metadata, new value: %s' % (metadata_name, metadata)) + logger.log(f'Upgrading {metadata_name} metadata, new value: {metadata}') elif 10 == len(cur_metadata): metadata = '|'.join(cur_metadata) - logger.log(u'Keeping %s metadata, value: %s' % (metadata_name, metadata)) + logger.log(f'Keeping {metadata_name} metadata, value: {metadata}') else: - logger.log(u'Skipping %s: "%s", incorrect format' % (metadata_name, metadata), logger.ERROR) + logger.error(f'Skipping {metadata_name}: "{metadata}", incorrect format') metadata = '0|0|0|0|0|0|0|0|0|0' - logger.log(u'Setting %s metadata, new value: %s' % (metadata_name, metadata)) + logger.log(f'Setting {metadata_name} metadata, new value: {metadata}') return metadata @@ -776,13 +764,13 @@ class ConfigMigrator(object): # Migration v6: Rename daily search to recent search def _migrate_v6(self): sickgear.RECENTSEARCH_INTERVAL = check_setting_int(self.config_obj, 'General', 'dailysearch_frequency', - sickgear.DEFAULT_RECENTSEARCH_INTERVAL) + sickgear.DEFAULT_RECENTSEARCH_INTERVAL) sickgear.RECENTSEARCH_STARTUP = bool(check_setting_int(self.config_obj, 'General', 'dailysearch_startup', 1)) if sickgear.RECENTSEARCH_INTERVAL < sickgear.MIN_RECENTSEARCH_INTERVAL: sickgear.RECENTSEARCH_INTERVAL = sickgear.MIN_RECENTSEARCH_INTERVAL - for curProvider in sickgear.providers.sortedProviderList(): + for curProvider in sickgear.providers.sorted_sources(): if hasattr(curProvider, 'enable_recentsearch'): curProvider.enable_recentsearch = bool(check_setting_int( self.config_obj, curProvider.get_id().upper(), curProvider.get_id() + '_enable_dailysearch', 1)) @@ -834,7 +822,7 @@ class ConfigMigrator(object): # Migration v15: Transmithe.net variables def _migrate_v15(self): try: - neb = filter_list(lambda p: 'Nebulance' in p.name, sickgear.providers.sortedProviderList())[0] + neb = list(filter(lambda p: 'Nebulance' in p.name, sickgear.providers.sorted_sources()))[0] except (BaseException, Exception): return # get the old settings from the file and store them in the new variable names @@ -857,14 +845,14 @@ class ConfigMigrator(object): # Migration v16: Purge old cache image folder name @staticmethod def _migrate_v16(): - if sickgear.CACHE_DIR and ek.ek(os.path.isdir, sickgear.CACHE_DIR): + if sickgear.CACHE_DIR and os.path.isdir(sickgear.CACHE_DIR): cache_default = sickgear.CACHE_DIR dead_paths = ['anidb', 'imdb', 'trakt'] for path in dead_paths: sickgear.CACHE_DIR = '%s/images/%s' % (cache_default, path) helpers.clear_cache(True) try: - ek.ek(os.rmdir, sickgear.CACHE_DIR) + os.rmdir(sickgear.CACHE_DIR) except OSError: pass sickgear.CACHE_DIR = cache_default diff --git a/sickgear/databases/cache_db.py b/sickgear/databases/cache_db.py index 87e7ea98..2332af24 100644 --- a/sickgear/databases/cache_db.py +++ b/sickgear/databases/cache_db.py @@ -96,16 +96,16 @@ class InitialSchema(db.SchemaUpgrade): ]) def test(self): - return self.hasTable('lastUpdate') + return self.has_table('lastUpdate') def execute(self): self.do_query(self.queries[next(iter(self.queries))]) - self.setDBVersion(MIN_DB_VERSION, check_db_version=False) + self.set_db_version(MIN_DB_VERSION, check_db_version=False) class ConsolidateProviders(InitialSchema): def test(self): - return 1 < self.checkDBVersion() + return 1 < self.call_check_db_version() def execute(self): keep_tables = {'lastUpdate', 'lastSearch', 'db_version', @@ -113,13 +113,13 @@ class ConsolidateProviders(InitialSchema): # old provider_cache is dropped before re-creation # noinspection SqlResolve self.do_query(['DROP TABLE [provider_cache]'] + self.queries['consolidate_providers'] + - ['DROP TABLE [%s]' % t for t in (set(self.listTables()) - keep_tables)]) + ['DROP TABLE [%s]' % t for t in (set(self.list_tables()) - keep_tables)]) self.finish(True) class AddBacklogParts(ConsolidateProviders): def test(self): - return 2 < self.checkDBVersion() + return 2 < self.call_check_db_version() def execute(self): # noinspection SqlResolve @@ -130,7 +130,7 @@ class AddBacklogParts(ConsolidateProviders): class AddProviderFailureHandling(AddBacklogParts): def test(self): - return 3 < self.checkDBVersion() + return 3 < self.call_check_db_version() def execute(self): self.do_query(self.queries['add_provider_fails']) @@ -139,17 +139,17 @@ class AddProviderFailureHandling(AddBacklogParts): class AddIndexerToTables(AddProviderFailureHandling): def test(self): - return 4 < self.checkDBVersion() + return 4 < self.call_check_db_version() def execute(self): self.do_query(self.queries['add_indexer_to_tables']) - self.addColumn('provider_cache', 'indexer', 'NUMERIC') + self.add_column('provider_cache', 'indexer', 'NUMERIC') self.finish() class AddGenericFailureHandling(AddBacklogParts): def test(self): - return 5 < self.checkDBVersion() + return 5 < self.call_check_db_version() def execute(self): self.do_query(self.queries['connection_fails']) @@ -158,7 +158,7 @@ class AddGenericFailureHandling(AddBacklogParts): class AddSaveQueues(AddGenericFailureHandling): def test(self): - return 6 < self.checkDBVersion() + return 6 < self.call_check_db_version() def execute(self): self.do_query(self.queries['save_queues']) diff --git a/sickgear/databases/failed_db.py b/sickgear/databases/failed_db.py index 03f66c0a..60d760a8 100644 --- a/sickgear/databases/failed_db.py +++ b/sickgear/databases/failed_db.py @@ -28,7 +28,7 @@ TEST_BASE_VERSION = None # the base production db version, only needed for TEST # Add new migrations at the bottom of the list; subclass the previous migration. class InitialSchema(db.SchemaUpgrade): def test(self): - return self.hasTable('failed') + return self.has_table('failed') def execute(self): queries = [ @@ -45,18 +45,18 @@ class InitialSchema(db.SchemaUpgrade): class SizeAndProvider(InitialSchema): def test(self): - return self.hasColumn('failed', 'size') and self.hasColumn('failed', 'provider') + return self.has_column('failed', 'size') and self.has_column('failed', 'provider') def execute(self): - self.addColumn('failed', 'size') - self.addColumn('failed', 'provider', 'TEXT', '') + self.add_column('failed', 'size') + self.add_column('failed', 'provider', 'TEXT', '') class History(SizeAndProvider): """Snatch history that can't be modified by the user""" def test(self): - return self.hasTable('history') + return self.has_table('history') def execute(self): self.connection.action('CREATE TABLE history (date NUMERIC, ' + @@ -67,21 +67,21 @@ class HistoryStatus(History): """Store episode status before snatch to revert to if necessary""" def test(self): - return self.hasColumn('history', 'old_status') + return self.has_column('history', 'old_status') def execute(self): - self.addColumn('history', 'old_status', 'NUMERIC', Quality.NONE) - self.addColumn('history', 'showid', 'NUMERIC', '-1') - self.addColumn('history', 'season', 'NUMERIC', '-1') - self.addColumn('history', 'episode', 'NUMERIC', '-1') + self.add_column('history', 'old_status', 'NUMERIC', Quality.NONE) + self.add_column('history', 'showid', 'NUMERIC', '-1') + self.add_column('history', 'season', 'NUMERIC', '-1') + self.add_column('history', 'episode', 'NUMERIC', '-1') class AddIndexerToTables(HistoryStatus): def test(self): - return self.hasColumn('history', 'indexer') + return self.has_column('history', 'indexer') def execute(self): - self.addColumn('history', 'indexer', 'NUMERIC') + self.add_column('history', 'indexer', 'NUMERIC') main_db = db.DBConnection('sickbeard.db') show_ids = {s['prod_id']: s['tv_id'] for s in @@ -91,15 +91,15 @@ class AddIndexerToTables(HistoryStatus): cl.append(['UPDATE history SET indexer = ? WHERE showid = ?', [i, s_id]]) self.connection.mass_action(cl) - if self.connection.hasTable('backup_history'): + if self.connection.has_table('backup_history'): self.connection.action( 'REPLACE INTO history ' '(date, size, `release`, provider, old_status, showid, season, episode, indexer)' ' SELECT' ' date, size, `release`, provider, old_status, showid, season, episode, indexer' ' FROM backup_history') - self.connection.removeTable('backup_history') + self.connection.remove_table('backup_history') self.connection.action('VACUUM') - self.setDBVersion(2, check_db_version=False) + self.set_db_version(2, check_db_version=False) diff --git a/sickgear/databases/mainDB.py b/sickgear/databases/mainDB.py index 8c50d3ba..b5fbcc93 100644 --- a/sickgear/databases/mainDB.py +++ b/sickgear/databases/mainDB.py @@ -21,8 +21,6 @@ import re from .. import db, common, logger from ..name_parser.parser import NameParser, InvalidNameException, InvalidShowException import sickgear -# noinspection PyPep8Naming -import encodingKludge as ek from six import iteritems @@ -88,7 +86,7 @@ class MainSanityCheck(db.DBSanityCheck): if 0 < len(cl): self.connection.mass_action(cl) - logger.log(u'Performing a vacuum on the database.', logger.DEBUG) + logger.debug('Performing a vacuum on the database.') self.connection.upgrade_log(fix_msg % 'VACUUM') self.connection.action('VACUUM') self.connection.upgrade_log(fix_msg % 'finished') @@ -105,7 +103,7 @@ class MainSanityCheck(db.DBSanityCheck): # This func would break with multi tv info sources and without tvid, so added check min db version to mitigate # Also, tv_show table had a unique index added at some time to prevent further dupes, # therefore, this func is kept to cleanse legacy data given that it's redundant for new row insertions - if self.connection.checkDBVersion() < 20004: + if self.connection.check_db_version() < 20004: sql_result = self.connection.select( 'SELECT show_id, %(col)s, COUNT(%(col)s) AS count FROM tv_shows GROUP BY %(col)s HAVING count > 1' @@ -113,8 +111,7 @@ class MainSanityCheck(db.DBSanityCheck): for cur_result in sql_result: - logger.log(u'Duplicate show detected! %s: %s count: %s' % ( - column, cur_result[column], cur_result['count']), logger.DEBUG) + logger.debug(f'Duplicate show detected! {column}: {cur_result[column]} count: {cur_result["count"]}') cur_dupe_results = self.connection.select( 'SELECT show_id, ' + column + ' FROM tv_shows WHERE ' + column + ' = ? LIMIT ?', @@ -123,22 +120,22 @@ class MainSanityCheck(db.DBSanityCheck): cl = [] for cur_dupe_id in cur_dupe_results: - logger.log(u'Deleting duplicate show with %s: %s show_id: %s' % ( - column, cur_dupe_id[column], cur_dupe_id['show_id'])) + logger.log(f'Deleting duplicate show with {column}: {cur_dupe_id[column]}' + f' show_id: {cur_dupe_id["show_id"]}') cl.append(['DELETE FROM tv_shows WHERE show_id = ?', [cur_dupe_id['show_id']]]) if 0 < len(cl): self.connection.mass_action(cl) else: - logger.log(u'No duplicate show, check passed') + logger.log('No duplicate show, check passed') def fix_duplicate_episodes(self): # This func would break with multi tv info sources and without tvid, so added check min db version to mitigate # Also, tv_show table had a unique index added at some time to prevent further dupes, # therefore, this func is kept to cleanse legacy data given that it's redundant for new row insertions - if self.connection.checkDBVersion() < 20007: + if self.connection.check_db_version() < 20007: sql_result = self.connection.select( 'SELECT indexer AS tv_id, showid AS prod_id, season, episode, COUNT(showid) as count' @@ -148,9 +145,9 @@ class MainSanityCheck(db.DBSanityCheck): for cur_result in sql_result: - logger.log(u'Duplicate episode detected! prod_id: %s season: %s episode: %s count: %s' % - (cur_result['prod_id'], cur_result['season'], cur_result['episode'], - cur_result['count']), logger.DEBUG) + logger.debug(f'Duplicate episode detected! prod_id: {cur_result["prod_id"]}' + f' season: {cur_result["season"]} episode: {cur_result["episode"]}' + f' count: {cur_result["count"]}') cur_dupe_results = self.connection.select( 'SELECT episode_id' @@ -165,14 +162,14 @@ class MainSanityCheck(db.DBSanityCheck): cl = [] for cur_dupe_id in cur_dupe_results: - logger.log(u'Deleting duplicate episode with episode_id: %s' % cur_dupe_id['episode_id']) - cl.append(['DELETE FROM tv_episodes WHERE episode_id = ?', [cur_dupe_id['episode_id']]]) + logger.log(f'Deleting duplicate episode with episode_id: {cur_dupe_id["episode_id"]}') + cl.append(['DELETE FROM tv_episodes WHERE episode_id = ?', [cur_dupe_id["episode_id"]]]) if 0 < len(cl): self.connection.mass_action(cl) else: - logger.log(u'No duplicate episode, check passed') + logger.log('No duplicate episode, check passed') def fix_orphan_episodes(self): @@ -184,16 +181,16 @@ class MainSanityCheck(db.DBSanityCheck): cl = [] for cur_result in sql_result: - logger.log(u'Orphan episode detected! episode_id: %s showid: %s' % ( - cur_result['episode_id'], cur_result['showid']), logger.DEBUG) - logger.log(u'Deleting orphan episode with episode_id: %s' % cur_result['episode_id']) + logger.debug(f'Orphan episode detected! episode_id: {cur_result["episode_id"]}' + f' showid: {cur_result["showid"]}') + logger.log(f'Deleting orphan episode with episode_id: {cur_result["episode_id"]}') cl.append(['DELETE FROM tv_episodes WHERE episode_id = ?', [cur_result['episode_id']]]) if 0 < len(cl): self.connection.mass_action(cl) else: - logger.log(u'No orphan episodes, check passed') + logger.log('No orphan episodes, check passed') def fix_missing_table_indexes(self): if not self.connection.select('PRAGMA index_info("idx_indexer_id")'): @@ -217,18 +214,18 @@ class MainSanityCheck(db.DBSanityCheck): logger.log('Updating TV Episode table with index idx_sta_epi_sta_air') self.connection.action('CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season, episode, status, airdate)') - if not self.connection.hasIndex('tv_episodes', 'idx_tv_ep_ids'): + if not self.connection.has_index('tv_episodes', 'idx_tv_ep_ids'): logger.log('Updating TV Episode table with index idx_tv_ep_ids') self.connection.action('CREATE INDEX idx_tv_ep_ids ON tv_episodes (indexer, showid)') - if not self.connection.hasIndex('tv_episodes', 'idx_tv_episodes_unique'): + if not self.connection.has_index('tv_episodes', 'idx_tv_episodes_unique'): self.connection.action('CREATE UNIQUE INDEX idx_tv_episodes_unique ON ' 'tv_episodes(indexer,showid,season,episode)') - allowtbl, blocktbl = (('allow', 'block'), ('white', 'black'))[not self.connection.hasTable('blocklist')] + allowtbl, blocktbl = (('allow', 'block'), ('white', 'black'))[not self.connection.has_table('blocklist')] for t in [('%slist' % allowtbl, 'show_id'), ('%slist' % blocktbl, 'show_id'), ('history', 'showid'), ('scene_exceptions', 'indexer_id')]: - if not self.connection.hasIndex('%s' % t[0], 'idx_id_indexer_%s' % t[0]): + if not self.connection.has_index('%s' % t[0], 'idx_id_indexer_%s' % t[0]): # noinspection SqlResolve self.connection.action('CREATE INDEX idx_id_indexer_%s ON %s (indexer, %s)' % (t[0], t[0], t[1])) @@ -242,9 +239,9 @@ class MainSanityCheck(db.DBSanityCheck): cl = [] for cur_result in sql_result: - logger.log(u'UNAIRED episode detected! episode_id: %s showid: %s' % ( - cur_result['episode_id'], cur_result['showid']), logger.DEBUG) - logger.log(u'Fixing unaired episode status with episode_id: %s' % cur_result['episode_id']) + logger.debug(f'UNAIRED episode detected! episode_id: {cur_result["episode_id"]}' + f' showid: {cur_result["showid"]}') + logger.log(f'Fixing unaired episode status with episode_id: {cur_result["episode_id"]}') cl.append(['UPDATE tv_episodes SET status = ? WHERE episode_id = ?', [common.UNAIRED, cur_result['episode_id']]]) @@ -252,7 +249,7 @@ class MainSanityCheck(db.DBSanityCheck): self.connection.mass_action(cl) else: - logger.log(u'No UNAIRED episodes, check passed') + logger.log('No UNAIRED episodes, check passed') def fix_scene_exceptions(self): @@ -311,9 +308,9 @@ class InitialSchema(db.SchemaUpgrade): # Add new migrations at the bottom of the list; subclass the previous migration. # 0 -> 20009 def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - if not self.hasTable('tv_shows') and not self.hasTable('db_version'): + if not self.has_table('tv_shows') and not self.has_table('db_version'): queries = [ # anime allow and block list 'CREATE TABLE allowlist (show_id INTEGER, range TEXT, keyword TEXT, indexer NUMERIC)', @@ -385,27 +382,23 @@ class InitialSchema(db.SchemaUpgrade): self.connection.action(query) else: - cur_db_version = self.checkDBVersion() + cur_db_version = self.call_check_db_version() if cur_db_version < MIN_DB_VERSION: logger.log_error_and_exit( - u'Your database version (' + str(cur_db_version) - + ') is too old to migrate from what this version of SickGear supports (' - + str(MIN_DB_VERSION) + ').' + "\n" + f'Your database version ({cur_db_version}) is too old to migrate from' + f' what this version of SickGear supports ({MIN_DB_VERSION}).\n' + 'Upgrade using a previous version (tag) build 496 to build 501 of SickGear' - ' first or remove database file to begin fresh.' - ) + ' first or remove database file to begin fresh.') if cur_db_version > MAX_DB_VERSION: logger.log_error_and_exit( - u'Your database version (' + str(cur_db_version) - + ') has been incremented past what this version of SickGear supports (' - + str(MAX_DB_VERSION) + ').\n' + f'Your database version ({cur_db_version}) has been incremented past' + f' what this version of SickGear supports ({MAX_DB_VERSION}).\n' + 'If you have used other forks of SickGear,' - ' your database may be unusable due to their modifications.' - ) + ' your database may be unusable due to their modifications.') - return self.checkDBVersion() + return self.call_check_db_version() # 9 -> 10 @@ -415,25 +408,25 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade): This func is only for 9->10 where older db columns exist, those columns have since changed """ - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - if not self.hasColumn('tv_episodes', 'file_size'): - self.addColumn('tv_episodes', 'file_size') + if not self.has_column('tv_episodes', 'file_size'): + self.add_column('tv_episodes', 'file_size') - if not self.hasColumn('tv_episodes', 'release_name'): - self.addColumn('tv_episodes', 'release_name', 'TEXT', '') + if not self.has_column('tv_episodes', 'release_name'): + self.add_column('tv_episodes', 'release_name', 'TEXT', '') sql_result = self.connection.select('SELECT episode_id, location, file_size FROM tv_episodes') - self.upgrade_log(u'Adding file size to all episodes in DB, please be patient') + self.upgrade_log('Adding file size to all episodes in DB, please be patient') for cur_result in sql_result: if not cur_result['location']: continue # if there is no size yet then populate it for us if (not cur_result['file_size'] or not int(cur_result['file_size'])) \ - and ek.ek(os.path.isfile, cur_result['location']): - cur_size = ek.ek(os.path.getsize, cur_result['location']) + and os.path.isfile(cur_result['location']): + cur_size = os.path.getsize(cur_result['location']) self.connection.action('UPDATE tv_episodes SET file_size = ? WHERE episode_id = ?', [cur_size, int(cur_result['episode_id'])]) @@ -441,7 +434,7 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade): # noinspection SqlRedundantOrderingDirection history_sql_result = self.connection.select('SELECT * FROM history WHERE provider != -1 ORDER BY date ASC') - self.upgrade_log(u'Adding release name to all episodes still in history') + self.upgrade_log('Adding release name to all episodes still in history') for cur_result in history_sql_result: # find the associated download, if there isn't one then ignore it # noinspection SqlResolve @@ -451,12 +444,12 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade): ' WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?', [cur_result['showid'], cur_result['season'], cur_result['episode'], cur_result['date']]) if not download_sql_result: - self.upgrade_log(u'Found a snatch in the history for ' + cur_result['resource'] - + ' but couldn\'t find the associated download, skipping it', logger.DEBUG) + self.upgrade_log(f'Found a snatch in the history for {cur_result["resource"]}' + f' but couldn\'t find the associated download, skipping it', logger.DEBUG) continue nzb_name = cur_result['resource'] - file_name = ek.ek(os.path.basename, download_sql_result[0]['resource']) + file_name = os.path.basename(download_sql_result[0]['resource']) # take the extension off the filename, it's not needed if '.' in file_name: @@ -470,13 +463,12 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade): ' WHERE showid = ? AND season = ? AND episode = ? AND location != ""', [cur_result['showid'], cur_result['season'], cur_result['episode']]) if not sql_result: - logger.log( - u'The episode ' + nzb_name + ' was found in history but doesn\'t exist on disk anymore, skipping', - logger.DEBUG) + logger.debug(f'The episode {nzb_name} was found in history but doesn\'t exist on disk anymore,' + f' skipping') continue # get the status/quality of the existing ep and make sure it's what we expect - ep_status, ep_quality = common.Quality.splitCompositeStatus(int(sql_result[0]['status'])) + ep_status, ep_quality = common.Quality.split_composite_status(int(sql_result[0]['status'])) if ep_status != common.DOWNLOADED: continue @@ -485,7 +477,7 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade): # make sure this is actually a real release name and not a season pack or something for cur_name in (nzb_name, file_name): - logger.log(u'Checking if ' + cur_name + ' is actually a good release name', logger.DEBUG) + logger.debug(f'Checking if {cur_name} is actually a good release name') try: np = NameParser(False) parse_result = np.parse(cur_name) @@ -505,10 +497,10 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade): ' FROM tv_episodes' ' WHERE release_name = ""') - self.upgrade_log(u'Adding release name to all episodes with obvious scene filenames') + self.upgrade_log('Adding release name to all episodes with obvious scene filenames') for cur_result in empty_sql_result: - ep_file_name = ek.ek(os.path.basename, cur_result['location']) + ep_file_name = os.path.basename(cur_result['location']) ep_file_name = os.path.splitext(ep_file_name)[0] # only want to find real scene names here so anything with a space in it is out @@ -524,20 +516,18 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade): if not parse_result.release_group: continue - logger.log( - u'Name ' + ep_file_name + ' gave release group of ' + parse_result.release_group + ', seems valid', - logger.DEBUG) + logger.debug(f'Name {ep_file_name} gave release group of {parse_result.release_group}, seems valid') self.connection.action('UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?', [ep_file_name, cur_result['episode_id']]) - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 10 -> 11 class RenameSeasonFolders(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) # rename the column self.connection.action('ALTER TABLE tv_shows RENAME TO tmp_tv_shows') @@ -560,8 +550,8 @@ class RenameSeasonFolders(db.SchemaUpgrade): # noinspection SqlResolve self.connection.action('DROP TABLE tmp_tv_shows') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 11 -> 12 @@ -583,8 +573,8 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade): """ def _update_status(self, old_status): - (status, quality) = common.Quality.splitCompositeStatus(old_status) - return common.Quality.compositeStatus(status, self._update_quality(quality)) + (status, quality) = common.Quality.split_composite_status(old_status) + return common.Quality.composite_status(status, self._update_quality(quality)) @staticmethod def _update_quality(old_quality): @@ -630,30 +620,30 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade): return result def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) # update the default quality so we dont grab the wrong qualities after migration sickgear.QUALITY_DEFAULT = self._update_composite_qualities(sickgear.QUALITY_DEFAULT) sickgear.save_config() # upgrade previous HD to HD720p -- shift previous qualities to new placevalues - old_hd = common.Quality.combineQualities( + old_hd = common.Quality.combine_qualities( [common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3], []) - new_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL, - common.Quality.HDBLURAY], []) + new_hd = common.Quality.combine_qualities([common.Quality.HDTV, common.Quality.HDWEBDL, + common.Quality.HDBLURAY], []) # update ANY -- shift existing qualities and add new 1080p qualities, # note that rawHD was not added to the ANY template - old_any = common.Quality.combineQualities( + old_any = common.Quality.combine_qualities( [common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3, common.Quality.UNKNOWN], []) - new_any = common.Quality.combineQualities( + new_any = common.Quality.combine_qualities( [common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.FULLHDTV, common.Quality.HDWEBDL, common.Quality.FULLHDWEBDL, common.Quality.HDBLURAY, common.Quality.FULLHDBLURAY, common.Quality.UNKNOWN], []) # update qualities (including templates) - self.upgrade_log(u'[1/4] Updating pre-defined templates and the quality for each show...') + self.upgrade_log('[1/4] Updating pre-defined templates and the quality for each show...') cl = [] shows = self.connection.select('SELECT * FROM tv_shows') for cur_show in shows: @@ -668,7 +658,7 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade): # update status that are are within the old hdwebdl # (1<<3 which is 8) and better -- exclude unknown (1<<15 which is 32768) - self.upgrade_log(u'[2/4] Updating the status for the episodes within each show...') + self.upgrade_log('[2/4] Updating the status for the episodes within each show...') cl = [] sql_result = self.connection.select('SELECT * FROM tv_episodes WHERE status < 3276800 AND status >= 800') for cur_result in sql_result: @@ -680,7 +670,7 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade): # may not always coordinate together # update previous history so it shows the correct action - self.upgrade_log(u'[3/4] Updating history to reflect the correct action...') + self.upgrade_log('[3/4] Updating history to reflect the correct action...') cl = [] # noinspection SqlResolve history_action = self.connection.select('SELECT * FROM history WHERE action < 3276800 AND action >= 800') @@ -690,7 +680,7 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade): self.connection.mass_action(cl) # update previous history so it shows the correct quality - self.upgrade_log(u'[4/4] Updating history to reflect the correct quality...') + self.upgrade_log('[4/4] Updating history to reflect the correct quality...') cl = [] # noinspection SqlResolve history_quality = self.connection.select('SELECT * FROM history WHERE quality < 32768 AND quality >= 8') @@ -699,12 +689,12 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade): [self._update_quality(cur_entry['quality']), cur_entry['showid'], cur_entry['date']]]) self.connection.mass_action(cl) - self.incDBVersion() + self.inc_db_version() # cleanup and reduce db if any previous data was removed - self.upgrade_log(u'Performing a vacuum on the database.', logger.DEBUG) + self.upgrade_log('Performing a vacuum on the database.', logger.DEBUG) self.connection.action('VACUUM') - return self.checkDBVersion() + return self.call_check_db_version() # 12 -> 13 @@ -712,20 +702,20 @@ class AddShowidTvdbidIndex(db.SchemaUpgrade): # Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Checking for duplicate shows before adding unique index.') + self.upgrade_log('Checking for duplicate shows before adding unique index.') MainSanityCheck(self.connection).fix_duplicate_shows('tvdb_id') - self.upgrade_log(u'Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries.') - if not self.hasTable('idx_showid'): + self.upgrade_log('Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries.') + if not self.has_table('idx_showid'): self.connection.action('CREATE INDEX idx_showid ON tv_episodes (showid);') - if not self.hasTable('idx_tvdb_id'): + if not self.has_table('idx_tvdb_id'): # noinspection SqlResolve self.connection.action('CREATE UNIQUE INDEX idx_tvdb_id ON tv_shows (tvdb_id);') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 13 -> 14 @@ -733,23 +723,23 @@ class AddLastUpdateTVDB(db.SchemaUpgrade): # Adding column last_update_tvdb to tv_shows for controlling nightly updates def execute(self): - if not self.hasColumn('tv_shows', 'last_update_tvdb'): - self.upgrade_log(u'Adding column last_update_tvdb to tv_shows') - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) - self.addColumn('tv_shows', 'last_update_tvdb', default=1) + if not self.has_column('tv_shows', 'last_update_tvdb'): + self.upgrade_log('Adding column last_update_tvdb to tv_shows') + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) + self.add_column('tv_shows', 'last_update_tvdb', default=1) - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 14 -> 15 class AddDBIncreaseTo15(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Bumping database version to v%s' % self.checkDBVersion()) - self.incDBVersion() - return self.checkDBVersion() + self.upgrade_log(f'Bumping database version to v{self.call_check_db_version()}') + self.inc_db_version() + return self.call_check_db_version() # 15 -> 16 @@ -757,122 +747,122 @@ class AddIMDbInfo(db.SchemaUpgrade): def execute(self): db_backed_up = False - if not self.hasTable('imdb_info'): - self.upgrade_log(u'Creating IMDb table imdb_info') + if not self.has_table('imdb_info'): + self.upgrade_log('Creating IMDb table imdb_info') - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db_backed_up = True self.connection.action( 'CREATE TABLE imdb_info (tvdb_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC,' ' akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT,' ' rating TEXT, votes INTEGER, last_update NUMERIC)') - if not self.hasColumn('tv_shows', 'imdb_id'): - self.upgrade_log(u'Adding IMDb column imdb_id to tv_shows') + if not self.has_column('tv_shows', 'imdb_id'): + self.upgrade_log('Adding IMDb column imdb_id to tv_shows') if not db_backed_up: - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) - self.addColumn('tv_shows', 'imdb_id') + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) + self.add_column('tv_shows', 'imdb_id') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 16 -> 17 class AddProperNamingSupport(db.SchemaUpgrade): def execute(self): - if not self.hasColumn('tv_shows', 'imdb_id')\ - and self.hasColumn('tv_shows', 'rls_require_words')\ - and self.hasColumn('tv_shows', 'rls_ignore_words'): - return self.setDBVersion(5816) + if not self.has_column('tv_shows', 'imdb_id')\ + and self.has_column('tv_shows', 'rls_require_words')\ + and self.has_column('tv_shows', 'rls_ignore_words'): + return self.set_db_version(5816) - if not self.hasColumn('tv_episodes', 'is_proper'): - self.upgrade_log(u'Adding column is_proper to tv_episodes') - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) - self.addColumn('tv_episodes', 'is_proper') + if not self.has_column('tv_episodes', 'is_proper'): + self.upgrade_log('Adding column is_proper to tv_episodes') + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) + self.add_column('tv_episodes', 'is_proper') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 17 -> 18 class AddEmailSubscriptionTable(db.SchemaUpgrade): def execute(self): - if not self.hasColumn('tv_episodes', 'is_proper')\ - and self.hasColumn('tv_shows', 'rls_require_words')\ - and self.hasColumn('tv_shows', 'rls_ignore_words')\ - and self.hasColumn('tv_shows', 'skip_notices'): - return self.setDBVersion(5817) + if not self.has_column('tv_episodes', 'is_proper')\ + and self.has_column('tv_shows', 'rls_require_words')\ + and self.has_column('tv_shows', 'rls_ignore_words')\ + and self.has_column('tv_shows', 'skip_notices'): + return self.set_db_version(5817) - if not self.hasColumn('tv_shows', 'notify_list'): - self.upgrade_log(u'Adding column notify_list to tv_shows') - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) - self.addColumn('tv_shows', 'notify_list', 'TEXT', None) + if not self.has_column('tv_shows', 'notify_list'): + self.upgrade_log('Adding column notify_list to tv_shows') + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) + self.add_column('tv_shows', 'notify_list', 'TEXT', None) - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 18 -> 19 class AddProperSearch(db.SchemaUpgrade): def execute(self): - if not self.hasColumn('tv_episodes', 'is_proper'): - return self.setDBVersion(12) + if not self.has_column('tv_episodes', 'is_proper'): + return self.set_db_version(12) - if not self.hasColumn('tv_shows', 'notify_list')\ - and self.hasColumn('tv_shows', 'rls_require_words')\ - and self.hasColumn('tv_shows', 'rls_ignore_words')\ - and self.hasColumn('tv_shows', 'skip_notices')\ - and self.hasColumn('history', 'source'): - return self.setDBVersion(5818) + if not self.has_column('tv_shows', 'notify_list')\ + and self.has_column('tv_shows', 'rls_require_words')\ + and self.has_column('tv_shows', 'rls_ignore_words')\ + and self.has_column('tv_shows', 'skip_notices')\ + and self.has_column('history', 'source'): + return self.set_db_version(5818) - if not self.hasColumn('info', 'last_proper_search'): - self.upgrade_log(u'Adding column last_proper_search to info') - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) - self.addColumn('info', 'last_proper_search', default=1) + if not self.has_column('info', 'last_proper_search'): + self.upgrade_log('Adding column last_proper_search to info') + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) + self.add_column('info', 'last_proper_search', default=1) - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 19 -> 20 class AddDvdOrderOption(db.SchemaUpgrade): def execute(self): - if not self.hasColumn('tv_shows', 'dvdorder'): - self.upgrade_log(u'Adding column dvdorder to tv_shows') - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) - self.addColumn('tv_shows', 'dvdorder', 'NUMERIC', '0') + if not self.has_column('tv_shows', 'dvdorder'): + self.upgrade_log('Adding column dvdorder to tv_shows') + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) + self.add_column('tv_shows', 'dvdorder', 'NUMERIC', '0') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 20 -> 21 class AddSubtitlesSupport(db.SchemaUpgrade): def execute(self): - if not self.hasColumn('tv_shows', 'subtitles'): - self.upgrade_log(u'Adding subtitles to tv_shows and tv_episodes') - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) - self.addColumn('tv_shows', 'subtitles') - self.addColumn('tv_episodes', 'subtitles', 'TEXT', '') - self.addColumn('tv_episodes', 'subtitles_searchcount') - self.addColumn('tv_episodes', 'subtitles_lastsearch', 'TIMESTAMP', str(datetime.datetime.min)) + if not self.has_column('tv_shows', 'subtitles'): + self.upgrade_log('Adding subtitles to tv_shows and tv_episodes') + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) + self.add_column('tv_shows', 'subtitles') + self.add_column('tv_episodes', 'subtitles', 'TEXT', '') + self.add_column('tv_episodes', 'subtitles_searchcount') + self.add_column('tv_episodes', 'subtitles_lastsearch', 'TIMESTAMP', str(datetime.datetime.min)) - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 21 -> 22 class ConvertTVShowsToIndexerScheme(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Converting TV Shows table to Indexer Scheme...') + self.upgrade_log('Converting TV Shows table to Indexer Scheme...') - if self.hasTable('tmp_tv_shows'): - self.upgrade_log(u'Removing temp tv show tables left behind from previous updates...') + if self.has_table('tmp_tv_shows'): + self.upgrade_log('Removing temp tv show tables left behind from previous updates...') # noinspection SqlResolve self.connection.action('DROP TABLE tmp_tv_shows') @@ -901,19 +891,19 @@ class ConvertTVShowsToIndexerScheme(db.SchemaUpgrade): # noinspection SqlConstantCondition self.connection.action('UPDATE tv_shows SET indexer = 1 WHERE 1=1') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 22 -> 23 class ConvertTVEpisodesToIndexerScheme(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Converting TV Episodes table to Indexer Scheme...') + self.upgrade_log('Converting TV Episodes table to Indexer Scheme...') - if self.hasTable('tmp_tv_episodes'): - self.upgrade_log(u'Removing temp tv episode tables left behind from previous updates...') + if self.has_table('tmp_tv_episodes'): + self.upgrade_log('Removing temp tv episode tables left behind from previous updates...') # noinspection SqlResolve self.connection.action('DROP TABLE tmp_tv_episodes') @@ -942,19 +932,19 @@ class ConvertTVEpisodesToIndexerScheme(db.SchemaUpgrade): # noinspection SqlConstantCondition self.connection.action('UPDATE tv_episodes SET indexer = 1 WHERE 1=1') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 23 -> 24 class ConvertIMDBInfoToIndexerScheme(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Converting IMDb Info table to Indexer Scheme...') + self.upgrade_log('Converting IMDb Info table to Indexer Scheme...') - if self.hasTable('tmp_imdb_info'): - self.upgrade_log(u'Removing temp imdb info tables left behind from previous updates...') + if self.has_table('tmp_imdb_info'): + self.upgrade_log('Removing temp imdb info tables left behind from previous updates...') # noinspection SqlResolve self.connection.action('DROP TABLE tmp_imdb_info') @@ -971,19 +961,19 @@ class ConvertIMDBInfoToIndexerScheme(db.SchemaUpgrade): # noinspection SqlResolve self.connection.action('DROP TABLE tmp_imdb_info') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 24 -> 25 class ConvertInfoToIndexerScheme(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Converting Info table to Indexer Scheme...') + self.upgrade_log('Converting Info table to Indexer Scheme...') - if self.hasTable('tmp_info'): - self.upgrade_log(u'Removing temp info tables left behind from previous updates...') + if self.has_table('tmp_info'): + self.upgrade_log('Removing temp info tables left behind from previous updates...') # noinspection SqlResolve self.connection.action('DROP TABLE tmp_info') @@ -997,48 +987,48 @@ class ConvertInfoToIndexerScheme(db.SchemaUpgrade): # noinspection SqlResolve self.connection.action('DROP TABLE tmp_info') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 25 -> 26 class AddArchiveFirstMatchOption(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - if not self.hasColumn('tv_shows', 'archive_firstmatch'): - self.upgrade_log(u'Adding column archive_firstmatch to tv_shows') - self.addColumn('tv_shows', 'archive_firstmatch', 'NUMERIC', '0') + if not self.has_column('tv_shows', 'archive_firstmatch'): + self.upgrade_log('Adding column archive_firstmatch to tv_shows') + self.add_column('tv_shows', 'archive_firstmatch', 'NUMERIC', '0') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 26 -> 27 class AddSceneNumbering(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - if self.hasTable('scene_numbering'): + if self.has_table('scene_numbering'): self.connection.action('DROP TABLE scene_numbering') - self.upgrade_log(u'Upgrading table scene_numbering ...') + self.upgrade_log('Upgrading table scene_numbering ...') self.connection.action( 'CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER,' ' scene_season INTEGER, scene_episode INTEGER,' ' PRIMARY KEY (indexer_id,season,episode))') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 27 -> 28 class ConvertIndexerToInteger(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) cl = [] - self.upgrade_log(u'Converting Indexer to Integer ...') + self.upgrade_log('Converting Indexer to Integer ...') cl.append(['UPDATE tv_shows SET indexer = ? WHERE LOWER(indexer) = ?', ['1', 'tvdb']]) cl.append(['UPDATE tv_shows SET indexer = ? WHERE LOWER(indexer) = ?', ['2', 'tvrage']]) cl.append(['UPDATE tv_episodes SET indexer = ? WHERE LOWER(indexer) = ?', ['1', 'tvdb']]) @@ -1048,50 +1038,50 @@ class ConvertIndexerToInteger(db.SchemaUpgrade): self.connection.mass_action(cl) - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 28 -> 29 class AddRequireAndIgnoreWords(db.SchemaUpgrade): # Adding column rls_require_words and rls_ignore_words to tv_shows def execute(self): - if self.hasColumn('tv_shows', 'rls_require_words') and self.hasColumn('tv_shows', 'rls_ignore_words'): - self.incDBVersion() - return self.checkDBVersion() + if self.has_column('tv_shows', 'rls_require_words') and self.has_column('tv_shows', 'rls_ignore_words'): + self.inc_db_version() + return self.call_check_db_version() db_backed_up = False - if not self.hasColumn('tv_shows', 'rls_require_words'): - self.upgrade_log(u'Adding column rls_require_words to tv_shows') - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + if not self.has_column('tv_shows', 'rls_require_words'): + self.upgrade_log('Adding column rls_require_words to tv_shows') + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db_backed_up = True - self.addColumn('tv_shows', 'rls_require_words', 'TEXT', '') + self.add_column('tv_shows', 'rls_require_words', 'TEXT', '') - if not self.hasColumn('tv_shows', 'rls_ignore_words'): - self.upgrade_log(u'Adding column rls_ignore_words to tv_shows') + if not self.has_column('tv_shows', 'rls_ignore_words'): + self.upgrade_log('Adding column rls_ignore_words to tv_shows') if not db_backed_up: - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) - self.addColumn('tv_shows', 'rls_ignore_words', 'TEXT', '') + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) + self.add_column('tv_shows', 'rls_ignore_words', 'TEXT', '') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 29 -> 30 class AddSportsOption(db.SchemaUpgrade): def execute(self): db_backed_up = False - if not self.hasColumn('tv_shows', 'sports'): - self.upgrade_log(u'Adding column sports to tv_shows') - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + if not self.has_column('tv_shows', 'sports'): + self.upgrade_log('Adding column sports to tv_shows') + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db_backed_up = True - self.addColumn('tv_shows', 'sports', 'NUMERIC', '0') + self.add_column('tv_shows', 'sports', 'NUMERIC', '0') - if self.hasColumn('tv_shows', 'air_by_date') and self.hasColumn('tv_shows', 'sports'): + if self.has_column('tv_shows', 'air_by_date') and self.has_column('tv_shows', 'sports'): # update sports column - self.upgrade_log(u'[4/4] Updating tv_shows to reflect the correct sports value...') + self.upgrade_log('[4/4] Updating tv_shows to reflect the correct sports value...') if not db_backed_up: - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) cl = [] history_quality = self.connection.select( 'SELECT * FROM tv_shows WHERE LOWER(classification) = "sports" AND air_by_date = 1 AND sports = 0') @@ -1101,162 +1091,162 @@ class AddSportsOption(db.SchemaUpgrade): cl.append(['UPDATE tv_shows SET air_by_date = 0 WHERE show_id = ?', [cur_entry['show_id']]]) self.connection.mass_action(cl) - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 30 -> 31 class AddSceneNumberingToTvEpisodes(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Adding columns scene_season and scene_episode to tvepisodes') - self.addColumn('tv_episodes', 'scene_season', 'NUMERIC', 'NULL') - self.addColumn('tv_episodes', 'scene_episode', 'NUMERIC', 'NULL') + self.upgrade_log('Adding columns scene_season and scene_episode to tvepisodes') + self.add_column('tv_episodes', 'scene_season', 'NUMERIC', 'NULL') + self.add_column('tv_episodes', 'scene_episode', 'NUMERIC', 'NULL') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 31 -> 32 class AddAnimeTVShow(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Adding column anime to tv_episodes') - self.addColumn('tv_shows', 'anime', 'NUMERIC', '0') + self.upgrade_log('Adding column anime to tv_episodes') + self.add_column('tv_shows', 'anime', 'NUMERIC', '0') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 32 -> 33 class AddAbsoluteNumbering(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Adding column absolute_number to tv_episodes') - self.addColumn('tv_episodes', 'absolute_number', 'NUMERIC', '0') + self.upgrade_log('Adding column absolute_number to tv_episodes') + self.add_column('tv_episodes', 'absolute_number', 'NUMERIC', '0') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 33 -> 34 class AddSceneAbsoluteNumbering(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Adding columns absolute_number and scene_absolute_number to scene_numbering') - self.addColumn('scene_numbering', 'absolute_number', 'NUMERIC', '0') - self.addColumn('scene_numbering', 'scene_absolute_number', 'NUMERIC', '0') + self.upgrade_log('Adding columns absolute_number and scene_absolute_number to scene_numbering') + self.add_column('scene_numbering', 'absolute_number', 'NUMERIC', '0') + self.add_column('scene_numbering', 'scene_absolute_number', 'NUMERIC', '0') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 34 -> 35 class AddAnimeAllowlistBlocklist(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) cl = [['CREATE TABLE allowlist (show_id INTEGER, range TEXT, keyword TEXT, indexer NUMERIC)'], ['CREATE TABLE blocklist (show_id INTEGER, range TEXT, keyword TEXT, indexer NUMERIC)']] - self.upgrade_log(u'Creating tables for anime allow and block lists') + self.upgrade_log('Creating tables for anime allow and block lists') self.connection.mass_action(cl) - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 35 -> 36 class AddSceneAbsoluteNumbering2(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Adding column scene_absolute_number to tv_episodes') - self.addColumn('tv_episodes', 'scene_absolute_number', 'NUMERIC', '0') + self.upgrade_log('Adding column scene_absolute_number to tv_episodes') + self.add_column('tv_episodes', 'scene_absolute_number', 'NUMERIC', '0') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 36 -> 37 class AddXemRefresh(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Creating table xem_refresh') + self.upgrade_log('Creating table xem_refresh') self.connection.action( 'CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 37 -> 38 class AddSceneToTvShows(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Adding column scene to tv_shows') - self.addColumn('tv_shows', 'scene', 'NUMERIC', '0') + self.upgrade_log('Adding column scene to tv_shows') + self.add_column('tv_shows', 'scene', 'NUMERIC', '0') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 38 -> 39 class AddIndexerMapping(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - if self.hasTable('indexer_mapping'): + if self.has_table('indexer_mapping'): self.connection.action('DROP TABLE indexer_mapping') - self.upgrade_log(u'Adding table indexer_mapping') + self.upgrade_log('Adding table indexer_mapping') self.connection.action( 'CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER, mindexer NUMERIC,' ' PRIMARY KEY (indexer_id, indexer))') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 39 -> 40 class AddVersionToTvEpisodes(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Adding columns release_group and version to tv_episodes') - self.addColumn('tv_episodes', 'release_group', 'TEXT', '') - self.addColumn('tv_episodes', 'version', 'NUMERIC', '-1') + self.upgrade_log('Adding columns release_group and version to tv_episodes') + self.add_column('tv_episodes', 'release_group', 'TEXT', '') + self.add_column('tv_episodes', 'version', 'NUMERIC', '-1') - self.upgrade_log(u'Adding column version to history') - self.addColumn('history', 'version', 'NUMERIC', '-1') + self.upgrade_log('Adding column version to history') + self.add_column('history', 'version', 'NUMERIC', '-1') - self.incDBVersion() - return self.checkDBVersion() + self.inc_db_version() + return self.call_check_db_version() # 40 -> 10000 class BumpDatabaseVersion(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Bumping database version') + self.upgrade_log('Bumping database version') - return self.setDBVersion(10000) + return self.set_db_version(10000) # 41,42 -> 10001 class Migrate41(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Bumping database version') + self.upgrade_log('Bumping database version') - return self.setDBVersion(10001) + return self.set_db_version(10001) # 43,44 -> 10001 @@ -1266,153 +1256,153 @@ class Migrate43(db.SchemaUpgrade): db_backed_up = False db_chg = None table = 'tmdb_info' - if self.hasTable(table): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + if self.has_table(table): + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db_backed_up = True - self.upgrade_log(u'Dropping redundant table tmdb_info') + self.upgrade_log('Dropping redundant table tmdb_info') # noinspection SqlResolve self.connection.action('DROP TABLE [%s]' % table) db_chg = True - if self.hasColumn('tv_shows', 'tmdb_id'): + if self.has_column('tv_shows', 'tmdb_id'): if not db_backed_up: - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db_backed_up = True - self.upgrade_log(u'Dropping redundant tmdb_info refs') - self.dropColumn('tv_shows', 'tmdb_id') + self.upgrade_log('Dropping redundant tmdb_info refs') + self.drop_columns('tv_shows', 'tmdb_id') db_chg = True - if not self.hasTable('db_version'): + if not self.has_table('db_version'): if not db_backed_up: - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) self.connection.action('PRAGMA user_version = 0') self.connection.action('CREATE TABLE db_version (db_version INTEGER);') self.connection.action('INSERT INTO db_version (db_version) VALUES (0);') if not db_chg: - self.upgrade_log(u'Bumping database version') + self.upgrade_log('Bumping database version') - return self.setDBVersion(10001) + return self.set_db_version(10001) # 4301 -> 10002 class Migrate4301(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Bumping database version') + self.upgrade_log('Bumping database version') - return self.setDBVersion(10002) + return self.set_db_version(10002) # 4302,4400 -> 10003 class Migrate4302(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Bumping database version') + self.upgrade_log('Bumping database version') - return self.setDBVersion(10003) + return self.set_db_version(10003) # 5816 - 5818 -> 15 class MigrateUpstream(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Migrate SickBeard db v%s into v15' % str(self.checkDBVersion()).replace('58', '')) + self.upgrade_log(f'Migrate SickBeard db v{self.call_check_db_version().replace("58", "")} into v15') - return self.setDBVersion(15) + return self.set_db_version(15) # 10000 -> 20000 class SickGearDatabaseVersion(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Bumping database version to new SickGear standards') + self.upgrade_log('Bumping database version to new SickGear standards') - return self.setDBVersion(20000) + return self.set_db_version(20000) # 10001 -> 10000 class RemoveDefaultEpStatusFromTvShows(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Dropping redundant column default_ep_status from tv_shows') - self.dropColumn('tv_shows', 'default_ep_status') + self.upgrade_log('Dropping redundant column default_ep_status from tv_shows') + self.drop_columns('tv_shows', 'default_ep_status') - return self.setDBVersion(10000) + return self.set_db_version(10000) # 10002 -> 10001 class RemoveMinorDBVersion(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Dropping redundant column db_minor_version from db_version') - self.dropColumn('db_version', 'db_minor_version') + self.upgrade_log('Dropping redundant column db_minor_version from db_version') + self.drop_columns('db_version', 'db_minor_version') - return self.setDBVersion(10001) + return self.set_db_version(10001) # 10003 -> 10002 class RemoveMetadataSub(db.SchemaUpgrade): def execute(self): - if self.hasColumn('tv_shows', 'sub_use_sr_metadata'): - self.upgrade_log(u'Dropping redundant column metadata sub') - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) - self.dropColumn('tv_shows', 'sub_use_sr_metadata') + if self.has_column('tv_shows', 'sub_use_sr_metadata'): + self.upgrade_log('Dropping redundant column metadata sub') + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) + self.drop_columns('tv_shows', 'sub_use_sr_metadata') - return self.setDBVersion(10002) + return self.set_db_version(10002) # 20000 -> 20001 class DBIncreaseTo20001(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - self.upgrade_log(u'Bumping database version to force a backup before new database code') + self.upgrade_log('Bumping database version to force a backup before new database code') self.connection.action('VACUUM') - self.upgrade_log(u'Performed a vacuum on the database', logger.DEBUG) + self.upgrade_log('Performed a vacuum on the database', logger.DEBUG) - return self.setDBVersion(20001) + return self.set_db_version(20001) # 20001 -> 20002 class AddTvShowOverview(db.SchemaUpgrade): def execute(self): - if not self.hasColumn('tv_shows', 'overview'): - self.upgrade_log(u'Adding column overview to tv_shows') - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) - self.addColumn('tv_shows', 'overview', 'TEXT', '') + if not self.has_column('tv_shows', 'overview'): + self.upgrade_log('Adding column overview to tv_shows') + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) + self.add_column('tv_shows', 'overview', 'TEXT', '') - return self.setDBVersion(20002) + return self.set_db_version(20002) # 20002 -> 20003 class AddTvShowTags(db.SchemaUpgrade): def execute(self): - if not self.hasColumn('tv_shows', 'tag'): - self.upgrade_log(u'Adding tag to tv_shows') + if not self.has_column('tv_shows', 'tag'): + self.upgrade_log('Adding tag to tv_shows') - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) - self.addColumn('tv_shows', 'tag', 'TEXT', 'Show List') + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) + self.add_column('tv_shows', 'tag', 'TEXT', 'Show List') - return self.setDBVersion(20003) + return self.set_db_version(20003) # 20003 -> 20004 class ChangeMapIndexer(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - if self.hasTable('indexer_mapping'): + if self.has_table('indexer_mapping'): self.connection.action('DROP TABLE indexer_mapping') - self.upgrade_log(u'Changing table indexer_mapping') + self.upgrade_log('Changing table indexer_mapping') self.connection.action( 'CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER NOT NULL,' ' mindexer NUMERIC, date NUMERIC NOT NULL DEFAULT 0, status INTEGER NOT NULL DEFAULT 0,' @@ -1420,22 +1410,22 @@ class ChangeMapIndexer(db.SchemaUpgrade): self.connection.action('CREATE INDEX IF NOT EXISTS idx_mapping ON indexer_mapping (indexer_id, indexer)') - if not self.hasColumn('info', 'last_run_backlog'): + if not self.has_column('info', 'last_run_backlog'): self.upgrade_log('Adding last_run_backlog to info') - self.addColumn('info', 'last_run_backlog', 'NUMERIC', 1) + self.add_column('info', 'last_run_backlog', 'NUMERIC', 1) - self.upgrade_log(u'Moving table scene_exceptions from cache.db to sickbeard.db') - if self.hasTable('scene_exceptions_refresh'): + self.upgrade_log('Moving table scene_exceptions from cache.db to sickbeard.db') + if self.has_table('scene_exceptions_refresh'): self.connection.action('DROP TABLE scene_exceptions_refresh') self.connection.action('CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER)') - if self.hasTable('scene_exceptions'): + if self.has_table('scene_exceptions'): self.connection.action('DROP TABLE scene_exceptions') self.connection.action('CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY,' ' indexer_id INTEGER KEY, show_name TEXT, season NUMERIC, custom NUMERIC)') try: cachedb = db.DBConnection(filename='cache.db') - if cachedb.hasTable('scene_exceptions'): + if cachedb.has_table('scene_exceptions'): sql_result = cachedb.action('SELECT * FROM scene_exceptions') cs = [] for cur_result in sql_result: @@ -1454,7 +1444,7 @@ class ChangeMapIndexer(db.SchemaUpgrade): 'scene_exceptions', 'scene_exceptions_refresh', 'info', 'indexer_mapping', 'db_version', 'history', 'imdb_info', 'lastUpdate', 'scene_numbering', 'tv_episodes', 'tv_shows', 'xem_refresh'} - current_tables = set(self.listTables()) + current_tables = set(self.list_tables()) remove_tables = list(current_tables - keep_tables) for table in remove_tables: # noinspection SqlResolve @@ -1462,97 +1452,97 @@ class ChangeMapIndexer(db.SchemaUpgrade): self.connection.action('VACUUM') - return self.setDBVersion(20004) + return self.set_db_version(20004) # 20004 -> 20005 class AddShowNotFoundCounter(db.SchemaUpgrade): def execute(self): - if not self.hasTable('tv_shows_not_found'): - self.upgrade_log(u'Adding table tv_shows_not_found') + if not self.has_table('tv_shows_not_found'): + self.upgrade_log('Adding table tv_shows_not_found') - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) self.connection.action( 'CREATE TABLE tv_shows_not_found (indexer NUMERIC NOT NULL, indexer_id NUMERIC NOT NULL,' ' fail_count NUMERIC NOT NULL DEFAULT 0, last_check NUMERIC NOT NULL, last_success NUMERIC,' ' PRIMARY KEY (indexer_id, indexer))') - return self.setDBVersion(20005) + return self.set_db_version(20005) # 20005 -> 20006 class AddFlagTable(db.SchemaUpgrade): def execute(self): - if not self.hasTable('flags'): - self.upgrade_log(u'Adding table flags') + if not self.has_table('flags'): + self.upgrade_log('Adding table flags') - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) self.connection.action('CREATE TABLE flags (flag PRIMARY KEY NOT NULL )') - return self.setDBVersion(20006) + return self.set_db_version(20006) # 20006 -> 20007 class DBIncreaseTo20007(db.SchemaUpgrade): def execute(self): - self.upgrade_log(u'Bumping database version') + self.upgrade_log('Bumping database version') - return self.setDBVersion(20007) + return self.set_db_version(20007) # 20007 -> 20008 class AddWebdlTypesTable(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) self.connection.action('CREATE TABLE webdl_types (dname TEXT NOT NULL , regex TEXT NOT NULL )') - return self.setDBVersion(20008) + return self.set_db_version(20008) # 20008 -> 20009 class AddWatched(db.SchemaUpgrade): def execute(self): # remove old table from version 20007 - if self.hasTable('tv_episodes_watched') and not self.hasColumn('tv_episodes_watched', 'clientep_id'): + if self.has_table('tv_episodes_watched') and not self.has_column('tv_episodes_watched', 'clientep_id'): self.connection.action('DROP TABLE tv_episodes_watched') self.connection.action('VACUUM') - if not self.hasTable('tv_episodes_watched'): - self.upgrade_log(u'Adding table tv_episodes_watched') + if not self.has_table('tv_episodes_watched'): + self.upgrade_log('Adding table tv_episodes_watched') - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) self.connection.action( 'CREATE TABLE tv_episodes_watched (tvep_id NUMERIC NOT NULL, clientep_id TEXT, label TEXT,' ' played NUMERIC DEFAULT 0 NOT NULL, date_watched NUMERIC NOT NULL, date_added NUMERIC,' ' status NUMERIC, location TEXT, file_size NUMERIC, hide INT default 0 not null)' ) - return self.setDBVersion(20009) + return self.set_db_version(20009) # 20009 -> 20010 class AddPrune(db.SchemaUpgrade): def execute(self): - if not self.hasColumn('tv_shows', 'prune'): + if not self.has_column('tv_shows', 'prune'): self.upgrade_log('Adding prune to tv_shows') - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) - self.addColumn('tv_shows', 'prune', 'INT', 0) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) + self.add_column('tv_shows', 'prune', 'INT', 0) - return self.setDBVersion(20010) + return self.set_db_version(20010) # 20010 -> 20011 class AddIndexerToTables(db.SchemaUpgrade): def execute(self): sickgear.helpers.upgrade_new_naming() - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) show_ids = {s['prod_id']: s['tv_id'] for s in self.connection.select('SELECT indexer AS tv_id, indexer_id AS prod_id FROM tv_shows')} - allowtbl, blocktbl = (('allow', 'block'), ('white', 'black'))[not self.connection.hasTable('blocklist')] + allowtbl, blocktbl = (('allow', 'block'), ('white', 'black'))[not self.connection.has_table('blocklist')] allowtbl, blocktbl = '%slist' % allowtbl, '%slist' % blocktbl columns = {allowtbl: 'show_id, range, keyword, indexer', blocktbl: 'show_id, range, keyword, indexer', @@ -1562,9 +1552,9 @@ class AddIndexerToTables(db.SchemaUpgrade): # add missing indexer column for t in [(allowtbl, 'show_id'), (blocktbl, 'show_id'), ('history', 'showid'), ('scene_exceptions', 'indexer_id')]: - if not self.hasColumn(t[0], 'indexer'): - self.upgrade_log(u'Adding TV info support to %s table' % t[0]) - self.addColumn(t[0], 'indexer') + if not self.has_column(t[0], 'indexer'): + self.upgrade_log(f'Adding TV info support to {t[0]} table') + self.add_column(t[0], 'indexer') cl = [] for s_id, i in iteritems(show_ids): # noinspection SqlResolve @@ -1580,11 +1570,11 @@ class AddIndexerToTables(db.SchemaUpgrade): if 0 < self.connection.connection.total_changes: self.upgrade_log('Removed orphaned data from %s' % t[0]) - if self.connection.hasTable('backup_%s' % t[0]): + if self.connection.has_table('backup_%s' % t[0]): self.upgrade_log('Adding backup data to %s' % t[0]) self.connection.action('REPLACE INTO %s SELECT %s FROM %s' % ('%s (%s)' % (t[0], columns[t[0]]), columns[t[0]], 'backup_%s' % t[0])) - self.connection.removeTable('backup_%s' % t[0]) + self.connection.remove_table('backup_%s' % t[0]) # recreate tables that have wrong primary key = indexer_id without indexer self.upgrade_log('Adding TV info support to scene_numbering') @@ -1628,7 +1618,7 @@ class AddIndexerToTables(db.SchemaUpgrade): self.connection.mass_action(cl) self.connection.action('CREATE INDEX idx_id_indexer_imdb_info ON imdb_info (indexer,indexer_id)') - if self.connection.hasTable('backup_imdb_info'): + if self.connection.has_table('backup_imdb_info'): self.upgrade_log('Adding backup data to imdb_info') # noinspection SqlResolve self.connection.action('REPLACE INTO imdb_info (indexer, indexer_id, imdb_id, title, year, akas, ' @@ -1636,29 +1626,29 @@ class AddIndexerToTables(db.SchemaUpgrade): 'last_update) SELECT indexer, indexer_id, imdb_id, title, year, akas, runtimes, ' 'genres, countries, country_codes, certificates, rating, votes, last_update ' 'FROM backup_imdb_info') - self.connection.removeTable('backup_imdb_info') + self.connection.remove_table('backup_imdb_info') # remove an index of an no longer existing column self.upgrade_log('Changing/Re-Creating Indexes') - if self.connection.hasIndex('tv_shows', 'idx_tvdb_id'): - self.connection.removeIndex('tv_shows', 'idx_tvdb_id') + if self.connection.has_index('tv_shows', 'idx_tvdb_id'): + self.connection.remove_index('tv_shows', 'idx_tvdb_id') - if self.connection.hasIndex('tv_shows', 'idx_indexer_id'): - self.connection.removeIndex('tv_shows', 'idx_indexer_id') + if self.connection.has_index('tv_shows', 'idx_indexer_id'): + self.connection.remove_index('tv_shows', 'idx_indexer_id') self.connection.action('CREATE UNIQUE INDEX idx_indexer_id ON tv_shows (indexer,indexer_id)') - if self.connection.hasIndex('tv_episodes', 'idx_showid'): - self.connection.removeIndex('tv_episodes', 'idx_showid') + if self.connection.has_index('tv_episodes', 'idx_showid'): + self.connection.remove_index('tv_episodes', 'idx_showid') - if self.connection.hasIndex('tv_episodes', 'idx_tv_episodes_showid_airdate'): - self.connection.removeIndex('tv_episodes', 'idx_tv_episodes_showid_airdate') + if self.connection.has_index('tv_episodes', 'idx_tv_episodes_showid_airdate'): + self.connection.remove_index('tv_episodes', 'idx_tv_episodes_showid_airdate') self.connection.action('CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(indexer,showid,airdate)') - if not self.connection.hasIndex('tv_episodes', 'idx_tv_episodes_unique'): + if not self.connection.has_index('tv_episodes', 'idx_tv_episodes_unique'): self.connection.action('CREATE UNIQUE INDEX idx_tv_episodes_unique ON ' 'tv_episodes(indexer,showid,season,episode)') - if self.connection.hasTable('backup_tv_episodes'): + if self.connection.has_table('backup_tv_episodes'): self.upgrade_log('Adding backup data to tv_episodes') # noinspection SqlResolve self.connection.action('REPLACE INTO tv_episodes (episode_id, showid, indexerid, indexer, name, season, ' @@ -1670,9 +1660,9 @@ class AddIndexerToTables(db.SchemaUpgrade): 'file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, ' 'is_proper, scene_season, scene_episode, absolute_number, scene_absolute_number, ' 'release_group, version FROM backup_tv_episodes') - self.connection.removeTable('backup_tv_episodes') + self.connection.remove_table('backup_tv_episodes') - if self.connection.hasTable('backup_tv_shows'): + if self.connection.has_table('backup_tv_shows'): self.upgrade_log('Adding backup data to tv_shows') # noinspection SqlResolve self.connection.action('REPLACE INTO tv_shows (show_id, indexer_id, indexer, show_name, location, ' @@ -1686,25 +1676,25 @@ class AddIndexerToTables(db.SchemaUpgrade): 'notify_list, imdb_id, last_update_indexer, dvdorder, archive_firstmatch, ' 'rls_require_words, rls_ignore_words, sports, anime, scene, overview, tag, prune ' 'FROM backup_tv_shows') - self.connection.removeTable('backup_tv_shows') + self.connection.remove_table('backup_tv_shows') self.connection.action('VACUUM') - return self.setDBVersion(20011) + return self.set_db_version(20011) # 20011 -> 20012 class AddShowExludeGlobals(db.SchemaUpgrade): def execute(self): - if not self.hasColumn('tv_shows', 'rls_global_exclude_ignore'): + if not self.has_column('tv_shows', 'rls_global_exclude_ignore'): self.upgrade_log('Adding rls_global_exclude_ignore, rls_global_exclude_require to tv_shows') - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) - self.addColumn('tv_shows', 'rls_global_exclude_ignore', data_type='TEXT', default='') - self.addColumn('tv_shows', 'rls_global_exclude_require', data_type='TEXT', default='') + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) + self.add_column('tv_shows', 'rls_global_exclude_ignore', data_type='TEXT', default='') + self.add_column('tv_shows', 'rls_global_exclude_require', data_type='TEXT', default='') - if self.hasTable('tv_shows_exclude_backup'): + if self.has_table('tv_shows_exclude_backup'): self.upgrade_log('Adding rls_global_exclude_ignore, rls_global_exclude_require from backup to tv_shows') # noinspection SqlResolve self.connection.mass_action([['UPDATE tv_shows SET rls_global_exclude_ignore = ' @@ -1719,15 +1709,15 @@ class AddShowExludeGlobals(db.SchemaUpgrade): ['DROP TABLE tv_shows_exclude_backup'] ]) - return self.setDBVersion(20012) + return self.set_db_version(20012) # 20012 -> 20013 class RenameAllowBlockListTables(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - if not self.connection.hasTable('blocklist'): + if not self.connection.has_table('blocklist'): self.upgrade_log('Renaming allow/block list tables') for old, new in (('black', 'block'), ('white', 'allow')): @@ -1740,19 +1730,19 @@ class RenameAllowBlockListTables(db.SchemaUpgrade): ['DROP TABLE tmp_%slist' % new] ]) - return self.setDBVersion(20013) + return self.set_db_version(20013) # 20013 -> 20014 class AddHistoryHideColumn(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection , 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - if not self.hasColumn('history', 'hide'): + if not self.has_column('history', 'hide'): self.upgrade_log('Adding hide column to history') - self.addColumn('history', 'hide', default=0, set_default=True) + self.add_column('history', 'hide', default=0, set_default=True) - if self.hasTable('history_hide_backup'): + if self.has_table('history_hide_backup'): self.upgrade_log('Restoring hide status in history from backup') # noinspection SqlResolve self.connection.mass_action([ @@ -1767,30 +1757,30 @@ class AddHistoryHideColumn(db.SchemaUpgrade): ['DROP TABLE history_hide_backup'] ]) - return self.setDBVersion(20014) + return self.set_db_version(20014) # 20014 -> 20015 class ChangeShowData(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection, 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) self.upgrade_log('Adding new data columns to tv_shows') - self.addColumns('tv_shows', [('timezone', 'TEXT', ''), ('airtime', 'NUMERIC'), - ('network_country', 'TEXT', ''), ('network_country_code', 'TEXT', ''), - ('network_id', 'NUMERIC'), ('network_is_stream', 'INTEGER'), - ('src_update_timestamp', 'INTEGER')]) + self.add_columns('tv_shows', [('timezone', 'TEXT', ''), ('airtime', 'NUMERIC'), + ('network_country', 'TEXT', ''), ('network_country_code', 'TEXT', ''), + ('network_id', 'NUMERIC'), ('network_is_stream', 'INTEGER'), + ('src_update_timestamp', 'INTEGER')]) self.upgrade_log('Adding new data columns to tv_episodes') - self.addColumns('tv_episodes', [('timezone', 'TEXT', ''), ('airtime', 'NUMERIC'), - ('runtime', 'NUMERIC', 0), ('timestamp', 'NUMERIC'), - ('network', 'TEXT', ''), ('network_country', 'TEXT', ''), - ('network_country_code', 'TEXT', ''), ('network_id', 'NUMERIC'), - ('network_is_stream', 'INTEGER')]) + self.add_columns('tv_episodes', [('timezone', 'TEXT', ''), ('airtime', 'NUMERIC'), + ('runtime', 'NUMERIC', 0), ('timestamp', 'NUMERIC'), + ('network', 'TEXT', ''), ('network_country', 'TEXT', ''), + ('network_country_code', 'TEXT', ''), ('network_id', 'NUMERIC'), + ('network_is_stream', 'INTEGER')]) - if not self.hasColumn('imdb_info', 'is_mini_series'): + if not self.has_column('imdb_info', 'is_mini_series'): self.upgrade_log('Adding new data columns to imdb_info') - self.addColumns('imdb_info', [('is_mini_series', 'INTEGER', 0), ('episode_count', 'NUMERIC')]) + self.add_columns('imdb_info', [('is_mini_series', 'INTEGER', 0), ('episode_count', 'NUMERIC')]) self.upgrade_log('Adding Character and Persons tables') @@ -1986,7 +1976,7 @@ class ChangeShowData(db.SchemaUpgrade): self.connection.mass_action(cl) self.connection.action('VACUUM') - return self.setDBVersion(20015) + return self.set_db_version(20015) # 20015 -> 20016 @@ -1999,7 +1989,7 @@ class ChangeTmdbID(db.SchemaUpgrade): self.upgrade_log('Renaming tmdb images') # noinspection PyProtectedMember for _dir in (ImageCache._persons_dir(), ImageCache._characters_dir()): - for _f in ek.ek(scantree, _dir): # type: DirEntry + for _f in scantree(_dir): # type: DirEntry if not _f.is_file(follow_symlinks=False): continue try: @@ -2010,14 +2000,14 @@ class ChangeTmdbID(db.SchemaUpgrade): continue try: move_file(_f.path, - ek.ek(os.path.join, ek.ek(os.path.dirname, _f.path), + os.path.join(os.path.dirname(_f.path), re.sub('^%s-' % img_src, '%s-' % cache_img_src[(img_src, TVINFO_TMDB)[TVINFO_TMDB_OLD == img_src]], _f.name))) except (BaseException, Exception): pass - db.backup_database(self.connection, 'sickbeard.db', self.checkDBVersion()) - has_tmdb_backups = all(self.hasTable(_r) for _r in + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) + has_tmdb_backups = all(self.has_table(_r) for _r in ('backup_tmdb_tv_shows', 'backup_tmdb_tv_episodes', 'backup_tmdb_indexer_mapping')) if has_tmdb_backups: self.upgrade_log('Checking for dupe shows in backup tables') @@ -2093,8 +2083,8 @@ class ChangeTmdbID(db.SchemaUpgrade): ['REPLACE INTO indexer_mapping (indexer_id, indexer, mindexer_id, mindexer, date, status)' ' SELECT indexer_id, indexer, mindexer_id, mindexer, date, status FROM backup_tmdb_indexer_mapping'], ])[has_tmdb_backups]) - [self.connection.removeTable(_t) for _t in ('backup_tmdb_tv_shows', 'backup_tmdb_tv_episodes', + [self.connection.remove_table(_t) for _t in ('backup_tmdb_tv_shows', 'backup_tmdb_tv_episodes', 'backup_tmdb_indexer_mapping')] - return self.setDBVersion(20016) + return self.set_db_version(20016) diff --git a/sickgear/db.py b/sickgear/db.py index 5cfc0fc5..b20485fa 100644 --- a/sickgear/db.py +++ b/sickgear/db.py @@ -24,21 +24,20 @@ import sqlite3 import threading import time -# noinspection PyPep8Naming -import encodingKludge as ek from exceptions_helper import ex import sickgear from . import logger, sgdatetime -from .sgdatetime import timestamp_near +from .sgdatetime import SGDatetime from sg_helpers import make_path, compress_file, remove_file_perm, scantree -from _23 import filter_iter, filter_list, list_values, scandir +from _23 import scandir from six import iterkeys, iteritems, itervalues # noinspection PyUnreachableCode if False: + # noinspection PyUnresolvedReferences from typing import Any, AnyStr, Dict, List, Optional, Tuple, Union @@ -49,7 +48,7 @@ db_support_upsert = (3, 25, 0) <= sqlite3.sqlite_version_info # type: bool db_supports_backup = hasattr(sqlite3.Connection, 'backup') and (3, 6, 11) <= sqlite3.sqlite_version_info # type: bool -def dbFilename(filename='sickbeard.db', suffix=None): +def db_filename(filename='sickbeard.db', suffix=None): # type: (AnyStr, Optional[AnyStr]) -> AnyStr """ @param filename: The sqlite database filename to use. If not specified, @@ -60,7 +59,7 @@ def dbFilename(filename='sickbeard.db', suffix=None): """ if suffix: filename = '%s.%s' % (filename, suffix) - return ek.ek(os.path.join, sickgear.DATA_DIR, filename) + return os.path.join(sickgear.DATA_DIR, filename) def mass_upsert_sql(table_name, value_dict, key_dict, sanitise=True): @@ -72,7 +71,7 @@ def mass_upsert_sql(table_name, value_dict, key_dict, sanitise=True): :param value_dict: dict of values to be set {'table_fieldname': value} :param key_dict: dict of restrains for update {'table_fieldname': value} :param sanitise: True to remove k, v pairs in keyDict from valueDict as they must not exist in both. - This option has a performance hit so it's best to remove key_dict keys from value_dict and set this False instead. + This option has a performance hit, so it's best to remove key_dict keys from value_dict and set this False instead. :type sanitise: Boolean :return: list of 2 sql command """ @@ -82,12 +81,12 @@ def mass_upsert_sql(table_name, value_dict, key_dict, sanitise=True): # sanity: remove k, v pairs in keyDict from valueDict if sanitise: - value_dict = dict(filter_iter(lambda k: k[0] not in key_dict, iteritems(value_dict))) + value_dict = dict(filter(lambda k: k[0] not in key_dict, iteritems(value_dict))) # noinspection SqlResolve cl.append(['UPDATE [%s] SET %s WHERE %s' % (table_name, ', '.join(gen_params(value_dict)), ' AND '.join(gen_params(key_dict))), - list_values(value_dict) + list_values(key_dict)]) + list(value_dict.values()) + list(key_dict.values())]) # noinspection SqlResolve cl.append(['INSERT INTO [' + table_name + '] (' + @@ -106,9 +105,9 @@ class DBConnection(object): from . import helpers self.new_db = False - db_src = dbFilename(filename) + db_src = db_filename(filename) if not os.path.isfile(db_src): - db_alt = dbFilename('sickrage.db') + db_alt = db_filename('sickrage.db') if os.path.isfile(db_alt): helpers.copy_file(db_alt, db_src) @@ -133,20 +132,21 @@ class DBConnection(object): :return: success, message """ if not db_supports_backup: - logger.log('this python sqlite3 version doesn\'t support backups', logger.DEBUG) + logger.debug('this python sqlite3 version doesn\'t support backups') return False, 'this python sqlite3 version doesn\'t support backups' - if not ek.ek(os.path.isdir, target): - logger.log('Backup target invalid', logger.ERROR) + if not os.path.isdir(target): + logger.error('Backup target invalid') return False, 'Backup target invalid' - target_db = ek.ek(os.path.join, target, (backup_filename, self.filename)[None is backup_filename]) - if ek.ek(os.path.exists, target_db): - logger.log('Backup target file already exists', logger.ERROR) + target_db = os.path.join(target, (backup_filename, self.filename)[None is backup_filename]) + if os.path.exists(target_db): + logger.error('Backup target file already exists') return False, 'Backup target file already exists' + # noinspection PyUnusedLocal def progress(status, remaining, total): - logger.log('Copied %s of %s pages...' % (total - remaining, total), logger.DEBUG) + logger.debug('Copied %s of %s pages...' % (total - remaining, total)) backup_con = None @@ -156,9 +156,9 @@ class DBConnection(object): with backup_con: with db_lock: self.connection.backup(backup_con, progress=progress) - logger.log('%s backup successful' % self.filename, logger.DEBUG) + logger.debug('%s backup successful' % self.filename) except sqlite3.Error as error: - logger.log("Error while taking backup: %s" % ex(error), logger.ERROR) + logger.error("Error while taking backup: %s" % ex(error)) return False, 'Backup failed' finally: if backup_con: @@ -169,11 +169,11 @@ class DBConnection(object): return True, 'Backup successful' - def checkDBVersion(self): + def check_db_version(self): # type: (...) -> int try: - if self.hasTable('db_version'): + if self.has_table('db_version'): result = self.select('SELECT db_version FROM db_version') else: version = self.select('PRAGMA user_version')[0]['user_version'] @@ -187,7 +187,7 @@ class DBConnection(object): if result: version = int(result[0]['db_version']) - if 10000 > version and self.hasColumn('db_version', 'db_minor_version'): + if 10000 > version and self.has_column('db_version', 'db_minor_version'): # noinspection SqlResolve minor = self.select('SELECT db_minor_version FROM db_version') return version * 100 + int(minor[0]['db_minor_version']) @@ -226,8 +226,8 @@ class DBConnection(object): self.connection.commit() if 0 < affected: - logger.debug(u'Transaction with %s queries executed affected at least %i row%s' % ( - len(queries), affected, helpers.maybe_plural(affected))) + logger.debug(f'Transaction with {len(queries)} queries executed affected at least {affected:d}' + f' row{helpers.maybe_plural(affected)}') return sql_result except sqlite3.OperationalError as e: sql_result = [] @@ -239,7 +239,7 @@ class DBConnection(object): except sqlite3.DatabaseError as e: if self.connection: self.connection.rollback() - logger.error(u'Fatal error executing query: ' + ex(e)) + logger.error(f'Fatal error executing query: {ex(e)}') raise return sql_result @@ -248,10 +248,10 @@ class DBConnection(object): def action_error(e): if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]: - logger.log(u'DB error: ' + ex(e), logger.WARNING) + logger.warning(f'DB error: {ex(e)}') time.sleep(1) return True - logger.log(u'DB error: ' + ex(e), logger.ERROR) + logger.error(f'DB error: {ex(e)}') def action(self, query, args=None): # type: (AnyStr, Optional[List, Tuple]) -> Optional[Union[List, sqlite3.Cursor]] @@ -280,7 +280,7 @@ class DBConnection(object): raise attempt += 1 except sqlite3.DatabaseError as e: - logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR) + logger.error(f'Fatal error executing query: {ex(e)}') raise return sql_result @@ -306,16 +306,16 @@ class DBConnection(object): query = 'UPDATE [%s] SET %s WHERE %s' % ( table_name, ', '.join(gen_params(value_dict)), ' AND '.join(gen_params(key_dict))) - self.action(query, list_values(value_dict) + list_values(key_dict)) + self.action(query, list(value_dict.values()) + list(key_dict.values())) if self.connection.total_changes == changes_before: # noinspection SqlResolve query = 'INSERT INTO [' + table_name + ']' \ + ' (%s)' % ', '.join(itertools.chain(iterkeys(value_dict), iterkeys(key_dict))) \ + ' VALUES (%s)' % ', '.join(['?'] * (len(value_dict) + len(key_dict))) - self.action(query, list_values(value_dict) + list_values(key_dict)) + self.action(query, list(value_dict.values()) + list(key_dict.values())) - def tableInfo(self, table_name): + def table_info(self, table_name): # type: (AnyStr) -> Dict[AnyStr, Dict[AnyStr, AnyStr]] # FIXME ? binding is not supported here, but I cannot find a way to escape a string manually @@ -333,38 +333,32 @@ class DBConnection(object): d[col[0]] = row[idx] return d - def hasTable(self, table_name): + def has_table(self, table_name): # type: (AnyStr) -> bool return 0 < len(self.select('SELECT 1 FROM sqlite_master WHERE name = ?;', (table_name,))) - def hasColumn(self, table_name, column): + def has_column(self, table_name, column): # type: (AnyStr, AnyStr) -> bool - return column in self.tableInfo(table_name) + return column in self.table_info(table_name) - def hasIndex(self, table_name, index): + def has_index(self, table_name, index): # type: (AnyStr, AnyStr) -> bool - sqlResults = self.select('PRAGMA index_list([%s])' % table_name) - for result in sqlResults: + sql_results = self.select('PRAGMA index_list([%s])' % table_name) + for result in sql_results: if result['name'] == index: return True return False - def removeIndex(self, table, name): + def remove_index(self, table, name): # type: (AnyStr, AnyStr) -> None - if self.hasIndex(table, name): + if self.has_index(table, name): self.action('DROP INDEX' + ' [%s]' % name) - def removeTable(self, name): + def remove_table(self, name): # type: (AnyStr) -> None - if self.hasTable(name): + if self.has_table(name): self.action('DROP TABLE' + ' [%s]' % name) - # noinspection SqlResolve - def addColumn(self, table, column, data_type='NUMERIC', default=0): - # type: (AnyStr, AnyStr, AnyStr, Any) -> None - self.action('ALTER TABLE [%s] ADD %s %s' % (table, column, data_type)) - self.action('UPDATE [%s] SET %s = ?' % (table, column), (default,)) - def has_flag(self, flag_name): # type: (AnyStr) -> bool sql_result = self.select('SELECT flag FROM flags WHERE flag = ?', [flag_name]) @@ -417,7 +411,7 @@ class DBConnection(object): logger.load_log('Upgrading %s' % self.filename, to_log, log_level) -def sanityCheckDatabase(connection, sanity_check): +def sanity_check_db(connection, sanity_check): sanity_check(connection).check() @@ -429,36 +423,36 @@ class DBSanityCheck(object): pass -def upgradeDatabase(connection, schema): - logger.log(u'Checking database structure...', logger.MESSAGE) +def upgrade_database(connection, schema): + logger.log('Checking database structure...', logger.MESSAGE) connection.is_upgrading = False - connection.new_db = 0 == connection.checkDBVersion() - _processUpgrade(connection, schema) + connection.new_db = 0 == connection.check_db_version() + _process_upgrade(connection, schema) if connection.is_upgrading: connection.upgrade_log('Finished') -def prettyName(class_name): +def _pretty_name(class_name): # type: (AnyStr) -> AnyStr return ' '.join([x.group() for x in re.finditer('([A-Z])([a-z0-9]+)', class_name)]) -def restoreDatabase(filename, version): - logger.log(u'Restoring database before trying upgrade again') - if not sickgear.helpers.restore_versioned_file(dbFilename(filename=filename, suffix='v%s' % version), version): - logger.log_error_and_exit(u'Database restore failed, abort upgrading database') +def _restore_database(filename, version): + logger.log('Restoring database before trying upgrade again') + if not sickgear.helpers.restore_versioned_file(db_filename(filename=filename, suffix='v%s' % version), version): + logger.log_error_and_exit('Database restore failed, abort upgrading database') return False return True -def _processUpgrade(connection, upgrade_class): +def _process_upgrade(connection, upgrade_class): instance = upgrade_class(connection) - logger.log('Checking %s database upgrade' % prettyName(upgrade_class.__name__), logger.DEBUG) + logger.debug('Checking %s database upgrade' % _pretty_name(upgrade_class.__name__)) if not instance.test(): connection.is_upgrading = True - connection.upgrade_log(getattr(upgrade_class, 'pretty_name', None) or prettyName(upgrade_class.__name__)) - logger.log('Database upgrade required: %s' % prettyName(upgrade_class.__name__), logger.MESSAGE) - db_version = connection.checkDBVersion() + connection.upgrade_log(getattr(upgrade_class, 'pretty_name', None) or _pretty_name(upgrade_class.__name__)) + logger.log('Database upgrade required: %s' % _pretty_name(upgrade_class.__name__), logger.MESSAGE) + db_version = connection.check_db_version() try: # only do backup if it's not a new db 0 < db_version and backup_database(connection, connection.filename, db_version) @@ -470,19 +464,19 @@ def _processUpgrade(connection, upgrade_class): # close db before attempting restore connection.close() - if restoreDatabase(connection.filename, db_version): + if _restore_database(connection.filename, db_version): logger.log_error_and_exit('Successfully restored database version: %s' % db_version) else: logger.log_error_and_exit('Failed to restore database version: %s' % db_version) else: logger.log_error_and_exit('Database upgrade failed, can\'t determine old db version, not restoring.') - logger.log('%s upgrade completed' % upgrade_class.__name__, logger.DEBUG) + logger.debug('%s upgrade completed' % upgrade_class.__name__) else: - logger.log('%s upgrade not required' % upgrade_class.__name__, logger.DEBUG) + logger.debug('%s upgrade not required' % upgrade_class.__name__) for upgradeSubClass in upgrade_class.__subclasses__(): - _processUpgrade(connection, upgradeSubClass) + _process_upgrade(connection, upgradeSubClass) # Base migration class. All future DB changes should be subclassed from this class @@ -490,11 +484,11 @@ class SchemaUpgrade(object): def __init__(self, connection, **kwargs): self.connection = connection - def hasTable(self, table_name): + def has_table(self, table_name): return 0 < len(self.connection.select('SELECT 1 FROM sqlite_master WHERE name = ?;', (table_name,))) - def hasColumn(self, table_name, column): - return column in self.connection.tableInfo(table_name) + def has_column(self, table_name, column): + return column in self.connection.table_info(table_name) def list_tables(self): # type: (...) -> List[AnyStr] @@ -513,13 +507,13 @@ class SchemaUpgrade(object): ['index'])] # noinspection SqlResolve - def addColumn(self, table, column, data_type='NUMERIC', default=0, set_default=False): + def add_column(self, table, column, data_type='NUMERIC', default=0, set_default=False): self.connection.action('ALTER TABLE [%s] ADD %s %s%s' % (table, column, data_type, ('', ' DEFAULT "%s"' % default)[set_default])) self.connection.action('UPDATE [%s] SET %s = ?' % (table, column), (default,)) # noinspection SqlResolve - def addColumns(self, table, column_list=None): + def add_columns(self, table, column_list=None): # type: (AnyStr, List) -> None if isinstance(column_list, list): sql = [] @@ -537,25 +531,21 @@ class SchemaUpgrade(object): if sql: self.connection.mass_action(sql) - def dropColumn(self, table, columns): - # type: (AnyStr, AnyStr) -> None - self.drop_columns(table, columns) - def drop_columns(self, table, column): # type: (AnyStr, Union[AnyStr, List[AnyStr]]) -> None # get old table columns and store the ones we want to keep result = self.connection.select('pragma table_info([%s])' % table) columns_list = ([column], column)[isinstance(column, list)] - keptColumns = filter_list(lambda col: col['name'] not in columns_list, result) + kept_columns = list(filter(lambda col: col['name'] not in columns_list, result)) - keptColumnsNames = [] + kept_columns_names = [] final = [] pk = [] # copy the old table schema, column by column - for column in keptColumns: + for column in kept_columns: - keptColumnsNames.append(column['name']) + kept_columns_names.append(column['name']) cl = [column['name'], column['type']] @@ -576,7 +566,7 @@ class SchemaUpgrade(object): # join all the table column creation fields final = ', '.join(final) - keptColumnsNames = ', '.join(keptColumnsNames) + kept_columns_names = ', '.join(kept_columns_names) # generate sql for the new table creation if 0 == len(pk): @@ -588,12 +578,12 @@ class SchemaUpgrade(object): # create new temporary table and copy the old table data across, barring the removed column self.connection.action(sql) # noinspection SqlResolve - self.connection.action('INSERT INTO [%s_new] SELECT %s FROM [%s]' % (table, keptColumnsNames, table)) + self.connection.action('INSERT INTO [%s_new] SELECT %s FROM [%s]' % (table, kept_columns_names, table)) # copy the old indexes from the old table result = self.connection.select("SELECT sql FROM sqlite_master WHERE tbl_name=? AND type='index'", [table]) - # remove the old table and rename the new table to take it's place + # remove the old table and rename the new table to take its place # noinspection SqlResolve self.connection.action('DROP TABLE [%s]' % table) # noinspection SqlResolve @@ -607,22 +597,19 @@ class SchemaUpgrade(object): # vacuum the db as we will have a lot of space to reclaim after dropping tables self.connection.action('VACUUM') - def checkDBVersion(self): - return self.connection.checkDBVersion() + def call_check_db_version(self): + return self.connection.check_db_version() - def incDBVersion(self): - new_version = self.checkDBVersion() + 1 + def inc_db_version(self): + new_version = self.call_check_db_version() + 1 # noinspection SqlConstantCondition self.connection.action('UPDATE db_version SET db_version = ? WHERE 1=1', [new_version]) return new_version - def setDBVersion(self, new_version, check_db_version=True): + def set_db_version(self, new_version, check_db_version=True): # noinspection SqlConstantCondition self.connection.action('UPDATE db_version SET db_version = ? WHERE 1=1', [new_version]) - return check_db_version and self.checkDBVersion() - - def listTables(self): - return self.list_tables() + return check_db_version and self.call_check_db_version() def do_query(self, queries): if not isinstance(queries, list): @@ -632,23 +619,23 @@ class SchemaUpgrade(object): for query in queries: tbl_name = re.findall(r'(?i)DROP.*?TABLE.*?\[?([^\s\]]+)', query) - if tbl_name and not self.hasTable(tbl_name[0]): + if tbl_name and not self.has_table(tbl_name[0]): continue tbl_name = re.findall(r'(?i)CREATE.*?TABLE.*?\s([^\s(]+)\s*\(', query) - if tbl_name and self.hasTable(tbl_name[0]): + if tbl_name and self.has_table(tbl_name[0]): continue self.connection.action(query) def finish(self, tbl_dropped=False): if tbl_dropped: self.connection.action('VACUUM') - self.incDBVersion() + self.inc_db_version() def upgrade_log(self, *args, **kwargs): self.connection.upgrade_log(*args, **kwargs) -def MigrationCode(my_db): +def migration_code(my_db): schema = { 0: sickgear.mainDB.InitialSchema, 9: sickgear.mainDB.AddSizeAndSceneNameFields, @@ -721,17 +708,17 @@ def MigrationCode(my_db): # 20002: sickgear.mainDB.AddCoolSickGearFeature3, } - db_version = my_db.checkDBVersion() + db_version = my_db.check_db_version() my_db.new_db = 0 == db_version - logger.log(u'Detected database version: v%s' % db_version, logger.DEBUG) + logger.debug(f'Detected database version: v{db_version}') if not (db_version in schema): if db_version == sickgear.mainDB.MAX_DB_VERSION: - logger.log(u'Database schema is up-to-date, no upgrade required') + logger.log('Database schema is up-to-date, no upgrade required') elif 10000 > db_version: - logger.log_error_and_exit(u'SickGear does not currently support upgrading from this database version') + logger.log_error_and_exit('SickGear does not currently support upgrading from this database version') else: - logger.log_error_and_exit(u'Invalid database version') + logger.log_error_and_exit('Invalid database version') else: @@ -746,26 +733,26 @@ def MigrationCode(my_db): cleanup_old_db_backups(my_db.filename) except (BaseException, Exception) as e: my_db.close() - logger.log(u'Failed to update database with error: %s attempting recovery...' % ex(e), logger.ERROR) + logger.error(f'Failed to update database with error: {ex(e)} attempting recovery...') - if restoreDatabase(my_db.filename, db_version): + if _restore_database(my_db.filename, db_version): # initialize the main SB database - logger.log_error_and_exit(u'Successfully restored database version: %s' % db_version) + logger.log_error_and_exit(f'Successfully restored database version: {db_version}') else: - logger.log_error_and_exit(u'Failed to restore database version: %s' % db_version) + logger.log_error_and_exit(f'Failed to restore database version: {db_version}') my_db.upgrade_log('Finished') def cleanup_old_db_backups(filename): try: - d, filename = ek.ek(os.path.split, filename) + d, filename = os.path.split(filename) if not d: d = sickgear.DATA_DIR - for f in filter_iter(lambda fn: fn.is_file() and filename in fn.name and - re.search(r'\.db(\.v\d+)?\.r\d+$', fn.name), - ek.ek(scandir, d)): + for f in filter(lambda fn: fn.is_file() and filename in fn.name and + re.search(r'\.db(\.v\d+)?\.r\d+$', fn.name), + scandir(d)): try: - ek.ek(os.unlink, f.path) + os.unlink(f.path) except (BaseException, Exception): pass except (BaseException, Exception): @@ -778,11 +765,11 @@ def backup_database(db_connection, filename, version): logger.debug('new db, no backup required') return - logger.log(u'Backing up database before upgrade') - if not sickgear.helpers.backup_versioned_file(dbFilename(filename), version): - logger.log_error_and_exit(u'Database backup failed, abort upgrading database') + logger.log('Backing up database before upgrade') + if not sickgear.helpers.backup_versioned_file(db_filename(filename), version): + logger.log_error_and_exit('Database backup failed, abort upgrading database') else: - logger.log(u'Proceeding with upgrade') + logger.log('Proceeding with upgrade') def get_rollback_module(): @@ -843,18 +830,18 @@ def backup_all_dbs(target, compress=True, prefer_7z=True): optional compress with zip or 7z (python 3 only, external lib py7zr required) 7z falls back to zip if py7zr is not available - :param target: target folder to backup to + :param target: target folder for backup db :param compress: compress db backups :param prefer_7z: prefer 7z compression if available :return: success, message """ if not make_path(target): - logger.log('Failed to create db backup dir', logger.ERROR) + logger.error('Failed to create db backup dir') return False, 'Failed to create db backup dir' my_db = DBConnection('cache.db') last_backup = my_db.select('SELECT time FROM lastUpdate WHERE provider = ?', ['sickgear_db_backup']) if last_backup: - now_stamp = int(timestamp_near(datetime.datetime.now())) + now_stamp = SGDatetime.timestamp_near() the_time = int(last_backup[0]['time']) # only backup every 23 hours if now_stamp - the_time < 60 * 60 * 23: @@ -870,7 +857,7 @@ def backup_all_dbs(target, compress=True, prefer_7z=True): if not success: return False, msg if compress: - full_path = ek.ek(os.path.join, target, name) + full_path = os.path.join(target, name) if not compress_file(full_path, '%s.db' % cur_db, prefer_7z=prefer_7z): return False, 'Failure to compress backup' delete_old_db_backups(target) diff --git a/sickgear/event_queue.py b/sickgear/event_queue.py index d9a42daa..0c894e00 100644 --- a/sickgear/event_queue.py +++ b/sickgear/event_queue.py @@ -32,8 +32,18 @@ class Events(threading.Thread): try: # get event type etype = self.queue.get(True, 1) + except moves.queue.Empty: + etype = 'Empty' + except(BaseException, Exception): + etype = None + if etype in (self.SystemEvent.RESTART, self.SystemEvent.SHUTDOWN, None, 'Empty'): + if etype in ('Empty',): + continue + from sickgear import logger + logger.debug(f'Callback {self.callback.__name__}(event type:{etype})') - # perform callback if we got a event type + try: + # perform callback if we got an event type self.callback(etype) # event completed diff --git a/sickgear/failedProcessor.py b/sickgear/failedProcessor.py index 3ebf1844..7af0483b 100644 --- a/sickgear/failedProcessor.py +++ b/sickgear/failedProcessor.py @@ -67,30 +67,33 @@ class FailedProcessor(LegacyFailedProcessor): :return: success :type: bool or None """ - self._log(u'Failed download detected: (%s, %s)' % (self.nzb_name, self.dir_name)) + self._log(f'Failed download detected: ({self.nzb_name}, {self.dir_name})') - releaseName = show_name_helpers.determineReleaseName(self.dir_name, self.nzb_name) - if None is releaseName: - self._log(u'Warning: unable to find a valid release name.', logger.WARNING) + release_name = show_name_helpers.determine_release_name(self.dir_name, self.nzb_name) + if None is release_name: + self._log('Warning: unable to find a valid release name.', logger.WARNING) raise exceptions_helper.FailedProcessingFailed() try: parser = NameParser(False, show_obj=self.show_obj, convert=True) - parsed = parser.parse(releaseName) + parsed = parser.parse(release_name) except InvalidNameException: - self._log(u'Error: release name is invalid: ' + releaseName, logger.DEBUG) + self._log(f'Error: release name is invalid: {release_name}', logger.DEBUG) raise exceptions_helper.FailedProcessingFailed() except InvalidShowException: - self._log(u'Error: unable to parse release name %s into a valid show' % releaseName, logger.DEBUG) + self._log(f'Error: unable to parse release name {release_name} into a valid show', logger.DEBUG) raise exceptions_helper.FailedProcessingFailed() - logger.log(u"name_parser info: ", logger.DEBUG) - logger.log(u" - " + str(parsed.series_name), logger.DEBUG) - logger.log(u" - " + str(parsed.season_number), logger.DEBUG) - logger.log(u" - " + str(parsed.episode_numbers), logger.DEBUG) - logger.log(u" - " + str(parsed.extra_info), logger.DEBUG) - logger.log(u" - " + str(parsed.release_group), logger.DEBUG) - logger.log(u" - " + str(parsed.air_date), logger.DEBUG) + for cur_msg in ( + 'name_parser info: ', + f' - {parsed.series_name}', + f' - {parsed.season_number}', + f' - {parsed.episode_numbers}', + f' - {parsed.extra_info}', + f' - {parsed.release_group}', + f' - {parsed.air_date}' + ): + logger.debug(cur_msg) for episode in parsed.episode_numbers: segment = parsed.show_obj.get_episode(parsed.season_number, episode) diff --git a/sickgear/failed_history.py b/sickgear/failed_history.py index 5af91a6d..9afa6884 100644 --- a/sickgear/failed_history.py +++ b/sickgear/failed_history.py @@ -25,7 +25,6 @@ from .history import dateFormat from exceptions_helper import EpisodeNotFoundException, ex from _23 import unquote -from six import PY2, text_type # noinspection PyUnresolvedReferences # noinspection PyUnreachableCode @@ -83,10 +82,6 @@ def prepare_failed_name(release): fixed = re.sub(r'[.\-+ ]', '_', fixed) - # noinspection PyUnresolvedReferences - if PY2 and not isinstance(fixed, unicode): - fixed = text_type(fixed, 'utf-8', 'replace') - return fixed @@ -104,21 +99,20 @@ def add_failed(release): sql_result = db_select('SELECT * FROM history t WHERE t.release=?', [release]) if not any(sql_result): - logger.log('Release not found in failed.db snatch history', logger.WARNING) + logger.warning('Release not found in failed.db snatch history') elif 1 < len(sql_result): - logger.log('Multiple logged snatches found for release in failed.db', logger.WARNING) + logger.warning('Multiple logged snatches found for release in failed.db') sizes = len(set([x['size'] for x in sql_result])) providers = len(set([x['provider'] for x in sql_result])) if 1 == sizes: - logger.log('However, they\'re all the same size. Continuing with found size', logger.WARNING) + logger.warning('However, they\'re all the same size. Continuing with found size') size = sql_result[0]['size'] else: - logger.log( - 'They also vary in size. Deleting logged snatches and recording this release with no size/provider', - logger.WARNING) + logger.warning( + 'They also vary in size. Deleting logged snatches and recording this release with no size/provider') for cur_result in sql_result: remove_snatched(cur_result['release'], cur_result['size'], cur_result['provider']) @@ -165,12 +159,12 @@ def set_episode_failed(ep_obj): """ try: with ep_obj.lock: - quality = Quality.splitCompositeStatus(ep_obj.status)[1] - ep_obj.status = Quality.compositeStatus(FAILED, quality) + quality = Quality.split_composite_status(ep_obj.status)[1] + ep_obj.status = Quality.composite_status(FAILED, quality) ep_obj.save_to_db() except EpisodeNotFoundException as e: - logger.log('Unable to get episode, please set its status manually: %s' % ex(e), logger.WARNING) + logger.warning('Unable to get episode, please set its status manually: %s' % ex(e)) def remove_failed(release): @@ -236,19 +230,19 @@ def revert_episode(ep_obj): if ep_obj.episode in history_eps: status_revert = history_eps[ep_obj.episode]['old_status'] - status, quality = Quality.splitCompositeStatus(status_revert) + status, quality = Quality.split_composite_status(status_revert) logger.log('Found in failed.db history with status: %s quality: %s' % ( statusStrings[status], Quality.qualityStrings[quality])) else: status_revert = WANTED - logger.log('Episode not found in failed.db history. Setting it to WANTED', logger.WARNING) + logger.warning('Episode not found in failed.db history. Setting it to WANTED') ep_obj.status = status_revert ep_obj.save_to_db() except EpisodeNotFoundException as e: - logger.log('Unable to create episode, please set its status manually: %s' % ex(e), logger.WARNING) + logger.warning('Unable to create episode, please set its status manually: %s' % ex(e)) def find_old_status(ep_obj): @@ -294,8 +288,7 @@ def find_release(ep_obj): db_action('DELETE FROM history WHERE %s=? AND %s!=?' % ('`release`', '`date`'), [release, r['date']]) # Found a previously failed release - logger.log('Found failed.db history release %sx%s: [%s]' % ( - ep_obj.season, ep_obj.episode, release), logger.DEBUG) + logger.debug(f'Found failed.db history release {ep_obj.season}x{ep_obj.episode}: [{release}]') else: release = None provider = None diff --git a/sickgear/generic_queue.py b/sickgear/generic_queue.py index b29d0ebc..11c77bbe 100644 --- a/sickgear/generic_queue.py +++ b/sickgear/generic_queue.py @@ -89,7 +89,7 @@ class GenericQueue(object): my_db = db.DBConnection('cache.db') my_db.mass_action(cl) except (BaseException, Exception) as e: - logger.log('Exception saving queue %s to db: %s' % (self.__class__.__name__, ex(e)), logger.ERROR) + logger.error('Exception saving queue %s to db: %s' % (self.__class__.__name__, ex(e))) def _clear_sql(self): # type: (...) -> List[List] @@ -103,7 +103,7 @@ class GenericQueue(object): my_db = db.DBConnection('cache.db') my_db.mass_action(item_sql) except (BaseException, Exception) as e: - logger.log('Exception saving item %s to db: %s' % (item, ex(e)), logger.ERROR) + logger.error('Exception saving item %s to db: %s' % (item, ex(e))) def delete_item(self, item, finished_run=False): # type: (Union[QueueItem, CastQueueItem], bool) -> None @@ -119,7 +119,7 @@ class GenericQueue(object): my_db = db.DBConnection('cache.db') my_db.mass_action(item_sql) except (BaseException, Exception) as e: - logger.log('Exception deleting item %s from db: %s' % (item, ex(e)), logger.ERROR) + logger.error('Exception deleting item %s from db: %s' % (item, ex(e))) def _get_item_sql(self, item): # type: (Union[QueueItem, CastQueueItem]) -> List[List] @@ -175,7 +175,7 @@ class GenericQueue(object): """ clear queue excluding internal defined types - :param action_types: only clear all of given action type + :param action_types: only clear supplied action types """ if not isinstance(action_types, list): action_types = [action_types] @@ -211,12 +211,12 @@ class GenericQueue(object): my_db.mass_action(del_main_sql) def pause(self): - logger.log(u'Pausing queue') + logger.log('Pausing queue') if self.lock: self.min_priority = 999999999999 def unpause(self): - logger.log(u'Unpausing queue') + logger.log('Unpausing queue') with self.lock: self.min_priority = 0 @@ -258,7 +258,7 @@ class GenericQueue(object): if 0 == len(self.events[event_type]): del self.events[event_type] except (BaseException, Exception) as e: - logger.log('Error removing event method from queue: %s' % ex(e), logger.ERROR) + logger.error('Error removing event method from queue: %s' % ex(e)) def execute_events(self, event_type, *args, **kwargs): # type: (int, Tuple, Dict) -> None @@ -267,7 +267,7 @@ class GenericQueue(object): try: event(*args, **kwargs) except (BaseException, Exception) as e: - logger.log('Error executing Event: %s' % ex(e), logger.ERROR) + logger.error('Error executing Event: %s' % ex(e)) def run(self): diff --git a/sickgear/gh_api.py b/sickgear/gh_api.py index bca5e589..7c78c4ed 100644 --- a/sickgear/gh_api.py +++ b/sickgear/gh_api.py @@ -23,7 +23,7 @@ if False: class GitHub(object): """ - Simple api wrapper for the Github API v3. Currently only supports the small thing that SB + Simple api wrapper for the GitHub API v3. Currently only supports the small thing that SB needs it for - list of commits. """ @@ -34,7 +34,7 @@ class GitHub(object): self.branch = branch @staticmethod - def _access_API(path, params=None): + def _access_api(path, params=None): """ Access the API at the path given and with the optional params given. @@ -49,55 +49,57 @@ class GitHub(object): if params and type(params) is dict: url += '?' + '&'.join([str(x) + '=' + str(params[x]) for x in params]) - parsedJSON = helpers.get_url(url, parse_json=True) - if not parsedJSON: + parsed_json = helpers.get_url(url, parse_json=True) + if not parsed_json: return [] - return parsedJSON + return parsed_json def commits(self): """ Get a list of the 100 most recent commits from the specified user/repo/branch, starting from HEAD. - user: The github username of the person whose repo you're querying + user: The GitHub username of the person whose repo you're querying repo: The repo name to query branch: Optional, the branch name to show commits from - Returns a deserialized json object containing the commit info. See http://developer.github.com/v3/repos/commits/ + Returns a deserialized json object containing the commit info. + See https://developer.github.com/v3/repos/commits/ """ - access_API = self._access_API(['repos', self.github_repo_user, self.github_repo, 'commits'], + access_api = self._access_api(['repos', self.github_repo_user, self.github_repo, 'commits'], params={'per_page': 100, 'sha': self.branch}) - return access_API + return access_api def compare(self, base, head, per_page=1): """ Uses the API to get a list of compares between base and head. - user: The github username of the person whose repo you're querying + user: The GitHub username of the person whose repo you're querying repo: The repo name to query base: Start compare from branch head: Current commit sha or branch name to compare per_page: number of items per page - Returns a deserialized json object containing the compare info. See http://developer.github.com/v3/repos/commits + Returns a deserialized json object containing the compare info. + See https://developer.github.com/v3/repos/commits """ - access_API = self._access_API( + access_api = self._access_api( ['repos', self.github_repo_user, self.github_repo, 'compare', base + '...' + head], params={'per_page': per_page}) - return access_API + return access_api def branches(self): - access_API = self._access_API( + access_api = self._access_api( ['repos', self.github_repo_user, self.github_repo, 'branches'], params={'per_page': 100}) - return access_API + return access_api def pull_requests(self): - access_API = self._access_API( + access_api = self._access_api( ['repos', self.github_repo_user, self.github_repo, 'pulls'], params={'per_page': 100}) # type: Optional[Dict] pulls = [] - for x in access_API: + for x in access_api: try: pull = PullRequest(x['head']['ref'], x['number']) pulls.append((repr(pull), pull.fetch_name())) diff --git a/sickgear/helpers.py b/sickgear/helpers.py index 2baa137e..117ec034 100644 --- a/sickgear/helpers.py +++ b/sickgear/helpers.py @@ -34,10 +34,8 @@ import sickgear from . import db, logger, notifiers from .common import cpu_presets, mediaExtensions, Overview, Quality, statusStrings, subtitleExtensions, \ ARCHIVED, DOWNLOADED, FAILED, IGNORED, SKIPPED, SNATCHED_ANY, SUBTITLED, UNAIRED, UNKNOWN, WANTED -from .sgdatetime import timestamp_near +from .sgdatetime import SGDatetime from lib.tvinfo_base.exceptions import * -# noinspection PyPep8Naming -import encodingKludge as ek from exceptions_helper import ex, MultipleShowObjectsException import dateutil.parser @@ -45,8 +43,9 @@ import requests import requests.exceptions import subliminal from lxml_etree import etree, is_lxml +from base64 import decodebytes as b64decodebytes, encodebytes as b64encodebytes -from _23 import b64decodebytes, b64encodebytes, decode_bytes, decode_str, filter_iter, scandir +from _23 import decode_bytes, decode_str, scandir from six import iteritems, string_types, text_type # noinspection PyUnresolvedReferences from six.moves import zip @@ -58,13 +57,18 @@ from sg_helpers import chmod_as_parent, clean_data, copy_file, download_file, fi get_url, indent_xml, make_path, maybe_plural, md5_for_text, move_file, proxy_setting, remove_file, \ remove_file_perm, replace_extension, sanitize_filename, scantree, touch_file, try_int, try_ord, write_file +# deprecated item, remove in 2020, kept here as rollback uses it +copyFile = copy_file +moveFile = move_file +tryInt = try_int # one legacy custom provider is keeping this signature here + # noinspection PyUnreachableCode if False: # noinspection PyUnresolvedReferences from typing import Any, AnyStr, Dict, Generator, NoReturn, Iterable, Iterator, List, Optional, Set, Tuple, Union from .tv import TVShow # the following workaround hack resolves a pyc resolution bug - from .name_cache import retrieveNameFromCache + from .name_cache import retrieve_name_from_cache from six import integer_types RE_XML_ENCODING = re.compile(r'^(<\?xml[^>]+)\s+(encoding\s*=\s*[\"\'][^\"\']*[\"\'])(\s*\?>|)', re.U) @@ -171,7 +175,7 @@ def has_image_ext(filename): :rtype: bool """ try: - if ek.ek(os.path.splitext, filename)[1].lower() in ['.bmp', '.gif', '.jpeg', '.jpg', '.png', '.webp']: + if os.path.splitext(filename)[1].lower() in ['.bmp', '.gif', '.jpeg', '.jpg', '.png', '.webp']: return True except (BaseException, Exception): pass @@ -251,9 +255,9 @@ def make_dir(path): :return: success of creation :rtype: bool """ - if not ek.ek(os.path.isdir, path): + if not os.path.isdir(path): try: - ek.ek(os.makedirs, path) + os.makedirs(path) # do a Synology library update notifiers.NotifierFactory().get('SYNOINDEX').addFolder(path) except OSError: @@ -320,19 +324,20 @@ def search_infosrc_for_show_id(reg_show_name, tvid=None, prodid=None, ui=None): return None, None, None -def sizeof_fmt(num): +def sizeof_fmt(number, digits=1, sep=' '): + # type: (int, int, AnyStr) -> AnyStr """ - format given bytes to human readable string + format given bytes to human-readable text - :param num: number - :type num: int or long - :return: human readable formatted string - :rtype: AnyStr + :param number: value to convert + :param digits: number of digits after decimal point + :param sep: seperater of value and dimension + :return: human-readable formatted text """ - for x in ['bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']: - if 1024.0 > num: - return "%3.1f %s" % (num, x) - num /= 1024.0 + for cur_dimension in ['bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']: + if 1024.0 > number: + return f'{number:3.{digits}f}{sep}{cur_dimension}' + number /= 1024.0 def list_media_files(path): @@ -346,37 +351,13 @@ def list_media_files(path): result = [] if path: if [direntry for direntry in scantree(path, include=[r'\.sickgearignore'], filter_kind=False, recurse=False)]: - logger.log('Skipping folder "%s" because it contains ".sickgearignore"' % path, logger.DEBUG) + logger.debug('Skipping folder "%s" because it contains ".sickgearignore"' % path) else: result = [direntry.path for direntry in scantree(path, exclude=['Extras'], filter_kind=False) if has_media_ext(direntry.name)] return result -def copyFile(src_file, dest_file): - """ deprecated_item, remove in 2020, kept here as rollback uses it - :param src_file: source file - :type src_file: AnyStr - :param dest_file: destination file - :type dest_file: AnyStr - :return: nothing - :rtype: None - """ - return copy_file(src_file, dest_file) - - -def moveFile(src_file, dest_file): - """ deprecated_item, remove in 2020, kept here as rollback uses it - :param src_file: source file - :type src_file: AnyStr - :param dest_file: destination file - :type dest_file: AnyStr - :return: nothing - :rtype: None - """ - return move_file(src_file, dest_file) - - def link(src_file, dest_file): """ @@ -391,7 +372,7 @@ def link(src_file, dest_file): if 0 == ctypes.windll.kernel32.CreateHardLinkW(text_type(dest_file), text_type(src_file), 0): raise ctypes.WinError() else: - ek.ek(os.link, src_file, dest_file) + os.link(src_file, dest_file) def hardlink_file(src_file, dest_file): @@ -403,11 +384,10 @@ def hardlink_file(src_file, dest_file): :type dest_file: AnyStr """ try: - ek.ek(link, src_file, dest_file) + link(src_file, dest_file) fix_set_group_id(dest_file) except (BaseException, Exception) as e: - logger.log(u"Failed to create hardlink of %s at %s: %s. Copying instead." % (src_file, dest_file, ex(e)), - logger.ERROR) + logger.error(f'Failed to create hardlink of {src_file} at {dest_file}: {ex(e)}. Copying instead.') copy_file(src_file, dest_file) @@ -423,10 +403,10 @@ def symlink(src_file, dest_file): import ctypes if ctypes.windll.kernel32.CreateSymbolicLinkW( - text_type(dest_file), text_type(src_file), 1 if ek.ek(os.path.isdir, src_file) else 0) in [0, 1280]: + text_type(dest_file), text_type(src_file), 1 if os.path.isdir(src_file) else 0) in [0, 1280]: raise ctypes.WinError() else: - ek.ek(os.symlink, src_file, dest_file) + os.symlink(src_file, dest_file) def move_and_symlink_file(src_file, dest_file): @@ -438,11 +418,11 @@ def move_and_symlink_file(src_file, dest_file): :type dest_file: AnyStr """ try: - ek.ek(shutil.move, src_file, dest_file) + shutil.move(src_file, dest_file) fix_set_group_id(dest_file) - ek.ek(symlink, dest_file, src_file) + symlink(dest_file, src_file) except (BaseException, Exception): - logger.log(u"Failed to create symlink of %s at %s. Copying instead" % (src_file, dest_file), logger.ERROR) + logger.error(f'Failed to create symlink of {src_file} at {dest_file}. Copying instead') copy_file(src_file, dest_file) @@ -461,11 +441,11 @@ def rename_ep_file(cur_path, new_path, old_path_length=0): :rtype: bool """ - # new_dest_dir, new_dest_name = ek.ek(os.path.split, new_path) + # new_dest_dir, new_dest_name = os.path.split(new_path) if 0 == old_path_length or len(cur_path) < old_path_length: # approach from the right - cur_file_name, cur_file_ext = ek.ek(os.path.splitext, cur_path) + cur_file_name, cur_file_ext = os.path.splitext(cur_path) else: # approach from the left cur_file_ext = cur_path[old_path_length:] @@ -473,7 +453,7 @@ def rename_ep_file(cur_path, new_path, old_path_length=0): if cur_file_ext[1:] in subtitleExtensions: # Extract subtitle language from filename - sublang = ek.ek(os.path.splitext, cur_file_name)[1][1:] + sublang = os.path.splitext(cur_file_name)[1][1:] # Check if the language extracted from filename is a valid language try: @@ -485,18 +465,18 @@ def rename_ep_file(cur_path, new_path, old_path_length=0): # put the extension on the incoming file new_path += cur_file_ext - make_path(ek.ek(os.path.dirname, new_path), syno=True) + make_path(os.path.dirname(new_path), syno=True) # move the file try: - logger.log(u'Renaming file from %s to %s' % (cur_path, new_path)) - ek.ek(shutil.move, cur_path, new_path) + logger.log(f'Renaming file from {cur_path} to {new_path}') + shutil.move(cur_path, new_path) except (OSError, IOError) as e: - logger.log(u"Failed renaming " + cur_path + " to " + new_path + ": " + ex(e), logger.ERROR) + logger.error(f'Failed renaming {cur_path} to {new_path}: {ex(e)}') return False # clean up any old folders that are empty - delete_empty_folders(ek.ek(os.path.dirname, cur_path)) + delete_empty_folders(os.path.dirname(cur_path)) return True @@ -514,25 +494,25 @@ def delete_empty_folders(check_empty_dir, keep_dir=None): # treat check_empty_dir as empty when it only contains these items ignore_items = [] - logger.log(u"Trying to clean any empty folders under " + check_empty_dir) + logger.log(f'Trying to clean any empty folders under {check_empty_dir}') # as long as the folder exists and doesn't contain any files, delete it - while ek.ek(os.path.isdir, check_empty_dir) and check_empty_dir != keep_dir: - check_files = ek.ek(os.listdir, check_empty_dir) + while os.path.isdir(check_empty_dir) and check_empty_dir != keep_dir: + check_files = os.listdir(check_empty_dir) if not check_files or (len(check_files) <= len(ignore_items) and all( [check_file in ignore_items for check_file in check_files])): # directory is empty or contains only ignore_items try: - logger.log(u"Deleting empty folder: " + check_empty_dir) + logger.log(f"Deleting empty folder: {check_empty_dir}") # need shutil.rmtree when ignore_items is really implemented - ek.ek(os.rmdir, check_empty_dir) + os.rmdir(check_empty_dir) # do a Synology library update notifiers.NotifierFactory().get('SYNOINDEX').deleteFolder(check_empty_dir) except OSError as e: - logger.log(u"Unable to delete " + check_empty_dir + ": " + repr(e) + " / " + ex(e), logger.WARNING) + logger.warning(f'Unable to delete {check_empty_dir}: {repr(e)} / {ex(e)}') break - check_empty_dir = ek.ek(os.path.dirname, check_empty_dir) + check_empty_dir = os.path.dirname(check_empty_dir) else: break @@ -560,12 +540,10 @@ def get_absolute_number_from_season_and_episode(show_obj, season, episode): if 1 == len(sql_result): absolute_number = int(sql_result[0]["absolute_number"]) - logger.log( - "Found absolute_number:" + str(absolute_number) + " by " + str(season) + "x" + str(episode), - logger.DEBUG) + logger.debug(f'Found absolute_number:{absolute_number} by {season}x{episode}') else: logger.debug('No entries for absolute number in show: %s found using %sx%s' % - (show_obj.unique_name, str(season), str(episode))) + (show_obj.unique_name, str(season), str(episode))) return absolute_number @@ -601,14 +579,14 @@ def sanitize_scene_name(name): :rtype: AnyStr """ if name: - bad_chars = u',:()£\'!?\u2019' + bad_chars = ',:()£\'!?\u2019' # strip out any bad chars name = re.sub(r'[%s]' % bad_chars, '', name, flags=re.U) # tidy up stuff that doesn't belong in scene names name = re.sub(r'(-?\s|/)', '.', name).replace('&', 'and') - name = re.sub(r"\.\.*", '.', name).rstrip('.') + name = re.sub(r"\.+", '.', name).rstrip('.') return name return '' @@ -655,7 +633,7 @@ def parse_xml(data, del_xmlns=False): try: parsed_xml = etree.fromstring(data) except (BaseException, Exception) as e: - logger.log(u"Error trying to parse xml data. Error: " + ex(e), logger.DEBUG) + logger.debug(f"Error trying to parse xml data. Error: {ex(e)}") parsed_xml = None return parsed_xml @@ -675,40 +653,40 @@ def backup_versioned_file(old_file, version): new_file = '%s.v%s' % (old_file, version) - if ek.ek(os.path.isfile, new_file): + if os.path.isfile(new_file): changed_old_db = False for back_nr in range(1, 10000): alt_name = '%s.r%s' % (new_file, back_nr) - if not ek.ek(os.path.isfile, alt_name): + if not os.path.isfile(alt_name): try: shutil.move(new_file, alt_name) changed_old_db = True break except (BaseException, Exception): - if ek.ek(os.path.isfile, new_file): + if os.path.isfile(new_file): continue - logger.log('could not rename old backup db file', logger.WARNING) + logger.warning('could not rename old backup db file') if not changed_old_db: raise Exception('can\'t create a backup of db') - while not ek.ek(os.path.isfile, new_file): - if not ek.ek(os.path.isfile, old_file) or 0 == get_size(old_file): - logger.log(u'No need to create backup', logger.DEBUG) + while not os.path.isfile(new_file): + if not os.path.isfile(old_file) or 0 == get_size(old_file): + logger.debug('No need to create backup') break try: - logger.log(u'Trying to back up %s to %s' % (old_file, new_file), logger.DEBUG) + logger.debug(f'Trying to back up {old_file} to {new_file}') shutil.copy(old_file, new_file) - logger.log(u'Backup done', logger.DEBUG) + logger.debug('Backup done') break except (BaseException, Exception) as e: - logger.log(u'Error while trying to back up %s to %s : %s' % (old_file, new_file, ex(e)), logger.WARNING) + logger.warning(f'Error while trying to back up {old_file} to {new_file} : {ex(e)}') num_tries += 1 time.sleep(3) - logger.log(u'Trying again.', logger.DEBUG) + logger.debug('Trying again.') if 3 <= num_tries: - logger.log(u'Unable to back up %s to %s please do it manually.' % (old_file, new_file), logger.ERROR) + logger.error(f'Unable to back up {old_file} to {new_file} please do it manually.') return False return True @@ -724,56 +702,45 @@ def restore_versioned_file(backup_file, version): :return: success :rtype: bool """ - numTries = 0 + num_tries = 0 - new_file, backup_version = ek.ek(os.path.splitext, backup_file) + new_file, backup_version = os.path.splitext(backup_file) restore_file = new_file + '.' + 'v' + str(version) - if not ek.ek(os.path.isfile, new_file): - logger.log(u"Not restoring, " + new_file + " doesn't exist", logger.DEBUG) + if not os.path.isfile(new_file): + logger.debug(f'Not restoring, {new_file} doesn\'t exist') return False try: - logger.log( - u"Trying to backup " + new_file + " to " + new_file + "." + "r" + str(version) + " before restoring backup", - logger.DEBUG) + logger.debug(f'Trying to backup {new_file} to {new_file}.r{version} before restoring backup') shutil.move(new_file, new_file + '.' + 'r' + str(version)) except (BaseException, Exception) as e: - logger.log( - u"Error while trying to backup DB file " + restore_file + " before proceeding with restore: " + ex(e), - logger.WARNING) + logger.warning(f'Error while trying to backup DB file {restore_file} before proceeding with restore: {ex(e)}') return False - while not ek.ek(os.path.isfile, new_file): - if not ek.ek(os.path.isfile, restore_file): - logger.log(u"Not restoring, " + restore_file + " doesn't exist", logger.DEBUG) + while not os.path.isfile(new_file): + if not os.path.isfile(restore_file): + logger.debug(f'Not restoring, {restore_file} doesn\'t exist') break try: - logger.log(u"Trying to restore " + restore_file + " to " + new_file, logger.DEBUG) + logger.debug(f'Trying to restore {restore_file} to {new_file}') shutil.copy(restore_file, new_file) - logger.log(u"Restore done", logger.DEBUG) + logger.debug('Restore done') break except (BaseException, Exception) as e: - logger.log(u"Error while trying to restore " + restore_file + ": " + ex(e), logger.WARNING) - numTries += 1 + logger.warning(f'Error while trying to restore {restore_file}: {ex(e)}') + num_tries += 1 time.sleep(1) - logger.log(u"Trying again.", logger.DEBUG) + logger.debug('Trying again.') - if 10 <= numTries: - logger.log(u"Unable to restore " + restore_file + " to " + new_file + " please do it manually.", - logger.ERROR) + if 10 <= num_tries: + logger.error(f'Unable to restore {restore_file} to {new_file} please do it manually.') return False return True -# one legacy custom provider is keeping this signature here, -# a monkey patch could fix that so that this can be removed -def tryInt(s, s_default=0): - return try_int(s, s_default) - - # try to convert to float, return default on failure def try_float(s, s_default=0.0): try: @@ -955,7 +922,7 @@ def get_show(name, try_scene_exceptions=False): show_obj = None try: - tvid, prodid = sickgear.name_cache.retrieveNameFromCache(name) + tvid, prodid = sickgear.name_cache.retrieve_name_from_cache(name) if tvid and prodid: show_obj = find_show_by_id({tvid: prodid}) @@ -964,7 +931,7 @@ def get_show(name, try_scene_exceptions=False): if tvid and prodid: show_obj = find_show_by_id({tvid: prodid}) except (BaseException, Exception) as e: - logger.log(u'Error when attempting to find show: ' + name + ' in SickGear: ' + ex(e), logger.DEBUG) + logger.debug(f'Error when attempting to find show: {name} in SickGear: {ex(e)}') return show_obj @@ -978,8 +945,8 @@ def is_hidden_folder(folder): :return: Returns True if folder is hidden :rtype: bool """ - if ek.ek(os.path.isdir, folder): - if ek.ek(os.path.basename, folder).startswith('.'): + if os.path.isdir(folder): + if os.path.basename(folder).startswith('.'): return True return False @@ -994,7 +961,7 @@ def real_path(path): :return: the canonicalized absolute pathname :rtype: AnyStr """ - return ek.ek(os.path.normpath, ek.ek(os.path.normcase, ek.ek(os.path.realpath, ek.ek(os.path.expanduser, path)))) + return os.path.normpath(os.path.normcase(os.path.realpath(os.path.expanduser(path)))) def validate_show(show_obj, season=None, episode=None): @@ -1040,7 +1007,7 @@ def clear_cache(force=False): """ # clean out cache directory, remove everything > 12 hours old dirty = None - del_time = int(timestamp_near((datetime.datetime.now() - datetime.timedelta(hours=12)))) + del_time = SGDatetime.timestamp_near(td=datetime.timedelta(hours=12)) direntry_args = dict(follow_symlinks=False) for direntry in scantree(sickgear.CACHE_DIR, ['images|rss|zoneinfo'], follow_symlinks=True): if direntry.is_file(**direntry_args) and (force or del_time > direntry.stat(**direntry_args).st_mtime): @@ -1048,12 +1015,13 @@ def clear_cache(force=False): elif direntry.is_dir(**direntry_args) and direntry.name not in ['cheetah', 'sessions', 'indexers']: dirty = dirty or False try: - ek.ek(os.rmdir, direntry.path) + os.rmdir(direntry.path) except OSError: dirty = True - logger.log(u'%s from cache folder %s' % ((('Found items not removed', 'Found items removed')[not dirty], - 'No items found to remove')[None is dirty], sickgear.CACHE_DIR)) + logger.log( + f'{(("Found items not removed", "Found items removed")[not dirty], "No items found to remove")[None is dirty]}' + f' from cache folder {sickgear.CACHE_DIR}') def human(size): @@ -1098,8 +1066,8 @@ def get_size(start_path='.'): :return: size in bytes :rtype: int or long """ - if ek.ek(os.path.isfile, start_path): - return ek.ek(os.path.getsize, start_path) + if os.path.isfile(start_path): + return os.path.getsize(start_path) try: return sum(map((lambda x: x.stat(follow_symlinks=False).st_size), scantree(start_path))) except OSError: @@ -1115,14 +1083,14 @@ def get_media_stats(start_path='.'): :param start_path: path to scan """ - if ek.ek(os.path.isdir, start_path): + if os.path.isdir(start_path): sizes = sorted(map(lambda y: y.stat(follow_symlinks=False).st_size, filter(lambda x: has_media_ext(x.name), scantree(start_path)))) if sizes: return len(sizes), sizes[0], sizes[-1], int(sum(sizes) / len(sizes)) - elif ek.ek(os.path.isfile, start_path): - size = ek.ek(os.path.getsize, start_path) + elif os.path.isfile(start_path): + size = os.path.getsize(start_path) return 1, size, size, size return 0, 0, 0, 0 @@ -1137,7 +1105,7 @@ def remove_article(text=''): :return: text without articles :rtype: AnyStr """ - return re.sub(r'(?i)^(?:(?:A(?!\s+to)n?)|The)\s(\w)', r'\1', text) + return re.sub(r'(?i)^(?:A(?!\s+to)n?|The)\s(\w)', r'\1', text) def re_valid_hostname(with_allowed=True): @@ -1285,7 +1253,7 @@ def check_port(host, port, timeout=1.0): def clear_unused_providers(): - providers = [x.cache.providerID for x in sickgear.providers.sortedProviderList() if x.is_active()] + providers = [x.cache.providerID for x in sickgear.providers.sorted_sources() if x.is_active()] if providers: my_db = db.DBConnection('cache.db') @@ -1299,7 +1267,7 @@ def make_search_segment_html_string(segment, max_eps=5): segment = [segment] if segment and len(segment) > max_eps: seasons = [x for x in set([x.season for x in segment])] - seg_str = u'Season%s: ' % maybe_plural(len(seasons)) + seg_str = f'Season{maybe_plural(len(seasons))}: ' divider = '' for x in seasons: eps = [str(s.episode) for s in segment if x == s.season] @@ -1309,7 +1277,7 @@ def make_search_segment_html_string(segment, max_eps=5): divider = ', ' elif segment: episode_numbers = ['S%sE%s' % (str(x.season).zfill(2), str(x.episode).zfill(2)) for x in segment] - seg_str = u'Episode%s: %s' % (maybe_plural(len(episode_numbers)), ', '.join(episode_numbers)) + seg_str = f'Episode{maybe_plural(len(episode_numbers))}: {", ".join(episode_numbers)}' return seg_str @@ -1319,7 +1287,7 @@ def has_anime(): :rtype: bool """ # noinspection PyTypeChecker - return False if not sickgear.showList else any(filter_iter(lambda show: show.is_anime, sickgear.showList)) + return False if not sickgear.showList else any(filter(lambda show: show.is_anime, sickgear.showList)) def cpu_sleep(): @@ -1332,11 +1300,11 @@ def cleanup_cache(): Delete old cached files """ delete_not_changed_in( - [ek.ek(os.path.join, sickgear.CACHE_DIR, 'images', 'browse', 'thumb', x) + [os.path.join(sickgear.CACHE_DIR, 'images', 'browse', 'thumb', x) for x in ['anidb', 'imdb', 'trakt', 'tvdb']] + - [ek.ek(os.path.join, sickgear.CACHE_DIR, 'images', x) + [os.path.join(sickgear.CACHE_DIR, 'images', x) for x in ['characters', 'person']] + - [ek.ek(os.path.join, sickgear.CACHE_DIR, 'tvinfo_cache')]) + [os.path.join(sickgear.CACHE_DIR, 'tvinfo_cache')]) def delete_not_changed_in(paths, days=30, minutes=0): @@ -1350,7 +1318,7 @@ def delete_not_changed_in(paths, days=30, minutes=0): :param minutes: Purge files not modified in this number of minutes (default: 0 minutes) :return: tuple; number of files that qualify for deletion, number of qualifying files that failed to be deleted """ - del_time = int(timestamp_near((datetime.datetime.now() - datetime.timedelta(days=days, minutes=minutes)))) + del_time = SGDatetime.timestamp_near(td=datetime.timedelta(days=days, minutes=minutes)) errors = 0 qualified = 0 for cur_path in (paths, [paths])[not isinstance(paths, list)]: @@ -1375,10 +1343,10 @@ def set_file_timestamp(filename, min_age=3, new_time=None): :param new_time: :type new_time: None or int """ - min_time = int(timestamp_near((datetime.datetime.now() - datetime.timedelta(days=min_age)))) + min_time = SGDatetime.timestamp_near(td=datetime.timedelta(days=min_age)) try: - if ek.ek(os.path.isfile, filename) and ek.ek(os.path.getmtime, filename) < min_time: - ek.ek(os.utime, filename, new_time) + if os.path.isfile(filename) and os.path.getmtime(filename) < min_time: + os.utime(filename, new_time) except (BaseException, Exception): pass @@ -1392,10 +1360,10 @@ def should_delete_episode(status): :return: should be deleted :rtype: bool """ - s = Quality.splitCompositeStatus(status)[0] + s = Quality.split_composite_status(status)[0] if s not in SNATCHED_ANY + [DOWNLOADED, ARCHIVED, IGNORED]: return True - logger.log('not safe to delete episode from db because of status: %s' % statusStrings[s], logger.DEBUG) + logger.debug('not safe to delete episode from db because of status: %s' % statusStrings[s]) return False @@ -1407,7 +1375,7 @@ def is_link(filepath): :return: True or False """ if 'win32' == sys.platform: - if not ek.ek(os.path.exists, filepath): + if not os.path.exists(filepath): return False import ctypes @@ -1417,36 +1385,56 @@ def is_link(filepath): attr = ctypes.windll.kernel32.GetFileAttributesW(text_type(filepath)) return invalid_file_attributes != attr and 0 != attr & file_attribute_reparse_point - return ek.ek(os.path.islink, filepath) + return os.path.islink(filepath) + + +def find_mount_point(path): + # type: (AnyStr) -> AnyStr + """ + returns the mount point for the given path + + :param path: to find the mount path + :return: mount point for path or path if no mount + """ + result = path + if os.path.exists(path): + result = os.path.realpath(os.path.abspath(path)) + try: + while not os.path.ismount(result): + new_path = os.path.dirname(result) + if new_path == result: + # return input path if mount point not found + return path + result = new_path + except (BaseException, Exception): + return path + return result def df(): + # type: (...) -> Tuple[List[Tuple[AnyStr, AnyStr]], bool] """ Return disk free space at known parent locations :return: string path, string value that is formatted size - :rtype: Tuple[List[Tuple[AnyStr, AnyStr]], bool] """ result = [] - min_output = True + min_output = True # flag ui to output minimal (e.g. vol: size, vol: size) if sickgear.ROOT_DIRS and sickgear.DISPLAY_FREESPACE: targets = [] - for path in sickgear.ROOT_DIRS.split('|')[1:]: - location_parts = os.path.splitdrive(path) - target = location_parts[0] - if 'win32' == sys.platform: - if not re.match('(?i)[a-z]:(?:\\\\)?$', target): - # simple drive letter not found, fallback to full path - target = path - min_output = False - elif sys.platform.startswith(('linux', 'darwin', 'sunos5')) or 'bsd' in sys.platform: - target = path + for cur_target in filter(lambda _t: _t and _t not in targets, + map(find_mount_point, sickgear.ROOT_DIRS.split('|')[1:])): + targets += [cur_target] + free = freespace(cur_target) + if 'win32' == sys.platform and None is not free: + cur_target = os.path.splitdrive(cur_target)[0] + if any(['win32' == sys.platform and not re.match('(?i)[a-z]:(\\\\)?$', cur_target), + # Windows, if a simple drive letter isn't found, fallback to full path. On Linux, full path is used + # trigger ui to output long paths instead of minimal volume letters layout + sys.platform.startswith(('linux', 'darwin', 'sunos5')), 'bsd' in sys.platform]): min_output = False - if target and target not in targets: - targets += [target] - free = freespace(path) - if None is not free: - result += [(target, sizeof_fmt(free).replace(' ', ''))] + result += [(cur_target, 'unavailable' if None is free else sizeof_fmt(free, sep=''))] + return result, min_output @@ -1496,11 +1484,11 @@ def path_mapper(search, replace, subject): :rtype: Tuple[AnyStr, bool] """ delim = '/!~!/' - search = re.sub(r'[\\]', delim, search) - replace = re.sub(r'[\\]', delim, replace) - path = re.sub(r'[\\]', delim, subject) + search = re.sub(r'\\', delim, search) + replace = re.sub(r'\\', delim, replace) + path = re.sub(r'\\', delim, subject) result = re.sub('(?i)^%s' % search, replace, path) - result = ek.ek(os.path.normpath, re.sub(delim, '/', result)) + result = os.path.normpath(re.sub(delim, '/', result)) return result, result != subject @@ -1516,7 +1504,7 @@ def get_overview(ep_status, show_quality, upgrade_once, split_snatch=False): :type split_snatch: bool :return: constant from classes Overview """ - status, quality = Quality.splitCompositeStatus(ep_status) + status, quality = Quality.split_composite_status(ep_status) if ARCHIVED == status: return Overview.GOOD if WANTED == status: @@ -1532,7 +1520,7 @@ def get_overview(ep_status, show_quality, upgrade_once, split_snatch=False): if not split_snatch and status in SNATCHED_ANY: return Overview.SNATCHED - void, best_qualities = Quality.splitQuality(show_quality) + void, best_qualities = Quality.split_quality(show_quality) # if re-downloads aren't wanted then mark it "good" if there is anything if not len(best_qualities): return Overview.GOOD @@ -1559,7 +1547,7 @@ def generate_show_dir_name(root_dir, show_name): san_show_name = san_show_name.replace(' ', '.') if None is root_dir: return san_show_name - return ek.ek(os.path.join, root_dir, san_show_name) + return os.path.join(root_dir, san_show_name) def count_files_dirs(base_dir): @@ -1572,9 +1560,9 @@ def count_files_dirs(base_dir): """ f = d = 0 try: - files = ek.ek(scandir, base_dir) + files = scandir(base_dir) except OSError as e: - logger.log('Unable to count files %s / %s' % (repr(e), ex(e)), logger.WARNING) + logger.warning('Unable to count files %s / %s' % (repr(e), ex(e))) else: for e in files: if e.is_file(): @@ -1603,8 +1591,8 @@ def upgrade_new_naming(): sickgear.CFG.setdefault('GUI', {})['fanart_ratings'] = '%s' % ne sickgear.CFG.write() - image_cache_dir = ek.ek(os.path.join, sickgear.CACHE_DIR, 'images') - bp_match = re.compile(r'(\d+)\.((?:banner|poster|(?:(?:\d+(?:\.\w*)?\.(?:\w{5,8}))\.)?fanart)\.jpg)', flags=re.I) + image_cache_dir = os.path.join(sickgear.CACHE_DIR, 'images') + bp_match = re.compile(r'(\d+)\.((?:banner|poster|(?:\d+(?:\.\w*)?\.\w{5,8}\.)?fanart)\.jpg)', flags=re.I) def _set_progress(p_msg, c, s): ps = None @@ -1618,14 +1606,14 @@ def upgrade_new_naming(): sickgear.classes.loading_msg.set_msg_progress(p_msg, '{:6.2f}%'.format(ps)) for d in ['', 'thumbnails']: - bd = ek.ek(os.path.join, image_cache_dir, d) - if ek.ek(os.path.isdir, bd): + bd = os.path.join(image_cache_dir, d) + if os.path.isdir(bd): fc, dc = count_files_dirs(bd) step = fc / float(100) cf = 0 p_text = 'Upgrading %s' % (d, 'banner/poster')[not d] _set_progress(p_text, 0, 0) - for entry in ek.ek(scandir, bd): + for entry in scandir(bd): if entry.is_file(): cf += 1 _set_progress(p_text, cf, step) @@ -1634,23 +1622,22 @@ def upgrade_new_naming(): old_id = int(b_s.group(1)) tvid = show_list.get(old_id) if tvid: - nb_dir = ek.ek(os.path.join, sickgear.CACHE_DIR, 'images', 'shows', - '%s-%s' % (tvid, old_id), d) - if not ek.ek(os.path.isdir, nb_dir): + nb_dir = os.path.join(sickgear.CACHE_DIR, 'images', 'shows', '%s-%s' % (tvid, old_id), d) + if not os.path.isdir(nb_dir): try: - ek.ek(os.makedirs, nb_dir) + os.makedirs(nb_dir) except (BaseException, Exception): pass - new_name = ek.ek(os.path.join, nb_dir, bp_match.sub(r'\2', entry.name)) + new_name = os.path.join(nb_dir, bp_match.sub(r'\2', entry.name)) try: move_file(entry.path, new_name) except (BaseException, Exception) as e: - logger.log('Unable to rename %s to %s: %s / %s' - % (entry.path, new_name, repr(e), ex(e)), logger.WARNING) + logger.warning('Unable to rename %s to %s: %s / %s' + % (entry.path, new_name, repr(e), ex(e))) else: # clean up files without reference in db try: - ek.ek(os.remove, entry.path) + os.remove(entry.path) except (BaseException, Exception): pass elif entry.is_dir(): @@ -1664,9 +1651,9 @@ def upgrade_new_naming(): p_text = 'Upgrading fanart' _set_progress(p_text, 0, 0) try: - entries = ek.ek(scandir, entry.path) + entries = scandir(entry.path) except OSError as e: - logger.log('Unable to stat dirs %s / %s' % (repr(e), ex(e)), logger.WARNING) + logger.warning('Unable to stat dirs %s / %s' % (repr(e), ex(e))) continue for d_entry in entries: if d_entry.is_dir(): @@ -1676,20 +1663,18 @@ def upgrade_new_naming(): if old_id: new_id = show_list.get(old_id) if new_id: - new_dir_name = ek.ek(os.path.join, sickgear.CACHE_DIR, 'images', 'shows', - '%s-%s' % (new_id, old_id), 'fanart') + new_dir_name = os.path.join(sickgear.CACHE_DIR, 'images', 'shows', + '%s-%s' % (new_id, old_id), 'fanart') try: move_file(d_entry.path, new_dir_name) except (BaseException, Exception) as e: - logger.log('Unable to rename %s to %s: %s / %s' % - (d_entry.path, new_dir_name, repr(e), ex(e)), logger.WARNING) - if ek.ek(os.path.isdir, new_dir_name): + logger.warning(f'Unable to rename {d_entry.path} to {new_dir_name}:' + f' {repr(e)} / {ex(e)}') + if os.path.isdir(new_dir_name): try: - f_n = filter_iter(lambda fn: fn.is_file(), - ek.ek(scandir, new_dir_name)) + f_n = filter(lambda fn: fn.is_file(), scandir(new_dir_name)) except OSError as e: - logger.log('Unable to rename %s / %s' % (repr(e), ex(e)), - logger.WARNING) + logger.warning('Unable to rename %s / %s' % (repr(e), ex(e))) else: rename_args = [] # noinspection PyTypeChecker @@ -1700,24 +1685,24 @@ def upgrade_new_naming(): try: move_file(*args) except (BaseException, Exception) as e: - logger.log('Unable to rename %s to %s: %s / %s' % - (args[0], args[1], repr(e), ex(e)), logger.WARNING) + logger.warning(f'Unable to rename {args[0]} to {args[1]}:' + f' {repr(e)} / {ex(e)}') else: try: - ek.ek(shutil.rmtree, d_entry.path) + shutil.rmtree(d_entry.path) except (BaseException, Exception): pass try: - ek.ek(shutil.rmtree, d_entry.path) + shutil.rmtree(d_entry.path) except (BaseException, Exception): pass try: - ek.ek(os.rmdir, entry.path) + os.rmdir(entry.path) except (BaseException, Exception): pass if 'thumbnails' == d: try: - ek.ek(os.rmdir, bd) + os.rmdir(bd) except (BaseException, Exception): pass _set_progress(p_text, 0, 1) @@ -1757,11 +1742,11 @@ def normalise_chars(text): :return: Text with entities replaced :rtype: AnyStr """ - result = text.replace(u'\u2010', u'-').replace(u'\u2011', u'-').replace(u'\u2012', u'-') \ - .replace(u'\u2013', u'-').replace(u'\u2014', u'-').replace(u'\u2015', u'-') \ - .replace(u'\u2018', u"'").replace(u'\u2019', u"'") \ - .replace(u'\u201c', u'\"').replace(u'\u201d', u'\"') \ - .replace(u'\u0020', u' ').replace(u'\u00a0', u' ') + result = text.replace('\u2010', '-').replace('\u2011', '-').replace('\u2012', '-') \ + .replace('\u2013', '-').replace('\u2014', '-').replace('\u2015', '-') \ + .replace('\u2018', "'").replace('\u2019', "'") \ + .replace('\u201c', '\"').replace('\u201d', '\"') \ + .replace('\u0020', ' ').replace('\u00a0', ' ') return result diff --git a/sickgear/history.py b/sickgear/history.py index 844088c5..209b3e9a 100644 --- a/sickgear/history.py +++ b/sickgear/history.py @@ -22,8 +22,6 @@ from .common import FAILED, SNATCHED, SNATCHED_PROPER, SUBTITLED, Quality from .name_parser.parser import NameParser import sickgear -from six import PY2, text_type - # noinspection PyUnreachableCode if False: from typing import Any, AnyStr @@ -47,9 +45,6 @@ def _log_history_item(action, tvid, prodid, season, episode, quality, resource, """ log_date = datetime.datetime.now().strftime(dateFormat) - if PY2 and not isinstance(resource, text_type): - resource = text_type(resource, 'utf-8', 'replace') - my_db = db.DBConnection() my_db.action( 'INSERT INTO history' @@ -77,7 +72,7 @@ def log_snatch(search_result): else: provider = 'unknown' - action = Quality.compositeStatus((SNATCHED, SNATCHED_PROPER)[is_proper], search_result.quality) + action = Quality.composite_status((SNATCHED, SNATCHED_PROPER)[is_proper], search_result.quality) resource = search_result.name @@ -125,8 +120,8 @@ def log_subtitle(tvid, prodid, season, episode, status, subtitle_result): """ resource = subtitle_result.path provider = subtitle_result.service - status, quality = Quality.splitCompositeStatus(status) - action = Quality.compositeStatus(SUBTITLED, quality) + status, quality = Quality.split_composite_status(status) + action = Quality.composite_status(SUBTITLED, quality) _log_history_item(action, tvid, prodid, season, episode, quality, resource, provider) @@ -140,8 +135,8 @@ def log_failed(ep_obj, release, provider=None): :param release: release :param provider: provider name """ - status, quality = Quality.splitCompositeStatus(ep_obj.status) - action = Quality.compositeStatus(FAILED, quality) + status, quality = Quality.split_composite_status(ep_obj.status) + action = Quality.composite_status(FAILED, quality) _log_history_item(action, ep_obj.show_obj.tvid, ep_obj.show_obj.prodid, ep_obj.season, ep_obj.episode, quality, release, provider) @@ -215,7 +210,7 @@ def history_snatched_proper_fix(): continue if 0 < Quality.get_proper_level(pr.extra_info_no_name(), pr.version, pr.is_anime): cl.append(['UPDATE history SET action = ? WHERE rowid = ?', - [Quality.compositeStatus(SNATCHED_PROPER, int(r['quality'])), + [Quality.composite_status(SNATCHED_PROPER, int(r['quality'])), r['rowid']]]) if cl: my_db.mass_action(cl) diff --git a/sickgear/image_cache.py b/sickgear/image_cache.py index 2f6b5b38..09f8910c 100644 --- a/sickgear/image_cache.py +++ b/sickgear/image_cache.py @@ -20,17 +20,14 @@ import os.path import re import zlib -# noinspection PyPep8Naming -import encodingKludge as ek import exceptions_helper from exceptions_helper import ex import sickgear import sg_helpers from . import db, logger from .metadata.generic import GenericMetadata -from .sgdatetime import timestamp_near +from .sgdatetime import SGDatetime from .indexers.indexer_config import TVINFO_TVDB, TVINFO_TVMAZE, TVINFO_TMDB, TVINFO_IMDB -from lib.tvinfo_base.exceptions import * from six import itervalues, iteritems @@ -56,9 +53,9 @@ class ImageCache(object): characters_dir = None # type: Optional[AnyStr] def __init__(self): - if None is ImageCache.base_dir and ek.ek(os.path.exists, sickgear.CACHE_DIR): - ImageCache.base_dir = ek.ek(os.path.abspath, ek.ek(os.path.join, sickgear.CACHE_DIR, 'images')) - ImageCache.shows_dir = ek.ek(os.path.abspath, ek.ek(os.path.join, self.base_dir, 'shows')) + if None is ImageCache.base_dir and os.path.exists(sickgear.CACHE_DIR): + ImageCache.base_dir = os.path.abspath(os.path.join(sickgear.CACHE_DIR, 'images')) + ImageCache.shows_dir = os.path.abspath(os.path.join(self.base_dir, 'shows')) ImageCache.persons_dir = self._persons_dir() ImageCache.characters_dir = self._characters_dir() @@ -70,17 +67,17 @@ class ImageCache(object): # """ # Builds up the full path to the image cache directory # """ - # return ek.ek(os.path.abspath, ek.ek(os.path.join, sickgear.CACHE_DIR, 'images')) + # return os.path.abspath(os.path.join(sickgear.CACHE_DIR, 'images')) @staticmethod def _persons_dir(): # type: (...) -> AnyStr - return ek.ek(os.path.join, sickgear.CACHE_DIR, 'images', 'person') + return os.path.join(sickgear.CACHE_DIR, 'images', 'person') @staticmethod def _characters_dir(): # type: (...) -> AnyStr - return ek.ek(os.path.join, sickgear.CACHE_DIR, 'images', 'characters') + return os.path.join(sickgear.CACHE_DIR, 'images', 'characters') def _fanart_dir(self, tvid=None, prodid=None): # type: (int, int) -> AnyStr @@ -95,7 +92,7 @@ class ImageCache(object): :rtype: AnyStr or None """ if None not in (tvid, prodid): - return ek.ek(os.path.abspath, ek.ek(os.path.join, self.shows_dir, '%s-%s' % (tvid, prodid), 'fanart')) + return os.path.abspath(os.path.join(self.shows_dir, '%s-%s' % (tvid, prodid), 'fanart')) def _thumbnails_dir(self, tvid, prodid): # type: (int, int) -> AnyStr @@ -109,7 +106,7 @@ class ImageCache(object): :return: path :rtype: AnyStr """ - return ek.ek(os.path.abspath, ek.ek(os.path.join, self.shows_dir, '%s-%s' % (tvid, prodid), 'thumbnails')) + return os.path.abspath(os.path.join(self.shows_dir, '%s-%s' % (tvid, prodid), 'thumbnails')) @staticmethod def _person_base_name(person_obj): @@ -134,7 +131,7 @@ class ImageCache(object): :param base_path: """ filename = '%s.jpg' % base_path or self._person_base_name(person_obj) - return ek.ek(os.path.join, self.persons_dir, filename) + return os.path.join(self.persons_dir, filename) def person_thumb_path(self, person_obj, base_path=None): # type: (Optional[Person], AnyStr) -> AnyStr @@ -144,7 +141,7 @@ class ImageCache(object): :param base_path: """ filename = '%s_thumb.jpg' % base_path or self._person_base_name(person_obj) - return ek.ek(os.path.join, self.persons_dir, filename) + return os.path.join(self.persons_dir, filename) def person_both_paths(self, person_obj): # type: (Person) -> Tuple[AnyStr, AnyStr] @@ -164,7 +161,7 @@ class ImageCache(object): :param base_path: """ filename = '%s.jpg' % base_path or self._character_base_name(character_obj, show_obj) - return ek.ek(os.path.join, self.characters_dir, filename) + return os.path.join(self.characters_dir, filename) def character_thumb_path(self, character_obj, show_obj, base_path=None): # type: (Optional[Character], Optional[TVShow], AnyStr) -> AnyStr @@ -175,7 +172,7 @@ class ImageCache(object): :param base_path: """ filename = '%s_thumb.jpg' % base_path or self._character_base_name(character_obj, show_obj) - return ek.ek(os.path.join, self.characters_dir, filename) + return os.path.join(self.characters_dir, filename) def character_both_path(self, character_obj, show_obj=None, tvid=None, proid=None, person_obj=None): # type: (Character, TVShow, integer_types, integer_types, Person) -> Tuple[AnyStr, AnyStr] @@ -208,7 +205,7 @@ class ImageCache(object): :return: a full path to the cached poster file for the given tvid prodid :rtype: AnyStr """ - return ek.ek(os.path.join, self.shows_dir, '%s-%s' % (tvid, prodid), 'poster.jpg') + return os.path.join(self.shows_dir, '%s-%s' % (tvid, prodid), 'poster.jpg') def banner_path(self, tvid, prodid): # type: (int, int) -> AnyStr @@ -222,7 +219,7 @@ class ImageCache(object): :return: a full path to the cached banner file for the given tvid prodid :rtype: AnyStr """ - return ek.ek(os.path.join, self.shows_dir, '%s-%s' % (tvid, prodid), 'banner.jpg') + return os.path.join(self.shows_dir, '%s-%s' % (tvid, prodid), 'banner.jpg') def fanart_path(self, tvid, prodid, prefix=''): # type: (int, int, Optional[AnyStr]) -> AnyStr @@ -238,7 +235,7 @@ class ImageCache(object): :return: a full path to the cached fanart file for the given tvid prodid :rtype: AnyStr """ - return ek.ek(os.path.join, self._fanart_dir(tvid, prodid), '%s%s' % (prefix, 'fanart.jpg')) + return os.path.join(self._fanart_dir(tvid, prodid), '%s%s' % (prefix, 'fanart.jpg')) def poster_thumb_path(self, tvid, prodid): # type: (int, int) -> AnyStr @@ -252,7 +249,7 @@ class ImageCache(object): :return: a full path to the cached poster file for the given tvid prodid :rtype: AnyStr """ - return ek.ek(os.path.join, self._thumbnails_dir(tvid, prodid), 'poster.jpg') + return os.path.join(self._thumbnails_dir(tvid, prodid), 'poster.jpg') def banner_thumb_path(self, tvid, prodid): # type: (int, int) -> AnyStr @@ -266,7 +263,7 @@ class ImageCache(object): :return: a full path to the cached poster file for the given tvid prodid :rtype: AnyStr """ - return ek.ek(os.path.join, self._thumbnails_dir(tvid, prodid), 'banner.jpg') + return os.path.join(self._thumbnails_dir(tvid, prodid), 'banner.jpg') @staticmethod def has_file(image_file): @@ -274,15 +271,15 @@ class ImageCache(object): """ :param image_file: image file :type image_file: AnyStr - :return: true if a image_file exists + :return: true if an image_file exists :rtype: bool """ result = [] - for filename in ek.ek(glob.glob, image_file): - result.append(ek.ek(os.path.isfile, filename) and filename) - logger.log(u'Found cached %s' % filename, logger.DEBUG) + for filename in glob.glob(image_file): + result.append(os.path.isfile(filename) and filename) + logger.debug(f'Found cached {filename}') - not any(result) and logger.log(u'No cache for %s' % image_file, logger.DEBUG) + not any(result) and logger.debug(f'No cache for {image_file}') return any(result) def has_poster(self, tvid, prodid): @@ -367,8 +364,8 @@ class ImageCache(object): :param image: image file or data :param is_binary: is data instead of path """ - if not is_binary and not ek.ek(os.path.isfile, image): - logger.warning(u'File not found to determine image type of %s' % image) + if not is_binary and not os.path.isfile(image): + logger.warning(f'File not found to determine image type of {image}') return if not image: logger.warning('No Image Data to determinate image type') @@ -384,7 +381,7 @@ class ImageCache(object): img_parser.parse_photoshop_content = False img_metadata = extractMetadata(img_parser) except (BaseException, Exception) as e: - logger.debug(u'Unable to extract metadata from %s, not using file. Error: %s' % (image, ex(e))) + logger.debug(f'Unable to extract metadata from {image}, not using file. Error: {ex(e)}') return if not img_metadata: @@ -392,7 +389,7 @@ class ImageCache(object): msg = 'Image Data' else: msg = image - logger.debug(u'Unable to extract metadata from %s, not using file' % msg) + logger.debug(f'Unable to extract metadata from {msg}, not using file') return width = img_metadata.get('width') @@ -444,9 +441,9 @@ class ImageCache(object): logger.debug(msg_success % 'fanart') return self.FANART - logger.warning(u'Skipped image with fanart aspect ratio but less than 500 pixels wide') + logger.warning('Skipped image with fanart aspect ratio but less than 500 pixels wide') else: - logger.warning(u'Skipped image with useless ratio %s' % img_ratio) + logger.warning(f'Skipped image with useless ratio {img_ratio}') def should_refresh(self, image_type=None, provider='local'): # type: (int, Optional[AnyStr]) -> bool @@ -468,7 +465,7 @@ class ImageCache(object): minutes_iv = 60 * 3 # daily_interval = 60 * 60 * 23 iv = minutes_iv - now_stamp = int(timestamp_near(datetime.datetime.now())) + now_stamp = SGDatetime.timestamp_near() the_time = int(sql_result[0]['time']) return now_stamp - the_time > iv @@ -485,7 +482,7 @@ class ImageCache(object): """ my_db = db.DBConnection('cache.db') my_db.upsert('lastUpdate', - {'time': int(timestamp_near(datetime.datetime.now()))}, + {'time': SGDatetime.timestamp_near()}, {'provider': 'imsg_%s_%s' % ((image_type, self.FANART)[None is image_type], provider)}) def _cache_image_from_file(self, image_path, img_type, tvid, prodid, prefix='', move_file=False): @@ -525,13 +522,13 @@ class ImageCache(object): dest_path = self.fanart_path(*id_args + (prefix,)).replace('.fanart.jpg', '.%s.fanart.jpg' % crc) fanart_dir = [self._fanart_dir(*id_args)] else: - logger.log(u'Invalid cache image type: ' + str(img_type), logger.ERROR) + logger.error(f'Invalid cache image type: {img_type}') return False for cache_dir in [self.shows_dir, self._thumbnails_dir(*id_args)] + fanart_dir: sg_helpers.make_path(cache_dir) - logger.log(u'%sing from %s to %s' % (('Copy', 'Mov')[move_file], image_path, dest_path)) + logger.log(f'{("Copy", "Mov")[move_file]}ing from {image_path} to {dest_path}') # copy poster, banner as thumb, even if moved we need to duplicate the images if img_type in (self.POSTER, self.BANNER) and dest_thumb_path: sg_helpers.copy_file(image_path, dest_thumb_path) @@ -540,7 +537,7 @@ class ImageCache(object): else: sg_helpers.copy_file(image_path, dest_path) - return ek.ek(os.path.isfile, dest_path) and dest_path or None + return os.path.isfile(dest_path) and dest_path or None def _cache_info_source_images(self, show_obj, img_type, num_files=0, max_files=500, force=False, show_infos=None): # type: (TVShow, int, int, int, bool, ShowInfosDict) -> bool @@ -577,7 +574,7 @@ class ImageCache(object): img_type_name = 'banner_thumb' dest_path = self.banner_thumb_path(*arg_tvid_prodid) else: - logger.log(u'Invalid cache image type: ' + str(img_type), logger.ERROR) + logger.error(f'Invalid cache image type: {img_type}') return False # retrieve the image from TV info source using the generic metadata class @@ -588,7 +585,7 @@ class ImageCache(object): return False crcs = [] - for cache_file_name in ek.ek(glob.glob, dest_path): + for cache_file_name in glob.glob(dest_path): with open(cache_file_name, mode='rb') as resource: crc = '%05X' % (zlib.crc32(resource.read()) & 0xFFFFFFFF) if crc not in crcs: @@ -627,11 +624,10 @@ class ImageCache(object): success += (0, 1)[result] if num_files > max_files: break - total = len(ek.ek(glob.glob, dest_path)) - logger.log(u'Saved %s fanart images%s. Cached %s of max %s fanart file%s' - % (success, - ('', ' from ' + ', '.join([x for x in list(set(sources))]))[0 < len(sources)], - total, sickgear.FANART_LIMIT, sg_helpers.maybe_plural(total))) + total = len(glob.glob(dest_path)) + logger.log(f'Saved {success} fanart images' + f'{("", " from " + ", ".join([x for x in list(set(sources))]))[0 < len(sources)]}.' + f' Cached {total} of max {sickgear.FANART_LIMIT} fanart file{sg_helpers.maybe_plural(total)}') return bool(success) image_urls = metadata_generator.retrieve_show_image(img_type_name, show_obj, return_links=True, @@ -655,11 +651,11 @@ class ImageCache(object): if thumb_img_data: thumb_result = metadata_generator.write_image(thumb_img_data, dest_thumb_path, force=True) if not thumb_result: - thumb_result = metadata_generator.write_image(img_data, dest_thumb_path, force=True) + metadata_generator.write_image(img_data, dest_thumb_path, force=True) break if result: - logger.log(u'Saved image type %s' % img_type_name) + logger.log(f'Saved image type {img_type_name}') return result def fill_cache(self, show_obj, force=False): @@ -686,7 +682,7 @@ class ImageCache(object): self.BANNER_THUMB: not self.has_banner_thumbnail(*arg_tvid_prodid) or force} if not any(itervalues(need_images)): - logger.log(u'%s: No new cache images needed. Done.' % show_obj.tvid_prodid) + logger.log(f'{show_obj.tvid_prodid}: No new cache images needed. Done.') return show_infos = GenericMetadata.gen_show_infos_dict(show_obj) @@ -696,12 +692,12 @@ class ImageCache(object): cache_path = self.fanart_path(*arg_tvid_prodid).replace('fanart.jpg', '') # num_images = len(fnmatch.filter(os.listdir(cache_path), '*.jpg')) - for cache_dir in ek.ek(glob.glob, cache_path): + for cache_dir in glob.glob(cache_path): if show_obj.tvid_prodid in sickgear.FANART_RATINGS: del (sickgear.FANART_RATINGS[show_obj.tvid_prodid]) result = sg_helpers.remove_file(cache_dir, tree=True) if result: - logger.log(u'%s cache file %s' % (result, cache_dir), logger.DEBUG) + logger.debug(f'{result} cache file {cache_dir}') try: checked_files = [] @@ -712,16 +708,16 @@ class ImageCache(object): needed = [] if any([need_images[self.POSTER], need_images[self.BANNER]]): poster_path = cur_provider.get_poster_path(show_obj) - if poster_path not in checked_files and ek.ek(os.path.isfile, poster_path): + if poster_path not in checked_files and os.path.isfile(poster_path): needed += [[False, poster_path]] if need_images[self.FANART]: fanart_path = cur_provider.get_fanart_path(show_obj) - if fanart_path not in checked_files and ek.ek(os.path.isfile, fanart_path): + if fanart_path not in checked_files and os.path.isfile(fanart_path): needed += [[True, fanart_path]] if 0 == len(needed): break - logger.log(u'Checking for images from optional %s metadata' % cur_provider.name, logger.DEBUG) + logger.debug(f'Checking for images from optional {cur_provider.name} metadata') for all_meta_provs, path_file in needed: checked_files += [path_file] @@ -738,9 +734,10 @@ class ImageCache(object): if None is cur_file_type: continue - logger.log(u'Checking if image %s (type %s needs metadata: %s)' - % (cache_file_name, str(cur_file_type), - ('No', 'Yes')[True is need_images[cur_file_type]]), logger.DEBUG) + logger.debug(f'Checking if image {cache_file_name} ' + f'(type {str(cur_file_type)}' + f' needs metadata: {("No", "Yes")[True is need_images[cur_file_type]]}' + f')') if need_images.get(cur_file_type): need_images[cur_file_type] = ( @@ -749,8 +746,8 @@ class ImageCache(object): if self.FANART == cur_file_type and \ (not sickgear.FANART_LIMIT or sickgear.FANART_LIMIT < need_images[cur_file_type]): continue - logger.log(u'Caching image found in the show directory to the image cache: %s, type %s' - % (cache_file_name, cur_file_type), logger.DEBUG) + logger.debug(f'Caching image found in the show directory to the image cache: {cache_file_name},' + f' type {cur_file_type}') self._cache_image_from_file( cache_file_name, cur_file_type, @@ -758,7 +755,7 @@ class ImageCache(object): isinstance(need_images[cur_file_type], bool)],)) except exceptions_helper.ShowDirNotFoundException: - logger.log(u'Unable to search for images in show directory because it doesn\'t exist', logger.WARNING) + logger.warning('Unable to search for images in show directory because it doesn\'t exist') # download images from TV info sources for image_type, name_type in [[self.POSTER, 'Poster'], [self.BANNER, 'Banner'], [self.FANART, 'Fanart']]: @@ -766,12 +763,12 @@ class ImageCache(object): if not max_files or max_files < need_images[image_type]: continue - logger.log(u'Seeing if we still need an image of type %s: %s' - % (name_type, ('No', 'Yes')[True is need_images[image_type]]), logger.DEBUG) + logger.debug(f'Seeing if we still need an image of type {name_type}:' + f' {("No", "Yes")[True is need_images[image_type]]}') if need_images[image_type]: file_num = (need_images[image_type] + 1, 1)[isinstance(need_images[image_type], bool)] if file_num <= max_files: self._cache_info_source_images(show_obj, image_type, file_num, max_files, force=force, show_infos=show_infos) - logger.log(u'Done cache check') + logger.log('Done cache check') diff --git a/sickgear/indexermapper.py b/sickgear/indexermapper.py index b5eafcf8..fcb924c2 100644 --- a/sickgear/indexermapper.py +++ b/sickgear/indexermapper.py @@ -26,8 +26,7 @@ import sickgear from lib.dateutil.parser import parse -from _23 import unidecode -from six import iteritems, moves, string_types, PY2 +from six import iteritems, moves, string_types # noinspection PyUnreachableCode if False: @@ -133,7 +132,7 @@ def confirm_show(premiere_date, shows_premiere, expected_name, show_name): # type: (Optional[datetime.date], Optional[Union[AnyStr, datetime.date]], AnyStr, AnyStr) -> bool """ confirm show possible confirmations: - 1. premiere dates are less then 2 days apart + 1. premiere dates are less than 2 days apart 2. show name is the same and premiere year is 1 year or less apart :param premiere_date: expected show premiere date @@ -178,9 +177,7 @@ def clean_show_name(showname): :return: :rtype: AnyStr """ - if not PY2: - return re.sub(r'[(\s]*(?:19|20)\d\d[)\s]*$', '', showname) - return re.sub(r'[(\s]*(?:19|20)\d\d[)\s]*$', '', unidecode(showname)) + return re.sub(r'[(\s]*(?:19|20)\d\d[)\s]*$', '', showname) def get_show_name_date(show_obj): @@ -255,7 +252,7 @@ def map_indexers_to_show(show_obj, update=False, force=False, recheck=False, im_ all_ids_srcs = [src_tv_id] + [s for s in (TVINFO_TRAKT, TVINFO_TMDB, TVINFO_TVMAZE, TVINFO_TVDB, TVINFO_IMDB) if s != src_tv_id] searched, confirmed = {}, False - for r in moves.range(len(all_ids_srcs)): + for _ in moves.range(len(all_ids_srcs)): search_done = False for i in all_ids_srcs: if new_ids.verified.get(i): @@ -411,7 +408,7 @@ def load_mapped_ids(**kwargs): cur_show_obj.ids = sickgear.indexermapper.map_indexers_to_show(cur_show_obj, **n_kargs) except (BaseException, Exception): logger.debug('Error loading mapped id\'s for show: %s' % cur_show_obj.unique_name) - logger.log('Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error('Traceback: %s' % traceback.format_exc()) logger.log('TV info mappings loaded') diff --git a/sickgear/indexers/indexer_api.py b/sickgear/indexers/indexer_api.py index 3d3e7b64..c5ee5f65 100644 --- a/sickgear/indexers/indexer_api.py +++ b/sickgear/indexers/indexer_api.py @@ -16,12 +16,9 @@ import os from .indexer_config import init_config, tvinfo_config -from sg_helpers import make_path, proxy_setting +from sg_helpers import proxy_setting import sickgear from lib.tvinfo_base import TVInfoBase -import encodingKludge as ek - -from _23 import list_values # noinspection PyUnreachableCode if False: @@ -41,8 +38,7 @@ class TVInfoAPI(object): if tvinfo_config[self.tvid]['active'] or ('no_dummy' in kwargs and True is kwargs['no_dummy']): if 'no_dummy' in kwargs: kwargs.pop('no_dummy') - indexer_cache_dir = ek.ek(os.path.join, sickgear.CACHE_DIR, 'tvinfo_cache', - tvinfo_config[self.tvid]['name']) + indexer_cache_dir = os.path.join(sickgear.CACHE_DIR, 'tvinfo_cache', tvinfo_config[self.tvid]['name']) kwargs['diskcache_dir'] = indexer_cache_dir return tvinfo_config[self.tvid]['module'](*args, **kwargs) else: @@ -85,13 +81,13 @@ class TVInfoAPI(object): @property def sources(self): # type: () -> Dict[int, AnyStr] - return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if not x['mapped_only'] and + return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if not x['mapped_only'] and True is not x.get('fallback') and True is not x.get('people_only')]) @property def search_sources(self): # type: () -> Dict[int, AnyStr] - return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if not x['mapped_only'] and + return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if not x['mapped_only'] and x.get('active') and not x.get('defunct') and True is not x.get('fallback') and True is not x.get('people_only')]) @@ -101,7 +97,7 @@ class TVInfoAPI(object): """ :return: return all indexers including mapped only indexers excluding fallback indexers """ - return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if True is not x.get('fallback') + return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if True is not x.get('fallback') and True is not x.get('people_only')]) @property @@ -110,9 +106,9 @@ class TVInfoAPI(object): """ :return: return all fallback indexers """ - return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if True is x.get('fallback')]) + return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if True is x.get('fallback')]) @property def xem_supported_sources(self): # type: () -> Dict[int, AnyStr] - return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if x.get('xem_origin')]) + return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if x.get('xem_origin')]) diff --git a/sickgear/logger.py b/sickgear/logger.py index 3aa6791d..1ecbca02 100644 --- a/sickgear/logger.py +++ b/sickgear/logger.py @@ -31,7 +31,7 @@ from logging.handlers import TimedRotatingFileHandler import sickgear from . import classes -from .sgdatetime import timestamp_near +from .sgdatetime import SGDatetime from sg_helpers import md5_for_text, remove_file_perm # noinspection PyUnreachableCode @@ -51,7 +51,7 @@ MESSAGE = logging.INFO DEBUG = logging.DEBUG DB = 5 -reverseNames = {u'ERROR': ERROR, u'WARNING': WARNING, u'INFO': MESSAGE, u'DEBUG': DEBUG, u'DB': DB} +reverseNames = {'ERROR': ERROR, 'WARNING': WARNING, 'INFO': MESSAGE, 'DEBUG': DEBUG, 'DB': DB} # suppress output with this handler @@ -198,7 +198,7 @@ class SBRotatingLogHandler(object): mem_key = 'logger' for to_log in log_list: log_id = md5_for_text(to_log) - now = int(timestamp_near(datetime.datetime.now())) + now = SGDatetime.timestamp_near() expired = now > sickgear.MEMCACHE.get(mem_key, {}).get(log_id, 0) sickgear.MEMCACHE[mem_key] = {} sickgear.MEMCACHE[mem_key][log_id] = 2 + now @@ -263,8 +263,8 @@ class SBRotatingLogHandler(object): buf = fh.read(min(remaining_size, buf_size)) remaining_size -= buf_size lines = buf.split('\n') - # the first line of the buffer is probably not a complete line so - # we'll save it and append it to the last line of the next buffer + # the first line of the buffer is probably not a complete line, + # so save it and append it to the last line of the next buffer # we read if None is not segment: # if the previous chunk starts right from the beginning of line @@ -337,9 +337,8 @@ class TimedCompressedRotatingFileHandler(TimedRotatingFileHandler): except AttributeError: pass - import encodingKludge try: - encodingKludge.ek(os.rename, self.baseFilename, dfn) + os.rename(self.baseFilename, dfn) except (BaseException, Exception): pass @@ -360,9 +359,8 @@ class TimedCompressedRotatingFileHandler(TimedRotatingFileHandler): if 0 < self.backupCount: # find the oldest log file and delete it # phase out files named sickgear.log in favour of sickgear.logs over backup_count days - all_names = encodingKludge.ek(glob.glob, file_name + '_*') + \ - encodingKludge.ek(glob.glob, encodingKludge.ek(os.path.join, encodingKludge.ek( - os.path.dirname, file_name), 'sickbeard_*')) + all_names = glob.glob(file_name + '_*') \ + + glob.glob(os.path.join(os.path.dirname(file_name), 'sickbeard_*')) if len(all_names) > self.backupCount: all_names.sort() self.delete_logfile(all_names[0]) diff --git a/sickgear/metadata/__init__.py b/sickgear/metadata/__init__.py index 8e1a4315..1dbf73ad 100644 --- a/sickgear/metadata/__init__.py +++ b/sickgear/metadata/__init__.py @@ -19,14 +19,13 @@ __all__ = ['generic', 'helpers', 'kodi', 'mede8er', 'mediabrowser', 'ps3', 'tivo import sys from . import kodi, mede8er, mediabrowser, ps3, tivo, wdtv, xbmc, xbmc_12plus -from _23 import filter_list def available_generators(): - return filter_list(lambda x: x not in ('generic', 'helpers'), __all__) + return list(filter(lambda x: x not in ('generic', 'helpers'), __all__)) -def _getMetadataModule(name): +def _get_metadata_module(name): name = name.lower() prefix = "sickgear.metadata." if name in __all__ and prefix + name in sys.modules: @@ -34,8 +33,8 @@ def _getMetadataModule(name): return None -def _getMetadataClass(name): - module = _getMetadataModule(name) +def _get_metadata_class(name): + module = _get_metadata_module(name) if not module: return None @@ -46,10 +45,10 @@ def _getMetadataClass(name): def get_metadata_generator_dict(): result = {} for cur_generator_id in available_generators(): - cur_generator = _getMetadataClass(cur_generator_id) + cur_generator = _get_metadata_class(cur_generator_id) if not cur_generator: continue result[cur_generator.name] = cur_generator return result - + diff --git a/sickgear/metadata/generic.py b/sickgear/metadata/generic.py index 906a065b..33cd8e1c 100644 --- a/sickgear/metadata/generic.py +++ b/sickgear/metadata/generic.py @@ -30,14 +30,11 @@ from ..indexers.indexer_config import TVINFO_TVDB, TVINFO_TMDB from lib.tvinfo_base import TVInfoImage, TVInfoImageType, TVInfoImageSize from lib.tvinfo_base.exceptions import * import sickgear -# noinspection PyPep8Naming -import encodingKludge as ek from exceptions_helper import ex from lib.fanart.core import Request as fanartRequest import lib.fanart as fanart from lxml_etree import etree -from _23 import filter_iter, list_keys from six import iteritems, itervalues, string_types # noinspection PyUnreachableCode @@ -127,13 +124,13 @@ class GenericMetadata(object): def get_id(self): # type: (...) -> AnyStr - return GenericMetadata.makeID(self.name) + return GenericMetadata.make_id(self.name) @staticmethod - def makeID(name): + def make_id(name): # type: (AnyStr) -> AnyStr name_id = re.sub("[+]", "plus", name) - name_id = re.sub(r"[^\w\d_]", "_", name_id).lower() + name_id = re.sub(r"[^\w_]", "_", name_id).lower() return name_id def set_config(self, string): @@ -151,71 +148,69 @@ class GenericMetadata(object): self.season_all_banner = config_list[9] def _has_show_metadata(self, show_obj): - # type: (sickgear.tv.TVShow) -> AnyStr - result = ek.ek(os.path.isfile, self.get_show_file_path(show_obj)) - logger.log(u"Checking if " + self.get_show_file_path(show_obj) + " exists: " + str(result), logger.DEBUG) + # type: (sickgear.tv.TVShow) -> bool + result = os.path.isfile(self.get_show_file_path(show_obj)) + logger.debug(f'Checking if {self.get_show_file_path(show_obj)} exists: {result}') return result def has_episode_metadata(self, ep_obj): - # type: (sickgear.tv.TVEpisode) -> AnyStr - result = ek.ek(os.path.isfile, self.get_episode_file_path(ep_obj)) - logger.log(u"Checking if " + self.get_episode_file_path(ep_obj) + " exists: " + str(result), logger.DEBUG) + # type: (sickgear.tv.TVEpisode) -> bool + result = os.path.isfile(self.get_episode_file_path(ep_obj)) + logger.debug(f'Checking if {self.get_episode_file_path(ep_obj)} exists: {result}') return result def _has_fanart(self, show_obj): - # type: (sickgear.tv.TVShow) -> AnyStr - result = ek.ek(os.path.isfile, self.get_fanart_path(show_obj)) - logger.log(u"Checking if " + self.get_fanart_path(show_obj) + " exists: " + str(result), logger.DEBUG) + # type: (sickgear.tv.TVShow) -> bool + result = os.path.isfile(self.get_fanart_path(show_obj)) + logger.debug(f'Checking if {self.get_fanart_path(show_obj)} exists: {result}') return result def _has_poster(self, show_obj): - # type: (sickgear.tv.TVShow) -> AnyStr - result = ek.ek(os.path.isfile, self.get_poster_path(show_obj)) - logger.log(u"Checking if " + self.get_poster_path(show_obj) + " exists: " + str(result), logger.DEBUG) + # type: (sickgear.tv.TVShow) -> bool + result = os.path.isfile(self.get_poster_path(show_obj)) + logger.debug(f'Checking if {self.get_poster_path(show_obj)} exists: {result}') return result def _has_banner(self, show_obj): - # type: (sickgear.tv.TVShow) -> AnyStr - result = ek.ek(os.path.isfile, self.get_banner_path(show_obj)) - logger.log(u"Checking if " + self.get_banner_path(show_obj) + " exists: " + str(result), logger.DEBUG) + # type: (sickgear.tv.TVShow) -> bool + result = os.path.isfile(self.get_banner_path(show_obj)) + logger.debug(f'Checking if {self.get_banner_path(show_obj)} exists: {result}') return result def has_episode_thumb(self, ep_obj): - # type: (sickgear.tv.TVEpisode) -> AnyStr + # type: (sickgear.tv.TVEpisode) -> bool location = self.get_episode_thumb_path(ep_obj) - result = None is not location and ek.ek(os.path.isfile, location) + result = None is not location and os.path.isfile(location) if location: - logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG) + logger.debug(f'Checking if {location} exists: {result}') return result def _has_season_poster(self, show_obj, season): - # type: (sickgear.tv.TVShow,int) -> AnyStr + # type: (sickgear.tv.TVShow,int) -> bool location = self.get_season_poster_path(show_obj, season) - result = None is not location and ek.ek(os.path.isfile, location) + result = None is not location and os.path.isfile(location) if location: - logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG) + logger.debug(f'Checking if {location} exists: {result}') return result def _has_season_banner(self, show_obj, season): - # type: (sickgear.tv.TVShow,int) -> AnyStr + # type: (sickgear.tv.TVShow,int) -> bool location = self.get_season_banner_path(show_obj, season) - result = None is not location and ek.ek(os.path.isfile, location) + result = None is not location and os.path.isfile(location) if location: - logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG) + logger.debug(f'Checking if {location} exists: {result}') return result def _has_season_all_poster(self, show_obj): - # type: (sickgear.tv.TVShow) -> AnyStr - result = ek.ek(os.path.isfile, self.get_season_all_poster_path(show_obj)) - logger.log(u"Checking if " + self.get_season_all_poster_path(show_obj) + " exists: " + str(result), - logger.DEBUG) + # type: (sickgear.tv.TVShow) -> bool + result = os.path.isfile(self.get_season_all_poster_path(show_obj)) + logger.debug(f'Checking if {self.get_season_all_poster_path(show_obj)} exists: {result}') return result def _has_season_all_banner(self, show_obj): - # type: (sickgear.tv.TVShow) -> AnyStr - result = ek.ek(os.path.isfile, self.get_season_all_banner_path(show_obj)) - logger.log(u"Checking if " + self.get_season_all_banner_path(show_obj) + " exists: " + str(result), - logger.DEBUG) + # type: (sickgear.tv.TVShow) -> bool + result = os.path.isfile(self.get_season_all_banner_path(show_obj)) + logger.debug(f'Checking if {self.get_season_all_banner_path(show_obj)} exists: {result}') return result @staticmethod @@ -245,7 +240,7 @@ class GenericMetadata(object): def get_show_file_path(self, show_obj): # type: (sickgear.tv.TVShow) -> AnyStr - return ek.ek(os.path.join, show_obj.location, self._show_metadata_filename) + return os.path.join(show_obj.location, self._show_metadata_filename) def get_episode_file_path(self, ep_obj): # type: (sickgear.tv.TVEpisode) -> AnyStr @@ -253,15 +248,15 @@ class GenericMetadata(object): def get_fanart_path(self, show_obj): # type: (sickgear.tv.TVShow) -> AnyStr - return ek.ek(os.path.join, show_obj.location, self.fanart_name) + return os.path.join(show_obj.location, self.fanart_name) def get_poster_path(self, show_obj): # type: (sickgear.tv.TVShow) -> AnyStr - return ek.ek(os.path.join, show_obj.location, self.poster_name) + return os.path.join(show_obj.location, self.poster_name) def get_banner_path(self, show_obj): # type: (sickgear.tv.TVShow) -> AnyStr - return ek.ek(os.path.join, show_obj.location, self.banner_name) + return os.path.join(show_obj.location, self.banner_name) def get_episode_thumb_path(self, ep_obj): # type: (sickgear.tv.TVEpisode) -> Optional[AnyStr] @@ -269,7 +264,7 @@ class GenericMetadata(object): Returns the path where the episode thumbnail should be stored. ep_obj: a TVEpisode instance for which to create the thumbnail """ - if ek.ek(os.path.isfile, ep_obj.location): + if os.path.isfile(ep_obj.location): tbn_filename = ep_obj.location.rpartition('.') @@ -296,7 +291,7 @@ class GenericMetadata(object): else: season_poster_filename = 'season' + str(season).zfill(2) - return ek.ek(os.path.join, show_obj.location, season_poster_filename + '-poster.jpg') + return os.path.join(show_obj.location, season_poster_filename + '-poster.jpg') def get_season_banner_path(self, show_obj, season): # type: (sickgear.tv.TVShow, int) -> AnyStr @@ -314,15 +309,15 @@ class GenericMetadata(object): else: season_banner_filename = 'season' + str(season).zfill(2) - return ek.ek(os.path.join, show_obj.location, season_banner_filename + '-banner.jpg') + return os.path.join(show_obj.location, season_banner_filename + '-banner.jpg') def get_season_all_poster_path(self, show_obj): # type: (sickgear.tv.TVShow) -> AnyStr - return ek.ek(os.path.join, show_obj.location, self.season_all_poster_name) + return os.path.join(show_obj.location, self.season_all_poster_name) def get_season_all_banner_path(self, show_obj): # type: (sickgear.tv.TVShow) -> AnyStr - return ek.ek(os.path.join, show_obj.location, self.season_all_banner_name) + return os.path.join(show_obj.location, self.season_all_banner_name) def _show_data(self, show_obj): # type: (sickgear.tv.TVShow) -> Optional[Union[bool, etree.Element]] @@ -346,8 +341,7 @@ class GenericMetadata(object): isinstance(getattr(fetched_show_info, 'data', None), (list, dict)) and 'seriesname' in getattr(fetched_show_info, 'data', [])) and \ not hasattr(fetched_show_info, 'seriesname'): - logger.log(u'Show %s not found on %s ' % - (show_obj.name, sickgear.TVInfoAPI(show_obj.tvid).name), logger.WARNING) + logger.warning(f'Show {show_obj.name} not found on {sickgear.TVInfoAPI(show_obj.tvid).name} ') return False return True @@ -367,8 +361,8 @@ class GenericMetadata(object): try: result = self.write_show_file(show_obj) except BaseTVinfoError as e: - logger.log('Unable to find useful show metadata for %s on %s: %s' % ( - self.name, sickgear.TVInfoAPI(show_obj.tvid).name, ex(e)), logger.WARNING) + logger.warning(f'Unable to find useful show metadata for {self.name}' + f' on {sickgear.TVInfoAPI(show_obj.tvid).name}: {ex(e)}') return result @@ -376,24 +370,23 @@ class GenericMetadata(object): # type: (sickgear.tv.TVEpisode, bool) -> bool result = False if self.episode_metadata and ep_obj and (not self.has_episode_metadata(ep_obj) or force): - logger.log('Metadata provider %s creating episode metadata for %s' % (self.name, ep_obj.pretty_name()), - logger.DEBUG) + logger.debug('Metadata provider %s creating episode metadata for %s' % (self.name, ep_obj.pretty_name())) try: result = self.write_ep_file(ep_obj) except BaseTVinfoError as e: - logger.log('Unable to find useful episode metadata for %s on %s: %s' % ( - self.name, sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name, ex(e)), logger.WARNING) + logger.warning(f'Unable to find useful episode metadata for {self.name}' + f' on {sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name}: {ex(e)}') return result def update_show_indexer_metadata(self, show_obj): # type: (sickgear.tv.TVShow) -> bool if self.show_metadata and show_obj and self._has_show_metadata(show_obj): - logger.debug(u'Metadata provider %s updating show indexer metadata file for %s' % ( - self.name, show_obj.unique_name)) + logger.debug(f'Metadata provider {self.name}' + f' updating show indexer metadata file for {show_obj.unique_name}') nfo_file_path = self.get_show_file_path(show_obj) - with ek.ek(io.open, nfo_file_path, 'r', encoding='utf8') as xmlFileObj: + with io.open(nfo_file_path, 'r', encoding='utf8') as xmlFileObj: show_xml = etree.ElementTree(file=xmlFileObj) tvid = show_xml.find('indexer') @@ -422,29 +415,28 @@ class GenericMetadata(object): def create_fanart(self, show_obj): # type: (sickgear.tv.TVShow) -> bool if self.fanart and show_obj and not self._has_fanart(show_obj): - logger.debug(u'Metadata provider %s creating fanart for %s' % (self.name, show_obj.unique_name)) + logger.debug(f'Metadata provider {self.name} creating fanart for {show_obj.unique_name}') return self.save_fanart(show_obj) return False def create_poster(self, show_obj): # type: (sickgear.tv.TVShow) -> bool if self.poster and show_obj and not self._has_poster(show_obj): - logger.debug(u'Metadata provider %s creating poster for %s' % (self.name, show_obj.unique_name)) + logger.debug(f'Metadata provider {self.name} creating poster for {show_obj.unique_name}') return self.save_poster(show_obj) return False def create_banner(self, show_obj): # type: (sickgear.tv.TVShow) -> bool if self.banner and show_obj and not self._has_banner(show_obj): - logger.debug(u'Metadata provider %s creating banner for %s' % (self.name, show_obj.unique_name)) + logger.debug(f'Metadata provider {self.name} creating banner for {show_obj.unique_name}') return self.save_banner(show_obj) return False def create_episode_thumb(self, ep_obj): # type: (sickgear.tv.TVEpisode) -> bool if self.episode_thumbnails and ep_obj and not self.has_episode_thumb(ep_obj): - logger.log(u"Metadata provider " + self.name + " creating episode thumbnail for " + ep_obj.pretty_name(), - logger.DEBUG) + logger.debug(f'Metadata provider {self.name} creating episode thumbnail for {ep_obj.pretty_name()}') return self.save_thumbnail(ep_obj) return False @@ -454,8 +446,7 @@ class GenericMetadata(object): result = [] for season, _ in iteritems(show_obj.sxe_ep_obj): if not self._has_season_poster(show_obj, season): - logger.debug(u'Metadata provider %s creating season posters for %s' % ( - self.name, show_obj.unique_name)) + logger.debug(f'Metadata provider {self.name} creating season posters for {show_obj.unique_name}') result = result + [self.save_season_posters(show_obj, season)] return all(result) return False @@ -466,8 +457,7 @@ class GenericMetadata(object): result = [] for season, _ in iteritems(show_obj.sxe_ep_obj): if not self._has_season_banner(show_obj, season): - logger.debug(u'Metadata provider %s creating season banners for %s' % ( - self.name, show_obj.unique_name)) + logger.debug(f'Metadata provider {self.name} creating season banners for {show_obj.unique_name}') result = result + [self.save_season_banners(show_obj, season)] return all(result) return False @@ -475,16 +465,14 @@ class GenericMetadata(object): def create_season_all_poster(self, show_obj): # type: (sickgear.tv.TVShow) -> bool if self.season_all_poster and show_obj and not self._has_season_all_poster(show_obj): - logger.debug(u'Metadata provider %s creating season all posters for %s' % ( - self.name, show_obj.unique_name)) + logger.debug(f'Metadata provider {self.name} creating season all posters for {show_obj.unique_name}') return self.save_season_all_poster(show_obj) return False def create_season_all_banner(self, show_obj): # type: (sickgear.tv.TVShow) -> bool if self.season_all_banner and show_obj and not self._has_season_all_banner(show_obj): - logger.debug(u'Metadata provider %s creating season all banner for %s' % ( - self.name, show_obj.unique_name)) + logger.debug(f'Metadata provider {self.name} creating season all banner for {show_obj.unique_name}') return self.save_season_all_banner(show_obj) return False @@ -561,7 +549,7 @@ class GenericMetadata(object): nfo_file_path = self.get_show_file_path(show_obj) - logger.log(u'Writing show metadata file: %s' % nfo_file_path, logger.DEBUG) + logger.debug(f'Writing show metadata file: {nfo_file_path}') return sg_helpers.write_file(nfo_file_path, data, xmltree=True, utf8=True) @@ -590,7 +578,7 @@ class GenericMetadata(object): nfo_file_path = self.get_episode_file_path(ep_obj) - logger.log(u'Writing episode metadata file: %s' % nfo_file_path, logger.DEBUG) + logger.debug(f'Writing episode metadata file: {nfo_file_path}') return sg_helpers.write_file(nfo_file_path, data, xmltree=True, utf8=True) @@ -607,17 +595,17 @@ class GenericMetadata(object): file_path = self.get_episode_thumb_path(ep_obj) if not file_path: - logger.log(u"Unable to find a file path to use for this thumbnail, not generating it", logger.DEBUG) + logger.debug('Unable to find a file path to use for this thumbnail, not generating it') return False thumb_url = self._get_episode_thumb_url(ep_obj) # if we can't find one then give up if not thumb_url: - logger.log(u"No thumb is available for this episode, not creating a thumb", logger.DEBUG) + logger.debug('No thumb is available for this episode, not creating a thumb') return False - thumb_data = metadata_helpers.getShowImage(thumb_url, show_name=ep_obj.show_obj.name) + thumb_data = metadata_helpers.get_show_image(thumb_url, show_name=ep_obj.show_obj.name) result = self._write_image(thumb_data, file_path) @@ -645,7 +633,7 @@ class GenericMetadata(object): img_cache_type=sickgear.image_cache.ImageCache.FANART) if not fanart_data: - logger.log(u"No fanart image was retrieved, unable to write fanart", logger.DEBUG) + logger.debug('No fanart image was retrieved, unable to write fanart') return False return self._write_image(fanart_data, fanart_path) @@ -666,7 +654,7 @@ class GenericMetadata(object): img_cache_type=sickgear.image_cache.ImageCache.POSTER) if not poster_data: - logger.log(u"No show poster image was retrieved, unable to write poster", logger.DEBUG) + logger.debug('No show poster image was retrieved, unable to write poster') return False return self._write_image(poster_data, poster_path) @@ -687,7 +675,7 @@ class GenericMetadata(object): img_cache_type=sickgear.image_cache.ImageCache.BANNER) if not banner_data: - logger.log(u"No show banner image was retrieved, unable to write banner", logger.DEBUG) + logger.debug('No show banner image was retrieved, unable to write banner') return False return self._write_image(banner_data, banner_path) @@ -715,20 +703,19 @@ class GenericMetadata(object): if 0 == len(cur_season_art): continue - # Just grab whatever's there for now + # Just grab whatever is there for now art_id, season_url = cur_season_art.popitem() season_poster_file_path = self.get_season_poster_path(show_obj, cur_season) if not season_poster_file_path: - logger.log(u'Path for season ' + str(cur_season) + ' came back blank, skipping this season', - logger.DEBUG) + logger.debug(f'Path for season {cur_season} came back blank, skipping this season') continue - season_data = metadata_helpers.getShowImage(season_url, show_name=show_obj.name) + season_data = metadata_helpers.get_show_image(season_url, show_name=show_obj.name) if not season_data: - logger.log(u'No season poster data available, skipping this season', logger.DEBUG) + logger.debug('No season poster data available, skipping this season') continue result = result + [self._write_image(season_data, season_poster_file_path)] @@ -760,20 +747,19 @@ class GenericMetadata(object): if 0 == len(cur_season_art): continue - # Just grab whatever's there for now + # Just grab whatever is there for now art_id, season_url = cur_season_art.popitem() season_banner_file_path = self.get_season_banner_path(show_obj, cur_season) if not season_banner_file_path: - logger.log(u'Path for season ' + str(cur_season) + ' came back blank, skipping this season', - logger.DEBUG) + logger.debug(f'Path for season {cur_season} came back blank, skipping this season') continue - season_data = metadata_helpers.getShowImage(season_url, show_name=show_obj.name) + season_data = metadata_helpers.get_show_image(season_url, show_name=show_obj.name) if not season_data: - logger.log(u'No season banner data available, skipping this season', logger.DEBUG) + logger.debug('No season banner data available, skipping this season') continue result = result + [self._write_image(season_data, season_banner_file_path)] @@ -791,7 +777,7 @@ class GenericMetadata(object): img_cache_type=sickgear.image_cache.ImageCache.POSTER) if not poster_data: - logger.log(u"No show poster image was retrieved, unable to write season all poster", logger.DEBUG) + logger.debug('No show poster image was retrieved, unable to write season all poster') return False return self._write_image(poster_data, poster_path) @@ -805,7 +791,7 @@ class GenericMetadata(object): img_cache_type=sickgear.image_cache.ImageCache.BANNER) if not banner_data: - logger.log(u"No show banner image was retrieved, unable to write season all banner", logger.DEBUG) + logger.debug('No show banner image was retrieved, unable to write season all banner') return False return self._write_image(banner_data, banner_path) @@ -822,30 +808,28 @@ class GenericMetadata(object): """ # don't bother overwriting it - if not force and ek.ek(os.path.isfile, image_path): - logger.log(u"Image already exists, not downloading", logger.DEBUG) + if not force and os.path.isfile(image_path): + logger.debug('Image already exists, not downloading') return False if not image_data: - logger.log(u"Unable to retrieve image, skipping", logger.WARNING) + logger.warning('Unable to retrieve image, skipping') return False - image_dir = ek.ek(os.path.dirname, image_path) + image_dir = os.path.dirname(image_path) try: - if not ek.ek(os.path.isdir, image_dir): - logger.log(u"Metadata dir didn't exist, creating it at " + image_dir, logger.DEBUG) - ek.ek(os.makedirs, image_dir) + if not os.path.isdir(image_dir): + logger.debug(f'Metadata dir didn"t exist, creating it at {image_dir}') + os.makedirs(image_dir) sg_helpers.chmod_as_parent(image_dir) - outFile = ek.ek(open, image_path, 'wb') - outFile.write(image_data) - outFile.close() + out_file = open(image_path, 'wb') + out_file.write(image_data) + out_file.close() sg_helpers.chmod_as_parent(image_path) except IOError as e: - logger.log( - u"Unable to write image to " + image_path + " - are you sure the show folder is writable? " + ex(e), - logger.ERROR) + logger.error(f'Unable to write image to {image_path} - are you sure the show folder is writable? {ex(e)}') return False return True @@ -858,7 +842,7 @@ class GenericMetadata(object): def _get_show_info(tv_id): try: show_lang = show_obj.lang - # There's gotta be a better way of doing this but we don't wanna + # There's gotta be a better way of doing this, but we don't want to # change the language value elsewhere tvinfo_config = sickgear.TVInfoAPI(tv_id).api_params.copy() tvinfo_config['fanart'] = True @@ -873,11 +857,11 @@ class GenericMetadata(object): return t.get_show((show_obj.ids[tv_id]['id'], show_obj.prodid)[tv_src == show_obj.tvid], load_episodes=False, banners=True, posters=True, fanart=True, language=show_obj.lang) except (BaseTVinfoError, IOError) as e: - logger.log(u"Unable to look up show on " + sickgear.TVInfoAPI( - tv_id).name + ", not downloading images: " + ex(e), logger.WARNING) + logger.warning(f'Unable to look up show on {sickgear.TVInfoAPI(tv_id).name},' + f' not downloading images: {ex(e)}') # todo: when tmdb is added as tv source remove the hardcoded TVINFO_TMDB - for tv_src in list(OrderedDict.fromkeys([show_obj.tvid] + list_keys(sickgear.TVInfoAPI().search_sources) + + for tv_src in list(OrderedDict.fromkeys([show_obj.tvid] + list(sickgear.TVInfoAPI().search_sources) + [TVINFO_TMDB])): if tv_src != show_obj.tvid and not show_obj.ids.get(tv_src, {}).get('id'): continue @@ -904,13 +888,13 @@ class GenericMetadata(object): try: alt_url = '%swww.%s%s' % re.findall( - r'(https?://)(?:artworks\.)?(thetvdb\.[^/]+/banners/[^\d]+[^.]+)(?:_t)(.*)', _url)[0][0:3] + r'(https?://)(?:artworks\.)?(thetvdb\.[^/]+/banners/\D+[^.]+)_t(.*)', _url)[0][0:3] if alt_url not in _urls[0]: _urls[1].append(alt_url) except (IndexError, Exception): try: alt_url = '%sartworks.%s_t%s' % re.findall( - r'(https?://)(?:www\.)?(thetvdb\.[^/]+/banners/[^\d]+[^.]+)(.*)', _url)[0][0:3] + r'(https?://)(?:www\.)?(thetvdb\.[^/]+/banners/\D+[^.]+)(.*)', _url)[0][0:3] if alt_url not in _urls[0]: _urls[1].append(alt_url) except (IndexError, Exception): @@ -1011,7 +995,7 @@ class GenericMetadata(object): thumb_url = _de_dupe(thumb_url) if not thumb_url: thumb_url = img_url - yield (img_url, thumb_url) + yield img_url, thumb_url elif img_url: yield img_url @@ -1046,8 +1030,8 @@ class GenericMetadata(object): image_type = 'fanart' if image_type not in ('poster', 'banner', 'fanart', 'poster_thumb', 'banner_thumb'): - logger.log(u"Invalid image type " + str(image_type) + ", couldn't find it in the " + sickgear.TVInfoAPI( - show_obj.tvid).name + " object", logger.ERROR) + logger.error(f'Invalid image type {image_type}, couldn\'t find it in the' + f' {sickgear.TVInfoAPI(show_obj.tvid).name} object') return image_urls = self._retrieve_image_urls(show_obj, image_type, show_infos) @@ -1062,7 +1046,7 @@ class GenericMetadata(object): if image_type in ('poster', 'banner'): if isinstance(image_url, tuple): image_url = image_url[0] - img_data = metadata_helpers.getShowImage(image_url, which, show_obj.name) + img_data = metadata_helpers.get_show_image(image_url, which, show_obj.name) if img_cache_type and img_cache_type != image_cache.which_type(img_data, is_binary=True): img_data = None continue @@ -1086,7 +1070,7 @@ class GenericMetadata(object): result = {} try: - # There's gotta be a better way of doing this but we don't wanna + # There's gotta be a better way of doing this, but we don't want to # change the language value elsewhere tvinfo_config = sickgear.TVInfoAPI(show_obj.tvid).api_params.copy() tvinfo_config[image_type] = True @@ -1098,8 +1082,8 @@ class GenericMetadata(object): t = sickgear.TVInfoAPI(show_obj.tvid).setup(**tvinfo_config) tvinfo_obj_show = t.get_show(show_obj.prodid, language=show_obj.lang) except (BaseTVinfoError, IOError) as e: - logger.log(u'Unable to look up show on ' + sickgear.TVInfoAPI( - show_obj.tvid).name + ', not downloading images: ' + ex(e), logger.WARNING) + logger.warning(f'Unable to look up show on {sickgear.TVInfoAPI(show_obj.tvid).name},' + f' not downloading images: {ex(e)}') return result if not self._valid_show(tvinfo_obj_show, show_obj): @@ -1114,7 +1098,7 @@ class GenericMetadata(object): return result - def retrieveShowMetadata(self, folder): + def retrieve_show_metadata(self, folder): # type: (AnyStr) -> Union[Tuple[int, int, AnyStr], Tuple[None, None, None]] """ Used only when mass adding Existing Shows, @@ -1125,39 +1109,37 @@ class GenericMetadata(object): empty_return = (None, None, None) - metadata_path = ek.ek(os.path.join, folder, self._show_metadata_filename) + metadata_path = os.path.join(folder, self._show_metadata_filename) - if not ek.ek(os.path.isdir, folder) or not ek.ek(os.path.isfile, metadata_path): - logger.log(u"Can't load the metadata file from " + repr(metadata_path) + ", it doesn't exist", logger.DEBUG) + if not os.path.isdir(folder) or not os.path.isfile(metadata_path): + logger.debug(f'Can\'t load the metadata file from {repr(metadata_path)}, it doesn\'t exist') return empty_return - logger.log(u"Loading show info from metadata file in " + folder, logger.DEBUG) + logger.debug(f'Loading show info from metadata file in {folder}') try: - with ek.ek(io.open, metadata_path, 'r', encoding='utf8') as xmlFileObj: - showXML = etree.ElementTree(file=xmlFileObj) + with io.open(metadata_path, 'r', encoding='utf8') as xmlFileObj: + show_xml = etree.ElementTree(file=xmlFileObj) - if None is showXML.findtext('title') \ - or all(None is _f for _f in (showXML.find('//uniqueid[@type]'), - showXML.findtext('tvdbid'), - showXML.findtext('id'), - showXML.findtext('indexer'))): - logger.log(u"Invalid info in tvshow.nfo (missing name or id):" - + str(showXML.findtext('title')) + ' ' - + str(showXML.findtext('indexer')) + ' ' - + str(showXML.findtext('tvdbid')) + ' ' - + str(showXML.findtext('id'))) + if None is show_xml.findtext('title') \ + or all(None is _f for _f in (show_xml.find('//uniqueid[@type]'), + show_xml.findtext('tvdbid'), + show_xml.findtext('id'), + show_xml.findtext('indexer'))): + logger.log(f'Invalid info in tvshow.nfo (missing name or id):' + f'{show_xml.findtext("title")} {show_xml.findtext("indexer")} ' + f'{show_xml.findtext("tvdbid")} {show_xml.findtext("id")}') return empty_return - name = showXML.findtext('title') + name = show_xml.findtext('title') try: - tvid = int(showXML.findtext('indexer')) + tvid = int(show_xml.findtext('indexer')) except (BaseException, Exception): tvid = None # handle v2 format of .nfo file - default_source = showXML.find('//uniqueid[@default="true"]') + default_source = show_xml.find('//uniqueid[@default="true"]') if None is not default_source: use_tvid = default_source.attrib.get('type') or tvid if isinstance(use_tvid, string_types): @@ -1167,32 +1149,30 @@ class GenericMetadata(object): if use_tvid and None is not prodid: return use_tvid, prodid, name - prodid = showXML.find('//uniqueid[@type="tvdb"]') + prodid = show_xml.find('//uniqueid[@type="tvdb"]') if None is not prodid: prodid = int(prodid.text) tvid = TVINFO_TVDB - elif None is not showXML.findtext('tvdbid'): - prodid = int(showXML.findtext('tvdbid')) + elif None is not show_xml.findtext('tvdbid'): + prodid = int(show_xml.findtext('tvdbid')) tvid = TVINFO_TVDB - elif None is not showXML.findtext('id'): - prodid = int(showXML.findtext('id')) + elif None is not show_xml.findtext('id'): + prodid = int(show_xml.findtext('id')) try: - tvid = TVINFO_TVDB if [s for s in showXML.findall('.//*') + tvid = TVINFO_TVDB if [s for s in show_xml.findall('.//*') if s.text and -1 != s.text.find('thetvdb.com')] else tvid except (BaseException, Exception): pass else: - logger.log(u"Empty or field in NFO, unable to find a ID", logger.WARNING) + logger.warning('Empty or field in NFO, unable to find a ID') return empty_return if None is prodid: - logger.log(u"Invalid Show ID (%s), not using metadata file" % prodid, logger.WARNING) + logger.warning(f'Invalid Show ID (%s), not using metadata file {prodid}') return empty_return except (BaseException, Exception) as e: - logger.log( - u"There was an error parsing your existing metadata file: '" + metadata_path + "' error: " + ex(e), - logger.WARNING) + logger.warning(f'There was an error parsing your existing metadata file: "{metadata_path}" error: {ex(e)}') return empty_return return tvid, prodid, name @@ -1206,7 +1186,7 @@ class GenericMetadata(object): except (BaseException, Exception): pass - logger.log(u'Could not find any %s images on Fanart.tv for %s' % (image_type, show_obj.name), logger.DEBUG) + logger.debug(f'Could not find any {image_type} images on Fanart.tv for {show_obj.name}') @staticmethod def _fanart_urls(tvdb_id, image_type='banner', lang='en', thumb=False): @@ -1223,9 +1203,9 @@ class GenericMetadata(object): resp = request.response() itemlist = [] dedupe = [] - for art in filter_iter(lambda i: 10 < len(i.get('url', '')) and (lang == i.get('lang', '')[0:2]), - # remove "[0:2]" ... to strictly use only data where "en" is at source - resp[types[image_type]]): # type: dict + for art in filter(lambda i: 10 < len(i.get('url', '')) and (lang == i.get('lang', '')[0:2]), + # remove "[0:2]" ... to strictly use only data where "en" is at source + resp[types[image_type]]): # type: dict try: url = (art['url'], art['url'].replace('/fanart/', '/preview/'))[thumb] if url not in dedupe: diff --git a/sickgear/metadata/helpers.py b/sickgear/metadata/helpers.py index fe046379..0aac20c8 100644 --- a/sickgear/metadata/helpers.py +++ b/sickgear/metadata/helpers.py @@ -22,7 +22,7 @@ if False: from typing import AnyStr, Optional -def getShowImage(url, img_num=None, show_name=None, supress_log=False): +def get_show_image(url, img_num=None, show_name=None, supress_log=False): # type: (AnyStr, Optional[int], Optional[AnyStr], bool) -> Optional[bytes] """ @@ -42,7 +42,7 @@ def getShowImage(url, img_num=None, show_name=None, supress_log=False): # if they provided a fanart number try to use it instead temp_url = url if None is img_num else url.split('-')[0] + '-' + str(img_num) + '.jpg' - logger.log(u'Fetching image from ' + temp_url, logger.DEBUG) + logger.debug(f'Fetching image from {temp_url}') from sickgear import FLARESOLVERR_HOST, MEMCACHE MEMCACHE.setdefault('cookies', {}) @@ -51,8 +51,8 @@ def getShowImage(url, img_num=None, show_name=None, supress_log=False): if None is image_data: if supress_log: return - logger.log('There was an error trying to retrieve the image%s, aborting' % - ('', ' for show: %s' % show_name)[None is not show_name], logger.WARNING) + logger.warning(f'There was an error trying to retrieve the image' + f'{("", " for show: %s" % show_name)[None is not show_name]}, aborting') return return image_data diff --git a/sickgear/metadata/kodi.py b/sickgear/metadata/kodi.py index dcd873cb..f0787a36 100644 --- a/sickgear/metadata/kodi.py +++ b/sickgear/metadata/kodi.py @@ -25,13 +25,11 @@ import sg_helpers from ..indexers.indexer_config import TVINFO_IMDB, TVINFO_TVDB from lib.tvinfo_base.exceptions import * import sickgear -# noinspection PyPep8Naming -import encodingKludge as ek import exceptions_helper from exceptions_helper import ex from lxml_etree import etree -from _23 import decode_str, map_iter +from _23 import decode_str from six import string_types # noinspection PyUnreachableCode @@ -109,7 +107,7 @@ class KODIMetadata(generic.GenericMetadata): show_obj: a TVShow instance to create the NFO for """ - show_ID = show_obj.prodid + show_id = show_obj.prodid show_lang = show_obj.lang tvinfo_config = sickgear.TVInfoAPI(show_obj.tvid).api_params.copy() @@ -129,13 +127,11 @@ class KODIMetadata(generic.GenericMetadata): try: show_info = t.get_show(show_obj.prodid, language=show_obj.lang) except BaseTVinfoShownotfound as e: - logger.log('Unable to find show with id %s on %s, skipping it' % (show_ID, sickgear.TVInfoAPI( - show_obj.tvid).name), logger.ERROR) + logger.error(f'Unable to find show with id {show_id} on {sickgear.TVInfoAPI(show_obj.tvid).name},' + f' skipping it') raise e except BaseTVinfoError as e: - logger.log( - '%s is down, can\'t use its data to add this show' % sickgear.TVInfoAPI(show_obj.tvid).name, - logger.ERROR) + logger.error(f'{sickgear.TVInfoAPI(show_obj.tvid).name} is down, can\'t use its data to add this show') raise e if not self._valid_show(show_info, show_obj): @@ -143,8 +139,8 @@ class KODIMetadata(generic.GenericMetadata): # check for title and id if None is getattr(show_info, 'seriesname', None) or None is getattr(show_info, 'id', None): - logger.log('Incomplete info for show with id %s on %s, skipping it' % (show_ID, sickgear.TVInfoAPI( - show_obj.tvid).name), logger.ERROR) + logger.error(f'Incomplete info for show with id {show_id} on {sickgear.TVInfoAPI(show_obj.tvid).name},' + f' skipping it') return False title = etree.SubElement(tv_node, 'title') @@ -159,7 +155,7 @@ class KODIMetadata(generic.GenericMetadata): has_id = False tvdb_id = None - for tvid, slug in map_iter( + for tvid, slug in map( lambda _tvid: (_tvid, sickgear.TVInfoAPI(_tvid).config.get('kodi_slug')), list(sickgear.TVInfoAPI().all_sources)): mid = slug and show_obj.ids[tvid].get('id') @@ -173,8 +169,8 @@ class KODIMetadata(generic.GenericMetadata): uniqueid = etree.SubElement(tv_node, 'uniqueid', **kwargs) uniqueid.text = '%s%s' % (('', 'tt')[TVINFO_IMDB == tvid], mid) if not has_id: - logger.log('Incomplete info for show with id %s on %s, skipping it' % (show_ID, sickgear.TVInfoAPI( - show_obj.tvid).name), logger.ERROR) + logger.error(f'Incomplete info for show with id {show_id} on {sickgear.TVInfoAPI(show_obj.tvid).name},' + f' skipping it') return False ratings = etree.SubElement(tv_node, 'ratings') @@ -237,7 +233,7 @@ class KODIMetadata(generic.GenericMetadata): nfo_file_path = self.get_show_file_path(show_obj) - logger.log(u'Writing Kodi metadata file: %s' % nfo_file_path, logger.DEBUG) + logger.debug(f'Writing Kodi metadata file: {nfo_file_path}') data = '\n%s' % data return sg_helpers.write_file(nfo_file_path, data, utf8=True) @@ -263,7 +259,7 @@ class KODIMetadata(generic.GenericMetadata): nfo_file_path = self.get_episode_file_path(ep_obj) - logger.log(u'Writing episode metadata file: %s' % nfo_file_path, logger.DEBUG) + logger.debug(f'Writing episode metadata file: {nfo_file_path}') return sg_helpers.write_file(nfo_file_path, data, xmltree=True, xml_header=True, utf8=True) @@ -294,8 +290,8 @@ class KODIMetadata(generic.GenericMetadata): except BaseTVinfoShownotfound as e: raise exceptions_helper.ShowNotFoundException(ex(e)) except BaseTVinfoError as e: - logger.log('Unable to connect to %s while creating meta files - skipping - %s' % (sickgear.TVInfoAPI( - ep_obj.show_obj.tvid).name, ex(e)), logger.ERROR) + logger.error(f'Unable to connect to {sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name}' + f' while creating meta files - skipping - {ex(e)}') return if not self._valid_show(show_info, ep_obj.show_obj): @@ -320,10 +316,10 @@ class KODIMetadata(generic.GenericMetadata): ep_info['firstaired'] = str(datetime.date.fromordinal(1)) if None is getattr(ep_info, 'episodename', None): - logger.log(u'Not generating nfo because the episode has no title', logger.DEBUG) + logger.debug('Not generating nfo because the episode has no title') return None - logger.log('Creating metadata for episode %sx%s' % (ep_obj.season, ep_obj.episode), logger.DEBUG) + logger.debug('Creating metadata for episode %sx%s' % (ep_obj.season, ep_obj.episode)) if 1 < len(ep_obj_list_to_write): ep_node = etree.SubElement(root_node, 'episodedetails') @@ -472,8 +468,8 @@ def remove_default_attr(*args, **kwargs): if nfo_path: # show try: - if ek.ek(os.path.isfile, nfo_path): - with ek.ek(io.open, nfo_path, 'r', encoding='utf8') as xml_file_obj: + if os.path.isfile(nfo_path): + with io.open(nfo_path, 'r', encoding='utf8') as xml_file_obj: xmltree = etree.ElementTree(file=xml_file_obj) # remove default="" attributes @@ -519,8 +515,8 @@ def remove_default_attr(*args, **kwargs): try: changed = False nfo_path = kodi.get_episode_file_path(cur_ep_obj) - if nfo_path and ek.ek(os.path.isfile, nfo_path): - with ek.ek(io.open, nfo_path, 'r', encoding='utf8') as xml_file_obj: + if nfo_path and os.path.isfile(nfo_path): + with io.open(nfo_path, 'r', encoding='utf8') as xml_file_obj: xmltree = etree.ElementTree(file=xml_file_obj) # remove default="" attributes @@ -573,8 +569,8 @@ def rebuild_nfo(*args, **kwargs): try: nfo_path = kodi.get_show_file_path(cur_show_obj) - if nfo_path and ek.ek(os.path.isfile, nfo_path): - with ek.ek(io.open, nfo_path, 'r', encoding='utf8') as xml_file_obj: + if nfo_path and os.path.isfile(nfo_path): + with io.open(nfo_path, 'r', encoding='utf8') as xml_file_obj: xmltree = etree.ElementTree(file=xml_file_obj) # check xml keys exist to validate file as type Kodi episode or tvshow .nfo diff --git a/sickgear/metadata/mede8er.py b/sickgear/metadata/mede8er.py index 37a52e65..62c0ec9b 100644 --- a/sickgear/metadata/mede8er.py +++ b/sickgear/metadata/mede8er.py @@ -127,10 +127,10 @@ class Mede8erMetadata(mediabrowser.MediaBrowserMetadata): try: show_info = t.get_show(show_obj.prodid, language=show_obj.lang) except BaseTVinfoShownotfound as e: - logger.log(u'Unable to find show with id ' + str(show_obj.prodid) + ' on tvdb, skipping it', logger.ERROR) + logger.error(f'Unable to find show with id {show_obj.prodid} on tvdb, skipping it') raise e except BaseTVinfoError as e: - logger.log(u'TVDB is down, can\'t use its data to make the NFO', logger.ERROR) + logger.error(f'TVDB is down, can\'t use its data to make the NFO') raise e if not self._valid_show(show_info, show_obj): @@ -142,12 +142,12 @@ class Mede8erMetadata(mediabrowser.MediaBrowserMetadata): or '' == show_info['seriesname'] \ or None is show_info['id'] \ or '' == show_info['id']: - logger.log('Incomplete info for show with id %s on %s, skipping it' % - (show_obj.prodid, sickgear.TVInfoAPI(show_obj.tvid).name), logger.ERROR) + logger.error(f'Incomplete info for show with id {show_obj.prodid}' + f' on {sickgear.TVInfoAPI(show_obj.tvid).name}, skipping it') return False except BaseTVinfoAttributenotfound: - logger.log('Incomplete info for show with id %s on %s, skipping it' % - (show_obj.prodid, sickgear.TVInfoAPI(show_obj.tvid).name), logger.ERROR) + logger.error(f'Incomplete info for show with id {show_obj.prodid}' + f' on {sickgear.TVInfoAPI(show_obj.tvid).name}, skipping it') return False SeriesName = etree.SubElement(tv_node, 'title') @@ -241,8 +241,8 @@ class Mede8erMetadata(mediabrowser.MediaBrowserMetadata): except BaseTVinfoShownotfound as e: raise exceptions_helper.ShowNotFoundException(ex(e)) except BaseTVinfoError as e: - logger.log('Unable to connect to %s while creating meta files - skipping - %s' % - (sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name, ex(e)), logger.ERROR) + logger.error(f'Unable to connect to {sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name}' + f' while creating meta files - skipping - {ex(e)}') return False if not self._valid_show(show_info, ep_obj.show_obj): @@ -261,8 +261,8 @@ class Mede8erMetadata(mediabrowser.MediaBrowserMetadata): try: ep_info = show_info[cur_ep_obj.season][cur_ep_obj.episode] except (BaseException, Exception): - logger.log(u'Unable to find episode %sx%s on tvdb... has it been removed? Should I delete from db?' % - (cur_ep_obj.season, cur_ep_obj.episode)) + logger.log(f'Unable to find episode {cur_ep_obj.season}x{cur_ep_obj.episode} on tvdb...' + f' has it been removed? Should it be deleted from the db?') return None if cur_ep_obj == ep_obj: diff --git a/sickgear/metadata/mediabrowser.py b/sickgear/metadata/mediabrowser.py index 5f4f7e29..5ae2cd60 100644 --- a/sickgear/metadata/mediabrowser.py +++ b/sickgear/metadata/mediabrowser.py @@ -24,8 +24,6 @@ from .. import logger import sg_helpers from lib.tvinfo_base.exceptions import * import sickgear -# noinspection PyPep8Naming -import encodingKludge as ek import exceptions_helper from exceptions_helper import ex from lxml_etree import etree @@ -98,7 +96,7 @@ class MediaBrowserMetadata(generic.GenericMetadata): self.eg_season_all_banner = "not supported" # type: AnyStr # Override with empty methods for unsupported features - def retrieveShowMetadata(self, folder): + def retrieve_show_metadata(self, folder): # type: (AnyStr) -> Tuple[None, None, None] # while show metadata is generated, it is not supported for our lookup return None, None, None @@ -120,12 +118,12 @@ class MediaBrowserMetadata(generic.GenericMetadata): ep_obj: a TVEpisode object to get the path for """ - if ek.ek(os.path.isfile, ep_obj.location): - xml_file_name = sg_helpers.replace_extension(ek.ek(os.path.basename, ep_obj.location), self._ep_nfo_extension) - metadata_dir_name = ek.ek(os.path.join, ek.ek(os.path.dirname, ep_obj.location), 'metadata') - xml_file_path = ek.ek(os.path.join, metadata_dir_name, xml_file_name) + if os.path.isfile(ep_obj.location): + xml_file_name = sg_helpers.replace_extension(os.path.basename(ep_obj.location), self._ep_nfo_extension) + metadata_dir_name = os.path.join(os.path.dirname(ep_obj.location), 'metadata') + xml_file_path = os.path.join(metadata_dir_name, xml_file_name) else: - logger.log(u"Episode location doesn't exist: " + str(ep_obj.location), logger.DEBUG) + logger.debug(f'Episode location doesn\'t exist: {ep_obj.location}') return '' return xml_file_path @@ -139,10 +137,10 @@ class MediaBrowserMetadata(generic.GenericMetadata): ep_obj: a TVEpisode object to get the path from """ - if ek.ek(os.path.isfile, ep_obj.location): - metadata_dir_name = ek.ek(os.path.join, ek.ek(os.path.dirname, ep_obj.location), 'metadata') - tbn_file_name = sg_helpers.replace_extension(ek.ek(os.path.basename, ep_obj.location), 'jpg') - return ek.ek(os.path.join, metadata_dir_name, tbn_file_name) + if os.path.isfile(ep_obj.location): + metadata_dir_name = os.path.join(os.path.dirname(ep_obj.location), 'metadata') + tbn_file_name = sg_helpers.replace_extension(os.path.basename(ep_obj.location), 'jpg') + return os.path.join(metadata_dir_name, tbn_file_name) def get_season_poster_path(self, show_obj, season): # type: (sickgear.tv.TVShow, int) -> Optional[AnyStr] @@ -152,8 +150,7 @@ class MediaBrowserMetadata(generic.GenericMetadata): If no season folder exists, None is returned """ - dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if - ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))] + dir_list = [x for x in os.listdir(show_obj.location) if os.path.isdir(os.path.join(show_obj.location, x))] season_dir_regex = r'^Season\s+(\d+)$' @@ -178,12 +175,12 @@ class MediaBrowserMetadata(generic.GenericMetadata): break if not season_dir: - logger.log(u"Unable to find a season dir for season " + str(season), logger.DEBUG) + logger.debug(f'Unable to find a season dir for season {season}') return None - logger.log(u"Using " + str(season_dir) + "/folder.jpg as season dir for season " + str(season), logger.DEBUG) + logger.debug(f'Using {season_dir}/folder.jpg as season dir for season {season}') - return ek.ek(os.path.join, show_obj.location, season_dir, 'folder.jpg') + return os.path.join(show_obj.location, season_dir, 'folder.jpg') def get_season_banner_path(self, show_obj, season): # type: (sickgear.tv.TVShow, int) -> Optional[AnyStr] @@ -193,8 +190,7 @@ class MediaBrowserMetadata(generic.GenericMetadata): If no season folder exists, None is returned """ - dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if - ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))] + dir_list = [x for x in os.listdir(show_obj.location) if os.path.isdir(os.path.join(show_obj.location, x))] season_dir_regex = r'^Season\s+(\d+)$' @@ -219,12 +215,12 @@ class MediaBrowserMetadata(generic.GenericMetadata): break if not season_dir: - logger.log(u"Unable to find a season dir for season " + str(season), logger.DEBUG) + logger.debug(f'Unable to find a season dir for season {season}') return None - logger.log(u"Using " + str(season_dir) + "/banner.jpg as season dir for season " + str(season), logger.DEBUG) + logger.debug(f'Using {season_dir}/banner.jpg as season dir for season {season}') - return ek.ek(os.path.join, show_obj.location, season_dir, 'banner.jpg') + return os.path.join(show_obj.location, season_dir, 'banner.jpg') def _show_data(self, show_obj): # type: (sickgear.tv.TVShow) -> Optional[Union[bool, etree.Element]] @@ -256,12 +252,11 @@ class MediaBrowserMetadata(generic.GenericMetadata): try: show_info = t.get_show(show_obj.prodid, language=show_obj.lang) except BaseTVinfoShownotfound as e: - logger.log("Unable to find show with id %s on %s, skipping it" % - (show_obj.prodid, sickgear.TVInfoAPI(show_obj.tvid).name), logger.ERROR) + logger.error(f'Unable to find show with id {show_obj.prodid} ' + f'on {sickgear.TVInfoAPI(show_obj.tvid).name}, skipping it') raise e except BaseTVinfoError as e: - logger.log("%s is down, can't use its data to make the NFO" % sickgear.TVInfoAPI(show_obj.tvid).name, - logger.ERROR) + logger.error('%s is down, can\'t use its data to make the NFO' % sickgear.TVInfoAPI(show_obj.tvid).name) raise e if not self._valid_show(show_info, show_obj): @@ -269,8 +264,8 @@ class MediaBrowserMetadata(generic.GenericMetadata): # check for title and id if None is getattr(show_info, 'seriesname', None) or None is getattr(show_info, 'id', None): - logger.log("Incomplete info for show with id %s on %s, skipping it" % - (show_obj.prodid, sickgear.TVInfoAPI(show_obj.tvid).name), logger.ERROR) + logger.error(f'Incomplete info for show with id {show_obj.prodid}' + f' on {sickgear.TVInfoAPI(show_obj.tvid).name}, skipping it') return False prodid = etree.SubElement(tv_node, "id") @@ -419,8 +414,8 @@ class MediaBrowserMetadata(generic.GenericMetadata): except BaseTVinfoShownotfound as e: raise exceptions_helper.ShowNotFoundException(ex(e)) except BaseTVinfoError as e: - logger.log("Unable to connect to %s while creating meta files - skipping - %s" % - (sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name, ex(e)), logger.ERROR) + logger.error(f'Unable to connect to {sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name}' + f' while creating meta files - skipping - {ex(e)}') return False if not self._valid_show(show_info, ep_obj.show_obj): diff --git a/sickgear/metadata/ps3.py b/sickgear/metadata/ps3.py index 4e91cdb2..8941cbc8 100644 --- a/sickgear/metadata/ps3.py +++ b/sickgear/metadata/ps3.py @@ -17,8 +17,6 @@ import os from . import generic -# noinspection PyPep8Naming -import encodingKludge as ek import sickgear # noinspection PyUnreachableCode @@ -79,7 +77,7 @@ class PS3Metadata(generic.GenericMetadata): self.eg_season_all_banner = "not supported" # type: AnyStr # Override with empty methods for unsupported features - def retrieveShowMetadata(self, folder): + def retrieve_show_metadata(self, folder): # type: (AnyStr) -> Tuple[None, None, None] # no show metadata generated, we abort this lookup function return None, None, None @@ -132,7 +130,7 @@ class PS3Metadata(generic.GenericMetadata): ep_obj: a TVEpisode instance for which to create the thumbnail """ - if ek.ek(os.path.isfile, ep_obj.location): + if os.path.isfile(ep_obj.location): tbn_filename = ep_obj.location + ".cover.jpg" else: return None diff --git a/sickgear/metadata/tivo.py b/sickgear/metadata/tivo.py index e93b4ace..9d749bee 100644 --- a/sickgear/metadata/tivo.py +++ b/sickgear/metadata/tivo.py @@ -25,8 +25,6 @@ from .. import logger import sg_helpers from lib.tvinfo_base.exceptions import * import sickgear -# noinspection PyPep8Naming -import encodingKludge as ek import exceptions_helper from exceptions_helper import ex @@ -89,7 +87,7 @@ class TIVOMetadata(generic.GenericMetadata): self.eg_season_all_banner = "not supported" # type: AnyStr # Override with empty methods for unsupported features - def retrieveShowMetadata(self, folder): + def retrieve_show_metadata(self, folder): # type: (AnyStr) -> Tuple[None, None, None] # no show metadata generated, we abort this lookup function return None, None, None @@ -155,12 +153,12 @@ class TIVOMetadata(generic.GenericMetadata): ep_obj: a TVEpisode object to get the path for """ - if ek.ek(os.path.isfile, ep_obj.location): - metadata_file_name = ek.ek(os.path.basename, ep_obj.location) + "." + self._ep_nfo_extension - metadata_dir_name = ek.ek(os.path.join, ek.ek(os.path.dirname, ep_obj.location), '.meta') - metadata_file_path = ek.ek(os.path.join, metadata_dir_name, metadata_file_name) + if os.path.isfile(ep_obj.location): + metadata_file_name = os.path.basename(ep_obj.location) + "." + self._ep_nfo_extension + metadata_dir_name = os.path.join(os.path.dirname(ep_obj.location), '.meta') + metadata_file_path = os.path.join(metadata_dir_name, metadata_file_name) else: - logger.log(u"Episode location doesn't exist: " + str(ep_obj.location), logger.DEBUG) + logger.debug(f'Episode location doesn\'t exist: {ep_obj.location}') return '' return metadata_file_path @@ -205,8 +203,8 @@ class TIVOMetadata(generic.GenericMetadata): except BaseTVinfoShownotfound as e: raise exceptions_helper.ShowNotFoundException(ex(e)) except BaseTVinfoError as e: - logger.log("Unable to connect to %s while creating meta files - skipping - %s" % - (sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name, ex(e)), logger.ERROR) + logger.error(f'Unable to connect to {sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name}' + f' while creating meta files - skipping - {ex(e)}') return False if not self._valid_show(show_info, ep_obj.show_obj): @@ -253,10 +251,10 @@ class TIVOMetadata(generic.GenericMetadata): # Write the synopsis of the video here sanitizedDescription = cur_ep_obj.description # Replace double curly quotes - sanitizedDescription = sanitizedDescription.replace(u"\u201c", "\"").replace(u"\u201d", "\"") + sanitizedDescription = sanitizedDescription.replace('\u201c', '"').replace('\u201d', '"') # Replace single curly quotes - sanitizedDescription = sanitizedDescription.replace(u"\u2018", "'").replace(u"\u2019", "'").replace( - u"\u02BC", "'") + sanitizedDescription = sanitizedDescription.replace('\u2018', '\'').replace('\u2019', '\'').replace( + '\u02BC', '\'') data += ("description : " + sanitizedDescription + "\n") @@ -335,25 +333,24 @@ class TIVOMetadata(generic.GenericMetadata): return False nfo_file_path = self.get_episode_file_path(ep_obj) - nfo_file_dir = ek.ek(os.path.dirname, nfo_file_path) + nfo_file_dir = os.path.dirname(nfo_file_path) try: - if not ek.ek(os.path.isdir, nfo_file_dir): - logger.log(u"Metadata dir didn't exist, creating it at " + nfo_file_dir, logger.DEBUG) - ek.ek(os.makedirs, nfo_file_dir) + if not os.path.isdir(nfo_file_dir): + logger.debug(f'Metadata dir didn\'t exist, creating it at {nfo_file_dir}') + os.makedirs(nfo_file_dir) sg_helpers.chmod_as_parent(nfo_file_dir) - logger.log(u"Writing episode nfo file to " + nfo_file_path, logger.DEBUG) + logger.debug(f'Writing episode nfo file to {nfo_file_path}') - with ek.ek(open, nfo_file_path, 'w') as nfo_file: + with open(nfo_file_path, 'w') as nfo_file: # Calling encode directly, b/c often descriptions have wonky characters. nfo_file.write(data.encode("utf-8")) sg_helpers.chmod_as_parent(nfo_file_path) except EnvironmentError as e: - logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), - logger.ERROR) + logger.error(f'Unable to write file to {nfo_file_path} - are you sure the folder is writable? {ex(e)}') return False return True diff --git a/sickgear/metadata/wdtv.py b/sickgear/metadata/wdtv.py index 23385dcb..b0c87c92 100644 --- a/sickgear/metadata/wdtv.py +++ b/sickgear/metadata/wdtv.py @@ -24,8 +24,6 @@ from .. import logger import sg_helpers from lib.tvinfo_base.exceptions import * import sickgear -# noinspection PyPep8Naming -import encodingKludge as ek import exceptions_helper from exceptions_helper import ex from lxml_etree import etree @@ -92,7 +90,7 @@ class WDTVMetadata(generic.GenericMetadata): self.eg_season_all_banner = "not supported" # type: AnyStr # Override with empty methods for unsupported features - def retrieveShowMetadata(self, folder): + def retrieve_show_metadata(self, folder): # type: (AnyStr) -> Tuple[None, None, None] # no show metadata generated, we abort this lookup function return None, None, None @@ -137,7 +135,7 @@ class WDTVMetadata(generic.GenericMetadata): ep_obj: a TVEpisode instance for which to create the thumbnail """ - if ek.ek(os.path.isfile, ep_obj.location): + if os.path.isfile(ep_obj.location): return sg_helpers.replace_extension(ep_obj.location, 'metathumb') def get_season_poster_path(self, show_obj, season): @@ -148,8 +146,7 @@ class WDTVMetadata(generic.GenericMetadata): If no season folder exists, None is returned """ - dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if - ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))] + dir_list = [x for x in os.listdir(show_obj.location) if os.path.isdir(os.path.join(show_obj.location, x))] season_dir_regex = r'^Season\s+(\d+)$' @@ -171,12 +168,12 @@ class WDTVMetadata(generic.GenericMetadata): break if not season_dir: - logger.log(u"Unable to find a season dir for season " + str(season), logger.DEBUG) + logger.debug(f'Unable to find a season dir for season {season}') return None - logger.log(u"Using " + str(season_dir) + "/folder.jpg as season dir for season " + str(season), logger.DEBUG) + logger.debug(f'Using {season_dir}/folder.jpg as season dir for season {season}') - return ek.ek(os.path.join, show_obj.location, season_dir, 'folder.jpg') + return os.path.join(show_obj.location, season_dir, 'folder.jpg') def _ep_data(self, ep_obj): # type: (sickgear.tv.TVEpisode) -> Optional[Union[bool, etree.Element]] @@ -207,8 +204,8 @@ class WDTVMetadata(generic.GenericMetadata): except BaseTVinfoShownotfound as e: raise exceptions_helper.ShowNotFoundException(ex(e)) except BaseTVinfoError as e: - logger.log("Unable to connect to %s while creating meta files - skipping - %s" % - (sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name, ex(e)), logger.ERROR) + logger.error(f'Unable to connect to {sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name}' + f' while creating meta files - skipping - {ex(e)}') return False if not self._valid_show(show_info, ep_obj.show_obj): diff --git a/sickgear/metadata/xbmc.py b/sickgear/metadata/xbmc.py index 99445335..ae5de5a3 100644 --- a/sickgear/metadata/xbmc.py +++ b/sickgear/metadata/xbmc.py @@ -20,8 +20,6 @@ import os from . import generic, xbmc_12plus import sg_helpers import sickgear -# noinspection PyPep8Naming -import encodingKludge as ek # noinspection PyUnreachableCode if False: @@ -104,7 +102,7 @@ class XBMCMetadata(xbmc_12plus.XBMC12PlusMetadata): ep_obj: a TVEpisode instance for which to create the thumbnail """ - if ek.ek(os.path.isfile, ep_obj.location): + if os.path.isfile(ep_obj.location): tbn_filename = sg_helpers.replace_extension(ep_obj.location, 'tbn') else: return None @@ -127,7 +125,7 @@ class XBMCMetadata(xbmc_12plus.XBMC12PlusMetadata): else: season_poster_filename = 'season' + str(season).zfill(2) - return ek.ek(os.path.join, show_obj.location, season_poster_filename + '.tbn') + return os.path.join(show_obj.location, season_poster_filename + '.tbn') # present a standard "interface" from the module diff --git a/sickgear/metadata/xbmc_12plus.py b/sickgear/metadata/xbmc_12plus.py index 7ffc76a2..2721d291 100644 --- a/sickgear/metadata/xbmc_12plus.py +++ b/sickgear/metadata/xbmc_12plus.py @@ -123,12 +123,11 @@ class XBMC12PlusMetadata(generic.GenericMetadata): try: show_info = t.get_show(show_id, language=show_lang) except BaseTVinfoShownotfound as e: - logger.log('Unable to find show with id %s on %s, skipping it' % - (show_id, sickgear.TVInfoAPI(show_obj.tvid).name), logger.ERROR) + logger.error(f'Unable to find show with id {show_id} on {sickgear.TVInfoAPI(show_obj.tvid).name},' + f' skipping it') raise e except BaseTVinfoError as e: - logger.log('%s is down, can\'t use its data to add this show' % sickgear.TVInfoAPI(show_obj.tvid).name, - logger.ERROR) + logger.error('%s is down, can\'t use its data to add this show' % sickgear.TVInfoAPI(show_obj.tvid).name) raise e if not self._valid_show(show_info, show_obj): @@ -136,8 +135,8 @@ class XBMC12PlusMetadata(generic.GenericMetadata): # check for title and id if None is getattr(show_info, 'seriesname', None) or None is getattr(show_info, 'id', None): - logger.log('Incomplete info for show with id %s on %s, skipping it' % - (show_id, sickgear.TVInfoAPI(show_obj.tvid).name), logger.ERROR) + logger.error(f'Incomplete info for show with id {show_id} on {sickgear.TVInfoAPI(show_obj.tvid).name},' + f' skipping it') return False title = etree.SubElement(tv_node, 'title') @@ -227,8 +226,9 @@ class XBMC12PlusMetadata(generic.GenericMetadata): except BaseTVinfoShownotfound as e: raise exceptions_helper.ShowNotFoundException(ex(e)) except BaseTVinfoError as e: - logger.log('Unable to connect to %s while creating meta files - skipping - %s' % - (sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name, ex(e)), logger.ERROR) + logger.error( + f'Unable to connect to {sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name} while creating meta files' + f' - skipping - {ex(e)}') return if not self._valid_show(show_info, ep_obj.show_obj): @@ -249,17 +249,17 @@ class XBMC12PlusMetadata(generic.GenericMetadata): (cur_ep_obj.season, cur_ep_obj.episode, sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name)) return None except (BaseException, Exception): - logger.log(u'Not generating nfo because failed to fetched tv info data at this time', logger.DEBUG) + logger.debug('Not generating nfo because failed to fetched tv info data at this time') return None if None is getattr(ep_info, 'firstaired', None): ep_info['firstaired'] = str(datetime.date.fromordinal(1)) if None is getattr(ep_info, 'episodename', None): - logger.log(u'Not generating nfo because the ep has no title', logger.DEBUG) + logger.debug('Not generating nfo because the ep has no title') return None - logger.log(u'Creating metadata for episode ' + str(ep_obj.season) + 'x' + str(ep_obj.episode), logger.DEBUG) + logger.debug(f'Creating metadata for episode {ep_obj.season}x{ep_obj.episode}') if 1 < len(ep_obj_list_to_write): episode = etree.SubElement(rootNode, 'episodedetails') diff --git a/sickgear/name_cache.py b/sickgear/name_cache.py index 1ecac246..c7225a27 100644 --- a/sickgear/name_cache.py +++ b/sickgear/name_cache.py @@ -32,7 +32,7 @@ sceneNameCache = {} nameCacheLock = threading.Lock() -def addNameToCache(name, tvid=0, prodid=0, season=-1): +def add_name_to_cache(name, tvid=0, prodid=0, season=-1): """Adds the show & tvdb id to the namecache :param name: the show name to cache @@ -41,7 +41,7 @@ def addNameToCache(name, tvid=0, prodid=0, season=-1): :type tvid: int :param prodid: the production id that this show should be cached with (can be None/0 for unknown) :type prodid: int or long - :param season: the season the the name exception belongs to. -1 for generic exception + :param season: the season the name exception belongs to. -1 for generic exception :type season: int """ global nameCache @@ -53,7 +53,7 @@ def addNameToCache(name, tvid=0, prodid=0, season=-1): nameCache[name] = [int(tvid), int(prodid), season] -def retrieveNameFromCache(name): +def retrieve_name_from_cache(name): # type: (AnyStr) -> Union[Tuple[int, int], Tuple[None, None]] """Looks up the given name in the name cache @@ -71,7 +71,7 @@ def retrieveNameFromCache(name): return None, None -def buildNameCache(show_obj=None, update_only_scene=False): +def build_name_cache(show_obj=None, update_only_scene=False): # type: (Optional[Union[TVShow, TVShowBase]], bool) -> None """Adds all new name exceptions to the namecache memory and flushes any removed name exceptions @@ -104,7 +104,7 @@ def buildNameCache(show_obj=None, update_only_scene=False): for cur_so in sickgear.showList if cur_so]) sceneNameCache = {} - cacheDB = db.DBConnection() + cache_db = db.DBConnection() cache_results = [] if update_only_scene: @@ -117,7 +117,7 @@ def buildNameCache(show_obj=None, update_only_scene=False): tmp_scene_name_cache = sceneNameCache.copy() for t, s in iteritems(show_ids): - cache_results += cacheDB.select( + cache_results += cache_db.select( 'SELECT show_name, indexer AS tv_id, indexer_id AS prod_id, season' ' FROM scene_exceptions' ' WHERE indexer = %s AND indexer_id IN (%s)' % (t, ','.join(['%s' % i for i in s]))) diff --git a/sickgear/name_parser/parser.py b/sickgear/name_parser/parser.py index cd2d5eb6..0132056c 100644 --- a/sickgear/name_parser/parser.py +++ b/sickgear/name_parser/parser.py @@ -32,8 +32,6 @@ except ImportError: regex = None from . import regexes -# noinspection PyPep8Naming -import encodingKludge as ek from exceptions_helper import ex import sickgear from .. import common, db, helpers, logger, scene_exceptions, scene_numbering @@ -41,8 +39,8 @@ from lib.tvinfo_base.exceptions import * from ..classes import OrderedDefaultdict from .._legacy_classes import LegacyParseResult -from _23 import decode_str, list_keys, list_range -from six import iteritems, iterkeys, itervalues, PY2, string_types, text_type +from _23 import decode_str, list_range +from six import iteritems, iterkeys, itervalues, string_types, text_type # noinspection PyUnreachableCode if False: @@ -100,7 +98,7 @@ class NameParser(object): cur_pattern = strip_comment.sub('', cur_pattern) cur_regex = re.compile('(?x)' + cur_pattern, re.VERBOSE | re.IGNORECASE) except re.error as errormsg: - logger.log(u'WARNING: Invalid episode_pattern, %s. %s' % (errormsg, cur_pattern)) + logger.log(f'WARNING: Invalid episode_pattern, {errormsg}. {cur_pattern}') else: cls.compiled_regexes[index].append([cur_pattern_num, cur_pattern_name, cur_regex]) index += 1 @@ -168,7 +166,7 @@ class NameParser(object): result.which_regex = [cur_regex_name] result.score = 0 - cur_regex_num - named_groups = list_keys(match.groupdict()) + named_groups = list(match.groupdict()) if 'series_name' in named_groups: result.series_name = match.group('series_name') @@ -262,7 +260,7 @@ class NameParser(object): if 'extra_info' in named_groups: tmp_extra_info = match.group('extra_info') - # Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season + # Show.S04.Special or Show.S05.Part.2.Extras are almost certainly not every episode in the season if tmp_extra_info and 'season_only' == cur_regex_name and re.search( r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I): continue @@ -294,7 +292,7 @@ class NameParser(object): matches.append(result) if len(matches): - # pick best match with highest score based on placement + # pick best match with the highest score based on placement best_result = max(sorted(matches, reverse=True, key=lambda x: x.which_regex), key=lambda x: x.score) show_obj = None @@ -330,7 +328,7 @@ class NameParser(object): # get quality new_name = helpers.remove_non_release_groups(name, show_obj.is_anime) - best_result.quality = common.Quality.nameQuality(new_name, show_obj.is_anime) + best_result.quality = common.Quality.name_quality(new_name, show_obj.is_anime) new_episode_numbers = [] new_season_numbers = [] @@ -383,13 +381,12 @@ class NameParser(object): season_number = int(ep_obj['seasonnumber']) episode_numbers = [int(ep_obj['episodenumber'])] - except BaseTVinfoEpisodenotfound as e: - logger.warning(u'Unable to find episode with date %s for show %s, skipping' % - (best_result.air_date, show_obj.unique_name)) + except BaseTVinfoEpisodenotfound: + logger.warning(f'Unable to find episode with date {best_result.air_date}' + f' for show {show_obj.unique_name}, skipping') episode_numbers = [] except BaseTVinfoError as e: - logger.log(u'Unable to contact ' + sickgear.TVInfoAPI(show_obj.tvid).name - + ': ' + ex(e), logger.WARNING) + logger.warning(f'Unable to contact {sickgear.TVInfoAPI(show_obj.tvid).name}: {ex(e)}') episode_numbers = [] for epNo in episode_numbers: @@ -455,7 +452,7 @@ class NameParser(object): 'SickGear does not support this. ' 'Sorry.' % (str(new_season_numbers))) - # I guess it's possible that we'd have duplicate episodes too, so lets + # I guess it's possible that we'd have duplicate episodes too, so let's # eliminate them new_episode_numbers = list(set(new_episode_numbers)) new_episode_numbers.sort() @@ -472,9 +469,8 @@ class NameParser(object): best_result.season_number = new_season_numbers[0] if self.convert and show_obj.is_scene: - logger.log(u'Converted parsed result %s into %s' - % (best_result.original_name, decode_str(str(best_result), errors='xmlcharrefreplace')), - logger.DEBUG) + logger.debug(f'Converted parsed result {best_result.original_name}' + f' into {decode_str(best_result, errors="xmlcharrefreplace")}') helpers.cpu_sleep() @@ -504,23 +500,20 @@ class NameParser(object): if not second: return getattr(first, attr) - a = getattr(first, attr, []) - b = getattr(second, attr) + first_val = getattr(first, attr, []) + second_val = getattr(second, attr) - # if a is good use it - if None is not a or (isinstance(a, list) and len(a)): - return a + # if first_val is good use it + if None is not first_val or (isinstance(first_val, list) and len(first_val)): + return first_val # if not use b (if b isn't set it'll just be default) - return b + return second_val @staticmethod - def _unicodify(obj, encoding='utf-8'): - if PY2 and isinstance(obj, string_types): - if not isinstance(obj, text_type): - obj = text_type(obj, encoding, 'replace') - if not PY2 and isinstance(obj, text_type): + def _unicodify(obj, encoding='utf8'): + if isinstance(obj, text_type): try: - return obj.encode('latin1').decode('utf8') + return obj.encode('latin1').decode(encoding) except (BaseException, Exception): pass return obj @@ -583,7 +576,7 @@ class NameParser(object): return cached # break it into parts if there are any (dirname, file name, extension) - dir_name, file_name = ek.ek(os.path.split, name) + dir_name, file_name = os.path.split(name) if self.file_name: base_file_name = helpers.remove_extension(file_name) @@ -598,7 +591,7 @@ class NameParser(object): file_name_result = self._parse_string(base_file_name) # use only the direct parent dir - dir_name = ek.ek(os.path.basename, dir_name) + dir_name = os.path.basename(dir_name) # parse the dirname for extra info if needed dir_name_result = self._parse_string(dir_name) @@ -653,7 +646,7 @@ class NameParser(object): and any('anime' in wr for wr in final_result.which_regex) == bool(final_result.show_obj.is_anime): name_parser_cache.add(name, final_result) - logger.log(u'Parsed %s into %s' % (name, final_result), logger.DEBUG) + logger.debug(f'Parsed {name} into {final_result}') return final_result @@ -755,15 +748,13 @@ class ParseResult(LegacyParseResult): self.release_group, self.air_date, tuple(self.ab_episode_numbers))) def __str__(self): - if not PY2: - return self.__unicode__() - return self.__unicode__().encode('utf-8', errors='ignore') + return self.__unicode__() def __unicode__(self): if None is not self.series_name: - to_return = self.series_name + u' - ' + to_return = f'{self.series_name} - ' else: - to_return = u'' + to_return = '' if None is not self.season_number: to_return += 'S' + str(self.season_number) if self.episode_numbers and len(self.episode_numbers): @@ -872,7 +863,7 @@ class NameParserCache(object): key = self._previous_parsed.first_key() del self._previous_parsed[key] except KeyError: - logger.log('Could not remove old NameParserCache entry: %s' % key, logger.DEBUG) + logger.debug('Could not remove old NameParserCache entry: %s' % key) def get(self, name): # type: (AnyStr) -> ParseResult @@ -885,7 +876,7 @@ class NameParserCache(object): """ with self.lock: if name in self._previous_parsed: - logger.log('Using cached parse result for: ' + name, logger.DEBUG) + logger.debug('Using cached parse result for: ' + name) self._previous_parsed.move_to_end(name) return self._previous_parsed[name] diff --git a/sickgear/name_parser/regexes.py b/sickgear/name_parser/regexes.py index 9a6b30db..85df55a5 100644 --- a/sickgear/name_parser/regexes.py +++ b/sickgear/name_parser/regexes.py @@ -14,7 +14,7 @@ # You should have received a copy of the GNU General Public License # along with SickGear. If not, see . -# all regexes are case insensitive +# all regexes are case-insensitive normal_regexes = [ ('garbage_name', diff --git a/sickgear/naming.py b/sickgear/naming.py index 0bddae7d..3d2378b2 100644 --- a/sickgear/naming.py +++ b/sickgear/naming.py @@ -22,9 +22,6 @@ from . import common, logger, tv from .common import Quality, DOWNLOADED from .name_parser.parser import NameParser -# noinspection PyPep8Naming -import encodingKludge as ek - # noinspection PyUnreachableCode if False: from typing import AnyStr, Dict, List @@ -112,7 +109,7 @@ class TVEpisodeSample(tv.TVEpisode): self.scene_absolute_number = absolute_number # type: int self._airdate = datetime.date(2010, 3, 9) # type: datetime.date self.show_obj = TVShowSample() # type: TVShowSample - self._status = Quality.compositeStatus(common.DOWNLOADED, common.Quality.SDTV) # type: int + self._status = Quality.composite_status(common.DOWNLOADED, common.Quality.SDTV) # type: int self._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP' # type: AnyStr self._is_proper = True # type: bool self._version = 2 # type: int @@ -168,11 +165,11 @@ def check_valid_naming(pattern=None, multi=None, anime_type=None): if None is anime_type: anime_type = sickgear.NAMING_ANIME - logger.log(u'Checking whether the pattern %s is valid for a single episode' % pattern, logger.DEBUG) + logger.debug(f'Checking whether the pattern {pattern} is valid for a single episode') valid = validate_name(pattern, None, anime_type) if None is not multi: - logger.log(u'Checking whether the pattern %s is valid for a multi episode' % pattern, logger.DEBUG) + logger.debug(f'Checking whether the pattern {pattern} is valid for a multi episode') valid = valid and validate_name(pattern, multi, anime_type) return valid @@ -191,7 +188,7 @@ def check_valid_abd_naming(pattern=None): if None is pattern: pattern = sickgear.NAMING_PATTERN - logger.log(u'Checking whether the pattern %s is valid for an air-by-date episode' % pattern, logger.DEBUG) + logger.debug(f'Checking whether the pattern {pattern} is valid for an air-by-date episode') valid = validate_name(pattern, abd=True) return valid @@ -199,7 +196,7 @@ def check_valid_abd_naming(pattern=None): def check_valid_sports_naming(pattern=None): """ - Checks if the name is can be parsed back to its original form for an sports format. + Checks if the name is can be parsed back to its original form for a sports format. Returns true if the naming is valid, false if not. :param pattern: String Naming Pattern @@ -210,7 +207,7 @@ def check_valid_sports_naming(pattern=None): if None is pattern: pattern = sickgear.NAMING_PATTERN - logger.log(u'Checking whether the pattern %s is valid for an sports episode' % pattern, logger.DEBUG) + logger.debug(f'Checking whether the pattern {pattern} is valid for an sports episode') valid = validate_name(pattern, sports=True) return valid @@ -236,43 +233,43 @@ def validate_name(pattern, multi=None, anime_type=None, file_only=False, abd=Fal """ sample_ep_obj = generate_sample_ep(multi, abd, sports, anime_type=anime_type) - new_name = u'%s.ext' % sample_ep_obj.formatted_filename(pattern, multi, anime_type) + new_name = f'{sample_ep_obj.formatted_filename(pattern, multi, anime_type)}.ext' new_path = sample_ep_obj.formatted_dir(pattern, multi) if not file_only: - new_name = ek.ek(os.path.join, new_path, new_name) + new_name = os.path.join(new_path, new_name) if not new_name: - logger.log(u'Unable to create a name out of %s' % pattern, logger.DEBUG) + logger.debug(f'Unable to create a name out of {pattern}') return False - logger.log(u'Trying to parse %s' % new_name, logger.DEBUG) + logger.debug(f'Trying to parse {new_name}') parser = NameParser(True, show_obj=sample_ep_obj.show_obj, naming_pattern=True) try: result = parser.parse(new_name) except (BaseException, Exception): - logger.log(u'Unable to parse %s, not valid' % new_name, logger.DEBUG) + logger.debug(f'Unable to parse {new_name}, not valid') return False - logger.log(u'The name %s parsed into %s' % (new_name, result), logger.DEBUG) + logger.debug(f'The name {new_name} parsed into {result}') if abd or sports: if result.air_date != sample_ep_obj.airdate: - logger.log(u'Air date incorrect in parsed episode, pattern isn\'t valid', logger.DEBUG) + logger.debug('Air date incorrect in parsed episode, pattern isn\'t valid') return False elif 3 == anime_type: if result.season_number != sample_ep_obj.season: - logger.log(u'Season number incorrect in parsed episode, pattern isn\'t valid', logger.DEBUG) + logger.debug('Season number incorrect in parsed episode, pattern isn\'t valid') return False if result.episode_numbers != [x.episode for x in [sample_ep_obj] + sample_ep_obj.related_ep_obj]: - logger.log(u'Episode numbering incorrect in parsed episode, pattern isn\'t valid', logger.DEBUG) + logger.debug('Episode numbering incorrect in parsed episode, pattern isn\'t valid') return False else: if len(result.ab_episode_numbers) \ and result.ab_episode_numbers != [x.absolute_number for x in [sample_ep_obj] + sample_ep_obj.related_ep_obj]: - logger.log(u'Absolute numbering incorrect in parsed episode, pattern isn\'t valid', logger.DEBUG) + logger.debug('Absolute numbering incorrect in parsed episode, pattern isn\'t valid') return False return True @@ -297,7 +294,7 @@ def generate_sample_ep(multi=None, abd=False, sports=False, anime=False, anime_t # make a fake episode object sample_ep_obj = TVEpisodeSample(2, 3, 3, 'Ep Name') - sample_ep_obj._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) + sample_ep_obj._status = Quality.composite_status(DOWNLOADED, Quality.HDTV) sample_ep_obj._airdate = datetime.date(2011, 3, 9) if abd: @@ -316,14 +313,14 @@ def generate_sample_ep(multi=None, abd=False, sports=False, anime=False, anime_t if None is not multi: sample_ep_obj._name = 'Ep Name (1)' second_ep = TVEpisodeSample(2, 4, 4, 'Ep Name (2)') - second_ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) + second_ep._status = Quality.composite_status(DOWNLOADED, Quality.HDTV) normal_naming = not anime or 3 == anime_type release_name = sample_ep_obj._release_name = second_ep._release_name = \ ('Show.Name.003-004.HDTV.XviD-RLSGROUP', 'Show.Name.S02E03E04E05.HDTV.XviD-RLSGROUP')[normal_naming] sample_ep_obj.related_ep_obj.append(second_ep) if normal_naming: third_ep = TVEpisodeSample(2, 5, 5, 'Ep Name (3)') - third_ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) + third_ep._status = Quality.composite_status(DOWNLOADED, Quality.HDTV) third_ep._release_name = release_name sample_ep_obj.related_ep_obj.append(third_ep) else: diff --git a/sickgear/network_timezones.py b/sickgear/network_timezones.py index 9dda4d22..abedde55 100644 --- a/sickgear/network_timezones.py +++ b/sickgear/network_timezones.py @@ -25,21 +25,18 @@ import sickgear from . import db, helpers, logger from sg_helpers import int_to_time -# noinspection PyPep8Naming -import encodingKludge as ek from lib.dateutil import tz, zoneinfo from lib.tzlocal import get_localzone from sg_helpers import remove_file_perm, scantree -from six import integer_types, iteritems, string_types, PY2 -from _23 import list_keys +from six import integer_types, iteritems, string_types # noinspection PyUnreachableCode if False: from _23 import DirEntry from typing import AnyStr, Optional, Tuple, Union -# regex to parse time (12/24 hour format) +# regex to parse time (12/24-hour format) time_regex = re.compile(r'(\d{1,2})(([:.](\d{2}))? ?([PA][. ]? ?M)|[:.](\d{2}))\b', flags=re.I) am_regex = re.compile(r'(A[. ]? ?M)', flags=re.I) pm_regex = re.compile(r'(P[. ]? ?M)', flags=re.I) @@ -126,8 +123,8 @@ def get_utc(): pass if isinstance(utc, datetime.tzinfo): return utc - tz_utc_file = ek.ek(os.path.join, ek.ek(os.path.dirname, zoneinfo.__file__), 'Greenwich') - if ek.ek(os.path.isfile, tz_utc_file): + tz_utc_file = os.path.join(os.path.dirname(zoneinfo.__file__), 'Greenwich') + if os.path.isfile(tz_utc_file): return tz.tzfile(tz_utc_file) @@ -154,14 +151,14 @@ def _remove_old_zoneinfo(): """ if None is not zoneinfo.ZONEFILENAME: current_file = helpers.real_path( - ek.ek(os.path.join, sickgear.ZONEINFO_DIR, ek.ek(os.path.basename, zoneinfo.ZONEFILENAME))) + os.path.join(sickgear.ZONEINFO_DIR, os.path.basename(zoneinfo.ZONEFILENAME))) for entry in chain.from_iterable([scantree(helpers.real_path(_dir), include=r'\.tar\.gz$', filter_kind=False) for _dir in (sickgear.ZONEINFO_DIR, )]): # type: DirEntry if current_file != entry.path: if remove_file_perm(entry.path, log_err=False): - logger.log(u'Delete unneeded old zoneinfo File: %s' % entry.path) + logger.log(f'Delete unneeded old zoneinfo File: {entry.path}') else: - logger.log(u'Unable to delete: %s' % entry.path, logger.ERROR) + logger.error(f'Unable to delete: {entry.path}') def _update_zoneinfo(): @@ -177,24 +174,23 @@ def _update_zoneinfo(): url_data = helpers.get_url(url) if None is url_data: update_last_retry() - # when None is urlData, trouble connecting to github - logger.log(u'Fetching zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s' % url, - logger.WARNING) + # when None is urlData, trouble connecting to GitHub + logger.warning(f'Fetching zoneinfo.txt failed, this can happen from time to time. Unable to get URL: {url}') return reset_last_retry() try: - (new_zoneinfo, zoneinfo_md5) = url_data.strip().rsplit(u' ') + (new_zoneinfo, zoneinfo_md5) = url_data.strip().rsplit(' ') except (BaseException, Exception): - logger.log('Fetching zoneinfo.txt failed, update contains unparsable data: %s' % url_data, logger.DEBUG) + logger.debug('Fetching zoneinfo.txt failed, update contains unparsable data: %s' % url_data) return current_file = zoneinfo.ZONEFILENAME if None is not current_file: - current_file = ek.ek(os.path.basename, current_file) - zonefile = helpers.real_path(ek.ek(os.path.join, sickgear.ZONEINFO_DIR, current_file)) - zonemetadata = None if not ek.ek(os.path.isfile, zonefile) else \ + current_file = os.path.basename(current_file) + zonefile = helpers.real_path(os.path.join(sickgear.ZONEINFO_DIR, current_file)) + zonemetadata = None if not os.path.isfile(zonefile) else \ zoneinfo.ZoneInfoFile(zoneinfo.getzoneinfofile_stream()).metadata newtz_regex = re.search(r'(\d{4}[^.]+)', new_zoneinfo) @@ -209,31 +205,31 @@ def _update_zoneinfo(): return # load the new zoneinfo - url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/%s' % new_zoneinfo + url_tar = f'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/{new_zoneinfo}' zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile) if not remove_file_perm(zonefile_tmp, log_err=False): - logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.ERROR) + logger.error(f'Unable to delete: {zonefile_tmp}') return if not helpers.download_file(url_tar, zonefile_tmp): return - if not ek.ek(os.path.exists, zonefile_tmp): - logger.log(u'Download of %s failed.' % zonefile_tmp, logger.ERROR) + if not os.path.exists(zonefile_tmp): + logger.error(f'Download of {zonefile_tmp} failed.') return new_hash = str(helpers.md5_for_file(zonefile_tmp)) if zoneinfo_md5.upper() == new_hash.upper(): - logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo, logger.MESSAGE) + logger.log(f'Updating timezone info with new one: {new_zoneinfo}', logger.MESSAGE) try: # remove the old zoneinfo file if None is not current_file: remove_file_perm(zonefile) # rename downloaded file - ek.ek(os.rename, zonefile_tmp, zonefile) + os.rename(zonefile_tmp, zonefile) setattr(zoneinfo, '_CLASS_ZONE_INSTANCE', list()) tz.gettz.cache_clear() from dateutil.zoneinfo import get_zonefile_instance @@ -248,7 +244,7 @@ def _update_zoneinfo(): return else: remove_file_perm(zonefile_tmp, log_err=False) - logger.log(u'MD5 hash does not match: %s File: %s' % (zoneinfo_md5.upper(), new_hash.upper()), logger.ERROR) + logger.error(f'MD5 hash does not match: {zoneinfo_md5.upper()} File: {new_hash.upper()}') return @@ -266,14 +262,14 @@ def update_network_dict(): network_tz_data = {} - # network timezones are stored on github pages + # network timezones are stored on GitHub pages url = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/network_timezones.txt' url_data = helpers.get_url(url) if url_data in (None, ''): update_last_retry() - # When None is urlData, trouble connecting to github - logger.debug(u'Updating network timezones failed, this can happen from time to time. URL: %s' % url) + # When None is urlData, trouble connecting to GitHub + logger.debug(f'Updating network timezones failed, this can happen from time to time. URL: {url}') load_network_dict(load=False) return @@ -282,7 +278,7 @@ def update_network_dict(): try: for line in url_data.splitlines(): try: - (name, tzone) = line.strip().rsplit(u':', 1) + (name, tzone) = line.strip().rsplit(':', 1) except (BaseException, Exception): continue if None is name or None is tzone: @@ -416,7 +412,7 @@ def parse_time(time_of_day): hour = helpers.try_int(time_parsed.group(1)) mins = helpers.try_int(time_parsed.group(4)) ampm = time_parsed.group(5) - # convert am/pm to 24 hour clock + # convert am/pm to 24-hour clock if None is not ampm: if None is not pm_regex.search(ampm) and 12 != hour: hour += 12 @@ -508,21 +504,21 @@ def _load_network_conversions(): conversions_in = [] - # network conversions are stored on github pages + # network conversions are stored on GitHub pages url = 'https://raw.githubusercontent.com/prinz23/sg_network_conversions/master/conversions.txt' url_data = helpers.get_url(url) if url_data in (None, ''): update_last_retry() - # when no url_data, trouble connecting to github - logger.debug(u'Updating network conversions failed, this can happen from time to time. URL: %s' % url) + # when no url_data, trouble connecting to GitHub + logger.debug(f'Updating network conversions failed, this can happen from time to time. URL: {url}') return reset_last_retry() try: for line in url_data.splitlines(): - (tvdb_network, tvrage_network, tvrage_country) = line.strip().rsplit(u'::', 2) + (tvdb_network, tvrage_network, tvrage_country) = line.strip().rsplit('::', 2) if not (tvdb_network and tvrage_network and tvrage_country): continue conversions_in.append( @@ -549,7 +545,7 @@ def _load_network_conversions(): # remove deleted records if 0 < len(conversions_db): - network_name = list_keys(conversions_db) + network_name = list(conversions_db) cl.append(['DELETE FROM network_conversions WHERE tvdb_network' ' IN (%s)' % ','.join(['?'] * len(network_name)), network_name]) @@ -612,7 +608,6 @@ def get_episode_time(d, # type: int return SGDatetime.from_timestamp(ep_timestamp, tzinfo=tzinfo, tz_aware=True, local_time=False) except OverflowError: logger.debug('Invalid timestamp: %s, using fallback' % ep_timestamp) - ep_timestamp = None ep_time = None if isinstance(ep_airtime, integer_types): @@ -635,8 +630,6 @@ def get_episode_time(d, # type: int if d and None is not ep_time and None is not tzinfo: ep_date = datetime.date.fromordinal(helpers.try_int(d)) - if PY2: - return datetime.datetime.combine(ep_date, ep_time).replace(tzinfo=tzinfo) return datetime.datetime.combine(ep_date, ep_time, tzinfo) return parse_date_time(d, t, tzinfo) diff --git a/sickgear/notifiers/__init__.py b/sickgear/notifiers/__init__.py index 342e18e8..b35ae421 100644 --- a/sickgear/notifiers/__init__.py +++ b/sickgear/notifiers/__init__.py @@ -24,10 +24,6 @@ from . import emby, kodi, plex, xbmc, \ discord, emailnotify, gitter, libnotify, growl, prowl, slack, telegram, trakt import sickgear -# noinspection PyPep8Naming -import encodingKludge as ek - -from _23 import filter_iter, list_values class NotifierFactory(object): @@ -70,32 +66,27 @@ class NotifierFactory(object): :return: ID String :rtype: String """ - for n in filter_iter(lambda v: v.is_enabled(), - list_values(self.notifiers)): + for n in filter(lambda v: v.is_enabled(), list(self.notifiers.values())): yield n.id() @property def enabled_onsnatch(self): - for n in filter_iter(lambda v: v.is_enabled() and v.is_enabled_onsnatch(), - list_values(self.notifiers)): + for n in filter(lambda v: v.is_enabled() and v.is_enabled_onsnatch(), list(self.notifiers.values())): yield n.id() @property def enabled_ondownload(self): - for n in filter_iter(lambda v: v.is_enabled() and v.is_enabled_ondownload(), - list_values(self.notifiers)): + for n in filter(lambda v: v.is_enabled() and v.is_enabled_ondownload(), list(self.notifiers.values())): yield n.id() @property def enabled_onsubtitledownload(self): - for n in filter_iter(lambda v: v.is_enabled() and v.is_enabled_onsubtitledownload(), - list_values(self.notifiers)): + for n in filter(lambda v: v.is_enabled() and v.is_enabled_onsubtitledownload(), list(self.notifiers.values())): yield n.id() @property def enabled_library(self): - for n in filter_iter(lambda v: v.is_enabled() and v.is_enabled_library(), - list_values(self.notifiers)): + for n in filter(lambda v: v.is_enabled() and v.is_enabled_library(), list(self.notifiers.values())): yield n.id() def get(self, nid): @@ -159,7 +150,7 @@ def notify_update_library(ep_obj, flush_q=False): continue shows.add(show_name) else: - parent_dir = re.sub(r'[/\\]+%s.*' % show_name, '', ek.ek(os.path.dirname, location)) + parent_dir = re.sub(r'[/\\]+%s.*' % show_name, '', os.path.dirname(location)) parent_dir = re.sub(r'^(.{,2})[/\\]', '', parent_dir) if parent_dir in locations: continue diff --git a/sickgear/notifiers/boxcar2.py b/sickgear/notifiers/boxcar2.py index c8d8f16f..b6d2ecd2 100644 --- a/sickgear/notifiers/boxcar2.py +++ b/sickgear/notifiers/boxcar2.py @@ -72,7 +72,7 @@ class Boxcar2Notifier(Notifier): except urllib.error.HTTPError as e: if not hasattr(e, 'code'): - self._log_error(u'Notification failed: %s' % ex(e)) + self._log_error(f'Notification failed: {ex(e)}') else: result = 'Notification failed. Error code: %s' % e.code self._log_error(result) @@ -91,7 +91,7 @@ class Boxcar2Notifier(Notifier): result = 'Wrong data sent to Boxcar' self._log_error(result) except urllib.error.URLError as e: - self._log_error(u'Notification failed: %s' % ex(e)) + self._log_error(f'Notification failed: ex(e)') return self._choose((True, 'Failed to send notification: %s' % result)[bool(result)], not bool(result)) diff --git a/sickgear/notifiers/emailnotify.py b/sickgear/notifiers/emailnotify.py index 68c51bbd..d4dab8f5 100644 --- a/sickgear/notifiers/emailnotify.py +++ b/sickgear/notifiers/emailnotify.py @@ -44,8 +44,8 @@ class EmailNotifier(Notifier): use_tls = 1 == sickgear.helpers.try_int(use_tls) login = any(user) and any(pwd) - self._log_debug(u'Sendmail HOST: %s; PORT: %s; LOGIN: %s, TLS: %s, USER: %s, FROM: %s, TO: %s' % ( - host, port, login, use_tls, user, smtp_from, to)) + self._log_debug(f'Sendmail HOST: {host}; PORT: {port};' + f' LOGIN: {login}, TLS: {use_tls}, USER: {user}, FROM: {smtp_from}, TO: {to}') try: srv = smtplib.SMTP(host, int(port)) @@ -54,16 +54,16 @@ class EmailNotifier(Notifier): if use_tls or login: srv.ehlo() - self._log_debug(u'Sent initial EHLO command') + self._log_debug('Sent initial EHLO command') if use_tls: srv.starttls() srv.ehlo() - self._log_debug(u'Sent STARTTLS and EHLO command') + self._log_debug('Sent STARTTLS and EHLO command') if login: srv.login(user, pwd) - self._log_debug(u'Sent LOGIN command') + self._log_debug('Sent LOGIN command') srv.sendmail(smtp_from, to, msg.as_string()) srv.quit() @@ -101,10 +101,10 @@ class EmailNotifier(Notifier): show_name = body.split(' - ')[0] to = self._get_recipients(show_name) if not any(to): - self._log_warning(u'No email recipients to notify, skipping') + self._log_warning('No email recipients to notify, skipping') return - self._log_debug(u'Email recipients to notify: %s' % to) + self._log_debug(f'Email recipients to notify: {to}') try: msg = MIMEMultipart('alternative') @@ -131,9 +131,9 @@ class EmailNotifier(Notifier): msg['Date'] = formatdate(localtime=True) if self._sendmail(sickgear.EMAIL_HOST, sickgear.EMAIL_PORT, sickgear.EMAIL_FROM, sickgear.EMAIL_TLS, sickgear.EMAIL_USER, sickgear.EMAIL_PASSWORD, to, msg): - self._log_debug(u'%s notification sent to [%s] for "%s"' % (title, to, body)) + self._log_debug(f'{title} notification sent to [{to}] for "{body}"') else: - self._log_error(u'%s notification ERROR: %s' % (title, self.last_err)) + self._log_error(f'{title} notification ERROR: {self.last_err}') def test_notify(self, host, port, smtp_from, use_tls, user, pwd, to): self._testing = True diff --git a/sickgear/notifiers/emby.py b/sickgear/notifiers/emby.py index 249c6639..91e3e27e 100644 --- a/sickgear/notifiers/emby.py +++ b/sickgear/notifiers/emby.py @@ -21,7 +21,7 @@ from .generic import Notifier from json_helper import json_loads import sickgear -from _23 import decode_bytes, decode_str, map_list +from _23 import decode_bytes, decode_str class EmbyNotifier(Notifier): @@ -50,7 +50,7 @@ class EmbyNotifier(Notifier): timeout=20, hooks=dict(response=self._cb_response), json=True) return self.response and self.response.get('ok') and 200 == self.response.get('status_code') and \ - version <= map_list(lambda x: int(x), (response and response.get('Version') or '0.0.0.0').split('.')) + version <= list(map(lambda x: int(x), (response and response.get('Version') or '0.0.0.0').split('.'))) def update_library(self, show_obj=None, **kwargs): """ Update library function @@ -61,7 +61,7 @@ class EmbyNotifier(Notifier): """ hosts, keys, message = self._check_config() if not hosts: - self._log_warning(u'Issue with hosts or api keys, check your settings') + self._log_warning('Issue with hosts or api keys, check your settings') return False from sickgear.indexers import indexer_config @@ -98,10 +98,10 @@ class EmbyNotifier(Notifier): timeout=20, hooks=dict(response=self._cb_response), **args) # Emby will initiate a LibraryMonitor path refresh one minute after this success if self.response and 204 == self.response.get('status_code') and self.response.get('ok'): - self._log(u'Success: update %s sent to host %s in a library updated call' % (mode_to_log, cur_host)) + self._log(f'Success: update {mode_to_log} sent to host {cur_host} in a library updated call') continue elif self.response and 401 == self.response.get('status_code'): - self._log_warning(u'Failed to authenticate with %s' % cur_host) + self._log_warning(f'Failed to authenticate with {cur_host}') elif self.response and 404 == self.response.get('status_code'): self.response = None sickgear.helpers.get_url( @@ -109,16 +109,16 @@ class EmbyNotifier(Notifier): headers={'Content-type': 'application/json', 'X-MediaBrowser-Token': keys[i]}, timeout=20, hooks=dict(response=self._cb_response), post_json={'Path': '', 'UpdateType': ''}) if self.response and 204 == self.response.get('status_code') and self.response.get('ok'): - self._log(u'Success: fallback to sending Library/Media/Updated call' - u' to scan all shows at host %s' % cur_host) + self._log(f'Success: fallback to sending Library/Media/Updated call' + f' to scan all shows at host {cur_host}') continue - self._log_debug(u'Warning, Library update responded 404 not found and' - u' fallback to new /Library/Media/Updated api call failed at %s' % cur_host) + self._log_debug(f'Warning, Library update responded 404 not found and' + f' fallback to new /Library/Media/Updated api call failed at {cur_host}') elif not response and not self.response or not self.response.get('ok'): - self._log_warning(u'Warning, could not connect with server at %s' % cur_host) + self._log_warning(f'Warning, could not connect with server at {cur_host}') else: - self._log_debug(u'Warning, unknown response %sfrom %s, can most likely be ignored' - % (self.response and '%s ' % self.response.get('status_code') or '', cur_host)) + self._log_debug(f'Warning, unknown response %sfrom {cur_host}, can most likely be ignored' + % (self.response and '%s ' % self.response.get('status_code') or '')) total_success = False return total_success @@ -181,7 +181,7 @@ class EmbyNotifier(Notifier): if len(hosts) != len(apikeys): message = ('Not enough Api keys for hosts', 'More Api keys than hosts')[len(apikeys) > len(hosts)] - self._log_warning(u'%s, check your settings' % message) + self._log_warning(f'{message}, check your settings') return False, False, message return hosts, apikeys, 'OK' @@ -215,12 +215,12 @@ class EmbyNotifier(Notifier): if self.response and 401 == self.response.get('status_code'): success = False message += ['Fail: Cannot authenticate API key with %s' % cur_host] - self._log_warning(u'Failed to authenticate with %s' % cur_host) + self._log_warning(f'Failed to authenticate with {cur_host}') continue elif not response and not self.response or not self.response.get('ok'): success = False message += ['Fail: No supported Emby server found at %s' % cur_host] - self._log_warning(u'Warning, could not connect with server at ' + cur_host) + self._log_warning(f'Warning, could not connect with server at {cur_host}') continue message += ['OK: %s' % cur_host] diff --git a/sickgear/notifiers/generic.py b/sickgear/notifiers/generic.py index ce4d98f2..a9f14ff8 100644 --- a/sickgear/notifiers/generic.py +++ b/sickgear/notifiers/generic.py @@ -25,7 +25,7 @@ notify_strings = dict( git_updated='SickGear updated', git_updated_text='SickGear updated to commit#: ', test_title='SickGear notification test', - test_body=u'Success testing %s settings from SickGear ʕ•ᴥ•ʔ', + test_body='Success testing %s settings from SickGear ʕ•ᴥ•ʔ', ) @@ -40,7 +40,7 @@ class BaseNotifier(object): return 'https://raw.githubusercontent.com/SickGear/SickGear/main/gui/slick/images/ico/' + self.sg_logo_file def _log(self, msg, level=logger.MESSAGE): - logger.log(u'%s: %s' % (self.name, msg), level) + logger.log(f'{self.name}: {msg}', level) def _log_debug(self, msg): self._log(msg, logger.DEBUG) @@ -108,7 +108,7 @@ class BaseNotifier(object): @staticmethod def _body_only(title, body): # don't use title with updates or testing, as only one str is used - return body if 'SickGear' in title else u'%s: %s' % (title, body.replace('#: ', '# ')) + return body if 'SickGear' in title else f'{title}: {body.replace("#: ", "# ")}' class Notifier(BaseNotifier): @@ -136,7 +136,7 @@ class Notifier(BaseNotifier): self._pre_notify('git_updated', notify_strings['git_updated_text'] + new_version, **kwargs) def _pre_notify(self, notify_string, message, *args, **kwargs): - self._log_debug(u'Sending notification "%s"' % (self._body_only(notify_strings[notify_string], message))) + self._log_debug(f'Sending notification "{self._body_only(notify_strings[notify_string], message)}"') try: return self._notify(notify_strings[notify_string], message, *args, **kwargs) except (BaseException, Exception): diff --git a/sickgear/notifiers/growl.py b/sickgear/notifiers/growl.py index 4eaa872e..ffc51c9a 100644 --- a/sickgear/notifiers/growl.py +++ b/sickgear/notifiers/growl.py @@ -94,7 +94,7 @@ class GrowlNotifier(Notifier): success = True except (BaseException, Exception) as e: - self._log_warning(u'Unable to send growl to %s:%s - %s' % (opts['host'], opts['port'], ex(e))) + self._log_warning(f'Unable to send growl to {opts["host"]}:{opts["port"]} - {ex(e)}') return success diff --git a/sickgear/notifiers/kodi.py b/sickgear/notifiers/kodi.py index 7b652f17..5fb783e8 100644 --- a/sickgear/notifiers/kodi.py +++ b/sickgear/notifiers/kodi.py @@ -94,7 +94,7 @@ class KodiNotifier(Notifier): Returns: True if processing succeeded with no issues else False if any issues found """ if not sickgear.KODI_HOST: - self._log_warning(u'No Kodi hosts specified, check your settings') + self._log_warning('No Kodi hosts specified, check your settings') return False # either update each host, or only attempt to update until one successful result @@ -108,7 +108,7 @@ class KodiNotifier(Notifier): response = self._send_json(cur_host, dict(method='Profiles.GetCurrentProfile')) if self.response and 401 == self.response.get('status_code'): - self._log_debug(u'Failed to authenticate with %s' % cur_host) + self._log_debug(f'Failed to authenticate with {cur_host}') continue if not response: self._maybe_log_failed_detection(cur_host) @@ -117,7 +117,7 @@ class KodiNotifier(Notifier): if self._send_library_update(cur_host, show_name): only_first.update(dict(profile=response.get('label') or 'Master', host=cur_host)) self._log('Success: profile;' + - u'"%(profile)s" at%(first)s host;%(host)s updated%(show)s%(first_note)s' % only_first) + '"%(profile)s" at%(first)s host;%(host)s updated%(show)s%(first_note)s' % only_first) else: self._maybe_log_failed_detection(cur_host) result += 1 @@ -148,10 +148,10 @@ class KodiNotifier(Notifier): failed_msg = 'Single show update failed,' if sickgear.KODI_UPDATE_FULL: - self._log_debug(u'%s falling back to full update' % failed_msg) + self._log_debug(f'{failed_msg} falling back to full update') return __method_update(host) - self._log_debug(u'%s consider enabling "Perform full library update" in config/notifications' % failed_msg) + self._log_debug(f'{failed_msg} consider enabling "Perform full library update" in config/notifications') return False ############################################################################## @@ -169,7 +169,7 @@ class KodiNotifier(Notifier): """ if not host: - self._log_warning(u'No host specified, aborting update') + self._log_warning('No host specified, aborting update') return False args = {} @@ -198,14 +198,14 @@ class KodiNotifier(Notifier): """ if not host: - self._log_warning(u'No host specified, aborting update') + self._log_warning('No host specified, aborting update') return False - self._log_debug(u'Updating library via HTTP method for host: %s' % host) + self._log_debug(f'Updating library via HTTP method for host: {host}') # if we're doing per-show if show_name: - self._log_debug(u'Updating library via HTTP method for show %s' % show_name) + self._log_debug(f'Updating library via HTTP method for show {show_name}') # noinspection SqlResolve path_sql = 'SELECT path.strPath' \ @@ -223,29 +223,28 @@ class KodiNotifier(Notifier): # sql used to grab path(s) response = self._send(host, {'command': 'QueryVideoDatabase(%s)' % path_sql}) if not response: - self._log_debug(u'Invalid response for %s on %s' % (show_name, host)) + self._log_debug(f'Invalid response for {show_name} on {host}') return False try: et = etree.fromstring(quote(response, ':\\/<>')) except SyntaxError as e: - self._log_error(u'Unable to parse XML in response: %s' % ex(e)) + self._log_error(f'Unable to parse XML in response: {ex(e)}') return False paths = et.findall('.//field') if not paths: - self._log_debug(u'No valid path found for %s on %s' % (show_name, host)) + self._log_debug(f'No valid path found for {show_name} on {host}') return False for path in paths: # we do not need it double-encoded, gawd this is dumb un_enc_path = decode_str(unquote(path.text), sickgear.SYS_ENCODING) - self._log_debug(u'Updating %s on %s at %s' % (show_name, host, un_enc_path)) + self._log_debug(f'Updating {show_name} on {host} at {un_enc_path}') if not self._send( host, dict(command='ExecBuiltIn', parameter='Kodi.updatelibrary(video, %s)' % un_enc_path)): - self._log_error(u'Update of show directory failed for %s on %s at %s' - % (show_name, host, un_enc_path)) + self._log_error(f'Update of show directory failed for {show_name} on {host} at {un_enc_path}') return False # sleep for a few seconds just to be sure kodi has a chance to finish each directory @@ -253,10 +252,10 @@ class KodiNotifier(Notifier): time.sleep(5) # do a full update if requested else: - self._log_debug(u'Full library update on host: %s' % host) + self._log_debug(f'Full library update on host: {host}') if not self._send(host, dict(command='ExecBuiltIn', parameter='Kodi.updatelibrary(video)')): - self._log_error(u'Failed full library update on: %s' % host) + self._log_error(f'Failed full library update on: {host}') return False return True @@ -277,7 +276,7 @@ class KodiNotifier(Notifier): result = {} if not host: - self._log_warning(u'No host specified, aborting update') + self._log_warning('No host specified, aborting update') return result if isinstance(command, dict): @@ -300,8 +299,8 @@ class KodiNotifier(Notifier): if not response.get('error'): return 'OK' == response.get('result') and {'OK': True} or response.get('result') - self._log_error(u'API error; %s from %s in response to command: %s' - % (json_dumps(response['error']), host, json_dumps(command))) + self._log_error(f'API error; {json_dumps(response["error"])} from {host}' + f' in response to command: {json_dumps(command)}') return result def _update_json(self, host=None, show_name=None): @@ -317,12 +316,12 @@ class KodiNotifier(Notifier): """ if not host: - self._log_warning(u'No host specified, aborting update') + self._log_warning('No host specified, aborting update') return False # if we're doing per-show if show_name: - self._log_debug(u'JSON library update. Host: %s Show: %s' % (host, show_name)) + self._log_debug(f'JSON library update. Host: {host} Show: {show_name}') # try fetching tvshowid using show_name with a fallback to getting show list show_name = unquote_plus(show_name) @@ -339,7 +338,7 @@ class KodiNotifier(Notifier): break if not shows: - self._log_debug(u'No items in GetTVShows response') + self._log_debug('No items in GetTVShows response') return False tvshowid = -1 @@ -354,7 +353,7 @@ class KodiNotifier(Notifier): # we didn't find the show (exact match), thus revert to just doing a full update if enabled if -1 == tvshowid: - self._log_debug(u'Doesn\'t have "%s" in it\'s known shows, full library update required' % show_name) + self._log_debug(f'Doesn\'t have "{show_name}" in it\'s known shows, full library update required') return False # lookup tv-show path if we don't already know it @@ -365,24 +364,24 @@ class KodiNotifier(Notifier): path = 'tvshowdetails' in response and response['tvshowdetails'].get('file', '') or '' if not len(path): - self._log_warning(u'No valid path found for %s with ID: %s on %s' % (show_name, tvshowid, host)) + self._log_warning(f'No valid path found for {show_name} with ID: {tvshowid} on {host}') return False - self._log_debug(u'Updating %s on %s at %s' % (show_name, host, path)) + self._log_debug(f'Updating {show_name} on {host} at {path}') command = dict(method='VideoLibrary.Scan', params={'directory': '%s' % json_dumps(path)[1:-1].replace('\\\\', '\\')}) response_scan = self._send_json(host, command) if not response_scan.get('OK'): - self._log_error(u'Update of show directory failed for %s on %s at %s response: %s' % - (show_name, host, path, response_scan)) + self._log_error(f'Update of show directory failed for {show_name} on {host} at {path}' + f' response: {response_scan}') return False # do a full update if requested else: - self._log_debug(u'Full library update on host: %s' % host) + self._log_debug(f'Full library update on host: {host}') response_scan = self._send_json(host, dict(method='VideoLibrary.Scan')) if not response_scan.get('OK'): - self._log_error(u'Failed full library update on: %s response: %s' % (host, response_scan)) + self._log_error(f'Failed full library update on: {host} response: {response_scan}') return False return True @@ -400,7 +399,7 @@ class KodiNotifier(Notifier): def _maybe_log_failed_detection(self, host, msg='connect to'): - self._maybe_log(u'Failed to %s %s, check device(s) and config' % (msg, host), logger.ERROR) + self._maybe_log(f'Failed to {msg} {host}, check device(s) and config', logger.ERROR) def _notify(self, title, body, hosts=None, username=None, password=None, **kwargs): """ Internal wrapper for the notify_snatch and notify_download functions @@ -429,20 +428,20 @@ class KodiNotifier(Notifier): if self.response and 401 == self.response.get('status_code'): success = False message += ['Fail: Cannot authenticate with %s' % cur_host] - self._log_debug(u'Failed to authenticate with %s' % cur_host) + self._log_debug(f'Failed to authenticate with {cur_host}') elif not api_version: success = False message += ['Fail: No supported Kodi found at %s' % cur_host] self._maybe_log_failed_detection(cur_host, 'connect and detect version for') else: if 4 >= api_version: - self._log_debug(u'Detected %sversion <= 11, using HTTP API' - % self.prefix and ' ' + self.prefix.capitalize()) + self._log_debug(f'Detected {self.prefix and " " + self.prefix.capitalize()}version <= 11,' + f' using HTTP API') __method_send = self._send command = dict(command='ExecBuiltIn', parameter='Notification(%s,%s)' % (title, body)) else: - self._log_debug(u'Detected version >= 12, using JSON API') + self._log_debug('Detected version >= 12, using JSON API') __method_send = self._send_json command = dict(method='GUI.ShowNotification', params=dict( [('title', title), ('message', body), ('image', self._sg_logo_url)] diff --git a/sickgear/notifiers/libnotify.py b/sickgear/notifiers/libnotify.py index c4a82ffb..9dc47547 100644 --- a/sickgear/notifiers/libnotify.py +++ b/sickgear/notifiers/libnotify.py @@ -44,14 +44,14 @@ def diagnose(): try: bus = dbus.SessionBus() except dbus.DBusException as e: - return (u'Error: unable to connect to D-Bus session bus: %s. ' - u'Are you running SickGear in a desktop session?') % (cgi.escape(e),) + return (f'Error: unable to connect to D-Bus session bus: {cgi.escape(e)}.' + f' Are you running SickGear in a desktop session?') try: bus.get_object('org.freedesktop.Notifications', '/org/freedesktop/Notifications') except dbus.DBusException as e: - return (u'Error: there doesn\'t seem to be a notification daemon available: %s ' - u'Try installing notification-daemon or notify-osd.') % (cgi.escape(e),) + return (f'Error: there doesn\'t seem to be a notification daemon available: {cgi.escape(e)}.' + f' Try installing notification-daemon or notify-osd.') return 'Error: Unable to send notification.' @@ -71,18 +71,18 @@ class LibnotifyNotifier(Notifier): # noinspection PyPackageRequirements import pynotify except ImportError: - self._log_error(u'Unable to import pynotify. libnotify notifications won\'t work') + self._log_error("Unable to import pynotify. libnotify notifications won't work") return False try: # noinspection PyPackageRequirements from gi.repository import GObject except ImportError: - self._log_error(u'Unable to import GObject from gi.repository. Cannot catch a GError in display') + self._log_error('Unable to import GObject from gi.repository. Cannot catch a GError in display') return False if not pynotify.init('SickGear'): - self._log_error(u'Initialization of pynotify failed. libnotify notifications won\'t work') + self._log_error('Initialization of pynotify failed. libnotify notifications won\'t work') return False self.pynotify = pynotify diff --git a/sickgear/notifiers/nmj.py b/sickgear/notifiers/nmj.py index 03be6551..fef80f01 100644 --- a/sickgear/notifiers/nmj.py +++ b/sickgear/notifiers/nmj.py @@ -43,11 +43,11 @@ class NMJNotifier(BaseNotifier): try: terminal = telnetlib.Telnet(host) except (BaseException, Exception): - self._log_warning(u'Unable to get a telnet session to %s' % host) + self._log_warning(f'Unable to get a telnet session to {host}') if result: # tell the terminal to output the necessary info to the screen so we can search it later - self._log_debug(u'Connected to %s via telnet' % host) + self._log_debug(f'Connected to {host} via telnet') terminal.read_until('sh-3.00# ') terminal.write('cat /tmp/source\n') terminal.write('cat /tmp/netshare\n') @@ -57,11 +57,11 @@ class NMJNotifier(BaseNotifier): match = re.search(r'(.+\.db)\r\n?(.+)(?=sh-3.00# cat /tmp/netshare)', tnoutput) # if we found the database in the terminal output then save that database to the config if not match: - self._log_warning(u'Could not get current NMJ database on %s, NMJ is probably not running!' % host) + self._log_warning(f'Could not get current NMJ database on {host}, NMJ is probably not running!') else: database = match.group(1) device = match.group(2) - self._log_debug(u'Found NMJ database %s on device %s' % (database, device)) + self._log_debug(f'Found NMJ database {database} on device {device}') sickgear.NMJ_DATABASE = database # if the device is a remote host then try to parse the mounting URL and save it to the config if device.startswith('NETWORK_SHARE/'): @@ -72,7 +72,7 @@ class NMJNotifier(BaseNotifier): 'but could not get the mounting url') else: mount = match.group().replace('127.0.0.1', host) - self._log_debug(u'Found mounting url on the Popcorn Hour in configuration: %s' % mount) + self._log_debug(f'Found mounting url on the Popcorn Hour in configuration: {mount}') sickgear.NMJ_MOUNT = mount result = True @@ -96,23 +96,23 @@ class NMJNotifier(BaseNotifier): database = self._choose(database, sickgear.NMJ_DATABASE) mount = self._choose(mount, sickgear.NMJ_MOUNT) - self._log_debug(u'Sending scan command for NMJ ') + self._log_debug('Sending scan command for NMJ') # if a mount URL is provided then attempt to open a handle to that URL if mount: try: req = urllib.request.Request(mount) - self._log_debug(u'Try to mount network drive via url: %s' % mount) + self._log_debug(f'Try to mount network drive via url: {mount}') http_response_obj = urllib.request.urlopen(req) # PY2 http_response_obj has no `with` context manager http_response_obj.close() except IOError as e: if hasattr(e, 'reason'): - self._log_warning(u'Could not contact Popcorn Hour on host %s: %s' % (host, e.reason)) + self._log_warning(f'Could not contact Popcorn Hour on host {host}: {e.reason}') elif hasattr(e, 'code'): - self._log_warning(u'Problem with Popcorn Hour on host %s: %s' % (host, e.code)) + self._log_warning(f'Problem with Popcorn Hour on host {host}: {e.code}') return False except (BaseException, Exception) as e: - self._log_error(u'Unknown exception: ' + ex(e)) + self._log_error(f'Unknown exception: {ex(e)}') return False # build up the request URL and parameters @@ -123,18 +123,18 @@ class NMJNotifier(BaseNotifier): # send the request to the server try: req = urllib.request.Request(update_url) - self._log_debug(u'Sending scan update command via url: %s' % update_url) + self._log_debug(f'Sending scan update command via url: {update_url}') http_response_obj = urllib.request.urlopen(req) response = http_response_obj.read() http_response_obj.close() except IOError as e: if hasattr(e, 'reason'): - self._log_warning(u'Could not contact Popcorn Hour on host %s: %s' % (host, e.reason)) + self._log_warning(f'Could not contact Popcorn Hour on host {host}: {e.reason}') elif hasattr(e, 'code'): - self._log_warning(u'Problem with Popcorn Hour on host %s: %s' % (host, e.code)) + self._log_warning(f'Problem with Popcorn Hour on host {host}: {e.code}') return False except (BaseException, Exception) as e: - self._log_error(u'Unknown exception: ' + ex(e)) + self._log_error(f'Unknown exception: {ex(e)}') return False # try to parse the resulting XML @@ -142,15 +142,15 @@ class NMJNotifier(BaseNotifier): et = etree.fromstring(response) result = et.findtext('returnValue') except SyntaxError as e: - self._log_error(u'Unable to parse XML returned from the Popcorn Hour: %s' % ex(e)) + self._log_error(f'Unable to parse XML returned from the Popcorn Hour: {ex(e)}') return False - # if the result was a number then consider that an error + # if the result was a number, then consider that an error if 0 < int(result): - self._log_error(u'Popcorn Hour returned an errorcode: %s' % result) + self._log_error(f'Popcorn Hour returned an errorcode: {result}') return False - self._log(u'NMJ started background scan') + self._log('NMJ started background scan') return True def _notify(self, host=None, database=None, mount=None, **kwargs): diff --git a/sickgear/notifiers/nmjv2.py b/sickgear/notifiers/nmjv2.py index 8303bae9..654d69a1 100644 --- a/sickgear/notifiers/nmjv2.py +++ b/sickgear/notifiers/nmjv2.py @@ -78,7 +78,7 @@ class NMJv2Notifier(BaseNotifier): result = True except IOError as e: - self._log_warning(u'Couldn\'t contact popcorn hour on host %s: %s' % (host, ex(e))) + self._log_warning(f'Couldn\'t contact popcorn hour on host {host}: {ex(e)}') if result: return '{"message": "Success, NMJ Database found at: %(host)s", "database": "%(database)s"}' % { @@ -100,7 +100,7 @@ class NMJv2Notifier(BaseNotifier): host = self._choose(host, sickgear.NMJv2_HOST) - self._log_debug(u'Sending scan command for NMJ ') + self._log_debug('Sending scan command for NMJ ') # if a host is provided then attempt to open a handle to that URL try: @@ -108,11 +108,11 @@ class NMJv2Notifier(BaseNotifier): url_scandir = '%s%s%s' % (base_url, 'metadata_database?', urlencode( dict(arg0='update_scandir', arg1=sickgear.NMJv2_DATABASE, arg2='', arg3='update_all'))) - self._log_debug(u'Scan update command sent to host: %s' % host) + self._log_debug(f'Scan update command sent to host: {host}') url_updatedb = '%s%s%s' % (base_url, 'metadata_database?', urlencode( dict(arg0='scanner_start', arg1=sickgear.NMJv2_DATABASE, arg2='background', arg3=''))) - self._log_debug(u'Try to mount network drive via url: %s' % host) + self._log_debug(f'Try to mount network drive via url: {host}') prereq = urllib.request.Request(url_scandir) req = urllib.request.Request(url_updatedb) @@ -127,24 +127,24 @@ class NMJv2Notifier(BaseNotifier): response2 = http_response_obj2.read() http_response_obj2.close() except IOError as e: - self._log_warning(u'Couldn\'t contact popcorn hour on host %s: %s' % (host, ex(e))) + self._log_warning(f'Couldn\'t contact popcorn hour on host {host}: {ex(e)}') return False try: et = etree.fromstring(response1) result1 = et.findtext('returnValue') except SyntaxError as e: - self._log_error(u'Unable to parse XML returned from the Popcorn Hour: update_scandir, %s' % ex(e)) + self._log_error(f'Unable to parse XML returned from the Popcorn Hour: update_scandir, {ex(e)}') return False try: et = etree.fromstring(response2) result2 = et.findtext('returnValue') except SyntaxError as e: - self._log_error(u'Unable to parse XML returned from the Popcorn Hour: scanner_start, %s' % ex(e)) + self._log_error(f'Unable to parse XML returned from the Popcorn Hour: scanner_start, {ex(e)}') return False - # if the result was a number then consider that an error + # if the result was a number, then consider that an error error_codes = ['8', '11', '22', '49', '50', '51', '60'] error_messages = ['Invalid parameter(s)/argument(s)', 'Invalid database path', @@ -155,15 +155,15 @@ class NMJv2Notifier(BaseNotifier): 'Read only file system'] if 0 < int(result1): index = error_codes.index(result1) - self._log_error(u'Popcorn Hour returned an error: %s' % (error_messages[index])) + self._log_error(f'Popcorn Hour returned an error: {error_messages[index]}') return False elif 0 < int(result2): index = error_codes.index(result2) - self._log_error(u'Popcorn Hour returned an error: %s' % (error_messages[index])) + self._log_error(f'Popcorn Hour returned an error: {error_messages[index]}') return False - self._log(u'NMJv2 started background scan') + self._log('NMJv2 started background scan') return True def _notify(self, host=None, **kwargs): diff --git a/sickgear/notifiers/plex.py b/sickgear/notifiers/plex.py index f9ad58cf..38e3417a 100644 --- a/sickgear/notifiers/plex.py +++ b/sickgear/notifiers/plex.py @@ -18,11 +18,10 @@ import re from .generic import Notifier import sickgear -from encodingKludge import fixStupidEncodings from exceptions_helper import ex -from _23 import b64encodestring, decode_str, etree, filter_iter, list_values, unquote_plus, urlencode -from six import iteritems, text_type, PY2 +from _23 import b64encodestring, decode_str, etree, unquote_plus, urlencode +from six import iteritems # noinspection PyUnresolvedReferences from six.moves import urllib @@ -46,34 +45,33 @@ class PLEXNotifier(Notifier): """ if not host: - self._log_error(u'No host specified, check your settings') + self._log_error('No host specified, check your settings') return False for key in command: - if not PY2 or type(command[key]) == text_type: - command[key] = command[key].encode('utf-8') + command[key] = command[key].encode('utf-8') enc_command = urlencode(command) - self._log_debug(u'Encoded API command: ' + enc_command) + self._log_debug(f'Encoded API command: {enc_command}') url = 'http://%s/xbmcCmds/xbmcHttp/?%s' % (host, enc_command) try: req = urllib.request.Request(url) if password: req.add_header('Authorization', 'Basic %s' % b64encodestring('%s:%s' % (username, password))) - self._log_debug(u'Contacting (with auth header) via url: ' + url) + self._log_debug(f'Contacting (with auth header) via url: {url}') else: - self._log_debug(u'Contacting via url: ' + url) + self._log_debug(f'Contacting via url: {url}') http_response_obj = urllib.request.urlopen(req) # PY2 http_response_obj has no `with` context manager result = decode_str(http_response_obj.read(), sickgear.SYS_ENCODING) http_response_obj.close() - self._log_debug(u'HTTP response: ' + result.replace('\n', '')) + self._log_debug('HTTP response: ' + result.replace('\n', '')) return True except (urllib.error.URLError, IOError) as e: - self._log_warning(u'Couldn\'t contact Plex at ' + fixStupidEncodings(url) + ' ' + ex(e)) + self._log_warning(f'Couldn\'t contact Plex at {url} {ex(e)}') return False @staticmethod @@ -115,7 +113,7 @@ class PLEXNotifier(Notifier): results = [] for cur_host in [x.strip() for x in host.split(',')]: cur_host = unquote_plus(cur_host) - self._log(u'Sending notification to \'%s\'' % cur_host) + self._log(f'Sending notification to \'{cur_host}\'') result = self._send_to_plex(command, cur_host, username, password) results += [self._choose(('%s Plex client ... %s' % (('Successful test notice sent to', 'Failed test for')[not result], cur_host)), result)] @@ -150,7 +148,7 @@ class PLEXNotifier(Notifier): """ host = self._choose(host, sickgear.PLEX_SERVER_HOST) if not host: - msg = u'No Plex Media Server host specified, check your settings' + msg = 'No Plex Media Server host specified, check your settings' self._log_debug(msg) return '%sFail: %s' % (('', '
')[self._testing], msg) @@ -161,7 +159,7 @@ class PLEXNotifier(Notifier): token_arg = None if username and password: - self._log_debug(u'Fetching plex.tv credentials for user: ' + username) + self._log_debug('Fetching plex.tv credentials for user: ' + username) req = urllib.request.Request('https://plex.tv/users/sign_in.xml', data=b'') req.add_header('Authorization', 'Basic %s' % b64encodestring('%s:%s' % (username, password))) req.add_header('X-Plex-Device-Name', 'SickGear') @@ -178,10 +176,10 @@ class PLEXNotifier(Notifier): token_arg = '?X-Plex-Token=' + token except urllib.error.URLError as e: - self._log(u'Error fetching credentials from plex.tv for user %s: %s' % (username, ex(e))) + self._log(f'Error fetching credentials from plex.tv for user {username}: {ex(e)}') except (ValueError, IndexError) as e: - self._log(u'Error parsing plex.tv response: ' + ex(e)) + self._log('Error parsing plex.tv response: ' + ex(e)) file_location = location if None is not location else '' if None is ep_obj else ep_obj.location host_validate = self._get_host_list(host, all([token_arg])) @@ -200,11 +198,11 @@ class PLEXNotifier(Notifier): sections = response.findall('.//Directory') if not sections: - self._log(u'Plex Media Server not running on: ' + cur_host) + self._log('Plex Media Server not running on: ' + cur_host) hosts_failed.append(cur_host) continue - for section in filter_iter(lambda x: 'show' == x.attrib['type'], sections): + for section in filter(lambda x: 'show' == x.attrib['type'], sections): if str(section.attrib['key']) in hosts_all: continue keyed_host = [(str(section.attrib['key']), cur_host)] @@ -234,32 +232,28 @@ class PLEXNotifier(Notifier): host_list.append(cur_host) else: hosts_failed.append(cur_host) - self._log_error(u'Error updating library section for Plex Media Server: %s' % cur_host) + self._log_error(f'Error updating library section for Plex Media Server: {cur_host}') if len(hosts_failed) == len(host_validate): - self._log(u'No successful Plex host updated') + self._log('No successful Plex host updated') return 'Fail no successful Plex host updated: %s' % ', '.join([host for host in hosts_failed]) else: hosts = ', '.join(set(host_list)) if len(hosts_match): - self._log(u'Hosts updating where TV section paths match the downloaded show: %s' % hosts) + self._log(f'Hosts updating where TV section paths match the downloaded show: {hosts}') else: - self._log(u'Updating all hosts with TV sections: %s' % hosts) + self._log(f'Updating all hosts with TV sections: {hosts}') return '' hosts = [ - host.replace('http://', '') for host in filter_iter(lambda x: x.startswith('http:'), - list_values(hosts_all))] + host.replace('http://', '') for host in filter(lambda x: x.startswith('http:'), list(hosts_all.values()))] secured = [ - host.replace('https://', '') for host in filter_iter(lambda x: x.startswith('https:'), - list_values(hosts_all))] + host.replace('https://', '') for host in filter(lambda x: x.startswith('https:'), list(hosts_all.values()))] failed = ', '.join([ - host.replace('http://', '') for host in filter_iter(lambda x: x.startswith('http:'), - hosts_failed)]) - failed_secured = ', '.join(filter_iter( + host.replace('http://', '') for host in filter(lambda x: x.startswith('http:'), hosts_failed)]) + failed_secured = ', '.join(filter( lambda x: x not in hosts, - [host.replace('https://', '') for host in filter_iter(lambda x: x.startswith('https:'), - hosts_failed)])) + [host.replace('https://', '') for host in filter(lambda x: x.startswith('https:'), hosts_failed)])) return '
' + '
'.join([result for result in [ ('', 'Fail: username/password when fetching credentials from plex.tv')[False is token_arg], diff --git a/sickgear/notifiers/prowl.py b/sickgear/notifiers/prowl.py index 876d66c0..c7b6a92b 100644 --- a/sickgear/notifiers/prowl.py +++ b/sickgear/notifiers/prowl.py @@ -52,7 +52,7 @@ class ProwlNotifier(Notifier): if 200 != response.status: if 401 == response.status: - result = u'Authentication, %s (bad API key?)' % response.reason + result = f'Authentication, {response.reason} (bad API key?)' else: result = 'Http response code "%s"' % response.status diff --git a/sickgear/notifiers/pushalot.py b/sickgear/notifiers/pushalot.py index 50772f09..3b28526e 100644 --- a/sickgear/notifiers/pushalot.py +++ b/sickgear/notifiers/pushalot.py @@ -30,7 +30,7 @@ class PushalotNotifier(Notifier): pushalot_auth_token = self._choose(pushalot_auth_token, sickgear.PUSHALOT_AUTHORIZATIONTOKEN) - self._log_debug(u'Title: %s, Message: %s, API: %s' % (title, body, pushalot_auth_token)) + self._log_debug(f'Title: {title}, Message: {body}, API: {pushalot_auth_token}') http_handler = moves.http_client.HTTPSConnection('pushalot.com') @@ -49,7 +49,7 @@ class PushalotNotifier(Notifier): if 200 != response.status: if 410 == response.status: - result = u'Authentication, %s (bad API key?)' % response.reason + result = f'Authentication, {response.reason} (bad API key?)' else: result = 'Http response code "%s"' % response.status diff --git a/sickgear/notifiers/pushbullet.py b/sickgear/notifiers/pushbullet.py index 1057bc68..bdda0bc5 100644 --- a/sickgear/notifiers/pushbullet.py +++ b/sickgear/notifiers/pushbullet.py @@ -69,7 +69,7 @@ class PushbulletNotifier(Notifier): result = resp.json()['error']['message'] except (BaseException, Exception): result = 'no response' - self._log_warning(u'%s' % result) + self._log_warning(f'{result}') return self._choose((True, 'Failed to send notification: %s' % result)[bool(result)], not bool(result)) diff --git a/sickgear/notifiers/pytivo.py b/sickgear/notifiers/pytivo.py index 9a512d3d..a380a347 100644 --- a/sickgear/notifiers/pytivo.py +++ b/sickgear/notifiers/pytivo.py @@ -18,8 +18,6 @@ import os from .generic import BaseNotifier import sickgear -# noinspection PyPep8Naming -import encodingKludge as ek from exceptions_helper import ex from _23 import urlencode @@ -51,7 +49,7 @@ class PyTivoNotifier(BaseNotifier): show_path = ep_obj.show_obj.location show_name = ep_obj.show_obj.name - root_show_and_season = ek.ek(os.path.dirname, ep_obj.location) + root_show_and_season = os.path.dirname(ep_obj.location) abs_path = ep_obj.location # Some show names have colons in them which are illegal in a path location, so strip them out. @@ -68,7 +66,7 @@ class PyTivoNotifier(BaseNotifier): request_url = 'http://%s/TiVoConnect?%s' % (host, urlencode( dict(Command='Push', Container=container, File=file_path, tsn=tsn))) - self._log_debug(u'Requesting ' + request_url) + self._log_debug(f'Requesting {request_url}') request = urllib.request.Request(request_url) @@ -78,17 +76,17 @@ class PyTivoNotifier(BaseNotifier): except urllib.error.HTTPError as e: if hasattr(e, 'reason'): - self._log_error(u'Error, failed to reach a server - ' + e.reason) + self._log_error('Error, failed to reach a server - ' + e.reason) return False elif hasattr(e, 'code'): - self._log_error(u'Error, the server couldn\'t fulfill the request - ' + e.code) + self._log_error('Error, the server couldn\'t fulfill the request - ' + e.code) return False except (BaseException, Exception) as e: - self._log_error(u'Unknown exception: ' + ex(e)) + self._log_error(f'Unknown exception: {ex(e)}') return False - self._log(u'Successfully requested transfer of file') + self._log('Successfully requested transfer of file') return True diff --git a/sickgear/notifiers/synoindex.py b/sickgear/notifiers/synoindex.py index 8e0c9fbf..32f6e089 100644 --- a/sickgear/notifiers/synoindex.py +++ b/sickgear/notifiers/synoindex.py @@ -18,8 +18,6 @@ import os from .generic import BaseNotifier -# noinspection PyPep8Naming -import encodingKludge as ek from exceptions_helper import ex from sg_helpers import cmdline_runner @@ -34,18 +32,17 @@ class SynoIndexNotifier(BaseNotifier): self._move_object(old_file, new_file) def _cmdline_run(self, synoindex_cmd): - self._log_debug(u'Executing command ' + str(synoindex_cmd)) - self._log_debug(u'Absolute path to command: ' + ek.ek(os.path.abspath, synoindex_cmd[0])) + self._log_debug(f'Executing command {str(synoindex_cmd)}') + self._log_debug(f'Absolute path to command: {os.path.abspath(synoindex_cmd[0])}') try: output, err, exit_status = cmdline_runner(synoindex_cmd) - self._log_debug(u'Script result: %s' % output) + self._log_debug(f'Script result: {output}') except (BaseException, Exception) as e: self._log_error('Unable to run synoindex: %s' % ex(e)) def _move_object(self, old_path, new_path): if self.is_enabled(): - self._cmdline_run(['/usr/syno/bin/synoindex', '-N', ek.ek(os.path.abspath, new_path), - ek.ek(os.path.abspath, old_path)]) + self._cmdline_run(['/usr/syno/bin/synoindex', '-N', os.path.abspath(new_path), os.path.abspath(old_path)]) def deleteFolder(self, cur_path): self._make_object('-D', cur_path) @@ -61,7 +58,7 @@ class SynoIndexNotifier(BaseNotifier): def _make_object(self, cmd_arg, cur_path): if self.is_enabled(): - self._cmdline_run(['/usr/syno/bin/synoindex', cmd_arg, ek.ek(os.path.abspath, cur_path)]) + self._cmdline_run(['/usr/syno/bin/synoindex', cmd_arg, os.path.abspath(cur_path)]) def update_library(self, ep_obj=None, **kwargs): self.addFile(ep_obj.location) diff --git a/sickgear/notifiers/synologynotifier.py b/sickgear/notifiers/synologynotifier.py index bc6a6d9b..51242a04 100644 --- a/sickgear/notifiers/synologynotifier.py +++ b/sickgear/notifiers/synologynotifier.py @@ -18,8 +18,6 @@ import os from .generic import Notifier -# noinspection PyPep8Naming -import encodingKludge as ek from exceptions_helper import ex from sg_helpers import cmdline_runner @@ -29,11 +27,11 @@ class SynologyNotifier(Notifier): def _notify(self, title, body, **kwargs): synodsmnotify_cmd = ['/usr/syno/bin/synodsmnotify', '@administrators', title, body] - self._log(u'Executing command ' + str(synodsmnotify_cmd)) - self._log_debug(u'Absolute path to command: ' + ek.ek(os.path.abspath, synodsmnotify_cmd[0])) + self._log(f'Executing command {synodsmnotify_cmd}') + self._log_debug(f'Absolute path to command: {os.path.abspath(synodsmnotify_cmd[0])}') try: output, err, exit_status = cmdline_runner(synodsmnotify_cmd) - self._log_debug(u'Script result: %s' % output) + self._log_debug(f'Script result: {output}') except (BaseException, Exception) as e: self._log('Unable to run synodsmnotify: %s' % ex(e)) diff --git a/sickgear/notifiers/telegram.py b/sickgear/notifiers/telegram.py index 6cd31518..96d86319 100644 --- a/sickgear/notifiers/telegram.py +++ b/sickgear/notifiers/telegram.py @@ -22,8 +22,6 @@ import re from ..common import USER_AGENT from .generic import Notifier -# noinspection PyPep8Naming -import encodingKludge as ek from exceptions_helper import ex import sickgear from sickgear.image_cache import ImageCache @@ -42,8 +40,8 @@ class TelegramNotifier(Notifier): access_token = self._choose(access_token, sickgear.TELEGRAM_ACCESS_TOKEN) cid = self._choose(chatid, sickgear.TELEGRAM_CHATID) try: - msg = self._body_only(('' if not title else u'%s' % title), body) - msg = msg.replace(u'%s: ' % title, u'%s:\r\n' % ('SickGear ' + title, title)[use_icon]) + msg = self._body_only(('' if not title else f'{title}'), body) + msg = msg.replace(f'{title}: ', f'{("SickGear " + title, title)[use_icon]}:\r\n') # HTML spaces ( ) and tabs ( ) aren't supported # See https://core.telegram.org/bots/api#html-style msg = re.sub('(?i) ?', ' ', msg) @@ -51,11 +49,11 @@ class TelegramNotifier(Notifier): msg = re.sub('(?i) ?', ' ', msg) if use_icon: - image_path = ek.ek(os.path.join, sickgear.PROG_DIR, 'gui', 'slick', 'images', 'banner_thumb.jpg') + image_path = os.path.join(sickgear.PROG_DIR, 'gui', 'slick', 'images', 'banner_thumb.jpg') if not self._testing: show_obj = ep_obj.show_obj banner_path = ImageCache().banner_thumb_path(show_obj.tvid, show_obj.prodid) - if ek.ek(os.path.isfile, banner_path): + if os.path.isfile(banner_path): image_path = banner_path with open(image_path, 'rb') as f: diff --git a/sickgear/notifiers/trakt.py b/sickgear/notifiers/trakt.py index dcd2a28a..cb24c4ff 100644 --- a/sickgear/notifiers/trakt.py +++ b/sickgear/notifiers/trakt.py @@ -22,7 +22,6 @@ import sickgear from lib.api_trakt import TraktAPI, exceptions from exceptions_helper import ConnectionSkipException -from _23 import list_keys from six import iteritems # noinspection PyUnreachableCode @@ -38,7 +37,7 @@ class TraktNotifier(BaseNotifier): def is_enabled_library(cls): if sickgear.TRAKT_ACCOUNTS: for tid, locations in iteritems(sickgear.TRAKT_UPDATE_COLLECTION): - if tid in list_keys(sickgear.TRAKT_ACCOUNTS): + if tid in list(sickgear.TRAKT_ACCOUNTS): return True return False @@ -89,7 +88,7 @@ class TraktNotifier(BaseNotifier): data['shows'][0]['seasons'][0]['episodes'].append({'number': cur_ep_obj.episode}) for tid, locations in iteritems(sickgear.TRAKT_UPDATE_COLLECTION): - if tid not in list_keys(sickgear.TRAKT_ACCOUNTS): + if tid not in list(sickgear.TRAKT_ACCOUNTS): continue for loc in locations: if not ep_obj.location.startswith('%s%s' % (loc.rstrip(os.path.sep), os.path.sep)): diff --git a/sickgear/notifiers/xbmc.py b/sickgear/notifiers/xbmc.py index 3b18c51c..8dcae9ce 100644 --- a/sickgear/notifiers/xbmc.py +++ b/sickgear/notifiers/xbmc.py @@ -20,11 +20,9 @@ import time from .generic import Notifier import sickgear from exceptions_helper import ex -from encodingKludge import fixStupidEncodings from json_helper import json_dumps, json_load from _23 import b64encodestring, decode_str, etree, quote, unquote, unquote_plus, urlencode -from six import PY2, text_type # noinspection PyUnresolvedReferences from six.moves import urllib @@ -104,26 +102,26 @@ class XBMCNotifier(Notifier): """ - self._log(u'Sending request to update library for host: "%s"' % host) + self._log(f'Sending request to update library for host: "{host}"') xbmcapi = self._get_xbmc_version(host, sickgear.XBMC_USERNAME, sickgear.XBMC_PASSWORD) if xbmcapi: if 4 >= xbmcapi: # try to update for just the show, if it fails, do full update if enabled if not self._update_library_http(host, show_name) and sickgear.XBMC_UPDATE_FULL: - self._log_warning(u'Single show update failed, falling back to full update') + self._log_warning('Single show update failed, falling back to full update') return self._update_library_http(host) else: return True else: # try to update for just the show, if it fails, do full update if enabled if not self._update_library_json(host, show_name) and sickgear.XBMC_UPDATE_FULL: - self._log_warning(u'Single show update failed, falling back to full update') + self._log_warning('Single show update failed, falling back to full update') return self._update_library_json(host) else: return True - self._log_debug(u'Failed to detect version for "%s", check configuration and try again' % host) + self._log_debug(f'Failed to detect version for "{host}", check configuration and try again') return False # ############################################################################# @@ -144,18 +142,17 @@ class XBMCNotifier(Notifier): """ if not host: - self._log_debug(u'No host passed, aborting update') + self._log_debug('No host passed, aborting update') return False username = self._choose(username, sickgear.XBMC_USERNAME) password = self._choose(password, sickgear.XBMC_PASSWORD) for key in command: - if not PY2 or type(command[key]) == text_type: - command[key] = command[key].encode('utf-8') + command[key] = command[key].encode('utf-8') enc_command = urlencode(command) - self._log_debug(u'Encoded API command: ' + enc_command) + self._log_debug('Encoded API command: ' + enc_command) url = 'http://%s/xbmcCmds/xbmcHttp/?%s' % (host, enc_command) try: @@ -163,19 +160,19 @@ class XBMCNotifier(Notifier): # if we have a password, use authentication if password: req.add_header('Authorization', 'Basic %s' % b64encodestring('%s:%s' % (username, password))) - self._log_debug(u'Contacting (with auth header) via url: ' + fixStupidEncodings(url)) + self._log_debug(f'Contacting (with auth header) via url: {url}') else: - self._log_debug(u'Contacting via url: ' + fixStupidEncodings(url)) + self._log_debug(f'Contacting via url: {url}') http_response_obj = urllib.request.urlopen(req) # PY2 http_response_obj has no `with` context manager result = decode_str(http_response_obj.read(), sickgear.SYS_ENCODING) http_response_obj.close() - self._log_debug(u'HTTP response: ' + result.replace('\n', '')) + self._log_debug('HTTP response: ' + result.replace('\n', '')) return result except (urllib.error.URLError, IOError) as e: - self._log_warning(u'Couldn\'t contact HTTP at %s %s' % (fixStupidEncodings(url), ex(e))) + self._log_warning(f'Couldn\'t contact HTTP at {url} {ex(e)}') return False def _update_library_http(self, host=None, show_name=None): @@ -194,14 +191,14 @@ class XBMCNotifier(Notifier): """ if not host: - self._log_debug(u'No host passed, aborting update') + self._log_debug('No host passed, aborting update') return False - self._log_debug(u'Updating XMBC library via HTTP method for host: ' + host) + self._log_debug('Updating XMBC library via HTTP method for host: ' + host) # if we're doing per-show if show_name: - self._log_debug(u'Updating library via HTTP method for show ' + show_name) + self._log_debug('Updating library via HTTP method for show ' + show_name) # noinspection SqlResolve path_sql = 'select path.strPath' \ @@ -227,30 +224,30 @@ class XBMCNotifier(Notifier): self._send_to_xbmc(reset_command, host) if not sql_xml: - self._log_debug(u'Invalid response for ' + show_name + ' on ' + host) + self._log_debug('Invalid response for ' + show_name + ' on ' + host) return False enc_sql_xml = quote(sql_xml, ':\\/<>') try: et = etree.fromstring(enc_sql_xml) except SyntaxError as e: - self._log_error(u'Unable to parse XML response: ' + ex(e)) + self._log_error(f'Unable to parse XML response: {ex(e)}') return False paths = et.findall('.//field') if not paths: - self._log_debug(u'No valid paths found for ' + show_name + ' on ' + host) + self._log_debug('No valid paths found for ' + show_name + ' on ' + host) return False for path in paths: # we do not need it double-encoded, gawd this is dumb un_enc_path = decode_str(unquote(path.text), sickgear.SYS_ENCODING) - self._log_debug(u'Updating ' + show_name + ' on ' + host + ' at ' + un_enc_path) + self._log_debug('Updating ' + show_name + ' on ' + host + ' at ' + un_enc_path) update_command = dict(command='ExecBuiltIn', parameter='XBMC.updatelibrary(video, %s)' % un_enc_path) request = self._send_to_xbmc(update_command, host) if not request: - self._log_error(u'Update of show directory failed on ' + show_name + self._log_error('Update of show directory failed on ' + show_name + ' on ' + host + ' at ' + un_enc_path) return False # sleep for a few seconds just to be sure xbmc has a chance to finish each directory @@ -258,12 +255,12 @@ class XBMCNotifier(Notifier): time.sleep(5) # do a full update if requested else: - self._log(u'Doing full library update on host: ' + host) + self._log('Doing full library update on host: ' + host) update_command = {'command': 'ExecBuiltIn', 'parameter': 'XBMC.updatelibrary(video)'} request = self._send_to_xbmc(update_command, host) if not request: - self._log_error(u'Full Library update failed on: ' + host) + self._log_error('Full Library update failed on: ' + host) return False return True @@ -287,14 +284,14 @@ class XBMCNotifier(Notifier): """ if not host: - self._log_debug(u'No host passed, aborting update') + self._log_debug('No host passed, aborting update') return False username = self._choose(username, sickgear.XBMC_USERNAME) password = self._choose(password, sickgear.XBMC_PASSWORD) command = command.encode('utf-8') - self._log_debug(u'JSON command: ' + command) + self._log_debug('JSON command: ' + command) url = 'http://%s/jsonrpc' % host try: @@ -303,28 +300,28 @@ class XBMCNotifier(Notifier): # if we have a password, use authentication if password: req.add_header('Authorization', 'Basic %s' % b64encodestring('%s:%s' % (username, password))) - self._log_debug(u'Contacting (with auth header) via url: ' + fixStupidEncodings(url)) + self._log_debug(f'Contacting (with auth header) via url: {url}') else: - self._log_debug(u'Contacting via url: ' + fixStupidEncodings(url)) + self._log_debug(f'Contacting via url: {url}') try: http_response_obj = urllib.request.urlopen(req) # PY2 http_response_obj has no `with` context manager except urllib.error.URLError as e: - self._log_warning(u'Error while trying to retrieve API version for "%s": %s' % (host, ex(e))) + self._log_warning(f'Error while trying to retrieve API version for "{host}": {ex(e)}') return False # parse the json result try: result = json_load(http_response_obj) http_response_obj.close() - self._log_debug(u'JSON response: ' + str(result)) + self._log_debug(f'JSON response: {result}') return result # need to return response for parsing except ValueError: - self._log_warning(u'Unable to decode JSON: ' + http_response_obj) + self._log_warning('Unable to decode JSON: ' + http_response_obj) return False except IOError as e: - self._log_warning(u'Couldn\'t contact JSON API at ' + fixStupidEncodings(url) + ' ' + ex(e)) + self._log_warning(f'Couldn\'t contact JSON API at {url} {ex(e)}') return False def _update_library_json(self, host=None, show_name=None): @@ -343,15 +340,15 @@ class XBMCNotifier(Notifier): """ if not host: - self._log_debug(u'No host passed, aborting update') + self._log_debug('No host passed, aborting update') return False - self._log(u'Updating XMBC library via JSON method for host: ' + host) + self._log('Updating XMBC library via JSON method for host: ' + host) # if we're doing per-show if show_name: tvshowid = -1 - self._log_debug(u'Updating library via JSON method for show ' + show_name) + self._log_debug('Updating library via JSON method for show ' + show_name) # get tvshowid by showName shows_command = '{"jsonrpc":"2.0","method":"VideoLibrary.GetTVShows","id":1}' @@ -360,7 +357,7 @@ class XBMCNotifier(Notifier): if shows_response and 'result' in shows_response and 'tvshows' in shows_response['result']: shows = shows_response['result']['tvshows'] else: - self._log_debug(u'No tvshows in TV show list') + self._log_debug('No tvshows in TV show list') return False for show in shows: @@ -373,7 +370,7 @@ class XBMCNotifier(Notifier): # we didn't find the show (exact match), thus revert to just doing a full update if enabled if -1 == tvshowid: - self._log_debug(u'Exact show name not matched in TV show list') + self._log_debug('Exact show name not matched in TV show list') return False # lookup tv-show path @@ -382,19 +379,19 @@ class XBMCNotifier(Notifier): path_response = self._send_to_xbmc_json(path_command, host) path = path_response['result']['tvshowdetails']['file'] - self._log_debug(u'Received Show: ' + show_name + ' with ID: ' + str(tvshowid) + ' Path: ' + path) + self._log_debug('Received Show: ' + show_name + ' with ID: ' + str(tvshowid) + ' Path: ' + path) if 1 > len(path): - self._log_warning(u'No valid path found for ' + show_name + ' with ID: ' + self._log_warning('No valid path found for ' + show_name + ' with ID: ' + str(tvshowid) + ' on ' + host) return False - self._log_debug(u'Updating ' + show_name + ' on ' + host + ' at ' + path) + self._log_debug('Updating ' + show_name + ' on ' + host + ' at ' + path) update_command = '{"jsonrpc":"2.0","method":"VideoLibrary.Scan","params":{"directory":%s},"id":1}' % ( json_dumps(path)) request = self._send_to_xbmc_json(update_command, host) if not request: - self._log_error(u'Update of show directory failed on ' + show_name + ' on ' + host + ' at ' + path) + self._log_error('Update of show directory failed on ' + show_name + ' on ' + host + ' at ' + path) return False # catch if there was an error in the returned request @@ -402,18 +399,18 @@ class XBMCNotifier(Notifier): for r in request: if 'error' in r: self._log_error( - u'Error while attempting to update show directory for ' + show_name + 'Error while attempting to update show directory for ' + show_name + ' on ' + host + ' at ' + path) return False # do a full update if requested else: - self._log(u'Doing Full Library update on host: ' + host) + self._log('Doing Full Library update on host: ' + host) update_command = '{"jsonrpc":"2.0","method":"VideoLibrary.Scan","id":1}' request = self._send_to_xbmc_json(update_command, host, sickgear.XBMC_USERNAME, sickgear.XBMC_PASSWORD) if not request: - self._log_error(u'Full Library update failed on: ' + host) + self._log_error('Full Library update failed on: ' + host) return False return True @@ -444,12 +441,12 @@ class XBMCNotifier(Notifier): for cur_host in [x.strip() for x in hosts.split(',')]: cur_host = unquote_plus(cur_host) - self._log(u'Sending notification to "%s"' % cur_host) + self._log(f'Sending notification to "{cur_host}"') xbmcapi = self._get_xbmc_version(cur_host, username, password) if xbmcapi: if 4 >= xbmcapi: - self._log_debug(u'Detected version <= 11, using HTTP API') + self._log_debug('Detected version <= 11, using HTTP API') command = dict(command='ExecBuiltIn', parameter='Notification(' + title.encode('utf-8') + ',' + body.encode('utf-8') + ')') notify_result = self._send_to_xbmc(command, cur_host, username, password) @@ -457,7 +454,7 @@ class XBMCNotifier(Notifier): result += [cur_host + ':' + str(notify_result)] success |= 'OK' in notify_result or success else: - self._log_debug(u'Detected version >= 12, using JSON API') + self._log_debug('Detected version >= 12, using JSON API') command = '{"jsonrpc":"2.0","method":"GUI.ShowNotification",' \ '"params":{"title":"%s","message":"%s", "image": "%s"},"id":1}' % \ (title.encode('utf-8'), body.encode('utf-8'), self._sg_logo_url) @@ -467,7 +464,7 @@ class XBMCNotifier(Notifier): success |= 'OK' in notify_result or success else: if sickgear.XBMC_ALWAYS_ON or self._testing: - self._log_error(u'Failed to detect version for "%s", check configuration and try again' % cur_host) + self._log_error(f'Failed to detect version for "{cur_host}", check configuration and try again') result += [cur_host + ':No response'] success = False @@ -491,7 +488,7 @@ class XBMCNotifier(Notifier): """ if not sickgear.XBMC_HOST: - self._log_debug(u'No hosts specified, check your settings') + self._log_debug('No hosts specified, check your settings') return False # either update each host, or only attempt to update until one successful result @@ -499,11 +496,11 @@ class XBMCNotifier(Notifier): for host in [x.strip() for x in sickgear.XBMC_HOST.split(',')]: if self._send_update_library(host, show_name): if sickgear.XBMC_UPDATE_ONLYFIRST: - self._log_debug(u'Successfully updated "%s", stopped sending update library commands' % host) + self._log_debug(f'Successfully updated "{host}", stopped sending update library commands') return True else: if sickgear.XBMC_ALWAYS_ON: - self._log_error(u'Failed to detect version for "%s", check configuration and try again' % host) + self._log_error(f'Failed to detect version for "{host}", check configuration and try again') result = result + 1 # needed for the 'update xbmc' submenu command diff --git a/sickgear/nzbSplitter.py b/sickgear/nzbSplitter.py index d1447e8b..c4334544 100644 --- a/sickgear/nzbSplitter.py +++ b/sickgear/nzbSplitter.py @@ -21,8 +21,6 @@ import re from lxml_etree import etree -# noinspection PyPep8Naming -import encodingKludge as ek from exceptions_helper import ex import sickgear @@ -42,7 +40,7 @@ SUBJECT_FN_MATCHER = re.compile(r'"([^"]*)"') RE_NORMAL_NAME = re.compile(r'\.\w{1,5}$') -def platform_encode(p): +def _platform_encode(p): """ Return Unicode name, if not already Unicode, decode with UTF-8 or latin1 """ try: return decode_str(p) @@ -50,17 +48,17 @@ def platform_encode(p): return decode_str(p, sickgear.SYS_ENCODING, errors='replace').replace('?', '!') -def name_extractor(subject): +def _name_extractor(subject): """ Try to extract a file name from a subject line, return `subject` if in doubt """ result = subject for name in re.findall(SUBJECT_FN_MATCHER, subject): name = name.strip(' "') if name and RE_NORMAL_NAME.search(name): result = name - return platform_encode(result) + return _platform_encode(result) -def getSeasonNZBs(name, url_data, season): +def _get_season_nzbs(name, url_data, season): """ :param name: name @@ -73,31 +71,31 @@ def getSeasonNZBs(name, url_data, season): :rtype: Tuple[Dict, AnyStr] """ try: - showXML = etree.ElementTree(etree.XML(url_data)) + show_xml = etree.ElementTree(etree.XML(url_data)) except SyntaxError: - logger.log(u'Unable to parse the XML of %s, not splitting it' % name, logger.ERROR) + logger.error(f'Unable to parse the XML of {name}, not splitting it') return {}, '' filename = name.replace('.nzb', '') - nzbElement = showXML.getroot() + nzb_element = show_xml.getroot() regex = r'([\w\._\ ]+)[\._ ]S%02d[\._ ]([\w\._\-\ ]+)' % season - sceneNameMatch = re.search(regex, filename, re.I) - if sceneNameMatch: - showName, qualitySection = sceneNameMatch.groups() + scene_name_match = re.search(regex, filename, re.I) + if scene_name_match: + show_name, quality_section = scene_name_match.groups() else: - logger.log('%s - Not a valid season pack scene name. If it\'s a valid one, log a bug.' % name, logger.ERROR) + logger.error('%s - Not a valid season pack scene name. If it\'s a valid one, log a bug.' % name) return {}, '' - regex = r'(%s[\._]S%02d(?:[E0-9]+)\.[\w\._]+)' % (re.escape(showName), season) + regex = r'(%s[\._]S%02d(?:[E0-9]+)\.[\w\._]+)' % (re.escape(show_name), season) regex = regex.replace(' ', '.') ep_files = {} xmlns = None - for cur_file in list(nzbElement): + for cur_file in list(nzb_element): if not isinstance(cur_file.tag, string_types): continue xmlns_match = re.match(r'[{](https?://[A-Za-z0-9_./]+/nzb)[}]file', cur_file.tag) @@ -110,15 +108,15 @@ def getSeasonNZBs(name, url_data, season): # print curFile.get("subject"), "doesn't match", regex continue cur_ep = match.group(1) - fn = name_extractor(cur_file.get('subject', '')) + fn = _name_extractor(cur_file.get('subject', '')) if cur_ep == re.sub(r'\+\d+\.par2$', '', fn, flags=re.I): - bn, ext = ek.ek(os.path.splitext, fn) + bn, ext = os.path.splitext(fn) cur_ep = re.sub(r'\.(part\d+|vol\d+(\+\d+)?)$', '', bn, flags=re.I) - bn, ext = ek.ek(os.path.splitext, cur_ep) + bn, ext = os.path.splitext(cur_ep) if isinstance(ext, string_types) \ and re.search(r'^\.(nzb|r\d{2}|rar|7z|zip|par2|vol\d+|nfo|srt|txt|bat|sh|mkv|mp4|avi|wmv)$', ext, flags=re.I): - logger.log('Unable to split %s into episode nzb\'s' % name, logger.WARNING) + logger.warning('Unable to split %s into episode nzb\'s' % name) return {}, '' if cur_ep not in ep_files: ep_files[cur_ep] = [cur_file] @@ -128,7 +126,7 @@ def getSeasonNZBs(name, url_data, season): return ep_files, xmlns -def createNZBString(file_elements, xmlns): +def _create_nzb_string(file_elements, xmlns): """ :param file_elements: first element @@ -136,17 +134,17 @@ def createNZBString(file_elements, xmlns): :return: :rtype: AnyStr """ - rootElement = etree.Element("nzb") + root_element = etree.Element("nzb") if xmlns: - rootElement.set("xmlns", xmlns) + root_element.set("xmlns", xmlns) for curFile in file_elements: - rootElement.append(stripNS(curFile, xmlns)) + root_element.append(_strip_ns(curFile, xmlns)) - return etree.tostring(rootElement, encoding='utf-8') + return etree.tostring(root_element, encoding='utf-8') -def saveNZB(nzb_name, nzb_string): +def _save_nzb(nzb_name, nzb_string): """ :param nzb_name: nzb name @@ -155,22 +153,22 @@ def saveNZB(nzb_name, nzb_string): :type nzb_string: AnyStr """ try: - with ek.ek(open, nzb_name + '.nzb', 'w') as nzb_fh: + with open(nzb_name + '.nzb', 'w') as nzb_fh: nzb_fh.write(nzb_string) except EnvironmentError as e: - logger.log(u'Unable to save NZB: ' + ex(e), logger.ERROR) + logger.error(f'Unable to save NZB: {ex(e)}') -def stripNS(element, ns): +def _strip_ns(element, ns): element.tag = element.tag.replace("{" + ns + "}", "") for curChild in list(element): - stripNS(curChild, ns) + _strip_ns(curChild, ns) return element -def splitResult(result): +def split_result(result): """ :param result: search result @@ -180,7 +178,7 @@ def splitResult(result): """ resp = helpers.get_url(result.url, failure_monitor=False) if None is resp: - logger.log(u'Unable to load url %s, can\'t download season NZB' % result.url, logger.ERROR) + logger.error(f'Unable to load url {result.url}, can\'t download season NZB') return False # parse the season ep name @@ -188,50 +186,50 @@ def splitResult(result): np = NameParser(False, show_obj=result.show_obj) parse_result = np.parse(result.name) except InvalidNameException: - logger.log(u'Unable to parse the filename %s into a valid episode' % result.name, logger.DEBUG) + logger.debug(f'Unable to parse the filename {result.name} into a valid episode') return False except InvalidShowException: - logger.log(u'Unable to parse the filename %s into a valid show' % result.name, logger.DEBUG) + logger.debug(f'Unable to parse the filename {result.name} into a valid show') return False # bust it up season = parse_result.season_number if None is not parse_result.season_number else 1 - separate_nzbs, xmlns = getSeasonNZBs(result.name, resp, season) + separate_nzbs, xmlns = _get_season_nzbs(result.name, resp, season) result_list = [] for new_nzb in separate_nzbs: - logger.log(u'Split out %s from %s' % (new_nzb, result.name), logger.DEBUG) + logger.debug(f'Split out {new_nzb} from {result.name}') # parse the name try: np = NameParser(False, show_obj=result.show_obj) parse_result = np.parse(new_nzb) except InvalidNameException: - logger.log(u"Unable to parse the filename %s into a valid episode" % new_nzb, logger.DEBUG) + logger.debug(f'Unable to parse the filename {new_nzb} into a valid episode') return False except InvalidShowException: - logger.log(u"Unable to parse the filename %s into a valid show" % new_nzb, logger.DEBUG) + logger.debug(f'Unable to parse the filename {new_nzb} into a valid show') return False # make sure the result is sane if (None is not parse_result.season_number and season != parse_result.season_number) \ or (None is parse_result.season_number and 1 != season): - logger.log(u'Found %s inside %s but it doesn\'t seem to belong to the same season, ignoring it' - % (new_nzb, result.name), logger.WARNING) + logger.warning(f'Found {new_nzb} inside {result.name} but it doesn\'t seem to belong to the same season,' + f' ignoring it') continue elif 0 == len(parse_result.episode_numbers): - logger.log(u'Found %s inside %s but it doesn\'t seem to be a valid episode NZB, ignoring it' - % (new_nzb, result.name), logger.WARNING) + logger.warning(f'Found {new_nzb} inside {result.name} but it doesn\'t seem to be a valid episode NZB,' + f' ignoring it') continue want_ep = True for ep_no in parse_result.episode_numbers: if not result.show_obj.want_episode(season, ep_no, result.quality): - logger.log(u'Ignoring result %s because we don\'t want an episode that is %s' - % (new_nzb, Quality.qualityStrings[result.quality]), logger.DEBUG) + logger.debug(f'Ignoring result {new_nzb} because we don\'t want an episode that is' + f' {Quality.qualityStrings[result.quality]}') want_ep = False break if not want_ep: @@ -248,7 +246,7 @@ def splitResult(result): nzb_result.provider = result.provider nzb_result.quality = result.quality nzb_result.show_obj = result.show_obj - nzb_result.extraInfo = [createNZBString(separate_nzbs[new_nzb], xmlns)] + nzb_result.extraInfo = [_create_nzb_string(separate_nzbs[new_nzb], xmlns)] result_list.append(nzb_result) diff --git a/sickgear/nzbget.py b/sickgear/nzbget.py index 8d54fad2..703b45bf 100644 --- a/sickgear/nzbget.py +++ b/sickgear/nzbget.py @@ -34,7 +34,7 @@ def test_nzbget(host, use_https, username, password, timeout=300): result = False if not host: msg = 'No NZBGet host found. Please configure it' - logger.log(msg, logger.ERROR) + logger.error(msg) return result, msg, None url = 'http%(scheme)s://%(username)s:%(password)s@%(host)s/xmlrpc' % { @@ -44,24 +44,24 @@ def test_nzbget(host, use_https, username, password, timeout=300): try: msg = 'Success. Connected' if rpc_client.writelog('INFO', 'SickGear connected as a test'): - logger.log(msg, logger.DEBUG) + logger.debug(msg) else: msg += ', but unable to send a message' - logger.log(msg, logger.ERROR) + logger.error(msg) result = True - logger.log(u'NZBGet URL: %s' % url, logger.DEBUG) + logger.debug(f'NZBGet URL: {url}') except moves.http_client.socket.error: msg = 'Please check NZBGet host and port (if it is running). NZBGet is not responding to these values' - logger.log(msg, logger.ERROR) + logger.error(msg) except moves.xmlrpc_client.ProtocolError as e: if 'Unauthorized' == e.errmsg: msg = 'NZBGet username or password is incorrect' - logger.log(msg, logger.ERROR) + logger.error(msg) else: msg = 'Protocol Error: %s' % e.errmsg - logger.log(msg, logger.ERROR) + logger.error(msg) return result, msg, rpc_client @@ -114,7 +114,7 @@ def send_nzb(search_result): return result nzbcontent64 = b64encodestring(data, keep_eol=True) - logger.log(u'Sending NZB to NZBGet: %s' % search_result.name) + logger.log(f'Sending NZB to NZBGet: {search_result.name}') try: # Find out if nzbget supports priority (Version 9.0+), old versions beginning with a 0.x will use the old cmd @@ -161,11 +161,11 @@ def send_nzb(search_result): nzbget_prio, False, search_result.url) if nzbget_result: - logger.log(u'NZB sent to NZBGet successfully', logger.DEBUG) + logger.debug('NZB sent to NZBGet successfully') result = True else: - logger.log(u'NZBGet could not add %s.nzb to the queue' % search_result.name, logger.ERROR) + logger.error(f'NZBGet could not add {search_result.name}.nzb to the queue') except (BaseException, Exception): - logger.log(u'Connect Error to NZBGet: could not add %s.nzb to the queue' % search_result.name, logger.ERROR) + logger.error(f'Connect Error to NZBGet: could not add {search_result.name}.nzb to the queue') return result diff --git a/sickgear/people_queue.py b/sickgear/people_queue.py index 77a3716e..0e99721c 100644 --- a/sickgear/people_queue.py +++ b/sickgear/people_queue.py @@ -154,7 +154,7 @@ class PeopleQueueActions(object): class PeopleQueueItem(generic_queue.QueueItem): def __init__(self, action_id, show_obj, uid=None, force=False, **kwargs): - # type: (integer_types, TVShow, AnyStr, bool, Dict) -> PeopleQueueItem + # type: (integer_types, TVShow, AnyStr, bool, Dict) -> None """ :param action_id: @@ -172,7 +172,7 @@ class PeopleQueueItem(generic_queue.QueueItem): class CastQueueItem(PeopleQueueItem): def __init__(self, show_obj, show_info_cast=None, uid=None, force=False, scheduled_update=False, switch=False, **kwargs): - # type: (TVShow, CastList, AnyStr, bool, bool, bool, Dict) -> CastQueueItem + # type: (TVShow, CastList, AnyStr, bool, bool, bool, Dict) -> None """ :param show_obj: show obj diff --git a/sickgear/piper.py b/sickgear/piper.py index ac5f4c26..a70eb146 100644 --- a/sickgear/piper.py +++ b/sickgear/piper.py @@ -1,13 +1,5 @@ import sys -# noinspection PyPep8Naming -import encodingKludge as ek - -if ek.EXIT_BAD_ENCODING: - print('Sorry, you MUST add the SickGear folder to the PYTHONPATH environment variable') - print('or find another way to force Python to use %s for string encoding.' % ek.SYS_ENCODING) - sys.exit(1) - # ################################# # Sanity check passed, can continue # ################################# @@ -18,8 +10,7 @@ import re from json_helper import json_loads from sg_helpers import cmdline_runner, is_virtualenv -from _23 import filter_list, ordered_dict -from six import iteritems, PY2 +from six import iteritems # noinspection PyUnreachableCode if False: @@ -32,7 +23,7 @@ def is_pip_ok(): :return: True if pip is ok """ - pip_ok = '/' != ek.ek(os.path.expanduser, '~') + pip_ok = '/' != os.path.expanduser('~') if pip_ok: pip_version, _, _ = _get_pip_version() if not pip_version: @@ -59,10 +50,6 @@ def run_pip(pip_cmd, suppress_stderr=False): pip_cmd += ['--progress-bar', 'off'] new_pip_arg = ['--no-python-version-warning'] - if PY2: - pip_version, _, _ = _get_pip_version() - if pip_version and 20 > int(pip_version.split('.')[0]): - new_pip_arg = [] return cmdline_runner( [sys.executable, '-m', 'pip'] + new_pip_arg + ['--disable-pip-version-check'] + pip_cmd, @@ -80,7 +67,7 @@ def initial_requirements(): from Cheetah import VersionTuple is_cheetah2 = (3, 0, 0) > VersionTuple[0:3] - is_cheetah3py3 = not PY2 and (3, 3, 0) > VersionTuple[0:3] + is_cheetah3py3 = (3, 3, 0) > VersionTuple[0:3] if not (is_cheetah2 or is_cheetah3py3): return @@ -115,7 +102,7 @@ def initial_requirements(): def extras_failed_filepath(data_dir): - return ek.ek(os.path.join, data_dir, '.pip_req_spec_failed.txt') + return os.path.join(data_dir, '.pip_req_spec_failed.txt') def load_ignorables(data_dir): @@ -124,7 +111,7 @@ def load_ignorables(data_dir): data = [] filepath = extras_failed_filepath(data_dir) - if ek.ek(os.path.isfile, filepath): + if os.path.isfile(filepath): try: with io.open(filepath, 'r', encoding='UTF8') as fp: data = fp.readlines() @@ -166,13 +153,10 @@ def check_pip_env(): _, _, installed, failed_names = _check_pip_env() - py2_last = 'final py2 release' boost = 'performance boost' extra_info = dict({'Cheetah3': 'filled requirement', 'CT3': 'filled requirement', 'lxml': boost, 'python-Levenshtein': boost}) - extra_info.update((dict(cryptography=py2_last, pip=py2_last, regex=py2_last, - scandir=boost, setuptools=py2_last), - dict(regex=boost))[not PY2]) + extra_info.update(dict(regex=boost)) return installed, extra_info, failed_names @@ -194,7 +178,7 @@ def _check_pip_env(pip_outdated=False, reset_fails=False): from sickgear import logger, PROG_DIR, DATA_DIR for cur_reco_file in ['requirements.txt', 'recommended.txt']: try: - with io.open(ek.ek(os.path.join, PROG_DIR, cur_reco_file)) as fh: + with io.open(os.path.join(PROG_DIR, cur_reco_file)) as fh: input_reco += ['%s\n' % line.strip() for line in fh] # must ensure EOL marker except (BaseException, Exception): pass @@ -267,9 +251,9 @@ def _check_pip_env(pip_outdated=False, reset_fails=False): names_outdated = dict({cur_item.get('name'): {k: cur_item.get(k) for k in ('version', 'latest_version', 'latest_filetype')} for cur_item in json_loads(output)}) - to_update = set(filter_list( + to_update = set(list(filter( lambda name: name in specifiers and names_outdated[name]['latest_version'] in specifiers[name], - set(names_reco).intersection(set(names_outdated)))) + set(names_reco).intersection(set(names_outdated))))) # check whether to ignore direct reference specification updates if not dev mode if not int(os.environ.get('CHK_URL_SPECIFIERS', 0)): @@ -283,7 +267,7 @@ def _check_pip_env(pip_outdated=False, reset_fails=False): except (BaseException, Exception): pass - updates_todo = ordered_dict() + updates_todo = dict() todo = to_install.union(to_update, requirement_update) for cur_name in [cur_n for cur_n in names_reco if cur_n in todo]: updates_todo[cur_name] = dict({ @@ -305,7 +289,7 @@ def pip_update(loading_msg, updates_todo, data_dir): failed_lines = [] input_reco = None - piper_path = ek.ek(os.path.join, data_dir, '.pip_req_spec_temp.txt') + piper_path = os.path.join(data_dir, '.pip_req_spec_temp.txt') for cur_project_name, cur_data in iteritems(updates_todo): msg = 'Installing package "%s"' % cur_project_name if cur_data.get('info'): @@ -343,7 +327,7 @@ def pip_update(loading_msg, updates_todo, data_dir): if not parsed_name: parsed_name = re.findall(r'(?sim)up-to-date\S+\s*(%s).*?\s\(([^)]+)\)$' % find_name, output) parsed_name = ['' if not parsed_name else '-'.join(parsed_name[0])] - pip_version = re.findall(r'%s-([\d.]+).*?' % find_name, ek.ek(os.path.basename, parsed_name[0]), re.I)[0] + pip_version = re.findall(r'%s-([\d.]+).*?' % find_name, os.path.basename(parsed_name[0]), re.I)[0] except (BaseException, Exception): pass diff --git a/sickgear/postProcessor.py b/sickgear/postProcessor.py index 5585fd3a..711e0bf6 100644 --- a/sickgear/postProcessor.py +++ b/sickgear/postProcessor.py @@ -22,8 +22,6 @@ import re import stat import threading -# noinspection PyPep8Naming -import encodingKludge as ek import exceptions_helper from exceptions_helper import ex @@ -35,7 +33,7 @@ from .indexers.indexer_config import TVINFO_TVDB from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser from _23 import decode_str -from six import iteritems, PY2, string_types +from six import iteritems, string_types from sg_helpers import long_path, cmdline_runner # noinspection PyUnreachableCode @@ -64,17 +62,16 @@ class PostProcessor(object): nzb_name: The name of the NZB which resulted in this file being downloaded (optional) """ # absolute path to the folder that is being processed - self.folder_path = long_path(ek.ek(os.path.dirname, long_path( - ek.ek(os.path.abspath, long_path(file_path))))) # type: AnyStr + self.folder_path = long_path(os.path.dirname(long_path(os.path.abspath(long_path(file_path))))) # type: AnyStr # full path to file self.file_path = long_path(file_path) # type: AnyStr # file name only - self.file_name = ek.ek(os.path.basename, long_path(file_path)) # type: AnyStr + self.file_name = os.path.basename(long_path(file_path)) # type: AnyStr # the name of the folder only - self.folder_name = ek.ek(os.path.basename, self.folder_path) # type: AnyStr + self.folder_name = os.path.basename(self.folder_path) # type: AnyStr # name of the NZB that resulted in this folder self.nzb_name = nzb_name # type: AnyStr or None @@ -112,9 +109,9 @@ class PostProcessor(object): :param level: The log level to use (optional) :type level: int """ - logger_msg = re.sub(r'(?i)\.*', '', message) - logger_msg = re.sub('(?i)]+>([^<]+)<[/]a>', r'\1', logger_msg) - logger.log(u'%s' % logger_msg, level) + logger_msg = re.sub(r'(?i)\.*', '', message) + logger_msg = re.sub('(?i)]+>([^<]+)', r'\1', logger_msg) + logger.log(f'{logger_msg}', level) self.log += message + '\n' def _check_for_existing_file(self, existing_file): @@ -132,25 +129,24 @@ class PostProcessor(object): """ if not existing_file: - self._log(u'There is no existing file', logger.DEBUG) + self._log('There is no existing file', logger.DEBUG) return PostProcessor.DOESNT_EXIST # if the new file exists, return the appropriate code depending on the size - if ek.ek(os.path.isfile, existing_file): - new_file = u'New file %s
.. is ' % self.file_path - if ek.ek(os.path.getsize, self.file_path) == ek.ek(os.path.getsize, existing_file): - self._log(u'%sthe same size as %s' % (new_file, existing_file), logger.DEBUG) + if os.path.isfile(existing_file): + new_file = f'New file {self.file_path}
.. is ' + if os.path.getsize(self.file_path) == os.path.getsize(existing_file): + self._log(f'{new_file}the same size as {existing_file}', logger.DEBUG) return PostProcessor.EXISTS_SAME - elif ek.ek(os.path.getsize, self.file_path) < ek.ek(os.path.getsize, existing_file): - self._log(u'%ssmaller than %s' % (new_file, existing_file), logger.DEBUG) + elif os.path.getsize(self.file_path) < os.path.getsize(existing_file): + self._log(f'{new_file}smaller than {existing_file}', logger.DEBUG) return PostProcessor.EXISTS_LARGER else: - self._log(u'%slarger than %s' % (new_file, existing_file), logger.DEBUG) + self._log(f'{new_file}larger than {existing_file}', logger.DEBUG) return PostProcessor.EXISTS_SMALLER else: - self._log(u'File doesn\'t exist %s' % existing_file, - logger.DEBUG) + self._log(f'File doesn\'t exist {existing_file}', logger.DEBUG) return PostProcessor.DOESNT_EXIST @staticmethod @@ -188,7 +184,7 @@ class PostProcessor(object): base_name = re.sub(r'[\[\]*?]', r'[\g<0>]', base_name) for meta_ext in ['', '-thumb', '.ext', '.ext.cover', '.metathumb']: - for associated_file_path in ek.ek(glob.glob, '%s%s.*' % (base_name, meta_ext)): + for associated_file_path in glob.glob('%s%s.*' % (base_name, meta_ext)): # only add associated to list if associated_file_path == file_path: continue @@ -201,7 +197,7 @@ class PostProcessor(object): if re.search(r'(^.+\.(rar|r\d+)$)', associated_file_path): continue - if ek.ek(os.path.isfile, associated_file_path): + if os.path.isfile(associated_file_path): file_path_list.append(associated_file_path) return file_path_list @@ -225,28 +221,26 @@ class PostProcessor(object): file_list = file_list + self.list_associated_files(file_path) if not file_list: - self._log(u'Not deleting anything because there are no files associated with %s' % file_path, logger.DEBUG) + self._log(f'Not deleting anything because there are no files associated with {file_path}', logger.DEBUG) return # delete the file and any other files which we want to delete for cur_file in file_list: - if ek.ek(os.path.isfile, cur_file): + if os.path.isfile(cur_file): # check first the read-only attribute - file_attribute = ek.ek(os.stat, cur_file)[0] + file_attribute = os.stat(cur_file)[0] if not file_attribute & stat.S_IWRITE: # File is read-only, so make it writeable try: - ek.ek(os.chmod, cur_file, stat.S_IWRITE) - self._log(u'Changed read only permissions to writeable to delete file %s' - % cur_file, logger.DEBUG) + os.chmod(cur_file, stat.S_IWRITE) + self._log(f'Changed read only permissions to writeable to delete file {cur_file}', logger.DEBUG) except (BaseException, Exception): - self._log(u'Cannot change permissions to writeable to delete file: %s' - % cur_file, logger.WARNING) + self._log(f'Cannot change permissions to writeable to delete file: {cur_file}', logger.WARNING) removal_type = helpers.remove_file(cur_file, log_level=logger.DEBUG) - if True is not ek.ek(os.path.isfile, cur_file): - self._log(u'%s file %s' % (removal_type, cur_file), logger.DEBUG) + if True is not os.path.isfile(cur_file): + self._log(f'{removal_type} file {cur_file}', logger.DEBUG) # do the library update for synoindex notifiers.NotifierFactory().get('SYNOINDEX').deleteFile(cur_file) @@ -274,7 +268,7 @@ class PostProcessor(object): """ if not action: - self._log(u'Must provide an action for the combined file operation', logger.ERROR) + self._log('Must provide an action for the combined file operation', logger.ERROR) return file_list = [file_path] @@ -284,7 +278,7 @@ class PostProcessor(object): file_list = file_list + self.list_associated_files(file_path, subtitles_only=True) if not file_list: - self._log(u'Not moving anything because there are no files associated with %s' % file_path, logger.DEBUG) + self._log(f'Not moving anything because there are no files associated with {file_path}', logger.DEBUG) return # create base name with file_path (media_file without .extension) @@ -294,7 +288,7 @@ class PostProcessor(object): # deal with all files for cur_file_path in file_list: - cur_file_name = ek.ek(os.path.basename, cur_file_path) + cur_file_name = os.path.basename(cur_file_path) # get the extension without . cur_extension = cur_file_path[old_base_name_length + 1:] @@ -304,10 +298,10 @@ class PostProcessor(object): cur_extension = 'nfo-orig' # check if file have subtitles language - if ek.ek(os.path.splitext, cur_extension)[1][1:] in common.subtitleExtensions: - cur_lang = ek.ek(os.path.splitext, cur_extension)[0] + if os.path.splitext(cur_extension)[1][1:] in common.subtitleExtensions: + cur_lang = os.path.splitext(cur_extension)[0] if cur_lang in sickgear.SUBTITLES_LANGUAGES: - cur_extension = cur_lang + ek.ek(os.path.splitext, cur_extension)[1] + cur_extension = cur_lang + os.path.splitext(cur_extension)[1] # If new base name then convert name if new_base_name: @@ -317,15 +311,15 @@ class PostProcessor(object): new_file_name = helpers.replace_extension(cur_file_name, cur_extension) if sickgear.SUBTITLES_DIR and cur_extension in common.subtitleExtensions: - subs_new_path = ek.ek(os.path.join, new_path, sickgear.SUBTITLES_DIR) + subs_new_path = os.path.join(new_path, sickgear.SUBTITLES_DIR) dir_exists = helpers.make_dir(subs_new_path) if not dir_exists: - logger.log(u'Unable to create subtitles folder ' + subs_new_path, logger.ERROR) + logger.error(f'Unable to create subtitles folder {subs_new_path}') else: helpers.chmod_as_parent(subs_new_path) - new_file_path = ek.ek(os.path.join, subs_new_path, new_file_name) + new_file_path = os.path.join(subs_new_path, new_file_name) else: - new_file_path = ek.ek(os.path.join, new_path, new_file_name) + new_file_path = os.path.join(new_path, new_file_name) if None is action_tmpl: action(cur_file_path, new_file_path) @@ -348,15 +342,16 @@ class PostProcessor(object): :type action_tmpl: """ - def _int_move(cur_file_path, new_file_path, success_tmpl=u' %s to %s'): + def _int_move(cur_file_path, new_file_path, success_tmpl=' %s to %s'): try: helpers.move_file(cur_file_path, new_file_path, raise_exceptions=True) helpers.chmod_as_parent(new_file_path) - self._log(u'Moved file from' + (success_tmpl % (cur_file_path, new_file_path)), logger.DEBUG) + self._log(f'Moved file from{(success_tmpl % (cur_file_path, new_file_path))}', + logger.DEBUG) except (IOError, OSError) as e: - self._log(u'Unable to move file %s
.. %s' - % (success_tmpl % (cur_file_path, new_file_path), ex(e)), logger.ERROR) + self._log(f'Unable to move file {success_tmpl % (cur_file_path, new_file_path)}
.. {ex(e)}', + logger.ERROR) raise e self._combined_file_operation(file_path, new_path, new_base_name, associated_files, _int_move, @@ -378,15 +373,16 @@ class PostProcessor(object): :type action_tmpl: """ - def _int_copy(cur_file_path, new_file_path, success_tmpl=u' %s to %s'): + def _int_copy(cur_file_path, new_file_path, success_tmpl=' %s to %s'): try: helpers.copy_file(cur_file_path, new_file_path) helpers.chmod_as_parent(new_file_path) - self._log(u'Copied file from' + (success_tmpl % (cur_file_path, new_file_path)), logger.DEBUG) + self._log(f'Copied file from{(success_tmpl % (cur_file_path, new_file_path))}', + logger.DEBUG) except (IOError, OSError) as e: - self._log(u'Unable to copy %s
.. %s' - % (success_tmpl % (cur_file_path, new_file_path), ex(e)), logger.ERROR) + self._log(f'Unable to copy {success_tmpl % (cur_file_path, new_file_path)}
.. {ex(e)}', + logger.ERROR) raise e self._combined_file_operation(file_path, new_path, new_base_name, associated_files, _int_copy, @@ -406,15 +402,16 @@ class PostProcessor(object): :type action_tmpl: """ - def _int_hard_link(cur_file_path, new_file_path, success_tmpl=u' %s to %s'): + def _int_hard_link(cur_file_path, new_file_path, success_tmpl=' %s to %s'): try: helpers.hardlink_file(cur_file_path, new_file_path) helpers.chmod_as_parent(new_file_path) - self._log(u'Hard linked file from' + (success_tmpl % (cur_file_path, new_file_path)), logger.DEBUG) + self._log(f'Hard linked file from{(success_tmpl % (cur_file_path, new_file_path))}', + logger.DEBUG) except (IOError, OSError) as e: - self._log(u'Unable to link file %s
.. %s' - % (success_tmpl % (cur_file_path, new_file_path), ex(e)), logger.ERROR) + self._log(f'Unable to link file {success_tmpl % (cur_file_path, new_file_path)}
.. {ex(e)}', + logger.ERROR) raise e self._combined_file_operation(file_path, new_path, new_base_name, associated_files, _int_hard_link, @@ -434,16 +431,16 @@ class PostProcessor(object): :type action_tmpl: """ - def _int_move_and_sym_link(cur_file_path, new_file_path, success_tmpl=u' %s to %s'): + def _int_move_and_sym_link(cur_file_path, new_file_path, success_tmpl=' %s to %s'): try: helpers.move_and_symlink_file(cur_file_path, new_file_path) helpers.chmod_as_parent(new_file_path) - self._log(u'Moved then symbolic linked file from' + (success_tmpl % (cur_file_path, new_file_path)), + self._log(f'Moved then symbolic linked file from{(success_tmpl % (cur_file_path, new_file_path))}', logger.DEBUG) except (IOError, OSError) as e: - self._log(u'Unable to link file %s
.. %s' - % (success_tmpl % (cur_file_path, new_file_path), ex(e)), logger.ERROR) + self._log(f'Unable to link file {success_tmpl % (cur_file_path, new_file_path)}
.. {ex(e)}', + logger.ERROR) raise e self._combined_file_operation(file_path, new_path, new_base_name, associated_files, _int_move_and_sym_link, @@ -518,9 +515,9 @@ class PostProcessor(object): self.in_history = True to_return = (show_obj, season_number, episode_numbers, quality) if not show_obj: - self._log(u'Unknown show, check availability on ShowList page', logger.DEBUG) + self._log('Unknown show, check availability on ShowList page', logger.DEBUG) break - self._log(u'Found a match in history for %s' % show_obj.name, logger.DEBUG) + self._log(f'Found a match in history for {show_obj.name}', logger.DEBUG) break return to_return @@ -549,7 +546,7 @@ class PostProcessor(object): :rtype: Tuple[None, None, List, None] or Tuple[sickgear.tv.TVShow, int, List[int], int] """ - logger.log(u'Analyzing name ' + repr(name)) + logger.log(f'Analyzing name {repr(name)}') to_return = (None, None, [], None) @@ -559,8 +556,8 @@ class PostProcessor(object): # parse the name to break it into show name, season, and episode np = NameParser(resource, convert=True, show_obj=self.show_obj or show_obj) parse_result = np.parse(name) - self._log(u'Parsed %s
.. from %s' - % (decode_str(str(parse_result), errors='xmlcharrefreplace'), name), logger.DEBUG) + self._log(f'Parsed {decode_str(str(parse_result), errors="xmlcharrefreplace")}
' + f'.. from {name}', logger.DEBUG) if parse_result.is_air_by_date and (None is parse_result.season_number or not parse_result.episode_numbers): season_number = -1 @@ -598,16 +595,19 @@ class PostProcessor(object): and parse_result.release_group: if not self.release_name: - self.release_name = helpers.remove_extension(ek.ek(os.path.basename, parse_result.original_name)) + self.release_name = helpers.remove_extension(os.path.basename(parse_result.original_name)) else: - logger.log(u'Parse result not sufficient (all following have to be set). will not save release name', - logger.DEBUG) - logger.log(u'Parse result(series_name): ' + str(parse_result.series_name), logger.DEBUG) - logger.log(u'Parse result(season_number): ' + str(parse_result.season_number), logger.DEBUG) - logger.log(u'Parse result(episode_numbers): ' + str(parse_result.episode_numbers), logger.DEBUG) - logger.log(u' or Parse result(air_date): ' + str(parse_result.air_date), logger.DEBUG) - logger.log(u'Parse result(release_group): ' + str(parse_result.release_group), logger.DEBUG) + for cur_msg in ( + 'Parse result not sufficient (all following have to be set). will not save release name', + f'Parse result(series_name): {parse_result.series_name}', + f'Parse result(season_number): {parse_result.season_number}', + f'Parse result(episode_numbers): {parse_result.episode_numbers}', + f' or Parse result(air_date): {parse_result.air_date}', + f'Parse result(release_group): {parse_result.release_group}' + ): + logger.debug(cur_msg) + def _find_info(self, history_only=False): """ @@ -635,7 +635,7 @@ class PostProcessor(object): lambda: self._analyze_name(self.file_path), # try to analyze the dir + file name together as one name - lambda: self._analyze_name(self.folder_name + u' ' + self.file_name), + lambda: self._analyze_name(f'{self.folder_name} {self.file_name}'), # try to analyze file name with previously parsed show_obj lambda: self._analyze_name(self.file_name, show_obj=show_obj, rel_grp=rel_grp)], @@ -648,7 +648,7 @@ class PostProcessor(object): try: (try_show_obj, try_season, try_episodes, try_quality) = cur_try() except (InvalidNameException, InvalidShowException) as e: - logger.log(u'Unable to parse, skipping: ' + ex(e), logger.DEBUG) + logger.debug(f'Unable to parse, skipping: {ex(e)}') continue if not try_show_obj: @@ -670,8 +670,8 @@ class PostProcessor(object): # for air-by-date shows we need to look up the season/episode from database if -1 == season_number and show_obj and episode_numbers: - self._log(u'Looks like this is an air-by-date or sports show,' - u' attempting to convert the date to season/episode', logger.DEBUG) + self._log('Looks like this is an air-by-date or sports show,' + ' attempting to convert the date to season/episode', logger.DEBUG) airdate = episode_numbers[0].toordinal() my_db = db.DBConnection() sql_result = my_db.select( @@ -684,8 +684,8 @@ class PostProcessor(object): season_number = int(sql_result[0][0]) episode_numbers = [int(sql_result[0][1])] else: - self._log(u'Unable to find episode with date %s for show %s, skipping' % - (episode_numbers[0], show_obj.tvid_prodid), logger.DEBUG) + self._log(f'Unable to find episode with date {episode_numbers[0]} for show {show_obj.tvid_prodid},' + f' skipping', logger.DEBUG) # don't leave dates in the episode list if we can't convert them to real episode numbers episode_numbers = [] continue @@ -700,8 +700,8 @@ class PostProcessor(object): [show_obj.tvid, show_obj.prodid]) if 1 == int(num_seasons_sql_result[0][0]) and None is season_number: self._log( - u'No season number found, but this show appears to only have 1 season,' - u' setting season number to 1...', logger.DEBUG) + 'No season number found, but this show appears to only have 1 season,' + ' setting season number to 1...', logger.DEBUG) season_number = 1 if show_obj and season_number and episode_numbers: @@ -734,13 +734,13 @@ class PostProcessor(object): for cur_episode_number in episode_numbers: cur_episode_number = int(cur_episode_number) - self._log(u'Retrieving episode object for %sx%s' % (season_number, cur_episode_number), logger.DEBUG) + self._log(f'Retrieving episode object for {season_number}x{cur_episode_number}', logger.DEBUG) # now that we've figured out which episode this file is just load it manually try: ep_obj = show_obj.get_episode(season_number, cur_episode_number) except exceptions_helper.EpisodeNotFoundException as e: - self._log(u'Unable to create episode: ' + ex(e), logger.DEBUG) + self._log(f'Unable to create episode: {ex(e)}', logger.DEBUG) raise exceptions_helper.PostProcessingFailed() # associate all the episodes together under a single root episode @@ -765,11 +765,10 @@ class PostProcessor(object): # if there is a quality available in the status then we don't need to bother guessing from the filename if ep_obj.status in common.Quality.SNATCHED_ANY: - old_status, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) + old_status, ep_quality = common.Quality.split_composite_status(ep_obj.status) if common.Quality.UNKNOWN != ep_quality: - self._log( - u'Using "%s" quality from the old status' % common.Quality.qualityStrings[ep_quality], - logger.DEBUG) + self._log(f'Using "{common.Quality.qualityStrings[ep_quality]}" quality from the old status', + logger.DEBUG) return ep_quality # search all possible names for our new quality, in case the file or dir doesn't have it @@ -782,27 +781,26 @@ class PostProcessor(object): if not cur_name: continue - ep_quality = common.Quality.nameQuality(cur_name, ep_obj.show_obj.is_anime) - quality_log = u' "%s" quality parsed from the %s %s'\ - % (common.Quality.qualityStrings[ep_quality], thing, cur_name) + ep_quality = common.Quality.name_quality(cur_name, ep_obj.show_obj.is_anime) + quality_log = f' "{common.Quality.qualityStrings[ep_quality]}" quality parsed from the {thing} {cur_name}' # if we find a good one then use it if common.Quality.UNKNOWN != ep_quality: - self._log(u'Using' + quality_log, logger.DEBUG) + self._log(f'Using{quality_log}', logger.DEBUG) return ep_quality else: - self._log(u'Found' + quality_log, logger.DEBUG) + self._log(f'Found{quality_log}', logger.DEBUG) - ep_quality = common.Quality.fileQuality(self.file_path) + ep_quality = common.Quality.file_quality(self.file_path) if common.Quality.UNKNOWN != ep_quality: - self._log(u'Using "%s" quality parsed from the metadata file content of %s' - % (common.Quality.qualityStrings[ep_quality], self.file_name), logger.DEBUG) + self._log(f'Using "{common.Quality.qualityStrings[ep_quality]}" quality parsed' + f' from the metadata file content of {self.file_name}', logger.DEBUG) return ep_quality # Try guessing quality from the file name - ep_quality = common.Quality.assumeQuality(self.file_name) - self._log(u'Using guessed "%s" quality from the file name %s' - % (common.Quality.qualityStrings[ep_quality], self.file_name), logger.DEBUG) + ep_quality = common.Quality.assume_quality(self.file_name) + self._log(f'Using guessed "{common.Quality.qualityStrings[ep_quality]}" quality' + f' from the file name {self.file_name}', logger.DEBUG) return ep_quality @@ -824,15 +822,10 @@ class PostProcessor(object): try: script_cmd = [piece for piece in re.split("( |\\\".*?\\\"|'.*?')", script_name) if piece.strip()] - script_cmd[0] = ek.ek(os.path.abspath, script_cmd[0]) - self._log(u'Absolute path to script: ' + script_cmd[0], logger.DEBUG) + script_cmd[0] = os.path.abspath(script_cmd[0]) + self._log(f'Absolute path to script: {script_cmd[0]}', logger.DEBUG) - if PY2: - script_cmd += [ep_obj.location.encode(sickgear.SYS_ENCODING), - self.file_path.encode(sickgear.SYS_ENCODING) - ] - else: - script_cmd += [ep_obj.location, self.file_path] + script_cmd += [ep_obj.location, self.file_path] script_cmd += ([], [str(ep_obj.show_obj.tvid)])[new_call] + [ str(ep_obj.show_obj.prodid), @@ -840,7 +833,7 @@ class PostProcessor(object): str(ep_obj.episode), str(ep_obj.airdate)] - self._log(u'Executing command ' + str(script_cmd)) + self._log(f'Executing command {script_cmd}') except (BaseException, Exception) as e: self._log('Error creating extra script command: %s' % ex(e), logger.ERROR) return @@ -851,10 +844,10 @@ class PostProcessor(object): self._log('Script result: %s' % output, logger.DEBUG) except OSError as e: - self._log(u'Unable to run extra_script: ' + ex(e), logger.ERROR) + self._log(f'Unable to run extra_script: {ex(e)}', logger.ERROR) except (BaseException, Exception) as e: - self._log(u'Unable to run extra_script: ' + ex(e), logger.ERROR) + self._log(f'Unable to run extra_script: {ex(e)}', logger.ERROR) def _run_extra_scripts(self, ep_obj): """ @@ -883,54 +876,54 @@ class PostProcessor(object): """ try: - existing_show_path = ek.ek(os.path.isdir, ep_obj.show.location) + existing_show_path = os.path.isdir(ep_obj.show.location) except exceptions_helper.ShowDirNotFoundException: existing_show_path = False if not existing_show_path and not sickgear.CREATE_MISSING_SHOW_DIRS: # Show location does not exist, and cannot be created, marking it unsafe to proceed - self._log(u'.. marking it unsafe to proceed because show location does not exist', logger.DEBUG) + self._log('.. marking it unsafe to proceed because show location does not exist', logger.DEBUG) return False # if SickGear snatched this then assume it's safe if ep_obj.status in common.Quality.SNATCHED_ANY: - self._log(u'SickGear snatched this episode, marking it safe to replace', logger.DEBUG) + self._log('SickGear snatched this episode, marking it safe to replace', logger.DEBUG) return True - old_ep_status, old_ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) + old_ep_status, old_ep_quality = common.Quality.split_composite_status(ep_obj.status) # if old episode is not downloaded/archived then it's safe if common.DOWNLOADED != old_ep_status and common.ARCHIVED != old_ep_status: - self._log(u'Existing episode status is not downloaded/archived, marking it safe to replace', logger.DEBUG) + self._log('Existing episode status is not downloaded/archived, marking it safe to replace', logger.DEBUG) return True if common.ARCHIVED == old_ep_status and common.Quality.NONE == old_ep_quality: - self._log(u'Marking it unsafe to replace because the existing episode status is archived', logger.DEBUG) + self._log('Marking it unsafe to replace because the existing episode status is archived', logger.DEBUG) return False # Status downloaded. Quality/ size checks # if manual post process option is set to force_replace then it's safe if self.force_replace: - self._log(u'Force replace existing episode option is enabled, marking it safe to replace', logger.DEBUG) + self._log('Force replace existing episode option is enabled, marking it safe to replace', logger.DEBUG) return True # if the file processed is higher quality than the existing episode then it's safe if new_ep_quality > old_ep_quality: if common.Quality.UNKNOWN != new_ep_quality: - self._log(u'Existing episode status is not snatched but the episode to process appears to be better' - u' quality than existing episode, marking it safe to replace', logger.DEBUG) + self._log('Existing episode status is not snatched but the episode to process appears to be better' + ' quality than existing episode, marking it safe to replace', logger.DEBUG) return True else: - self._log(u'Marking it unsafe to replace because an existing episode exists in the database and' - u' the episode to process has unknown quality', logger.DEBUG) + self._log('Marking it unsafe to replace because an existing episode exists in the database and' + ' the episode to process has unknown quality', logger.DEBUG) return False existing_file_status = self._check_for_existing_file(ep_obj.location) if PostProcessor.DOESNT_EXIST == existing_file_status \ and (existing_show_path or sickgear.CREATE_MISSING_SHOW_DIRS): - self._log(u'.. there is no file to replace, marking it safe to continue', logger.DEBUG) + self._log('.. there is no file to replace, marking it safe to continue', logger.DEBUG) return True # if there's an existing downloaded file with same quality, check filesize to decide @@ -954,48 +947,47 @@ class PostProcessor(object): npr.is_anime, check_is_repack=True) if new_proper_level > cur_proper_level and \ (not is_repack or npr.release_group == ep_obj.release_group): - self._log(u'Proper or repack with same quality, marking it safe to replace', logger.DEBUG) + self._log('Proper or repack with same quality, marking it safe to replace', logger.DEBUG) return True - self._log(u'An episode exists in the database with the same quality as the episode to process', - logger.DEBUG) + self._log('An episode exists in the database with the same quality as the episode to process', logger.DEBUG) - self._log(u'Checking size of existing file ' + ep_obj.location, logger.DEBUG) + self._log(f'Checking size of existing file {ep_obj.location}', logger.DEBUG) if PostProcessor.EXISTS_SMALLER == existing_file_status: # File exists and new file is larger, marking it safe to replace - self._log(u'.. the existing smaller file will be replaced', logger.DEBUG) + self._log('.. the existing smaller file will be replaced', logger.DEBUG) return True elif PostProcessor.EXISTS_LARGER == existing_file_status: # File exists and new file is smaller, marking it unsafe to replace - self._log(u'.. marking it unsafe to replace the existing larger file', logger.DEBUG) + self._log('.. marking it unsafe to replace the existing larger file', logger.DEBUG) return False elif PostProcessor.EXISTS_SAME == existing_file_status: # File exists and new file is same size, marking it unsafe to replace - self._log(u'.. marking it unsafe to replace the existing same size file', logger.DEBUG) + self._log('.. marking it unsafe to replace the existing same size file', logger.DEBUG) return False else: - self._log(u'Unknown file status for: %s This should never happen, please log this as a bug.' - % ep_obj.location, logger.ERROR) + self._log(f'Unknown file status for: {ep_obj.location}' + f' This should never happen, please log this as a bug.', logger.ERROR) return False # if there's an existing file with better quality if old_ep_quality > new_ep_quality and old_ep_quality != common.Quality.UNKNOWN: # Episode already exists in database and processed episode has lower quality, marking it unsafe to replace - self._log(u'Marking it unsafe to replace the episode that already exists in database with a file of lower' - u' quality', logger.DEBUG) + self._log('Marking it unsafe to replace the episode that already exists in database with a file of lower' + ' quality', logger.DEBUG) return False if self.in_history: - self._log(u'SickGear snatched this episode, marking it safe to replace', logger.DEBUG) + self._log('SickGear snatched this episode, marking it safe to replace', logger.DEBUG) return True # None of the conditions were met, marking it unsafe to replace - self._log(u'Marking it unsafe to replace because no positive condition is met, you may force replace but it' - u' would be better to examine the files', logger.DEBUG) + self._log('Marking it unsafe to replace because no positive condition is met, you may force replace but it' + ' would be better to examine the files', logger.DEBUG) return False def _change_ep_objs(self, show_obj, season_number, episode_numbers, quality): @@ -1006,14 +998,14 @@ class PostProcessor(object): for cur_ep_obj in [ep_obj] + ep_obj.related_ep_obj: with cur_ep_obj.lock: if self.release_name: - self._log(u'Found release name ' + self.release_name, logger.DEBUG) + self._log(f'Found release name {self.release_name}', logger.DEBUG) cur_ep_obj.release_name = self.release_name or '' - any_qualities, best_qualities = common.Quality.splitQuality(cur_ep_obj.show_obj.quality) - cur_status, cur_quality = common.Quality.splitCompositeStatus(cur_ep_obj.status) + any_qualities, best_qualities = common.Quality.split_quality(cur_ep_obj.show_obj.quality) + cur_status, cur_quality = common.Quality.split_composite_status(cur_ep_obj.status) - cur_ep_obj.status = common.Quality.compositeStatus( + cur_ep_obj.status = common.Quality.composite_status( **({'status': common.DOWNLOADED, 'quality': quality}, {'status': common.ARCHIVED, 'quality': quality}) [cur_ep_obj.status in common.Quality.SNATCHED_BEST or @@ -1052,7 +1044,7 @@ class PostProcessor(object): self._log('Successfully processed.', logger.MESSAGE) else: - self._log('Can\'t figure out what show/episode to process', logger.WARNING) + self._log("Can't figure out what show/episode to process", logger.WARNING) raise exceptions_helper.PostProcessingFailed() def process(self): @@ -1062,16 +1054,16 @@ class PostProcessor(object): :rtype: bool """ - self._log(u'Processing... %s%s' % (ek.ek(os.path.relpath, self.file_path, self.folder_path), - (u'
.. from nzb %s' % self.nzb_name, u'')[None is self.nzb_name])) + self._log(f'Processing... {os.path.relpath(self.file_path, self.folder_path)}' + f'{(f"
.. from nzb {self.nzb_name}", "")[None is self.nzb_name]}') - if ek.ek(os.path.isdir, self.file_path): - self._log(u'Expecting file %s
.. is actually a directory, skipping' % self.file_path) + if os.path.isdir(self.file_path): + self._log(f'Expecting file {self.file_path}
.. is actually a directory, skipping') return False for ignore_file in self.IGNORED_FILESTRINGS: if ignore_file in self.file_path: - self._log(u'File %s
.. is ignored type, skipping' % self.file_path) + self._log(f'File {self.file_path}
.. is ignored type, skipping') return False # reset per-file stuff @@ -1083,10 +1075,10 @@ class PostProcessor(object): # if we don't have it then give up if not show_obj: - self._log(u'Must add show to SickGear before trying to post process an episode', logger.WARNING) + self._log('Must add show to SickGear before trying to post process an episode', logger.WARNING) raise exceptions_helper.PostProcessingFailed() elif None is season_number or not episode_numbers: - self._log(u'Quitting this post process, could not determine what episode this is', logger.DEBUG) + self._log('Quitting this post process, could not determine what episode this is', logger.DEBUG) return False # retrieve/create the corresponding TVEpisode objects @@ -1097,12 +1089,12 @@ class PostProcessor(object): new_ep_quality = self._get_quality(ep_obj) else: new_ep_quality = quality - self._log(u'Using "%s" quality' % common.Quality.qualityStrings[new_ep_quality], logger.DEBUG) + self._log(f'Using "{common.Quality.qualityStrings[new_ep_quality]}" quality', logger.DEBUG) # see if it's safe to replace existing episode (is download snatched, PROPER, better quality) if not self._safe_replace(ep_obj, new_ep_quality): # if it's not safe to replace, stop here - self._log(u'Quitting this post process', logger.DEBUG) + self._log('Quitting this post process', logger.DEBUG) return False # delete the existing file (and company) @@ -1110,27 +1102,27 @@ class PostProcessor(object): try: self._delete(cur_ep_obj.location, associated_files=True) - # clean up any left over folders + # clean up any leftover folders if cur_ep_obj.location: - helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep_obj.location), + helpers.delete_empty_folders(os.path.dirname(cur_ep_obj.location), keep_dir=ep_obj.show_obj.location) except (OSError, IOError): - raise exceptions_helper.PostProcessingFailed(u'Unable to delete existing files') + raise exceptions_helper.PostProcessingFailed('Unable to delete existing files') # set the status of the episodes # for cur_ep_obj in [ep_obj] + ep_obj.related_ep_obj: - # cur_ep_obj.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality) + # cur_ep_obj.status = common.Quality.composite_status(common.SNATCHED, new_ep_quality) # if the show directory doesn't exist then make it if allowed - if not ek.ek(os.path.isdir, ep_obj.show_obj.location) and sickgear.CREATE_MISSING_SHOW_DIRS: - self._log(u'Show directory does not exist, creating it', logger.DEBUG) + if not os.path.isdir(ep_obj.show_obj.location) and sickgear.CREATE_MISSING_SHOW_DIRS: + self._log('Show directory does not exist, creating it', logger.DEBUG) try: - ek.ek(os.mkdir, ep_obj.show_obj.location) + os.mkdir(ep_obj.show_obj.location) # do the library update for synoindex notifiers.NotifierFactory().get('SYNOINDEX').addFolder(ep_obj.show_obj.location) except (OSError, IOError): - raise exceptions_helper.PostProcessingFailed(u'Unable to create show directory: ' - + ep_obj.show_obj.location) + raise exceptions_helper.PostProcessingFailed(f'Unable to create show directory:' + f' {ep_obj.show_obj.location}') # get metadata for the show (but not episode because it hasn't been fully processed) ep_obj.show_obj.write_metadata(True) @@ -1138,31 +1130,31 @@ class PostProcessor(object): self._change_ep_objs(show_obj, season_number, episode_numbers, new_ep_quality) # Just want to keep this consistent for failed handling right now - release_name = show_name_helpers.determineReleaseName(self.folder_path, self.nzb_name) + release_name = show_name_helpers.determine_release_name(self.folder_path, self.nzb_name) if None is release_name: - self._log(u'No snatched release found in history', logger.WARNING) + self._log('No snatched release found in history', logger.WARNING) elif sickgear.USE_FAILED_DOWNLOADS: failed_history.remove_failed(release_name) # find the destination folder try: proper_path = ep_obj.proper_path() - proper_absolute_path = ek.ek(os.path.join, ep_obj.show_obj.location, proper_path) - dest_path = ek.ek(os.path.dirname, proper_absolute_path) + proper_absolute_path = os.path.join(ep_obj.show_obj.location, proper_path) + dest_path = os.path.dirname(proper_absolute_path) except exceptions_helper.ShowDirNotFoundException: raise exceptions_helper.PostProcessingFailed( - u'Unable to post process an episode because the show dir does not exist, quitting') + 'Unable to post process an episode because the show dir does not exist, quitting') - self._log(u'Destination folder for this episode is ' + dest_path, logger.DEBUG) + self._log(f'Destination folder for this episode is {dest_path}', logger.DEBUG) # create any folders we need if not helpers.make_path(dest_path, syno=True): - raise exceptions_helper.PostProcessingFailed(u'Unable to create destination folder: ' + dest_path) + raise exceptions_helper.PostProcessingFailed(f'Unable to create destination folder: {dest_path}') # figure out the base name of the resulting episode file if sickgear.RENAME_EPISODES: - new_base_name = ek.ek(os.path.basename, proper_path) + new_base_name = os.path.basename(proper_path) new_file_name = new_base_name + '.' + self.file_name.rpartition('.')[-1] else: @@ -1177,13 +1169,12 @@ class PostProcessor(object): keepalive = keepalive_stop = None if self.webhandler: def keep_alive(webh, stop_event): - if not PY2: - import asyncio - asyncio.set_event_loop(asyncio.new_event_loop()) + import asyncio + asyncio.set_event_loop(asyncio.new_event_loop()) while not stop_event.is_set(): stop_event.wait(60) webh('.') - webh(u'\n') + webh('\n') keepalive_stop = threading.Event() keepalive = threading.Thread(target=keep_alive, args=(self.webhandler, keepalive_stop)) @@ -1194,7 +1185,7 @@ class PostProcessor(object): 'new_base_name': new_base_name, 'associated_files': sickgear.MOVE_ASSOCIATED_FILES} args_cpmv = {'subtitles': sickgear.USE_SUBTITLES and ep_obj.show_obj.subtitles, - 'action_tmpl': u' %s
.. to %s'} + 'action_tmpl': ' %s
.. to %s'} args_cpmv.update(args_link) if self.webhandler: self.webhandler('Processing method is "%s"' % self.process_method) @@ -1208,10 +1199,10 @@ class PostProcessor(object): elif 'symlink' == self.process_method: self._move_and_symlink(**args_link) else: - logger.log(u'Unknown process method: ' + str(self.process_method), logger.ERROR) - raise exceptions_helper.PostProcessingFailed(u'Unable to move the files to the new location') + logger.error(f'Unknown process method: {self.process_method}') + raise exceptions_helper.PostProcessingFailed('Unable to move the files to the new location') except (OSError, IOError): - raise exceptions_helper.PostProcessingFailed(u'Unable to move the files to the new location') + raise exceptions_helper.PostProcessingFailed('Unable to move the files to the new location') finally: if self.webhandler: # stop the keep_alive @@ -1224,7 +1215,7 @@ class PostProcessor(object): sql_l = [] for cur_ep_obj in [ep_obj] + ep_obj.related_ep_obj: with cur_ep_obj.lock: - cur_ep_obj.location = ek.ek(os.path.join, dest_path, new_file_name) + cur_ep_obj.location = os.path.join(dest_path, new_file_name) if dosubs: cur_ep_obj.download_subtitles(force=True) # set file modify stamp to show airdate diff --git a/sickgear/processTV.py b/sickgear/processTV.py index fa5e14da..16326af2 100644 --- a/sickgear/processTV.py +++ b/sickgear/processTV.py @@ -24,8 +24,6 @@ import shutil import stat import sys -# noinspection PyPep8Naming -import encodingKludge as ek import exceptions_helper from exceptions_helper import ex, MultipleShowObjectsException from json_helper import json_dumps, json_loads @@ -35,10 +33,9 @@ from . import common, db, failedProcessor, helpers, logger, notifiers, postProce from .common import SNATCHED_ANY from .history import reset_status from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser -from .sgdatetime import timestamp_near +from .sgdatetime import SGDatetime -from _23 import filter_list, filter_iter, list_values, map_iter -from six import iteritems, iterkeys, string_types, PY2, text_type +from six import iteritems, iterkeys, string_types, text_type from sg_helpers import long_path, scantree import lib.rarfile.rarfile as rarfile @@ -73,15 +70,15 @@ class ProcessTVShow(object): @property def result(self, pre=True): # type: (bool) -> AnyStr - return (('
', u'\n')[pre]).join(self._output) + return (('
', '\n')[pre]).join(self._output) def _buffer(self, text=None): if None is not text: self._output.append(text) if self.webhandler: - logger_msg = re.sub(r'(?i)', '\n', text) - logger_msg = re.sub('(?i)]+>([^<]+)<[/]a>', r'\1', logger_msg) - self.webhandler('%s%s' % (logger_msg, u'\n')) + logger_msg = re.sub(r'(?i)', '\n', text) + logger_msg = re.sub('(?i)]+>([^<]+)', r'\1', logger_msg) + self.webhandler('%s%s' % (logger_msg, '\n')) def _log_helper(self, message, log_level=logger.DEBUG): """ @@ -91,9 +88,9 @@ class ProcessTVShow(object): :param log_level: log level :type log_level: int """ - logger_msg = re.sub(r'(?i)\.*', '', message) - logger_msg = re.sub('(?i)]+>([^<]+)<[/]a>', r'\1', logger_msg) - logger.log(u'%s' % logger_msg, log_level) + logger_msg = re.sub(r'(?i)\.*', '', message) + logger_msg = re.sub('(?i)]+>([^<]+)', r'\1', logger_msg) + logger.log(f'{logger_msg}', log_level) self._buffer(message) return @@ -124,7 +121,7 @@ class ProcessTVShow(object): :rtype: bool """ # check if it's a folder - if not ek.ek(os.path.isdir, folder): + if not os.path.isdir(folder): return False # make sure it isn't TV_DOWNLOAD_DIR @@ -139,14 +136,14 @@ class ProcessTVShow(object): try: shutil.rmtree(folder) except (OSError, IOError) as e: - logger.log(u'Warning: unable to delete folder: %s: %s' % (folder, ex(e)), logger.WARNING) + logger.warning(f'Warning: unable to delete folder: {folder}: {ex(e)}') return False - if ek.ek(os.path.isdir, folder): - logger.log(u'Warning: unable to delete folder: %s' % folder, logger.WARNING) + if os.path.isdir(folder): + logger.warning(f'Warning: unable to delete folder: {folder}') return False - self._log_helper(u'Deleted folder ' + folder, logger.MESSAGE) + self._log_helper(f'Deleted folder {folder}', logger.MESSAGE) return True def _delete_files(self, process_path, notwanted_files, force=False): @@ -164,27 +161,27 @@ class ProcessTVShow(object): # Delete all file not needed for cur_file in notwanted_files: - cur_file_path = ek.ek(os.path.join, process_path, cur_file) + cur_file_path = os.path.join(process_path, cur_file) - if not ek.ek(os.path.isfile, cur_file_path): + if not os.path.isfile(cur_file_path): continue # Prevent error when a notwantedfiles is an associated files # check first the read-only attribute - file_attribute = ek.ek(os.stat, cur_file_path)[0] + file_attribute = os.stat(cur_file_path)[0] if not file_attribute & stat.S_IWRITE: # File is read-only, so make it writeable - self._log_helper(u'Changing ReadOnly flag for file ' + cur_file) + self._log_helper(f'Changing ReadOnly flag for file {cur_file}') try: - ek.ek(os.chmod, cur_file_path, stat.S_IWRITE) + os.chmod(cur_file_path, stat.S_IWRITE) except OSError as e: - self._log_helper(u'Cannot change permissions of %s: %s' % (cur_file_path, ex(e))) + self._log_helper(f'Cannot change permissions of {cur_file_path}: {ex(e)}') removal_type = helpers.remove_file(cur_file_path) - if ek.ek(os.path.isfile, cur_file_path): + if os.path.isfile(cur_file_path): result = False else: - self._log_helper(u'%s file %s' % (removal_type, cur_file)) + self._log_helper(f'{removal_type} file {cur_file}') return result @@ -212,7 +209,7 @@ class ProcessTVShow(object): show_obj = helpers.find_show_by_id({int(sql_result[-1]['indexer']): int(sql_result[-1]['showid'])}, check_multishow=True) if hasattr(show_obj, 'name'): - logger.log('Found Show: %s in snatch history for: %s' % (show_obj.name, name), logger.DEBUG) + logger.debug('Found Show: %s in snatch history for: %s' % (show_obj.name, name)) except MultipleShowObjectsException: show_obj = None return show_obj @@ -254,7 +251,7 @@ class ProcessTVShow(object): video_size = 0 for cur_video_file in videofiles: try: - cur_video_size = ek.ek(os.path.getsize, ek.ek(os.path.join, path, cur_video_file)) + cur_video_size = os.path.getsize(os.path.join(path, cur_video_file)) except (BaseException, Exception): continue @@ -263,7 +260,7 @@ class ProcessTVShow(object): video_pick = cur_video_file if video_pick: - vid_filename = ek.ek(os.path.splitext, video_pick)[0] + vid_filename = os.path.splitext(video_pick)[0] # check if filename is garbage, disregard it if re.search(r'^[a-zA-Z0-9]+$', vid_filename): return @@ -283,7 +280,7 @@ class ProcessTVShow(object): build_path = (lambda old_path: '%s%s' % (helpers.real_path(old_path).rstrip(os.path.sep), os.path.sep)) process_path = build_path(path) - for parent in map_iter(lambda p: build_path(p), sickgear.ROOT_DIRS.split('|')[1:]): + for parent in map(lambda p: build_path(p), sickgear.ROOT_DIRS.split('|')[1:]): if process_path.startswith(parent): return parent.rstrip(os.path.sep) @@ -315,27 +312,26 @@ class ProcessTVShow(object): """ # if they passed us a real directory then assume it's the one we want - if dir_name and ek.ek(os.path.isdir, long_path(dir_name)): - dir_name = long_path(ek.ek(os.path.realpath, long_path(dir_name))) + if dir_name and os.path.isdir(long_path(dir_name)): + dir_name = long_path(os.path.realpath(long_path(dir_name))) # if the client and SickGear are not on the same machine translate the directory in a network directory - elif dir_name and sickgear.TV_DOWNLOAD_DIR and ek.ek(os.path.isdir, sickgear.TV_DOWNLOAD_DIR)\ - and ek.ek(os.path.normpath, dir_name) != ek.ek(os.path.normpath, sickgear.TV_DOWNLOAD_DIR): - dir_name = ek.ek(os.path.join, sickgear.TV_DOWNLOAD_DIR, - ek.ek(os.path.abspath, dir_name).split(os.path.sep)[-1]) - self._log_helper(u'SickGear PP Config, completed TV downloads folder: ' + sickgear.TV_DOWNLOAD_DIR) + elif dir_name and sickgear.TV_DOWNLOAD_DIR and os.path.isdir(sickgear.TV_DOWNLOAD_DIR)\ + and os.path.normpath(dir_name) != os.path.normpath(sickgear.TV_DOWNLOAD_DIR): + dir_name = os.path.join(sickgear.TV_DOWNLOAD_DIR, os.path.abspath(dir_name).split(os.path.sep)[-1]) + self._log_helper(f'SickGear PP Config, completed TV downloads folder: {sickgear.TV_DOWNLOAD_DIR}') if dir_name: - self._log_helper(u'Checking folder... ' + dir_name) + self._log_helper(f'Checking folder... {dir_name}') # if we didn't find a real directory then process "failed" or just quit - if not dir_name or not ek.ek(os.path.isdir, dir_name): + if not dir_name or not os.path.isdir(dir_name): if nzb_name and failed: self._process_failed(dir_name, nzb_name, show_obj=show_obj) else: - self._log_helper(u'Unable to figure out what folder to process. ' + - u'If your downloader and SickGear aren\'t on the same PC then make sure ' + - u'you fill out your completed TV download folder in the PP config.') + self._log_helper('Unable to figure out what folder to process. ' + 'If your downloader and SickGear aren\'t on the same PC then make sure ' + 'you fill out your completed TV download folder in the PP config.') return self.result parent = self.find_parent(dir_name) @@ -351,18 +347,18 @@ class ProcessTVShow(object): show_obj = self.check_name(re.sub(r'\.(nzb|torrent)$', '', nzb_name, flags=re.I)) if None is show_obj and dir_name: - show_obj = self.check_name(ek.ek(os.path.basename, dir_name)) + show_obj = self.check_name(os.path.basename(dir_name)) path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type) - if sickgear.POSTPONE_IF_SYNC_FILES and any(filter_iter(helpers.is_sync_file, files)): - self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR) + if sickgear.POSTPONE_IF_SYNC_FILES and any(filter(helpers.is_sync_file, files)): + self._log_helper('Found temporary sync files, skipping post process', logger.ERROR) return self.result if not process_method: process_method = sickgear.PROCESS_METHOD - self._log_helper(u'Processing folder... %s' % path) + self._log_helper(f'Processing folder... {path}') work_files = [] joined = self.join(path) @@ -370,27 +366,27 @@ class ProcessTVShow(object): work_files += [joined] rar_files, rarfile_history = self.unused_archives( - path, filter_list(helpers.is_first_rar_volume, files), pp_type, process_method) + path, list(filter(helpers.is_first_rar_volume, files)), pp_type, process_method) rar_content = self._unrar(path, rar_files, force) if self.fail_detected: self._process_failed(dir_name, nzb_name, show_obj=show_obj) self.update_history_tab() return self.result - rar_content = [x for x in rar_content if not helpers.is_link(ek.ek(os.path.join, path, x))] + rar_content = [x for x in rar_content if not helpers.is_link(os.path.join(path, x))] path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type) - files = [x for x in files if not helpers.is_link(ek.ek(os.path.join, path, x))] - video_files = filter_list(helpers.has_media_ext, files) - video_in_rar = filter_list(helpers.has_media_ext, rar_content) - work_files += [ek.ek(os.path.join, path, item) for item in rar_content] + files = [x for x in files if not helpers.is_link(os.path.join(path, x))] + video_files = list(filter(helpers.has_media_ext, files)) + video_in_rar = list(filter(helpers.has_media_ext, rar_content)) + work_files += [os.path.join(path, item) for item in rar_content] if 0 < len(files): - self._log_helper(u'Process file%s: %s' % (helpers.maybe_plural(files), str(files))) + self._log_helper(f'Process file{helpers.maybe_plural(files)}: {str(files)}') if 0 < len(video_files): - self._log_helper(u'Process video file%s: %s' % (helpers.maybe_plural(video_files), str(video_files))) + self._log_helper(f'Process video file{helpers.maybe_plural(video_files)}: {str(video_files)}') if 0 < len(rar_content): - self._log_helper(u'Process rar content: ' + str(rar_content)) + self._log_helper(f'Process rar content: {rar_content}') if 0 < len(video_in_rar): - self._log_helper(u'Process video%s in rar: %s' % (helpers.maybe_plural(video_in_rar), str(video_in_rar))) + self._log_helper(f'Process video{helpers.maybe_plural(video_in_rar)} in rar: {str(video_in_rar)}') # If nzb_name is set and there's more than one videofile in the folder, files will be lost (overwritten). nzb_name_original = nzb_name @@ -408,7 +404,7 @@ class ProcessTVShow(object): if None is show_obj: soh = self.check_video_filenames(path, video_in_rar) self._process_media(path, video_in_rar, nzb_name, 'move', force, force_replace, show_obj=soh) - self._delete_files(path, [ek.ek(os.path.relpath, item, path) for item in work_files], force=True) + self._delete_files(path, [os.path.relpath(item, path) for item in work_files], force=True) video_batch = set(video_files) - set(video_in_rar) else: video_batch = video_files @@ -418,7 +414,7 @@ class ProcessTVShow(object): video_pick = [''] video_size = 0 for cur_video_file in video_batch: - cur_video_size = ek.ek(os.path.getsize, ek.ek(os.path.join, path, cur_video_file)) + cur_video_size = os.path.getsize(os.path.join(path, cur_video_file)) if 0 == video_size or cur_video_size > video_size: video_size = cur_video_size video_pick = [cur_video_file] @@ -429,8 +425,7 @@ class ProcessTVShow(object): force, force_replace, use_trash=cleanup, show_obj=show_obj) except OSError as e: - logger.log('Batch skipped, %s%s' % - (ex(e), e.filename and (' (file %s)' % e.filename) or ''), logger.WARNING) + logger.warning('Batch skipped, %s%s' % (ex(e), e.filename and (' (file %s)' % e.filename) or '')) # Process video files in TV subdirectories for directory in [x for x in dirs if self._validate_dir( @@ -439,10 +434,10 @@ class ProcessTVShow(object): # self._set_process_success(reset=True) - for walk_path, walk_dir, files in ek.ek(os.walk, ek.ek(os.path.join, path, directory), topdown=False): + for walk_path, walk_dir, files in os.walk(os.path.join(path, directory), topdown=False): - if sickgear.POSTPONE_IF_SYNC_FILES and any(filter_iter(helpers.is_sync_file, files)): - self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR) + if sickgear.POSTPONE_IF_SYNC_FILES and any(filter(helpers.is_sync_file, files)): + self._log_helper('Found temporary sync files, skipping post process', logger.ERROR) return self.result parent = self.find_parent(walk_path) @@ -452,20 +447,20 @@ class ProcessTVShow(object): continue # Ignore any symlinks at this stage to avoid the potential for unraring a symlinked archive - files = [x for x in files if not helpers.is_link(ek.ek(os.path.join, walk_path, x))] + files = [x for x in files if not helpers.is_link(os.path.join(walk_path, x))] rar_files, rarfile_history = self.unused_archives( - walk_path, filter_list(helpers.is_first_rar_volume, files), pp_type, process_method, + walk_path, list(filter(helpers.is_first_rar_volume, files)), pp_type, process_method, rarfile_history) rar_content = self._unrar(walk_path, rar_files, force) - work_files += [ek.ek(os.path.join, walk_path, item) for item in rar_content] + work_files += [os.path.join(walk_path, item) for item in rar_content] if self.fail_detected: self._process_failed(dir_name, nzb_name, show_obj=self.show_obj_helper(show_obj, directory)) continue - rar_content = [x for x in rar_content if not helpers.is_link(ek.ek(os.path.join, walk_path, x))] + rar_content = [x for x in rar_content if not helpers.is_link(os.path.join(walk_path, x))] files = list(set(files + rar_content)) - video_files = filter_list(helpers.has_media_ext, files) - video_in_rar = filter_list(helpers.has_media_ext, rar_content) + video_files = list(filter(helpers.has_media_ext, files)) + video_in_rar = list(filter(helpers.has_media_ext, rar_content)) notwanted_files = [x for x in files if x not in video_files] # Don't Link media when the media is extracted from a rar in the same path @@ -483,7 +478,7 @@ class ProcessTVShow(object): video_pick = [''] video_size = 0 for cur_video_file in video_batch: - cur_video_size = ek.ek(os.path.getsize, ek.ek(os.path.join, walk_path, cur_video_file)) + cur_video_size = os.path.getsize(os.path.join(walk_path, cur_video_file)) if 0 == video_size or cur_video_size > video_size: video_size = cur_video_size @@ -497,8 +492,7 @@ class ProcessTVShow(object): self.check_video_filenames(walk_dir, video_pick))) except OSError as e: - logger.log('Batch skipped, %s%s' % - (ex(e), e.filename and (' (file %s)' % e.filename) or ''), logger.WARNING) + logger.warning(f'Batch skipped, {ex(e)}{e.filename and (" (file %s)" % e.filename) or ""}') if process_method in ('hardlink', 'symlink') and video_in_rar: self._delete_files(walk_path, rar_content) @@ -512,14 +506,12 @@ class ProcessTVShow(object): self._delete_files(walk_path, notwanted_files) if 'move' == process_method \ - and ek.ek(os.path.normpath, sickgear.TV_DOWNLOAD_DIR) != ek.ek(os.path.normpath, - walk_path): + and os.path.normpath(sickgear.TV_DOWNLOAD_DIR) != os.path.normpath(walk_path): self._delete_folder(walk_path, check_empty=False) if 'copy' == process_method and work_files: - self._delete_files(path, [ek.ek(os.path.relpath, item, path) for item in work_files], force=True) - for f in sorted(list(set([ek.ek(os.path.dirname, item) for item in work_files]) - {path}), - key=len, reverse=True): + self._delete_files(path, [os.path.relpath(item, path) for item in work_files], force=True) + for f in sorted(list(set([os.path.dirname(item) for item in work_files]) - {path}), key=len, reverse=True): self._delete_folder(f) def _bottom_line(text, log_level=logger.DEBUG): @@ -532,12 +524,13 @@ class ProcessTVShow(object): if self.any_vid_processed: if not self.files_failed: - _bottom_line(u'Successfully processed.', logger.MESSAGE) + _bottom_line('Successfully processed.', logger.MESSAGE) else: - _bottom_line(u'Successfully processed at least one video file%s.' % - (', others were skipped', ' and skipped another')[1 == self.files_failed], logger.MESSAGE) + _bottom_line(f'Successfully processed at least one video file' + f'{(", others were skipped", " and skipped another")[1 == self.files_failed]}.', + logger.MESSAGE) else: - _bottom_line(u'Failed! Did not process any files.', logger.WARNING) + _bottom_line('Failed! Did not process any files.', logger.WARNING) return self.result @@ -561,7 +554,7 @@ class ProcessTVShow(object): if ('auto' == pp_type and sickgear.PROCESS_AUTOMATICALLY and 'copy' == process_method and sickgear.UNPACK): - archive_history_file = ek.ek(os.path.join, sickgear.DATA_DIR, 'archive_history.txt') + archive_history_file = os.path.join(sickgear.DATA_DIR, 'archive_history.txt') if not archive_history: try: @@ -572,13 +565,13 @@ class ProcessTVShow(object): init_history_cnt = len(archive_history) - archive_history = {k_arc: v for k_arc, v in iteritems(archive_history) if ek.ek(os.path.isfile, k_arc)} + archive_history = {k_arc: v for k_arc, v in iteritems(archive_history) if os.path.isfile(k_arc)} - unused_files = list(set([ek.ek(os.path.join, path, x) for x in archives]) - set(iterkeys(archive_history))) - archives = [ek.ek(os.path.basename, x) for x in unused_files] + unused_files = list(set([os.path.join(path, x) for x in archives]) - set(iterkeys(archive_history))) + archives = [os.path.basename(x) for x in unused_files] if unused_files: for f in unused_files: - archive_history.setdefault(f, int(timestamp_near(datetime.datetime.utcnow()))) + archive_history.setdefault(f, SGDatetime.timestamp_near(datetime.datetime.utcnow())) if init_history_cnt != len(archive_history): try: @@ -605,24 +598,24 @@ class ProcessTVShow(object): :return: success :rtype: bool """ - self._log_helper(u'Processing sub dir: ' + dir_name) + self._log_helper(f'Processing sub dir: {dir_name}') - if ek.ek(os.path.basename, dir_name).startswith('_FAILED_'): - self._log_helper(u'The directory name indicates it failed to extract.') + if os.path.basename(dir_name).startswith('_FAILED_'): + self._log_helper('The directory name indicates it failed to extract.') failed = True - elif ek.ek(os.path.basename, dir_name).startswith('_UNDERSIZED_'): - self._log_helper(u'The directory name indicates that it was previously rejected for being undersized.') + elif os.path.basename(dir_name).startswith('_UNDERSIZED_'): + self._log_helper('The directory name indicates that it was previously rejected for being undersized.') failed = True - elif ek.ek(os.path.basename, dir_name).upper().startswith('_UNPACK'): - self._log_helper(u'The directory name indicates that this release is in the process of being unpacked.') + elif os.path.basename(dir_name).upper().startswith('_UNPACK'): + self._log_helper('The directory name indicates that this release is in the process of being unpacked.') return False if failed: - self._process_failed(ek.ek(os.path.join, path, dir_name), nzb_name_original, show_obj=show_obj) + self._process_failed(os.path.join(path, dir_name), nzb_name_original, show_obj=show_obj) return False if helpers.is_hidden_folder(dir_name): - self._log_helper(u'Ignoring hidden folder: ' + dir_name) + self._log_helper(f'Ignoring hidden folder: {dir_name}') return False # make sure the directory isn't inside a show directory @@ -630,22 +623,20 @@ class ProcessTVShow(object): sql_result = my_db.select('SELECT * FROM tv_shows') for cur_result in sql_result: - if dir_name.lower().startswith(ek.ek(os.path.realpath, cur_result['location']).lower() + os.sep)\ - or dir_name.lower() == ek.ek(os.path.realpath, cur_result['location']).lower(): - self._log_helper( - u'Found an episode that has already been moved to its show dir, skipping', - logger.ERROR) + if dir_name.lower().startswith(os.path.realpath(cur_result['location']).lower() + os.sep) \ + or dir_name.lower() == os.path.realpath(cur_result['location']).lower(): + self._log_helper('Found an episode that has already been moved to its show dir, skipping', logger.ERROR) return False # Get the videofile list for the next checks all_files = [] all_dirs = [] process_path = None - for process_path, process_dir, fileList in ek.ek(os.walk, ek.ek(os.path.join, path, dir_name), topdown=False): + for process_path, process_dir, fileList in os.walk(os.path.join(path, dir_name), topdown=False): all_dirs += process_dir all_files += fileList - video_files = filter_list(helpers.has_media_ext, all_files) + video_files = list(filter(helpers.has_media_ext, all_files)) all_dirs.append(dir_name) # check if the directory have at least one tv video file @@ -665,7 +656,7 @@ class ProcessTVShow(object): if sickgear.UNPACK and process_path and all_files: # Search for packed release - packed_files = filter_list(helpers.is_first_rar_volume, all_files) + packed_files = list(filter(helpers.is_first_rar_volume, all_files)) for packed in packed_files: try: @@ -688,30 +679,29 @@ class ProcessTVShow(object): unpacked_files = [] if 'win32' == sys.platform: - rarfile.UNRAR_TOOL = ek.ek(os.path.join, sickgear.PROG_DIR, 'lib', 'rarfile', 'UnRAR.exe') + rarfile.UNRAR_TOOL = os.path.join(sickgear.PROG_DIR, 'lib', 'rarfile', 'UnRAR.exe') if sickgear.UNPACK and rar_files: - self._log_helper(u'Packed releases detected: ' + str(rar_files)) + self._log_helper(f'Packed releases detected: {rar_files}') for archive in rar_files: - self._log_helper(u'Unpacking archive: ' + archive) + self._log_helper(f'Unpacking archive: {archive}') try: - rar_handle = rarfile.RarFile(ek.ek(os.path.join, path, archive)) + rar_handle = rarfile.RarFile(os.path.join(path, archive)) except (BaseException, Exception): - self._log_helper(u'Failed to open archive: %s' % archive, logger.ERROR) + self._log_helper(f'Failed to open archive: {archive}', logger.ERROR) self._set_process_success(False) continue try: # Skip extraction if any file in archive has previously been extracted skip_file = False - for file_in_archive in [ek.ek(os.path.basename, x.filename) + for file_in_archive in [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.is_dir()]: if self._already_postprocessed(path, file_in_archive, force): - self._log_helper( - u'Archive file already processed, extraction skipped: ' + file_in_archive) + self._log_helper(f'Archive file already processed, extraction skipped: {file_in_archive}') skip_file = True break @@ -721,20 +711,18 @@ class ProcessTVShow(object): raise rarfile.PasswordRequired rar_handle.extractall(path=path) - rar_content = [ek.ek(os.path.normpath, x.filename) - for x in rar_handle.infolist() if not x.is_dir()] + rar_content = [os.path.normpath(x.filename) for x in rar_handle.infolist() if not x.is_dir()] renamed = self.cleanup_names(path, rar_content) cur_unpacked = rar_content if not renamed else \ - (list(set(rar_content) - set(iterkeys(renamed))) + list_values(renamed)) - self._log_helper(u'Unpacked content: [u\'%s\']' % '\', u\''.join(map_iter(text_type, - cur_unpacked))) + (list(set(rar_content) - set(iterkeys(renamed))) + list(renamed.values())) + self._log_helper('Unpacked content: ["%s"]' % '", "'.join(map(text_type, cur_unpacked))) unpacked_files += cur_unpacked except (rarfile.PasswordRequired, rarfile.RarWrongPassword): - self._log_helper(u'Failed to unpack archive PasswordRequired: %s' % archive, logger.ERROR) + self._log_helper(f'Failed to unpack archive PasswordRequired: {archive}', logger.ERROR) self._set_process_success(False) self.fail_detected = True except (BaseException, Exception): - self._log_helper(u'Failed to unpack archive: %s' % archive, logger.ERROR) + self._log_helper(f'Failed to unpack archive: {archive}', logger.ERROR) self._set_process_success(False) finally: rar_handle.close() @@ -744,13 +732,13 @@ class ProcessTVShow(object): # check for passworded rar's for archive in rar_files: try: - rar_handle = rarfile.RarFile(ek.ek(os.path.join, path, archive)) + rar_handle = rarfile.RarFile(os.path.join(path, archive)) except (BaseException, Exception): - self._log_helper(u'Failed to open archive: %s' % archive, logger.ERROR) + self._log_helper(f'Failed to open archive: {archive}', logger.ERROR) continue try: if rar_handle.needs_password(): - self._log_helper(u'Failed to unpack archive PasswordRequired: %s' % archive, logger.ERROR) + self._log_helper(f'Failed to unpack archive PasswordRequired: {archive}', logger.ERROR) self._set_process_success(False) self.failure_detected = True rar_handle.close() @@ -773,7 +761,7 @@ class ProcessTVShow(object): old_name = None new_name = None params = { - 'base_name': ek.ek(os.path.basename, directory), + 'base_name': os.path.basename(directory), 'reverse_pattern': re.compile('|'.join([ r'\.\d{2}e\d{2}s\.', r'\.p0(?:63|27|612)\.', r'\.[pi](?:084|675|0801)\.', r'\b[45]62[xh]\.', r'\.yarulb\.', r'\.vtd[hp]\.', r'\.(?:ld[.-]?)?bew\.', r'\.pir.?(?:shv|dov|dvd|bew|db|rb)\.', @@ -797,9 +785,9 @@ class ProcessTVShow(object): for cur_filename in _filenames: - file_name, file_extension = ek.ek(os.path.splitext, cur_filename) - file_path = ek.ek(os.path.join, _dirpath, cur_filename) - dir_name = ek.ek(os.path.dirname, file_path) + file_name, file_extension = os.path.splitext(cur_filename) + file_path = os.path.join(_dirpath, cur_filename) + dir_name = os.path.dirname(file_path) if None is not reverse_pattern.search(file_name): na_parts = season_pattern.search(file_name) @@ -817,34 +805,34 @@ class ProcessTVShow(object): new_filename = file_name[::-1] logger.log('Reversing base filename "%s" to "%s"' % (file_name, new_filename)) try: - ek.ek(os.rename, file_path, ek.ek(os.path.join, _dirpath, new_filename + file_extension)) - is_renamed[ek.ek(os.path.relpath, file_path, directory)] = ek.ek( - os.path.relpath, new_filename + file_extension, directory) + os.rename(file_path, os.path.join(_dirpath, new_filename + file_extension)) + is_renamed[os.path.relpath(file_path, directory)] = \ + os.path.relpath(new_filename + file_extension, directory) except OSError as _e: - logger.log('Error unable to rename file "%s" because %s' % (cur_filename, ex(_e)), logger.ERROR) + logger.error('Error unable to rename file "%s" because %s' % (cur_filename, ex(_e))) elif helpers.has_media_ext(cur_filename) and \ None is not garbage_name.search(file_name) and None is not media_pattern.search(base_name): _num_videos += 1 _old_name = file_path - _new_name = ek.ek(os.path.join, dir_name, '%s%s' % (base_name, file_extension)) + _new_name = os.path.join(dir_name, '%s%s' % (base_name, file_extension)) return is_renamed, _num_videos, _old_name, _new_name if files: is_renamed, num_videos, old_name, new_name = renamer( directory, files, num_videos, old_name, new_name, **params) else: - for cur_dirpath, void, cur_filenames in ek.ek(os.walk, directory): + for cur_dirpath, void, cur_filenames in os.walk(directory): is_renamed, num_videos, old_name, new_name = renamer( cur_dirpath, cur_filenames, num_videos, old_name, new_name, **params) if all([not is_renamed, 1 == num_videos, old_name, new_name]): - try_name = ek.ek(os.path.basename, new_name) - logger.log('Renaming file "%s" using dirname as "%s"' % (ek.ek(os.path.basename, old_name), try_name)) + try_name = os.path.basename(new_name) + logger.log('Renaming file "%s" using dirname as "%s"' % (os.path.basename(old_name), try_name)) try: - ek.ek(os.rename, old_name, new_name) - is_renamed[ek.ek(os.path.relpath, old_name, directory)] = ek.ek(os.path.relpath, new_name, directory) + os.rename(old_name, new_name) + is_renamed[os.path.relpath(old_name, directory)] = os.path.relpath(new_name, directory) except OSError as e: - logger.log('Error unable to rename file "%s" because %s' % (old_name, ex(e)), logger.ERROR) + logger.error('Error unable to rename file "%s" because %s' % (old_name, ex(e))) return is_renamed @@ -859,11 +847,11 @@ class ProcessTVShow(object): result = False chunks = {} matcher = re.compile(r'\.[0-9]+$') - for dirpath, void, filenames in ek.ek(os.walk, directory): + for dirpath, void, filenames in os.walk(directory): for filename in filenames: if None is not matcher.search(filename): - maybe_chunk = ek.ek(os.path.join, dirpath, filename) - base_filepath, ext = ek.ek(os.path.splitext, maybe_chunk) + maybe_chunk = os.path.join(dirpath, filename) + base_filepath, ext = os.path.splitext(maybe_chunk) if base_filepath not in chunks: chunks[base_filepath] = [] chunks[base_filepath].append(maybe_chunk) @@ -874,22 +862,22 @@ class ProcessTVShow(object): for base_filepath in chunks: chunks[base_filepath].sort() chunk_set = chunks[base_filepath] - if ek.ek(os.path.isfile, base_filepath): - base_filesize = ek.ek(os.path.getsize, base_filepath) - chunk_sizes = [ek.ek(os.path.getsize, x) for x in chunk_set] + if os.path.isfile(base_filepath): + base_filesize = os.path.getsize(base_filepath) + chunk_sizes = [os.path.getsize(x) for x in chunk_set] largest_chunk = max(chunk_sizes) if largest_chunk >= base_filesize: outfile = '%s.001' % base_filepath if outfile not in chunk_set: try: - ek.ek(os.rename, base_filepath, outfile) + os.rename(base_filepath, outfile) except OSError: - logger.log('Error unable to rename file %s' % base_filepath, logger.ERROR) + logger.error('Error unable to rename file %s' % base_filepath) return result chunk_set.append(outfile) chunk_set.sort() else: - del_dir, del_file = ek.ek(os.path.split, base_filepath) + del_dir, del_file = os.path.split(base_filepath) if not self._delete_files(del_dir, [del_file], force=True): return result else: @@ -934,10 +922,6 @@ class ProcessTVShow(object): if force or not self.any_vid_processed: return False - # Needed for accessing DB with a unicode dir_name - if PY2 and not isinstance(dir_name, text_type): - dir_name = text_type(dir_name, 'utf_8') - parse_result = None try: parse_result = NameParser(convert=True).parse(videofile, cache_result=False) @@ -969,8 +953,8 @@ class ProcessTVShow(object): my_db = db.DBConnection() sql_result = my_db.select('SELECT * FROM tv_episodes WHERE release_name = ?', [dir_name]) if sql_result: - self._log_helper(u'Found a release directory %s that has already been processed,
.. skipping: %s' - % (showlink, dir_name)) + self._log_helper(f'Found a release directory {showlink} that has already been processed,
' + f'.. skipping: {dir_name}') if ep_detail_sql: reset_status(parse_result.show_obj.tvid, parse_result.show_obj.prodid, @@ -980,14 +964,12 @@ class ProcessTVShow(object): else: # This is needed for video whose name differ from dir_name - if PY2 and not isinstance(videofile, text_type): - videofile = text_type(videofile, 'utf_8') sql_result = my_db.select( 'SELECT * FROM tv_episodes WHERE release_name = ?', [videofile.rpartition('.')[0]]) if sql_result: - self._log_helper(u'Found a video, but that release %s was already processed,
.. skipping: %s' - % (showlink, videofile)) + self._log_helper(f'Found a video, but that release {showlink} was already processed,
' + f'.. skipping: {videofile}') if ep_detail_sql: reset_status(parse_result.show_obj.tvid, parse_result.show_obj.prodid, @@ -1005,10 +987,10 @@ class ProcessTVShow(object): + ' and tv_episodes.status IN (%s)' % ','.join([str(x) for x in common.Quality.DOWNLOADED])\ + ' and history.resource LIKE ?' - sql_result = my_db.select(search_sql, [u'%' + videofile]) + sql_result = my_db.select(search_sql, [f'%{videofile}']) if sql_result: - self._log_helper(u'Found a video, but the episode %s is already processed,
.. skipping: %s' - % (showlink, videofile)) + self._log_helper(f'Found a video, but the episode {showlink} is already processed,
' + f'.. skipping: {videofile}') if ep_detail_sql: reset_status(parse_result.show_obj.tvid, parse_result.show_obj.prodid, @@ -1048,7 +1030,7 @@ class ProcessTVShow(object): self._set_process_success(False) continue - cur_video_file_path = ek.ek(os.path.join, process_path, cur_video_file) + cur_video_file_path = os.path.join(process_path, cur_video_file) parent = self.find_parent(cur_video_file_path) if parent: @@ -1065,7 +1047,7 @@ class ProcessTVShow(object): process_fail_message = '' except exceptions_helper.PostProcessingFailed: file_success = False - process_fail_message = '
.. Post Processing Failed' + process_fail_message = '
.. Post Processing Failed' self._set_process_success(file_success) @@ -1073,13 +1055,11 @@ class ProcessTVShow(object): self._buffer(processor.log.strip('\n')) if file_success: - self._log_helper(u'Successfully processed ' + cur_video_file, logger.MESSAGE) + self._log_helper(f'Successfully processed {cur_video_file}', logger.MESSAGE) elif self.any_vid_processed: - self._log_helper(u'Warning fail for %s%s' % (cur_video_file_path, process_fail_message), - logger.WARNING) + self._log_helper(f'Warning fail for {cur_video_file_path}{process_fail_message}', logger.WARNING) else: - self._log_helper(u'Did not use file %s%s' % (cur_video_file_path, process_fail_message), - logger.WARNING) + self._log_helper(f'Did not use file {cur_video_file_path}{process_fail_message}', logger.WARNING) @staticmethod def _get_path_dir_files(dir_name, nzb_name, pp_type): @@ -1097,16 +1077,16 @@ class ProcessTVShow(object): if dir_name == sickgear.TV_DOWNLOAD_DIR and not nzb_name or 'manual' == pp_type: # Scheduled Media Process Active # Get at first all the subdir in the dir_name - for path, dirs, files in ek.ek(os.walk, dir_name): - files = [x for x in files if not helpers.is_link(ek.ek(os.path.join, path, x))] + for path, dirs, files in os.walk(dir_name): + files = [x for x in files if not helpers.is_link(os.path.join(path, x))] break else: - path, dirs = ek.ek(os.path.split, dir_name) # Script Media Process + path, dirs = os.path.split(dir_name) # Script Media Process if None is not nzb_name and not nzb_name.endswith('.nzb') and \ - ek.ek(os.path.isfile, ek.ek(os.path.join, dir_name, nzb_name)): + os.path.isfile(os.path.join(dir_name, nzb_name)): # For single torrent file without directory dirs = [] - files = [ek.ek(os.path.join, dir_name, nzb_name)] + files = [os.path.join(dir_name, nzb_name)] else: dirs = [dirs] files = [] @@ -1145,13 +1125,12 @@ class ProcessTVShow(object): if sickgear.DELETE_FAILED and self.any_vid_processed: self._delete_folder(dir_name, check_empty=False) - task = u'Failed download processing' + task = 'Failed download processing' if self.any_vid_processed: - self._log_helper(u'Successful %s: (%s, %s)' - % (task.lower(), str(nzb_name), dir_name), logger.MESSAGE) + self._log_helper(f'Successful {task.lower()}: ({str(nzb_name)}, {dir_name})', logger.MESSAGE) else: - self._log_helper(u'%s failed: (%s, %s): %s' - % (task, str(nzb_name), dir_name, process_fail_message), logger.WARNING) + self._log_helper(f'{task} failed: ({str(nzb_name)}, {dir_name}): {process_fail_message}', + logger.WARNING) def process_minimal(self, nzb_name, show_obj, failed, webhandler): if failed: diff --git a/sickgear/properFinder.py b/sickgear/properFinder.py index b27f9c78..1397e06a 100644 --- a/sickgear/properFinder.py +++ b/sickgear/properFinder.py @@ -21,8 +21,6 @@ import re import threading import traceback -# noinspection PyPep8Naming -import encodingKludge as ek from exceptions_helper import ex, MultipleShowObjectsException, AuthException import sickgear @@ -32,9 +30,9 @@ from .common import ARCHIVED, FAILED, DOWNLOADED, SNATCHED_ANY, SNATCHED_PROPER, NeededQualities, Quality from .history import dateFormat from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser -from .sgdatetime import timestamp_near +from .sgdatetime import SGDatetime -from _23 import filter_iter, filter_list, list_values, map_consume, map_list +from _23 import map_consume from six import string_types # noinspection PyUnreachableCode @@ -75,7 +73,7 @@ def search_propers(provider_proper_obj=None): proper_sch = sickgear.proper_finder_scheduler if None is proper_sch.start_time: - run_in = proper_sch.lastRun + proper_sch.cycleTime - datetime.datetime.now() + run_in = proper_sch.last_run + proper_sch.cycle_time - datetime.datetime.now() run_at = ', next check ' if datetime.timedelta() > run_in: run_at += 'imminent' @@ -133,7 +131,7 @@ def get_old_proper_level(show_obj, tvid, prodid, season, episode_numbers, old_st [tvid, prodid, season, episode]) if not result or not isinstance(result[0]['resource'], string_types) or not result[0]['resource']: continue - nq = Quality.sceneQuality(result[0]['resource'], show_obj.is_anime) + nq = Quality.scene_quality(result[0]['resource'], show_obj.is_anime) if nq != new_quality: continue try: @@ -187,7 +185,7 @@ def load_webdl_types(): try: for line in url_data.splitlines(): try: - (key, val) = line.strip().split(u'::', 1) + (key, val) = line.strip().split('::', 1) except (BaseException, Exception): continue if None is key or None is val: @@ -216,14 +214,14 @@ def load_webdl_types(): def _search_provider(cur_provider, provider_propers, aired_since_shows, recent_shows, recent_anime): # type: (GenericProvider, List, datetime.datetime, List[Tuple[int, int]], List[Tuple[int, int]]) -> None try: - # we need to extent the referenced list from parameter to update the original var + # we need to extend the referenced list from parameter to update the original var provider_propers.extend(cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows, anime=recent_anime)) except AuthException as e: - logger.log('Authentication error: %s' % ex(e), logger.ERROR) + logger.error('Authentication error: %s' % ex(e)) except (BaseException, Exception) as e: - logger.log('Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) + logger.error('Error while searching %s, skipping: %s' % (cur_provider.name, ex(e))) + logger.error(traceback.format_exc()) if not provider_propers: logger.log('No Proper releases found at [%s]' % cur_provider.name) @@ -253,9 +251,9 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime # filter provider list for: # 1. from recent search: recent search enabled providers # 2. native proper search: active search enabled providers - provider_list = filter_list( + provider_list = list(filter( lambda p: p.is_active() and (p.enable_recentsearch, p.enable_backlog)[None is proper_dict], - sickgear.providers.sortedProviderList()) + sickgear.providers.sorted_sources())) search_threads = [] if None is proper_dict: @@ -308,8 +306,8 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime cur_proper.parsed_show_obj = (cur_proper.parsed_show_obj or helpers.find_show_by_id(parse_result.show_obj.tvid_prodid)) if None is cur_proper.parsed_show_obj: - logger.log('Skip download; cannot find show with ID [%s] at %s' % - (cur_proper.prodid, sickgear.TVInfoAPI(cur_proper.tvid).name), logger.ERROR) + logger.error('Skip download; cannot find show with ID [%s] at %s' % + (cur_proper.prodid, sickgear.TVInfoAPI(cur_proper.tvid).name)) continue cur_proper.tvid = cur_proper.parsed_show_obj.tvid @@ -321,26 +319,25 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime # only get anime Proper if it has release group and version if parse_result.is_anime and not parse_result.release_group and -1 == parse_result.version: - logger.log('Ignored Proper with no release group and version in name [%s]' % cur_proper.name, - logger.DEBUG) + logger.debug('Ignored Proper with no release group and version in name [%s]' % cur_proper.name) continue if not show_name_helpers.pass_wordlist_checks(cur_proper.name, parse=False, indexer_lookup=False, show_obj=cur_proper.parsed_show_obj): - logger.log('Ignored unwanted Proper [%s]' % cur_proper.name, logger.DEBUG) + logger.debug('Ignored unwanted Proper [%s]' % cur_proper.name) continue re_x = dict(re_prefix='.*', re_suffix='.*') result = show_name_helpers.contains_any(cur_proper.name, cur_proper.parsed_show_obj.rls_ignore_words, rx=cur_proper.parsed_show_obj.rls_ignore_words_regex, **re_x) if None is not result and result: - logger.log('Ignored Proper containing ignore word [%s]' % cur_proper.name, logger.DEBUG) + logger.debug('Ignored Proper containing ignore word [%s]' % cur_proper.name) continue result = show_name_helpers.contains_any(cur_proper.name, cur_proper.parsed_show_obj.rls_require_words, rx=cur_proper.parsed_show_obj.rls_require_words_regex, **re_x) if None is not result and not result: - logger.log('Ignored Proper for not containing any required word [%s]' % cur_proper.name, logger.DEBUG) + logger.debug('Ignored Proper for not containing any required word [%s]' % cur_proper.name) continue cur_size = getattr(cur_proper, 'size', None) @@ -364,8 +361,8 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime # only keep the Proper if we already retrieved the same quality ep (don't get better/worse ones) # check if we want this release: same quality as current, current has correct status # restrict other release group releases to Proper's - old_status, old_quality = Quality.splitCompositeStatus(int(sql_result[0]['status'])) - cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime) + old_status, old_quality = Quality.split_composite_status(int(sql_result[0]['status'])) + cur_proper.quality = Quality.name_quality(cur_proper.name, parse_result.is_anime) cur_proper.is_repack, cur_proper.properlevel = Quality.get_proper_level( parse_result.extra_info_no_name(), parse_result.version, parse_result.is_anime, check_is_repack=True) cur_proper.proper_level = cur_proper.properlevel # local non global value @@ -421,15 +418,15 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime old_webdl_type = get_webdl_type(old_extra_no_name, old_name) new_webdl_type = get_webdl_type(parse_result.extra_info_no_name(), cur_proper.name) if old_webdl_type != new_webdl_type: - logger.log('Ignored Proper webdl source [%s], does not match existing webdl source [%s] for [%s]' - % (old_webdl_type, new_webdl_type, cur_proper.name), logger.DEBUG) + logger.debug(f'Ignored Proper webdl source [{old_webdl_type}], does not match existing webdl source' + f' [{new_webdl_type}] for [{cur_proper.name}]') continue # for webdls, prevent Propers from different groups log_same_grp = 'Ignored Proper from release group [%s] does not match existing group [%s] for [%s]' \ % (parse_result.release_group, old_release_group, cur_proper.name) if sickgear.PROPERS_WEBDL_ONEGRP and is_web and not same_release_group: - logger.log(log_same_grp, logger.DEBUG) + logger.debug(log_same_grp) continue # check if we actually want this Proper (if it's the right release group and a higher version) @@ -438,7 +435,7 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime if not (-1 < old_version < parse_result.version): continue if not same_release_group: - logger.log(log_same_grp, logger.DEBUG) + logger.debug(log_same_grp) continue found_msg = 'Found anime Proper v%s to replace v%s' % (parse_result.version, old_version) else: @@ -456,7 +453,7 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime # skip if the episode has never downloaded, because a previous quality is required to match the Proper if not len(history_results): - logger.log('Ignored Proper cannot find a recent history item for [%s]' % cur_proper.name, logger.DEBUG) + logger.debug('Ignored Proper cannot find a recent history item for [%s]' % cur_proper.name) continue # make sure that none of the existing history downloads are the same Proper as the download candidate @@ -466,14 +463,14 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime for hitem in history_results: # if the result exists in history already we need to skip it if clean_proper_name == _generic_name(helpers.remove_non_release_groups( - ek.ek(os.path.basename, hitem['resource']))): + os.path.basename(hitem['resource']))): is_same = True break if is_same: logger.log('Ignored Proper already in history [%s]' % cur_proper.name) continue - logger.log(found_msg, logger.DEBUG) + logger.debug(found_msg) # finish populating the Proper instance # cur_proper.show_obj = cur_proper.parsed_show_obj.prodid @@ -489,7 +486,7 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime cur_provider.log_result('Propers', len(propers), '%s' % cur_provider.name) - return list_values(propers) + return list(propers.values()) def _download_propers(proper_list): @@ -509,24 +506,24 @@ def _download_propers(proper_list): # get verified list; sort the list of unique Propers for highest proper_level, newest first for cur_proper in sorted( - filter_iter(lambda p: p not in consumed_proper, - # allows Proper to fail or be rejected and another to be tried (with a different name) - filter_iter(lambda p: _epid(p) not in downloaded_epid, proper_list)), + filter(lambda p: p not in consumed_proper, + # allows Proper to fail or be rejected and another to be tried (with a different name) + filter(lambda p: _epid(p) not in downloaded_epid, proper_list)), key=operator.attrgetter('properlevel', 'date'), reverse=True): # type: Proper epid = _epid(cur_proper) # if the show is in our list and there hasn't been a Proper already added for that particular episode # then add it to our list of Propers - if epid not in map_list(_epid, verified_propers): + if epid not in list(map(_epid, verified_propers)): logger.log('Proper may be useful [%s]' % cur_proper.name) verified_propers.add(cur_proper) else: # use Proper with the highest level remove_propers = set() map_consume(lambda vp: remove_propers.add(vp), - filter_iter(lambda p: (epid == _epid(p) and cur_proper.proper_level > p.proper_level), - verified_propers)) + filter(lambda p: (epid == _epid(p) and cur_proper.proper_level > p.proper_level), + verified_propers)) if remove_propers: verified_propers -= remove_propers @@ -559,16 +556,14 @@ def _download_propers(proper_list): if reject: if isinstance(reject, string_types): if scene_rej_nuked and not scene_nuked_active: - logger.log('Rejecting nuked release. Nuke reason [%s] source [%s]' % (reject, url), - logger.DEBUG) + logger.debug('Rejecting nuked release. Nuke reason [%s] source [%s]' % (reject, url)) else: - logger.log('Considering nuked release. Nuke reason [%s] source [%s]' % (reject, url), - logger.DEBUG) + logger.debug('Considering nuked release. Nuke reason [%s] source [%s]' % (reject, url)) reject = False elif scene_contains or non_scene_fallback: reject = False else: - logger.log('Rejecting as not scene release listed at any [%s]' % url, logger.DEBUG) + logger.debug('Rejecting as not scene release listed at any [%s]' % url) if reject: continue @@ -633,7 +628,7 @@ def get_needed_qualites(needed=None): continue ep_obj = show_obj.get_episode(season=cur_result['season'], episode=cur_result['episode']) if ep_obj: - ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status) + ep_status, ep_quality = Quality.split_composite_status(ep_obj.status) if ep_status in SNATCHED_ANY + [DOWNLOADED, ARCHIVED]: needed.check_needed_qualities([ep_quality]) @@ -687,21 +682,21 @@ def _generic_name(name): def _set_last_proper_search(when): - logger.log(u'Setting the last Proper search in the DB to %s' % when, logger.DEBUG) + logger.debug(f'Setting the last Proper search in the DB to {when}') my_db = db.DBConnection() sql_result = my_db.select('SELECT * FROM info') if 0 == len(sql_result): my_db.action('INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)', - [0, 0, int(timestamp_near(when))]) + [0, 0, SGDatetime.timestamp_near(when)]) else: # noinspection SqlConstantCondition - my_db.action('UPDATE info SET last_proper_search=%s WHERE 1=1' % int(timestamp_near(when))) + my_db.action('UPDATE info SET last_proper_search=%s WHERE 1=1' % SGDatetime.timestamp_near(when)) def next_proper_timeleft(): - return sickgear.proper_finder_scheduler.timeLeft() + return sickgear.proper_finder_scheduler.time_left() def get_last_proper_search(): diff --git a/sickgear/providers/__init__.py b/sickgear/providers/__init__.py index 4ba6218d..ecc0b6d4 100644 --- a/sickgear/providers/__init__.py +++ b/sickgear/providers/__init__.py @@ -22,7 +22,6 @@ from .newznab import NewznabConstants from .. import logger import sickgear -from _23 import filter_list, filter_iter from six import iteritems, itervalues # noinspection PyUnreachableCode @@ -30,6 +29,7 @@ if False: from typing import AnyStr, List, Union from .generic import GenericProvider, NZBProvider, TorrentProvider +# noinspection PyUnresolvedReferences __all__ = [ # usenet 'filesharingtalk', @@ -50,47 +50,47 @@ for module in __all__: try: m = importlib.import_module('.' + module, 'sickgear.providers') globals().update({n: getattr(m, n) for n in m.__all__} if hasattr(m, '__all__') - else dict(filter_iter(lambda t: '_' != t[0][0], iteritems(m.__dict__)))) + else dict(filter(lambda t: '_' != t[0][0], iteritems(m.__dict__)))) except ImportError as e: if 'custom' != module[0:6]: raise e -def sortedProviderList(): +def sorted_sources(): # type: (...) -> List[Union[GenericProvider, NZBProvider, TorrentProvider]] """ return sorted provider list :return: sorted list of providers """ - initialList = sickgear.providerList + sickgear.newznabProviderList + sickgear.torrentRssProviderList - providerDict = dict(zip([x.get_id() for x in initialList], initialList)) + initial_list = sickgear.provider_list + sickgear.newznab_providers + sickgear.torrent_rss_providers + provider_dict = dict(zip([x.get_id() for x in initial_list], initial_list)) - newList = [] + new_list = [] # add all modules in the priority list, in order for curModule in sickgear.PROVIDER_ORDER: - if curModule in providerDict: - newList.append(providerDict[curModule]) + if curModule in provider_dict: + new_list.append(provider_dict[curModule]) if not sickgear.PROVIDER_ORDER: - nzb = filter_list(lambda p: p.providerType == generic.GenericProvider.NZB, itervalues(providerDict)) - tor = filter_list(lambda p: p.providerType != generic.GenericProvider.NZB, itervalues(providerDict)) - newList = sorted(filter_iter(lambda p: not p.anime_only, nzb), key=lambda v: v.get_id()) + \ - sorted(filter_iter(lambda p: not p.anime_only, tor), key=lambda v: v.get_id()) + \ - sorted(filter_iter(lambda p: p.anime_only, nzb), key=lambda v: v.get_id()) + \ - sorted(filter_iter(lambda p: p.anime_only, tor), key=lambda v: v.get_id()) + nzb = list(filter(lambda p: p.providerType == generic.GenericProvider.NZB, itervalues(provider_dict))) + tor = list(filter(lambda p: p.providerType != generic.GenericProvider.NZB, itervalues(provider_dict))) + new_list = sorted(filter(lambda p: not p.anime_only, nzb), key=lambda v: v.get_id()) + \ + sorted(filter(lambda p: not p.anime_only, tor), key=lambda v: v.get_id()) + \ + sorted(filter(lambda p: p.anime_only, nzb), key=lambda v: v.get_id()) + \ + sorted(filter(lambda p: p.anime_only, tor), key=lambda v: v.get_id()) # add any modules that are missing from that list - for curModule in providerDict: - if providerDict[curModule] not in newList: - newList.append(providerDict[curModule]) + for curModule in provider_dict: + if provider_dict[curModule] not in new_list: + new_list.append(provider_dict[curModule]) - return newList + return new_list -def makeProviderList(): - return [x.provider for x in [getProviderModule(y) for y in __all__] if x] +def provider_modules(): + return [x.provider for x in [_get_module_by_name(y) for y in __all__] if x] def generic_provider_name(n): @@ -103,7 +103,7 @@ def generic_provider_url(u): return u.strip().strip('/').lower().replace('https', 'http') -def make_unique_list(p_list, d_list=None): +def _make_unique_list(p_list, d_list=None): # type: (List, List) -> List """ remove provider duplicates @@ -119,7 +119,7 @@ def make_unique_list(p_list, d_list=None): default_names = [d.name for d in d_list or []] - p_list = filter_iter(lambda _x: _x.get_id() not in ['sick_beard_index'], p_list) + p_list = filter(lambda _x: _x.get_id() not in ['sick_beard_index'], p_list) for cur_p in p_list: g_name = generic_provider_name(cur_p.name) g_url = generic_provider_url(cur_p.url) @@ -136,32 +136,32 @@ def make_unique_list(p_list, d_list=None): return new_p_list -def getNewznabProviderList(data): +def newznab_source_list(data): # type: (AnyStr) -> List - defaultList = [makeNewznabProvider(x) for x in getDefaultNewznabProviders().split('!!!')] - providerList = make_unique_list(filter_list(lambda _x: _x, [makeNewznabProvider(x) for x in data.split('!!!')]), - defaultList) + default_list = [_create_newznab_source(x) for x in _default_newznab_sources().split('!!!')] + provider_list = _make_unique_list(list(filter( + lambda _x: _x, [_create_newznab_source(x) for x in data.split('!!!')])), default_list) - providerDict = dict(zip([x.name for x in providerList], providerList)) + provider_dict = dict(zip([x.name for x in provider_list], provider_list)) - for curDefault in defaultList: + for curDefault in default_list: if not curDefault: continue - if curDefault.name not in providerDict: + if curDefault.name not in provider_dict: curDefault.default = True - providerList.append(curDefault) + provider_list.append(curDefault) else: - providerDict[curDefault.name].default = True + provider_dict[curDefault.name].default = True for k in ('name', 'url', 'needs_auth', 'search_mode', 'search_fallback', 'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog', 'server_type'): - setattr(providerDict[curDefault.name], k, getattr(curDefault, k)) + setattr(provider_dict[curDefault.name], k, getattr(curDefault, k)) - return filter_list(lambda _x: _x, providerList) + return list(filter(lambda _x: _x, provider_list)) -def makeNewznabProvider(config_string): +def _create_newznab_source(config_string): if not config_string: return None @@ -177,24 +177,24 @@ def makeNewznabProvider(config_string): except IndexError: params.update({k: d}) else: - logger.log(u'Skipping Newznab provider string: \'%s\', incorrect format' % config_string, logger.ERROR) + logger.error(f'Skipping Newznab provider string: \'{config_string}\', incorrect format') return None newznab_module = sys.modules['sickgear.providers.newznab'] - newProvider = newznab_module.NewznabProvider(name, url, **params) - newProvider.enabled = '1' == enabled + new_provider = newznab_module.NewznabProvider(name, url, **params) + new_provider.enabled = '1' == enabled - return newProvider + return new_provider -def getTorrentRssProviderList(data): - providerList = filter_list(lambda _x: _x, [makeTorrentRssProvider(x) for x in data.split('!!!')]) +def torrent_rss_source_list(data): + provider_list = list(filter(lambda _x: _x, [_create_torrent_rss_source(x) for x in data.split('!!!')])) - return filter_list(lambda _x: _x, providerList) + return list(filter(lambda _x: _x, provider_list)) -def makeTorrentRssProvider(config_string): +def _create_torrent_rss_source(config_string): if not config_string: return None @@ -213,30 +213,31 @@ def makeTorrentRssProvider(config_string): url = values[1] enabled = values[3] except ValueError: - logger.log(u"Skipping RSS Torrent provider string: '" + config_string + "', incorrect format", - logger.ERROR) + logger.error(f'Skipping RSS Torrent provider string: \'{config_string}\', incorrect format') return None try: - torrentRss = sys.modules['sickgear.providers.rsstorrent'] + torrent_rss = sys.modules['sickgear.providers.rsstorrent'] except (BaseException, Exception): return - newProvider = torrentRss.TorrentRssProvider(name, url, cookies, search_mode, search_fallback, enable_recentsearch, - enable_backlog) - newProvider.enabled = '1' == enabled + new_provider = torrent_rss.TorrentRssProvider(name, url, cookies, search_mode, search_fallback, enable_recentsearch, + enable_backlog) + new_provider.enabled = '1' == enabled - return newProvider + return new_provider -def getDefaultNewznabProviders(): - return '!!!'.join(['NZBgeek|https://api.nzbgeek.info/||5030,5040|0|eponly|0|0|0', - 'DrunkenSlug|https://api.drunkenslug.com/||5030,5040|0|eponly|0|0|0', - 'NinjaCentral|https://ninjacentral.co.za/||5030,5040|0|eponly|0|0|0', - ]) +def _default_newznab_sources(): + return '!!!'.join([ + '|'.join(_src) for _src in + (['NZBgeek', 'https://api.nzbgeek.info/', '', '5030,5040', '0', 'eponly', '0', '0', '0'], + ['DrunkenSlug', 'https://api.drunkenslug.com/', '', '5030,5040', '0', 'eponly', '0', '0', '0'], + ['NinjaCentral', 'https://ninjacentral.co.za/', '', '5030,5040', '0', 'eponly', '0', '0', '0'], + )]) -def getProviderModule(name): +def _get_module_by_name(name): prefix, cprov, name = 'sickgear.providers.', 'motsuc'[::-1], name.lower() if name in __all__ and prefix + name in sys.modules: return sys.modules[prefix + name] @@ -245,11 +246,11 @@ def getProviderModule(name): raise Exception('Can\'t find %s%s in providers' % (prefix, name)) -def getProviderClass(provider_id): - providerMatch = [x for x in - sickgear.providerList + sickgear.newznabProviderList + sickgear.torrentRssProviderList if - provider_id == x.get_id()] +def get_by_id(provider_id): + provider_match = [x for x in + sickgear.provider_list + sickgear.newznab_providers + sickgear.torrent_rss_providers if + provider_id == x.get_id()] - if 1 != len(providerMatch): + if 1 != len(provider_match): return None - return providerMatch[0] + return provider_match[0] diff --git a/sickgear/providers/alpharatio.py b/sickgear/providers/alpharatio.py index 4b4ed911..bbb46c0d 100644 --- a/sickgear/providers/alpharatio.py +++ b/sickgear/providers/alpharatio.py @@ -25,7 +25,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -63,7 +62,6 @@ class AlphaRatioProvider(generic.TorrentProvider): rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({'info': 'view', 'get': 'download'})]) for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % (search_string, ('&freetorrent=1', '')[not self.freeleech]) html = self.get_url(search_url) @@ -107,7 +105,7 @@ class AlphaRatioProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, search_url) results = self._sort_seeding(mode, results + items[mode]) diff --git a/sickgear/providers/bithdtv.py b/sickgear/providers/bithdtv.py index 4e7b4be9..b620519a 100644 --- a/sickgear/providers/bithdtv.py +++ b/sickgear/providers/bithdtv.py @@ -23,7 +23,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -49,7 +48,7 @@ class BitHDTVProvider(generic.TorrentProvider): [(None is y or re.search(r'(?i)rss\slink', y)), self.has_all_cookies(['su', 'sp', 'sl'], 'h_'), 'search' in self.urls] + [(self.session.cookies.get('h_' + x) or 'sg!no!pw') in self.digest for x in ('su', 'sp', 'sl')])), - failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) + failed_msg=(lambda y=None: 'Invalid cookie details for %s. Check settings')) @staticmethod def _has_signature(data=None): @@ -67,7 +66,6 @@ class BitHDTVProvider(generic.TorrentProvider): for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % (search_string, self._categories_string(mode, '%s', ',')) html = self.get_url(search_url, timeout=90) @@ -112,7 +110,7 @@ class BitHDTVProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, search_url) diff --git a/sickgear/providers/blutopia.py b/sickgear/providers/blutopia.py index 0ef6bdb2..b69664b1 100644 --- a/sickgear/providers/blutopia.py +++ b/sickgear/providers/blutopia.py @@ -25,7 +25,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import filter_iter, unidecode from six import iteritems @@ -55,7 +54,7 @@ class BlutopiaProvider(generic.TorrentProvider): def _authorised(self, **kwargs): return super(BlutopiaProvider, self)._authorised( - logged_in=self.logged_in, failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) + logged_in=self.logged_in, failed_msg=(lambda y=None: 'Invalid cookie details for %s. Check settings')) def logged_in(self, resp=None): @@ -103,11 +102,10 @@ class BlutopiaProvider(generic.TorrentProvider): show_type = self.show_obj.air_by_date and 'Air By Date' \ or self.show_obj.is_sports and 'Sports' or None if show_type: - logger.log(u'Provider does not carry shows of type: [%s], skipping' % show_type, logger.DEBUG) + logger.debug(f'Provider does not carry shows of type: [{show_type}], skipping') return results for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % ( self._token, search_string.replace('.', ' '), self._categories_string(template=''), '', '', '') @@ -136,7 +134,7 @@ class BlutopiaProvider(generic.TorrentProvider): marked = ','.join([x.attrs.get('data-original-title', '').lower() for x in tr.find_all( 'i', attrs={'class': ['text-gold', 'fa-diamond', 'fa-certificate']})]) # noinspection PyTypeChecker - munged = ''.join(filter_iter(marked.__contains__, ['free', 'double', 'feat'])) + munged = ''.join(filter(marked.__contains__, ['free', 'double', 'feat'])) # noinspection PyUnboundLocalVariable if ((non_marked and rc['filter'].search(munged)) or (not non_marked and not rc['filter'].search(munged))): @@ -161,7 +159,7 @@ class BlutopiaProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, log + search_url) diff --git a/sickgear/providers/btn.py b/sickgear/providers/btn.py index f5373228..6b87bff9 100644 --- a/sickgear/providers/btn.py +++ b/sickgear/providers/btn.py @@ -32,7 +32,6 @@ from bs4_parser import BS4Parser from exceptions_helper import AuthException from json_helper import json_dumps -from _23 import unidecode from six import iteritems @@ -76,8 +75,7 @@ class BTNProvider(generic.TorrentProvider): self.tmr_limit_update('1', 'h', '150/hr %s' % data) self.log_failure_url(url, post_data, post_json) else: - logger.log(u'Action prematurely ended. %(prov)s server error response = %(desc)s' % - {'prov': self.name, 'desc': data}, logger.WARNING) + logger.warning(f'Action prematurely ended. {self.name} server error response = {data}') def _search_provider(self, search_params, age=0, **kwargs): @@ -119,7 +117,7 @@ class BTNProvider(generic.TorrentProvider): self._check_response(error_text, self.url_api, post_data=json_rpc(params)) return results except AuthException: - logger.log('API looks to be down, add un/pw config detail to be used as a fallback', logger.WARNING) + logger.warning('API looks to be down, add un/pw config detail to be used as a fallback') except (KeyError, Exception): pass @@ -201,7 +199,6 @@ class BTNProvider(generic.TorrentProvider): del (self.session.headers['Referer']) self.auth_html = True - search_string = unidecode(search_string) search_url = self.urls['search'] % (search_string, self._categories_string(mode, 'filter_cat[%s]=1')) html = self.get_url(search_url, use_tmr_limit=False) @@ -249,7 +246,7 @@ class BTNProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(results) - cnt, search_url) @@ -269,7 +266,7 @@ class BTNProvider(generic.TorrentProvider): else: # If we don't have a release name we need to get creative - title = u'' + title = '' keys = ['Series', 'GroupName', 'Resolution', 'Source', 'Codec'] for key in keys: if key in data_json: @@ -355,8 +352,8 @@ class BTNProvider(generic.TorrentProvider): # Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of "RSS" data search, # older items will be done through backlog if 86400 < seconds_since_last_update: - logger.log(u'Only trying to fetch the last 24 hours even though the last known successful update on ' + - '%s was over 24 hours' % self.name, logger.WARNING) + logger.warning(f'Only trying to fetch the last 24 hours even though the last known successful update on' + f' {self.name} was over 24 hours') seconds_since_last_update = 86400 return self._search_provider(dict(Cache=['']), age=seconds_since_last_update) @@ -369,7 +366,7 @@ class BTNCache(tvcache.TVCache): def _cache_data(self, **kwargs): - return self.provider.cache_data(age=self._getLastUpdate().timetuple(), min_time=self.update_iv) + return self.provider.cache_data(age=self._get_last_update().timetuple(), min_time=self.update_iv) provider = BTNProvider() diff --git a/sickgear/providers/eztv.py b/sickgear/providers/eztv.py index 86bad378..780d6ebf 100644 --- a/sickgear/providers/eztv.py +++ b/sickgear/providers/eztv.py @@ -23,7 +23,7 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import b64decodestring, unidecode +from _23 import b64decodestring from six import iteritems @@ -62,7 +62,6 @@ class EztvProvider(generic.TorrentProvider): for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['browse'] % search_string if 'Cache' == mode else \ self.urls['search'] % search_string.replace('.', ' ') @@ -107,7 +106,7 @@ class EztvProvider(generic.TorrentProvider): except (generic.HaltParseException, IndexError): pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, search_url) diff --git a/sickgear/providers/fano.py b/sickgear/providers/fano.py index 67eb8395..471518f4 100644 --- a/sickgear/providers/fano.py +++ b/sickgear/providers/fano.py @@ -25,7 +25,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems FLTAG = r'\s+]+%s[^<]+ datetime.timedelta @@ -503,10 +500,10 @@ class GenericProvider(object): if time_left > datetime.timedelta(seconds=0): if log_warning: # Ensure provider name output (e.g. when displaying config/provs) instead of e.g. thread "Tornado" - prepend = ('[%s] :: ' % self.name, '')[any([x.name in threading.current_thread().name - for x in sickgear.providers.sortedProviderList()])] - logger.log('%sToo many requests reached at %s, waiting for %s' % ( - prepend, self.fmt_delta(self.tmr_limit_time), self.fmt_delta(time_left)), logger.WARNING) + prepend = ('[%s] :: ' % self.name, '')[any(x.name in threading.current_thread().name + for x in sickgear.providers.sorted_sources())] + logger.warning(f'{prepend}Too many requests reached at {self.fmt_delta(self.tmr_limit_time)},' + f' waiting for {self.fmt_delta(time_left)}') return use_tmr_limit else: self.tmr_limit_time = None @@ -517,10 +514,9 @@ class GenericProvider(object): if self.is_waiting(): if log_warning: time_left = self.wait_time() - self.fail_newest_delta() - logger.log('Failed %s times, skipping provider for %s, last failure at %s with fail type: %s' % ( + logger.warning('Failed %s times, skipping provider for %s, last failure at %s with fail type: %s' % ( self.failure_count, self.fmt_delta(time_left), self.fmt_delta(self.failure_time), - ProviderFailTypes.names.get( - self.last_fail, ProviderFailTypes.names[ProviderFailTypes.other])), logger.WARNING) + ProviderFailTypes.names.get(self.last_fail, ProviderFailTypes.names[ProviderFailTypes.other]))) return True return False @@ -535,7 +531,7 @@ class GenericProvider(object): self._last_fail_type = fail_type self.fails.add_fail(*args, **kwargs) else: - logger.log('%s: Not logging same failure within 3 seconds' % self.name, logger.DEBUG) + logger.debug('%s: Not logging same failure within 3 seconds' % self.name) def get_url(self, url, skip_auth=False, use_tmr_limit=True, *args, **kwargs): # type: (AnyStr, bool, bool, Any, Any) -> Optional[AnyStr, Dict] @@ -546,8 +542,8 @@ class GenericProvider(object): :param url: Address where to fetch data from :param skip_auth: Skip authentication check of provider if True :param use_tmr_limit: An API limit can be +ve before a fetch, but unwanted, set False to short should_skip - :param args: params to pass-through to get_url - :param kwargs: keyword params to pass-through to get_url + :param args: params to pass through to get_url + :param kwargs: keyword params to pass through to get_url :return: None or data fetched from URL """ data = None @@ -582,7 +578,7 @@ class GenericProvider(object): if data and not isinstance(data, tuple) \ or isinstance(data, tuple) and data[0]: if 0 != self.failure_count: - logger.log('Unblocking provider: %s' % self.get_id(), logger.DEBUG) + logger.debug('Unblocking provider: %s' % self.get_id()) self.failure_count = 0 self.failure_time = None else: @@ -630,7 +626,7 @@ class GenericProvider(object): post += [' .. Post params: [%s]' % '&'.join([post_data])] if post_json: post += [' .. Json params: [%s]' % '&'.join([post_json])] - logger.log('Failure URL: %s%s' % (url, ''.join(post)), logger.WARNING) + logger.warning('Failure URL: %s%s' % (url, ''.join(post))) def get_id(self): # type: (...) -> AnyStr @@ -643,7 +639,7 @@ class GenericProvider(object): :param name: name :return: """ - return re.sub(r'[^\w\d_]', '_', name.strip().lower()) + return re.sub(r'[^\w_]', '_', name.strip().lower()) def image_name(self, *default_name): # type: (...) -> AnyStr @@ -653,8 +649,7 @@ class GenericProvider(object): :return: """ for name in ['%s.%s' % (self.get_id(), image_ext) for image_ext in ['png', 'gif', 'jpg']]: - if ek.ek(os.path.isfile, - ek.ek(os.path.join, sickgear.PROG_DIR, 'gui', sickgear.GUI_NAME, 'images', 'providers', name)): + if os.path.isfile(os.path.join(sickgear.PROG_DIR, 'gui', sickgear.GUI_NAME, 'images', 'providers', name)): return name return '%s.png' % ('newznab', default_name[0])[any(default_name)] @@ -675,7 +670,7 @@ class GenericProvider(object): rxc_delim = re.compile(r'[&;]') rxc_skip_key = re.compile(r'clearance') - for cur_p in sickgear.providers.sortedProviderList(): + for cur_p in sickgear.providers.sorted_sources(): pid = cur_p.get_id() auths = set([]) for cur_kt in ['password', 'passkey', 'api_key', 'key', 'digest', 'cookies', 'hash']: @@ -758,7 +753,7 @@ class GenericProvider(object): def is_enabled(self): # type: (...) -> bool """ - This should be overridden and should return the config setting eg. sickgear.MYPROVIDER + This should be overridden and should return the config setting e.g. sickgear.MYPROVIDER """ return self.enabled @@ -807,7 +802,7 @@ class GenericProvider(object): try: btih = None try: - btih = re.findall(r'urn:btih:([\w]{32,40})', result.url)[0] + btih = re.findall(r'urn:btih:(\w{32,40})', result.url)[0] if 32 == len(btih): btih = make_btih(btih) except (BaseException, Exception): @@ -815,7 +810,7 @@ class GenericProvider(object): if not btih or not re.search('(?i)[0-9a-f]{32,40}', btih): assert not result.url.startswith('http') - logger.log('Unable to extract torrent hash from link: ' + ex(result.url), logger.ERROR) + logger.error('Unable to extract torrent hash from link: ' + ex(result.url)) return False urls = ['http%s://%s/torrent/%s.torrent' % (u + (btih.upper(),)) @@ -838,25 +833,25 @@ class GenericProvider(object): cache_dir = sickgear.CACHE_DIR or helpers.get_system_temp_dir() base_name = '%s.%s' % (re.sub('.%s$' % self.providerType, '', helpers.sanitize_filename(result.name)), self.providerType) - final_file = ek.ek(os.path.join, final_dir, base_name) + final_file = os.path.join(final_dir, base_name) cached = result.cache_filepath - if cached and ek.ek(os.path.isfile, cached): - base_name = ek.ek(os.path.basename, cached) - cache_file = ek.ek(os.path.join, cache_dir, base_name) + if cached and os.path.isfile(cached): + base_name = os.path.basename(cached) + cache_file = os.path.join(cache_dir, base_name) self.session.headers['Referer'] = url if cached or helpers.download_file(url, cache_file, session=self.session, allow_redirects='/it' not in url, failure_monitor=False): if self._verify_download(cache_file): - logger.log(u'Downloaded %s result from %s' % (self.name, url)) + logger.log(f'Downloaded {self.name} result from {url}') try: helpers.move_file(cache_file, final_file) msg = 'moved' except (OSError, Exception): msg = 'copied cached file' - logger.log(u'Saved .%s data and %s to %s' % ( - (link_type, 'torrent cache')['magnet' == link_type], msg, final_file)) + logger.log(f'Saved .{(link_type, "torrent cache")["magnet" == link_type]} data' + f' and {msg} to {final_file}') saved = True break @@ -869,20 +864,20 @@ class GenericProvider(object): del(self.session.headers['Referer']) if not saved and 'magnet' == link_type: - logger.log(u'All torrent cache servers failed to return a downloadable result', logger.DEBUG) - final_file = ek.ek(os.path.join, final_dir, '%s.%s' % (helpers.sanitize_filename(result.name), link_type)) + logger.debug('All torrent cache servers failed to return a downloadable result') + final_file = os.path.join(final_dir, '%s.%s' % (helpers.sanitize_filename(result.name), link_type)) try: with open(final_file, 'wb') as fp: fp.write(decode_bytes(result.url)) fp.flush() os.fsync(fp.fileno()) saved = True - logger.log(u'Saved magnet link to file as some clients (or plugins) support this, %s' % final_file) + logger.log(f'Saved magnet link to file as some clients (or plugins) support this, {final_file}') if 'blackhole' == sickgear.TORRENT_METHOD: logger.log('Tip: If your client fails to load magnet in files, ' + 'change blackhole to a client connection method in search settings') except (BaseException, Exception): - logger.log(u'Failed to save magnet link to file, %s' % final_file) + logger.log(f'Failed to save magnet link to file, {final_file}') elif not saved: if 'torrent' == link_type and result.provider.get_id() in sickgear.PROVIDER_HOMES: t_result = result # type: TorrentSearchResult @@ -898,7 +893,7 @@ class GenericProvider(object): t_result.provider._valid_home(url_exclude=urls) setattr(sickgear, 'PROVIDER_EXCLUDE', ([], urls)[any([t_result.provider.url])]) - logger.log(u'Server failed to return anything useful', logger.ERROR) + logger.error('Server failed to return anything useful') return saved @@ -930,7 +925,7 @@ class GenericProvider(object): def search_rss(self, ep_obj_list): # type: (List[TVEpisode]) -> Dict[TVEpisode, SearchResult] - return self.cache.findNeededEpisodes(ep_obj_list) + return self.cache.find_needed_episodes(ep_obj_list) def get_quality(self, item, anime=False): # type: (etree.Element, bool) -> int @@ -942,7 +937,7 @@ class GenericProvider(object): :return: a Quality value obtained from the node's data """ (title, url) = self._title_and_url(item) - quality = Quality.sceneQuality(title, anime) + quality = Quality.scene_quality(title, anime) return quality def _search_provider(self, search_params, search_mode='eponly', epcount=0, age=0, **kwargs): @@ -972,7 +967,7 @@ class GenericProvider(object): except (BaseException, Exception): pass - title = title and re.sub(r'\s+', '.', u'%s' % title) + title = title and re.sub(r'\s+', '.', f'{title}') if url and not re.match('(?i)magnet:', url): url = str(url).replace('&', '&') @@ -981,11 +976,6 @@ class GenericProvider(object): def _link(self, url, url_tmpl=None, url_quote=None): url = '%s' % url # ensure string type if url and not re.match('(?i)magnet:', url): - if PY2: - try: - url = url.encode('utf-8') - except (BaseException, Exception): - pass url = url.strip().replace('&', '&') if not url: url = '' @@ -1016,12 +1006,12 @@ class GenericProvider(object): all_cells = all_cells if any(all_cells) else header_row.find_all('td') headers = [re.sub( - r'[\s]+', '', - ((any([cell.get_text()]) and any([rc[x].search(cell.get_text()) for x in iterkeys(rc)]) and cell.get_text()) - or (cell.attrs.get('id') and any([rc[x].search(cell['id']) for x in iterkeys(rc)]) and cell['id']) - or (cell.attrs.get('title') and any([rc[x].search(cell['title']) for x in iterkeys(rc)]) and cell['title']) - or next(iter(set(filter_iter(lambda rz: any([rz]), [ - next(iter(set(filter_iter(lambda ry: any([ry]), [ + r'\s+', '', + ((any([cell.get_text()]) and any(rc[x].search(cell.get_text()) for x in iterkeys(rc)) and cell.get_text()) + or (cell.attrs.get('id') and any(rc[x].search(cell['id']) for x in iterkeys(rc)) and cell['id']) + or (cell.attrs.get('title') and any(rc[x].search(cell['title']) for x in iterkeys(rc)) and cell['title']) + or next(iter(set(filter(lambda rz: any([rz]), [ + next(iter(set(filter(lambda ry: any([ry]), [ cell.find(tag, **p) for p in [{attr: rc[x]} for x in iterkeys(rc)]]))), {}).get(attr) for (tag, attr) in [ ('img', 'title'), ('img', 'src'), ('i', 'title'), ('i', 'class'), @@ -1038,7 +1028,7 @@ class GenericProvider(object): for k, r in iteritems(rc): if k not in results: - for name in filter_iter(lambda v: any([v]) and r.search(v), all_headers[::-1]): + for name in filter(lambda v: any([v]) and r.search(v), all_headers[::-1]): results[k] = all_headers.index(name) - len(all_headers) break @@ -1111,7 +1101,7 @@ class GenericProvider(object): search_list = [] for cur_ep_obj in ep_obj_list: # search cache for episode result - cache_result = self.cache.searchCache(cur_ep_obj, manual_search) # type: List[SearchResult] + cache_result = self.cache.search_cache(cur_ep_obj, manual_search) # type: List[SearchResult] if cache_result: if cur_ep_obj.episode not in results: results[cur_ep_obj.episode] = cache_result @@ -1201,10 +1191,10 @@ class GenericProvider(object): try: parse_result = parser.parse(title, release_group=self.get_id()) except InvalidNameException: - logger.log(u'Unable to parse the filename %s into a valid episode' % title, logger.DEBUG) + logger.debug(f'Unable to parse the filename {title} into a valid episode') continue except InvalidShowException: - logger.log(u'No match for search criteria in the parsed filename ' + title, logger.DEBUG) + logger.debug(f'No match for search criteria in the parsed filename {title}') continue if parse_result.show_obj.is_anime: @@ -1216,8 +1206,8 @@ class GenericProvider(object): continue if not (parse_result.show_obj.tvid == show_obj.tvid and parse_result.show_obj.prodid == show_obj.prodid): - logger.debug(u'Parsed show [%s] is not show [%s] we are searching for' % ( - parse_result.show_obj.unique_name, show_obj.unique_name)) + logger.debug(f'Parsed show [{parse_result.show_obj.unique_name}] is not show [{show_obj.unique_name}]' + f' we are searching for') continue parsed_show_obj = parse_result.show_obj @@ -1231,15 +1221,15 @@ class GenericProvider(object): if not (parsed_show_obj.air_by_date or parsed_show_obj.is_sports): if 'sponly' == search_mode: if len(parse_result.episode_numbers): - logger.log(u'This is supposed to be a season pack search but the result ' + title + - u' is not a valid season pack, skipping it', logger.DEBUG) + logger.debug(f'This is supposed to be a season pack search but the result {title}' + f' is not a valid season pack, skipping it') add_cache_entry = True if len(parse_result.episode_numbers) \ and (parse_result.season_number not in set([ep_obj.season for ep_obj in ep_obj_list]) or not [ep_obj for ep_obj in ep_obj_list if ep_obj.scene_episode in parse_result.episode_numbers]): - logger.log(u'The result ' + title + u' doesn\'t seem to be a valid episode that we are trying' + - u' to snatch, ignoring', logger.DEBUG) + logger.debug(f'The result {title} doesn\'t seem to be a valid episode that we are trying' + f' to snatch, ignoring') add_cache_entry = True else: if not len(parse_result.episode_numbers)\ @@ -1247,14 +1237,14 @@ class GenericProvider(object): and not [ep_obj for ep_obj in ep_obj_list if ep_obj.season == parse_result.season_number and ep_obj.episode in parse_result.episode_numbers]: - logger.log(u'The result ' + title + u' doesn\'t seem to be a valid season that we are trying' + - u' to snatch, ignoring', logger.DEBUG) + logger.debug(f'The result {title} doesn\'t seem to be a valid season that we are trying' + f' to snatch, ignoring') add_cache_entry = True elif len(parse_result.episode_numbers) and not [ ep_obj for ep_obj in ep_obj_list if ep_obj.season == parse_result.season_number and ep_obj.episode in parse_result.episode_numbers]: - logger.log(u'The result ' + title + ' doesn\'t seem to be a valid episode that we are trying' + - u' to snatch, ignoring', logger.DEBUG) + logger.debug(f'The result {title} doesn\'t seem to be a valid episode that we are trying' + f' to snatch, ignoring') add_cache_entry = True if not add_cache_entry: @@ -1263,8 +1253,8 @@ class GenericProvider(object): episode_numbers = parse_result.episode_numbers else: if not parse_result.is_air_by_date: - logger.log(u'This is supposed to be a date search but the result ' + title + - u' didn\'t parse as one, skipping it', logger.DEBUG) + logger.debug(f'This is supposed to be a date search but the result {title}' + f' didn\'t parse as one, skipping it') add_cache_entry = True else: season_number = parse_result.season_number @@ -1273,13 +1263,13 @@ class GenericProvider(object): if not episode_numbers or \ not [ep_obj for ep_obj in ep_obj_list if ep_obj.season == season_number and ep_obj.episode in episode_numbers]: - logger.log(u'The result ' + title + ' doesn\'t seem to be a valid episode that we are trying' + - u' to snatch, ignoring', logger.DEBUG) + logger.debug(f'The result {title} doesn\'t seem to be a valid episode that we are trying' + f' to snatch, ignoring') add_cache_entry = True # add parsed result to cache for usage later on if add_cache_entry: - logger.log(u'Adding item from search to cache: ' + title, logger.DEBUG) + logger.debug(f'Adding item from search to cache: {title}') ci = self.cache.add_cache_entry(title, url, parse_result=parse_result) if None is not ci: cl.append(ci) @@ -1296,11 +1286,11 @@ class GenericProvider(object): multi_ep = 1 < len(episode_numbers) if not want_ep: - logger.log(u'Ignoring result %s because we don\'t want an episode that is %s' - % (title, Quality.qualityStrings[quality]), logger.DEBUG) + logger.debug(f'Ignoring result {title} because we don\'t want an episode that is' + f' {Quality.qualityStrings[quality]}') continue - logger.log(u'Found result %s at %s' % (title, url), logger.DEBUG) + logger.debug(f'Found result {title} at {url}') # make a result object ep_obj_results = [] # type: List[TVEpisode] @@ -1325,14 +1315,14 @@ class GenericProvider(object): ep_num = None if 1 == len(ep_obj_results): ep_num = ep_obj_results[0].episode - logger.log(u'Single episode result.', logger.DEBUG) + logger.debug('Single episode result.') elif 1 < len(ep_obj_results): ep_num = MULTI_EP_RESULT - logger.log(u'Separating multi-episode result to check for later - result contains episodes: ' + - str(parse_result.episode_numbers), logger.DEBUG) + logger.debug(f'Separating multi-episode result to check for later - result contains episodes:' + f' {parse_result.episode_numbers}') elif 0 == len(ep_obj_results): ep_num = SEASON_RESULT - logger.log(u'Separating full season result to check for later', logger.DEBUG) + logger.debug('Separating full season result to check for later') if ep_num not in results: # noinspection PyTypeChecker @@ -1356,7 +1346,7 @@ class GenericProvider(object): :param kwargs: :return: """ - results = self.cache.listPropers(search_date) + results = self.cache.list_propers(search_date) return [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show_obj) for x in results] @@ -1387,18 +1377,18 @@ class GenericProvider(object): :param count: count of successfully processed items :param url: source url of item(s) """ - stats = map_list(lambda arg: ('_reject_%s' % arg[0], arg[1]), - filter_iter(lambda _arg: all([getattr(self, '_reject_%s' % _arg[0], None)]), - (('seed', '%s ]+>)|\W)*(attempts|tries|remain)[\W\w]{,40}?(remain|left|attempt)|last[^<]+?attempt)', y)) logged_in, failed_msg = [None is not a and a or b for (a, b) in ( (logged_in, (lambda y=None: self.has_all_cookies())), - (failed_msg, (lambda y='': maxed_out(y) and u'Urgent abort, running low on login attempts. ' + - u'Password flushed to prevent service disruption to %s.' or + (failed_msg, (lambda y='': maxed_out(y) and 'Urgent abort, running low on login attempts. ' + + 'Password flushed to prevent service disruption to %s.' or (re.search(r'(?i)(username|password)((<[^>]+>)|\W)*' + r'(or|and|/|\s)((<[^>]+>)|\W)*(password|incorrect)', y) and - u'Invalid username or password for %s. Check settings' or - u'Failed to authenticate or parse a response from %s, abort provider'))) + 'Invalid username or password for %s. Check settings' or + 'Failed to authenticate or parse a response from %s, abort provider'))) )] if logged_in() and (not hasattr(self, 'urls') or bool(len(getattr(self, 'urls')))): @@ -2026,7 +2015,7 @@ class TorrentProvider(GenericProvider): if not self._check_auth(): return False except AuthException as e: - logger.log('%s' % ex(e), logger.ERROR) + logger.error('%s' % ex(e)) return False if isinstance(url, type([])): @@ -2103,7 +2092,7 @@ class TorrentProvider(GenericProvider): sickgear.save_config() msg = failed_msg(response) if msg: - logger.log(msg % self.name, logger.ERROR) + logger.error(msg % self.name) return False @@ -2169,7 +2158,7 @@ class TorrentProvider(GenericProvider): if self.should_skip(log_warning=False): break - proper_check = re.compile(r'(?i)(?:%s)' % clean_term.sub('', proper_term)) + proper_check = re.compile(r'(?i)%s' % clean_term.sub('', proper_term)) for item in items: if self.should_skip(log_warning=False): break diff --git a/sickgear/providers/hdbits.py b/sickgear/providers/hdbits.py index 11542acd..6c8ed495 100644 --- a/sickgear/providers/hdbits.py +++ b/sickgear/providers/hdbits.py @@ -49,7 +49,7 @@ class HDBitsProvider(generic.TorrentProvider): def _check_auth_from_data(self, parsed_json): if 'status' in parsed_json and 5 == parsed_json.get('status') and 'message' in parsed_json: - logger.log(u'Incorrect username or password for %s: %s' % (self.name, parsed_json['message']), logger.DEBUG) + logger.debug(f'Incorrect username or password for {self.name}: {parsed_json["message"]}') raise AuthException('Your username or password for %s is incorrect, check your config.' % self.name) return True @@ -115,10 +115,10 @@ class HDBitsProvider(generic.TorrentProvider): try: if not (json_resp and self._check_auth_from_data(json_resp) and 'data' in json_resp): - logger.log(u'Response from %s does not contain any json data, abort' % self.name, logger.ERROR) + logger.error(f'Response from {self.name} does not contain any json data, abort') return results except AuthException as e: - logger.log(u'Authentication error: %s' % (ex(e)), logger.ERROR) + logger.error(f'Authentication error: {ex(e)}') return results cnt = len(items[mode]) diff --git a/sickgear/providers/hdspace.py b/sickgear/providers/hdspace.py index b41eac53..d693d7af 100644 --- a/sickgear/providers/hdspace.py +++ b/sickgear/providers/hdspace.py @@ -25,7 +25,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -76,7 +75,6 @@ class HDSpaceProvider(generic.TorrentProvider): log = '%sing (%s) ' % (('keep', 'skipp')[non_marked], ', '.join([self.may_filter[f][0] for f in filters])) for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['browse'] + self._categories_string(template='', delimiter=';') if 'Cache' != mode: @@ -130,7 +128,7 @@ class HDSpaceProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, log + search_url) results = self._sort_seeding(mode, results + items[mode]) diff --git a/sickgear/providers/hdtorrents.py b/sickgear/providers/hdtorrents.py index 311dad04..8fcb5067 100644 --- a/sickgear/providers/hdtorrents.py +++ b/sickgear/providers/hdtorrents.py @@ -25,7 +25,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -80,7 +79,6 @@ class HDTorrentsProvider(generic.TorrentProvider): for mode in search_params: rc['cats'] = re.compile('(?i)category=(?:%s)' % self._categories_string(mode, template='', delimiter='|')) for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % ( search_string, self._categories_string(mode, template='category[]=%s') @@ -133,7 +131,7 @@ class HDTorrentsProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, log + search_url) diff --git a/sickgear/providers/immortalseed.py b/sickgear/providers/immortalseed.py index e4504706..2dffe029 100644 --- a/sickgear/providers/immortalseed.py +++ b/sickgear/providers/immortalseed.py @@ -27,7 +27,6 @@ from ..helpers import try_int import exceptions_helper import feedparser -from _23 import unidecode from six import iteritems @@ -72,7 +71,6 @@ class ImmortalSeedProvider(generic.TorrentProvider): 'size': r'size[^\d/]+([^/]+)', 'get': '(.*download.*)', 'title': r'NUKED\b\.(.*)$'})]) for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_string = search_string.replace(' ', '.') search_url = self.urls['search'] % ( diff --git a/sickgear/providers/iptorrents.py b/sickgear/providers/iptorrents.py index 5bca68b8..f399be6c 100644 --- a/sickgear/providers/iptorrents.py +++ b/sickgear/providers/iptorrents.py @@ -24,7 +24,7 @@ from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import b64decodestring, unidecode +from _23 import b64decodestring from six import iteritems @@ -58,7 +58,7 @@ class IPTorrentsProvider(generic.TorrentProvider): ['IPTorrents' in y, 'type="password"' not in y[0:2048], self.has_all_cookies()] + [(self.session.cookies.get(c, domain='') or 'sg!no!pw') in self.digest for c in ('uid', 'pass')])), - failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) + failed_msg=(lambda y=None: 'Invalid cookie details for %s. Check settings')) @staticmethod def _has_signature(data=None): @@ -77,7 +77,6 @@ class IPTorrentsProvider(generic.TorrentProvider): urls = [] for search_string in search_params[mode]: urls += [[]] - search_string = unidecode(search_string) or search_string for page in range((3, 5)['Cache' == mode])[1:]: # URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile urls[-1] += [self.urls['search'] % ( @@ -155,7 +154,7 @@ class IPTorrentsProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, search_url, log_settings_hint) if self.is_search_finished(mode, items, cnt_search, rc['id'], last_recent_search, lrs_new, lrs_found): diff --git a/sickgear/providers/limetorrents.py b/sickgear/providers/limetorrents.py index 2ddf2289..7a429b74 100644 --- a/sickgear/providers/limetorrents.py +++ b/sickgear/providers/limetorrents.py @@ -24,7 +24,7 @@ from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import b64decodestring, quote_plus, unidecode +from _23 import b64decodestring, quote_plus class LimeTorrentsProvider(generic.TorrentProvider): @@ -61,8 +61,6 @@ class LimeTorrentsProvider(generic.TorrentProvider): for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) - search_url = self.urls['browse'] if 'Cache' == mode \ else self.urls['search'] % (quote_plus(search_string)) @@ -116,7 +114,7 @@ class LimeTorrentsProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, search_url) @@ -133,7 +131,7 @@ class LimeTorrentsProvider(generic.TorrentProvider): try: result = re.findall('(?i)"(magnet:[^"]+?)"', html)[0] except IndexError: - logger.log('Failed no magnet in response', logger.DEBUG) + logger.debug('Failed no magnet in response') return result diff --git a/sickgear/providers/magnetdl.py b/sickgear/providers/magnetdl.py index afa1e739..b6ed7559 100644 --- a/sickgear/providers/magnetdl.py +++ b/sickgear/providers/magnetdl.py @@ -24,7 +24,6 @@ from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -53,7 +52,6 @@ class MagnetDLProvider(generic.TorrentProvider): for search_string in search_params[mode]: urls = [self.urls['browse'], self.urls['browse'] + '2'] if 'Cache' != mode: - search_string = unidecode(search_string) urls = [self.urls['search'] % re.sub(r'[.\s]+', ' ', search_string)] html = '' @@ -101,7 +99,7 @@ class MagnetDLProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, search_url) diff --git a/sickgear/providers/milkie.py b/sickgear/providers/milkie.py index 5e88c007..858d6b4a 100644 --- a/sickgear/providers/milkie.py +++ b/sickgear/providers/milkie.py @@ -23,8 +23,6 @@ from . import generic from .. import logger from ..helpers import try_int -from _23 import unidecode - class MilkieProvider(generic.TorrentProvider): @@ -71,7 +69,6 @@ class MilkieProvider(generic.TorrentProvider): for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = '' data_json, sess = None, None diff --git a/sickgear/providers/morethan.py b/sickgear/providers/morethan.py index b2622fb4..bb94c431 100644 --- a/sickgear/providers/morethan.py +++ b/sickgear/providers/morethan.py @@ -26,7 +26,6 @@ from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -67,7 +66,7 @@ class MoreThanProvider(generic.TorrentProvider): for (k, v) in iteritems({'info': r'torrents.php\?id', 'get': 'download', 'nuked': 'nuked'})]) for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string).replace('.', ' ') + search_string = search_string.replace('.', ' ') search_url = self.urls['search'] % (search_string, self._categories_string(mode, template='filter_cat[%s]=1')) @@ -113,7 +112,7 @@ class MoreThanProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, search_url) diff --git a/sickgear/providers/ncore.py b/sickgear/providers/ncore.py index f6177865..d99e9512 100644 --- a/sickgear/providers/ncore.py +++ b/sickgear/providers/ncore.py @@ -26,7 +26,6 @@ from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -66,7 +65,6 @@ class NcoreProvider(generic.TorrentProvider): 'list': '.*?torrent_all', 'info': 'details', 'key': 'key=([^"]+)">Torrent let'})]) for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % search_string # fetches 15 results by default, and up to 100 if allowed in user profile @@ -107,7 +105,7 @@ class NcoreProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, search_url) diff --git a/sickgear/providers/nebulance.py b/sickgear/providers/nebulance.py index 99feacd0..843beb4a 100644 --- a/sickgear/providers/nebulance.py +++ b/sickgear/providers/nebulance.py @@ -25,7 +25,7 @@ from ..helpers import try_int from bs4_parser import BS4Parser from json_helper import json_dumps -from _23 import filter_list, unidecode, unquote_plus +from _23 import unquote_plus from six import iteritems @@ -83,7 +83,6 @@ class NebulanceProvider(generic.TorrentProvider): rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({'nodots': r'[\.\s]+'})]) for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['browse'] % (self.user_authkey, self.user_passkey) if 'Cache' != mode: @@ -120,7 +119,7 @@ class NebulanceProvider(generic.TorrentProvider): items[mode].append((title, download_url, seeders, self._bytesizer(size))) except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, search_url) results = self._sort_seeding(mode, results + items[mode]) @@ -164,7 +163,7 @@ class NebulanceProvider(generic.TorrentProvider): ('(?i)%s(Proper)%s' % (bl, br), r'`\1`'), (r'%s\s*%s' % (bl, br), '`')]: title = re.sub(r[0], r[1], title) - grp = filter_list(lambda rn: '.release' in rn.lower(), item['tags']) + grp = list(filter(lambda rn: '.release' in rn.lower(), item['tags'])) title = '%s%s-%s' % (('', t[0])[1 < len(t)], title, (any(grp) and grp[0] or 'nogrp').upper().replace('.RELEASE', '')) @@ -186,7 +185,7 @@ class NebulanceProvider(generic.TorrentProvider): for mode in search_params: for search_string in search_params[mode]: - search_string = unquote_plus(unidecode(search_string)) + search_string = unquote_plus(search_string) params = {'release': search_string} if 'Cache' == mode: diff --git a/sickgear/providers/newznab.py b/sickgear/providers/newznab.py index 69fbfdd4..b9cac5e2 100644 --- a/sickgear/providers/newznab.py +++ b/sickgear/providers/newznab.py @@ -31,7 +31,7 @@ from ..common import NeededQualities, Quality from ..helpers import remove_non_release_groups from ..indexers.indexer_config import * from ..network_timezones import SG_TIMEZONE -from ..sgdatetime import SGDatetime, timestamp_near +from ..sgdatetime import SGDatetime from ..search import get_aired_in_season, get_wanted_qualities from ..show_name_helpers import get_show_names from ..scene_exceptions import has_season_exceptions @@ -217,7 +217,7 @@ class NewznabProvider(generic.NZBProvider): try: my_db = db.DBConnection('cache.db') if isinstance(value, datetime.datetime): - save_value = int(timestamp_near(value)) + save_value = SGDatetime.timestamp_near(value) else: save_value = SGDatetime.timestamp_far(value, default=0) my_db.action('INSERT OR REPLACE INTO "lastrecentsearch" (name, datetime) VALUES (?,?)', @@ -331,7 +331,7 @@ class NewznabProvider(generic.NZBProvider): except (BaseException, Exception): continue except (BaseException, Exception): - logger.log('Error parsing result for [%s]' % self.name, logger.DEBUG) + logger.debug('Error parsing result for [%s]' % self.name) if not caps and self._caps and not all_cats and self._caps_all_cats and not cats and self._caps_cats: self._check_excludes(cats) @@ -347,7 +347,7 @@ class NewznabProvider(generic.NZBProvider): caps[NewznabConstants.SEARCH_SEASON] = 'season' if NewznabConstants.SEARCH_EPISODE not in caps or not caps.get(NewznabConstants.SEARCH_EPISODE): caps[NewznabConstants.SEARCH_TEXT] = 'ep' - if (TVINFO_TVRAGE not in caps or not caps.get(TVINFO_TVRAGE)): + if TVINFO_TVRAGE not in caps or not caps.get(TVINFO_TVRAGE): caps[TVINFO_TVRAGE] = 'rid' if NewznabConstants.SEARCH_TEXT not in caps or not caps.get(NewznabConstants.SEARCH_TEXT): caps[NewznabConstants.SEARCH_TEXT] = 'q' @@ -644,14 +644,14 @@ class NewznabProvider(generic.NZBProvider): if not s.show_obj.is_anime and not s.show_obj.is_sports: if not getattr(s, 'wanted_quality', None): # this should not happen, the creation is missing for the search in this case - logger.log('wanted_quality property was missing for search, creating it', logger.WARNING) - ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status) + logger.warning('wanted_quality property was missing for search, creating it') + ep_status, ep_quality = Quality.split_composite_status(ep_obj.status) s.wanted_quality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=True) needed.check_needed_qualities(s.wanted_quality) if not hasattr(ep_obj, 'eps_aired_in_season'): # this should not happen, the creation is missing for the search in this case - logger.log('eps_aired_in_season property was missing for search, creating it', logger.WARNING) + logger.warning('eps_aired_in_season property was missing for search, creating it') ep_count, ep_count_scene = get_aired_in_season(ep_obj.show_obj) ep_obj.eps_aired_in_season = ep_count.get(ep_obj.season, 0) ep_obj.eps_aired_in_scene_season = ep_count_scene.get(ep_obj.scene_season, 0) if ep_obj.show_obj.is_scene \ @@ -682,14 +682,14 @@ class NewznabProvider(generic.NZBProvider): needed.check_needed_types(ep_obj.show_obj) if not ep_obj.show_obj.is_anime and not ep_obj.show_obj.is_sports: if not getattr(ep_obj, 'wanted_quality', None): - ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status) + ep_status, ep_quality = Quality.split_composite_status(ep_obj.status) ep_obj.wanted_quality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=True) needed.check_needed_qualities(ep_obj.wanted_quality) else: if not ep_obj.show_obj.is_anime and not ep_obj.show_obj.is_sports: for cur_ep_obj in ep_obj_list: if not getattr(cur_ep_obj, 'wanted_quality', None): - ep_status, ep_quality = Quality.splitCompositeStatus(cur_ep_obj.status) + ep_status, ep_quality = Quality.split_composite_status(cur_ep_obj.status) cur_ep_obj.wanted_quality = get_wanted_qualities(cur_ep_obj, ep_status, ep_quality, unaired=True) needed.check_needed_qualities(cur_ep_obj.wanted_quality) @@ -733,7 +733,7 @@ class NewznabProvider(generic.NZBProvider): continue # search cache for episode result - cache_result = self.cache.searchCache(ep_obj, manual_search) + cache_result = self.cache.search_cache(ep_obj, manual_search) if cache_result: if ep_obj.episode not in results: results[ep_obj.episode] = cache_result @@ -911,9 +911,9 @@ class NewznabProvider(generic.NZBProvider): # category ids cat = [] if 'Episode' == mode or 'Season' == mode: - if not (any([x in params for x in - [v for c, v in iteritems(self.caps) - if c not in [NewznabConstants.SEARCH_EPISODE, NewznabConstants.SEARCH_SEASON]]])): + if not (any(x in params for x in + [v for c, v in iteritems(self.caps) + if c not in [NewznabConstants.SEARCH_EPISODE, NewznabConstants.SEARCH_SEASON]])): logger.log('Show is missing either an id or search term for search') continue @@ -938,7 +938,7 @@ class NewznabProvider(generic.NZBProvider): request_params = base_params.copy() # if ('Propers' == mode or 'nzbs_org' == self.get_id()) \ if 'Propers' == mode \ - and 'q' in params and not (any([x in params for x in ['season', 'ep']])): + and 'q' in params and not (any(x in params for x in ['season', 'ep'])): request_params['t'] = 'search' request_params.update(params) @@ -978,14 +978,14 @@ class NewznabProvider(generic.NZBProvider): parsed_xml, n_spaces = self.cache.parse_and_get_ns(data) items = parsed_xml.findall('channel/item') except (BaseException, Exception): - logger.log('Error trying to load %s RSS feed' % self.name, logger.WARNING) + logger.warning('Error trying to load %s RSS feed' % self.name) break if not self._check_auth_from_data(parsed_xml, search_url): break if 'rss' != parsed_xml.tag: - logger.log('Resulting XML from %s isn\'t RSS, not parsing it' % self.name, logger.WARNING) + logger.warning('Resulting XML from %s isn\'t RSS, not parsing it' % self.name) break i and time.sleep(2.1) @@ -996,8 +996,7 @@ class NewznabProvider(generic.NZBProvider): if title and url: results.append(item) else: - logger.log('The data returned from %s is incomplete, this result is unusable' % self.name, - logger.DEBUG) + logger.debug('The data returned from %s is incomplete, this result is unusable' % self.name) # get total and offset attributes try: @@ -1036,8 +1035,8 @@ class NewznabProvider(generic.NZBProvider): # there are more items available than the amount given in one call, grab some more items = total - request_params['offset'] - logger.log('%s more item%s to fetch from a batch of up to %s items.' - % (items, helpers.maybe_plural(items), request_params['limit']), logger.DEBUG) + logger.debug(f'{items} more item{helpers.maybe_plural(items)} to fetch from a batch of up to' + f' {request_params["limit"]} items.') batch_count = self._log_result(results, mode, cnt, search_url) exit_log = False @@ -1048,10 +1047,10 @@ class NewznabProvider(generic.NZBProvider): if exit_log: self._log_search(mode, len(results), search_url) - if not try_all_searches and any([x in request_params for x in [ + if not try_all_searches and any(x in request_params for x in [ v for c, v in iteritems(self.caps) if c not in [NewznabConstants.SEARCH_EPISODE, NewznabConstants.SEARCH_SEASON, - NewznabConstants.SEARCH_TEXT]]]) and len(results): + NewznabConstants.SEARCH_TEXT]]) and len(results): break return results, n_spaces @@ -1070,7 +1069,7 @@ class NewznabProvider(generic.NZBProvider): :param kwargs: :return: """ - cache_results = self.cache.listPropers(search_date) + cache_results = self.cache.list_propers(search_date) results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show_obj) for x in cache_results] @@ -1125,7 +1124,7 @@ class NewznabProvider(generic.NZBProvider): result_date = self._parse_pub_date(item) if not result_date: - logger.log(u'Unable to figure out the date for entry %s, skipping it' % title) + logger.log(f'Unable to figure out the date for entry {title}, skipping it') continue result_size, result_uid = self._parse_size_uid(item, ns=n_space) @@ -1183,10 +1182,10 @@ class NewznabCache(tvcache.TVCache): root = elem return root, ns - def updateCache(self, - needed=NeededQualities(need_all=True), # type: NeededQualities - **kwargs - ): + def update_cache(self, + needed=NeededQualities(need_all=True), # type: NeededQualities + **kwargs + ): """ :param needed: needed qualites class @@ -1195,22 +1194,22 @@ class NewznabCache(tvcache.TVCache): if 4489 != sickgear.RECENTSEARCH_INTERVAL or self.should_update(): n_spaces = {} try: - check = self._checkAuth() + check = self.check_auth() if isinstance(check, bool) and not check: items = None else: (items, n_spaces) = self.provider.cache_data(needed=needed) except (BaseException, Exception) as e: - logger.log('Error updating Cache: %s' % ex(e), logger.ERROR) + logger.error('Error updating Cache: %s' % ex(e)) items = None if items: - self._clearCache() + self.clear_cache() # parse data cl = [] for item in items: - ci = self._parseItem(n_spaces, item) + ci = self.parse_item(n_spaces, item) if None is not ci: cl.append(ci) @@ -1219,7 +1218,7 @@ class NewznabCache(tvcache.TVCache): my_db.mass_action(cl) # set updated as time the attempt to fetch data is - self.setLastUpdate() + self.set_last_update() @staticmethod def parse_ids(item, ns): @@ -1240,7 +1239,7 @@ class NewznabCache(tvcache.TVCache): return ids # overwrite method with that parses the rageid from the newznab feed - def _parseItem(self, + def parse_item(self, ns, # type: Dict item # type: etree.Element ): # type: (...) -> Union[List[AnyStr, List[Any]], None] @@ -1257,5 +1256,4 @@ class NewznabCache(tvcache.TVCache): if title and url: return self.add_cache_entry(title, url, tvid_prodid=ids) - logger.log('Data returned from the %s feed is incomplete, this result is unusable' % self.provider.name, - logger.DEBUG) + logger.debug('Data returned from the %s feed is incomplete, this result is unusable' % self.provider.name) diff --git a/sickgear/providers/nyaa.py b/sickgear/providers/nyaa.py index 8b2bd5a9..4bb3f460 100644 --- a/sickgear/providers/nyaa.py +++ b/sickgear/providers/nyaa.py @@ -22,7 +22,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -51,7 +50,6 @@ class NyaaProvider(generic.TorrentProvider): rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({'info': 'view', 'get': '(?:torrent|magnet:)'})]) for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % ((0, 2)[self.confirmed], search_string.replace('.', ' ')) html = self.get_url(search_url) @@ -93,7 +91,7 @@ class NyaaProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, search_url) diff --git a/sickgear/providers/omgwtfnzbs.py b/sickgear/providers/omgwtfnzbs.py index ac2bf7e9..1d7e4bc6 100644 --- a/sickgear/providers/omgwtfnzbs.py +++ b/sickgear/providers/omgwtfnzbs.py @@ -87,8 +87,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider): if re.search('(?i)(information is incorrect|in(?:valid|correct).*?(?:username|api))', data_json.get('notice')): - logger.log(u'Incorrect authentication credentials for ' + self.name + ' : ' + str(description_text), - logger.DEBUG) + logger.debug(f'Incorrect authentication credentials for {self.name} : {description_text}') raise AuthException( 'Your authentication credentials for ' + self.name + ' are incorrect, check your config.') @@ -96,7 +95,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider): return True else: - logger.log(u'Unknown error given from ' + self.name + ' : ' + str(description_text), logger.DEBUG) + logger.debug(f'Unknown error given from {self.name} : {str(description_text)}') return False return True @@ -109,7 +108,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider): :return: list of search strings :rtype: List[AnyStr] """ - return [x for x in show_name_helpers.makeSceneSeasonSearchString(self.show_obj, ep_obj)] + return [x for x in show_name_helpers.make_scene_season_search_string(self.show_obj, ep_obj)] def _episode_strings(self, ep_obj): """ @@ -119,7 +118,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider): :return: list of search strings :rtype: List[AnyStr] """ - return [x for x in show_name_helpers.makeSceneSearchString(self.show_obj, ep_obj)] + return [x for x in show_name_helpers.make_scene_search_string(self.show_obj, ep_obj)] def _title_and_url(self, item): """ @@ -149,7 +148,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider): self.tmr_limit_update('1', 'h', 'Your 24 hour limit of 10 NZBs has been reached') self.log_failure_url(url) elif '' not in data or 'seem to be logged in' in data: - logger.log('Failed nzb data response: %s' % data, logger.DEBUG) + logger.debug('Failed nzb data response: %s' % data) else: result = data return result @@ -345,7 +344,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider): time.sleep(1.1) pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') mode = (mode, search_mode)['Propers' == search_mode] self._log_search(mode, len(results) - cnt, search_url) @@ -400,7 +399,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider): if success and self.nn: success, msg = None, 'pm dev in irc about this feature' if not success: - logger.log(u'%s: %s' % (msg, self.cookies), logger.WARNING) + logger.warning(f'{msg}: {self.cookies}') self.cookies = None return None return False diff --git a/sickgear/providers/pretome.py b/sickgear/providers/pretome.py index 87acb764..460fd807 100644 --- a/sickgear/providers/pretome.py +++ b/sickgear/providers/pretome.py @@ -23,7 +23,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -58,7 +57,6 @@ class PreToMeProvider(generic.TorrentProvider): rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({'info': 'details', 'get': 'download'})]) for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % search_string html = self.get_url(search_url) @@ -102,7 +100,7 @@ class PreToMeProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.error(u'Failed to parse. Traceback: %s' % traceback.format_exc()) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, search_url) diff --git a/sickgear/providers/privatehd.py b/sickgear/providers/privatehd.py index 5f8fbdf0..08ae3a7d 100644 --- a/sickgear/providers/privatehd.py +++ b/sickgear/providers/privatehd.py @@ -25,7 +25,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import filter_iter, unidecode from six import iteritems @@ -57,7 +56,7 @@ class PrivateHDProvider(generic.TorrentProvider): return super(PrivateHDProvider, self)._authorised( logged_in=(lambda y='': 'English' in y and 'auth/login' not in y and all( [(self.session.cookies.get('privatehdx_session', domain='') or 'sg!no!pw') in self.digest])), - failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) + failed_msg=(lambda y=None: 'Invalid cookie details for %s. Check settings')) def _search_provider(self, search_params, **kwargs): @@ -89,11 +88,10 @@ class PrivateHDProvider(generic.TorrentProvider): show_type = self.show_obj.air_by_date and 'Air By Date' \ or self.show_obj.is_sports and 'Sports' or self.show_obj.is_anime and 'Anime' or None if show_type: - logger.log(u'Provider does not carry shows of type: [%s], skipping' % show_type, logger.DEBUG) + logger.debug(f'Provider does not carry shows of type: [{show_type}], skipping') return results for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % ( '+'.join(search_string.split()), self._categories_string(mode, '')) @@ -120,7 +118,7 @@ class PrivateHDProvider(generic.TorrentProvider): if any(self.filter): marked = ','.join([x.attrs.get('title', '').lower() for x in tr.find_all( 'i', attrs={'class': ['fa-star', 'fa-diamond', 'fa-star-half-o']})]) - munged = ''.join(filter_iter(marked.__contains__, ['free', 'half', 'double'])) + munged = ''.join(filter(marked.__contains__, ['free', 'half', 'double'])) # noinspection PyUnboundLocalVariable if ((non_marked and rc['filter'].search(munged)) or (not non_marked and not rc['filter'].search(munged))): @@ -143,7 +141,7 @@ class PrivateHDProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, log + search_url) diff --git a/sickgear/providers/ptf.py b/sickgear/providers/ptf.py index 3870b82f..d041d43a 100644 --- a/sickgear/providers/ptf.py +++ b/sickgear/providers/ptf.py @@ -26,7 +26,6 @@ from .. import logger from ..helpers import anon_url, try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -57,7 +56,7 @@ class PTFProvider(generic.TorrentProvider): logged_in=(lambda y='': all( ['RSS Feed' in y, self.has_all_cookies('session_key')] + [(self.session.cookies.get(x) or 'sg!no!pw') in self.digest for x in ['session_key']])), - failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) + failed_msg=(lambda y=None: 'Invalid cookie details for %s. Check settings')) def _search_provider(self, search_params, **kwargs): @@ -82,7 +81,6 @@ class PTFProvider(generic.TorrentProvider): for mode in search_params: rc['cats'] = re.compile('(?i)cat=(?:%s)' % self._categories_string(mode, template='', delimiter='|')) for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % ('+'.join(search_string.split()), self._categories_string(mode)) html = self.get_url(search_url) @@ -146,7 +144,7 @@ class PTFProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, log + self.session.response.get('url')) diff --git a/sickgear/providers/rarbg.py b/sickgear/providers/rarbg.py index ab9f2ffe..b5b735b5 100644 --- a/sickgear/providers/rarbg.py +++ b/sickgear/providers/rarbg.py @@ -41,7 +41,7 @@ class RarbgProvider(generic.TorrentProvider): 'api_list': self.url_api + 'mode=list', 'api_search': self.url_api + 'mode=search'} - self.params = {'defaults': '&format=json_extended&category=18;41&limit=100&sort=last&ranked={r}&token={t}', + self.params = {'defaults': '&format=json_extended&category=18;41;49&limit=100&sort=last&ranked={r}&token={t}', 'param_iid': '&search_imdb=%(sid)s', 'param_tid': '&search_tvdb=%(sid)s', 'param_str': '&search_string=%(str)s', @@ -68,7 +68,7 @@ class RarbgProvider(generic.TorrentProvider): return True time.sleep(2) - logger.log(u'No usable API token returned from: %s' % self.urls['api_token'], logger.ERROR) + logger.error(f'No usable API token returned from: {self.urls["api_token"]}') return False @staticmethod diff --git a/sickgear/providers/revtt.py b/sickgear/providers/revtt.py index 0ee68d6e..2e367969 100644 --- a/sickgear/providers/revtt.py +++ b/sickgear/providers/revtt.py @@ -23,7 +23,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -61,7 +60,6 @@ class RevTTProvider(generic.TorrentProvider): for mode in search_params: rc['cats'] = re.compile('(?i)cat=(?:%s)' % self._categories_string(mode, template='', delimiter='|')) for search_string in search_params[mode]: - search_string = unidecode(search_string) html = self.get_url(self.urls['search'] % ('+'.join(search_string.split()), self._categories_string(mode))) @@ -104,7 +102,7 @@ class RevTTProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url')) diff --git a/sickgear/providers/rsstorrent.py b/sickgear/providers/rsstorrent.py index 802eae4e..31971841 100644 --- a/sickgear/providers/rsstorrent.py +++ b/sickgear/providers/rsstorrent.py @@ -59,7 +59,7 @@ class TorrentRssProvider(generic.TorrentProvider): title, url = None, None if item.title: - title = re.sub(r'\s+', '.', u'' + item.title) + title = re.sub(r'\s+', '.', '' + item.title) attempt_list = [lambda: item.torrent_magneturi, lambda: item.enclosures[0].href, diff --git a/sickgear/providers/scenehd.py b/sickgear/providers/scenehd.py index 4b982fe0..a04aa810 100644 --- a/sickgear/providers/scenehd.py +++ b/sickgear/providers/scenehd.py @@ -23,7 +23,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -48,7 +47,7 @@ class SceneHDProvider(generic.TorrentProvider): return super(SceneHDProvider, self)._authorised( logged_in=(lambda y='': ['RSS links' in y] and all( [(self.session.cookies.get(c, domain='') or 'sg!no!pw') in self.digest for c in ('uid', 'pass')])), - failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) + failed_msg=(lambda y=None: 'Invalid cookie details for %s. Check settings')) def _search_provider(self, search_params, **kwargs): @@ -62,7 +61,6 @@ class SceneHDProvider(generic.TorrentProvider): 'nuked': 'nuke', 'filter': 'free'})]) for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % (search_string, self._categories_string(mode, '%s', ',')) html = self.get_url(search_url, timeout=90) @@ -111,7 +109,7 @@ class SceneHDProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, search_url) diff --git a/sickgear/providers/scenetime.py b/sickgear/providers/scenetime.py index 96d14262..edc318f0 100644 --- a/sickgear/providers/scenetime.py +++ b/sickgear/providers/scenetime.py @@ -23,7 +23,6 @@ from .. import logger from ..helpers import anon_url, try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -51,7 +50,7 @@ class SceneTimeProvider(generic.TorrentProvider): ['staff-support' in y, self.has_all_cookies()] + [(self.session.cookies.get(x, domain='') or 'sg!no!pw') in self.digest for x in ('uid', 'pass')])), - failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) + failed_msg=(lambda y=None: 'Invalid cookie details for %s. Check settings')) @staticmethod def _has_signature(data=None): @@ -70,7 +69,6 @@ class SceneTimeProvider(generic.TorrentProvider): urls = [] for search_string in search_params[mode]: urls += [[]] - search_string = unidecode(search_string) search_url = self.urls['search'] % (self._categories_string(), '+'.join(search_string.replace('.', ' ').split()), ('', '&freeleech=on')[self.freeleech]) @@ -148,7 +146,7 @@ class SceneTimeProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, search_url, log_settings_hint) diff --git a/sickgear/providers/shazbat.py b/sickgear/providers/shazbat.py index b0187e49..81bf520c 100644 --- a/sickgear/providers/shazbat.py +++ b/sickgear/providers/shazbat.py @@ -26,7 +26,7 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode, unquote_plus +from _23 import unquote_plus from six import iteritems, text_type @@ -75,7 +75,6 @@ class ShazbatProvider(generic.TorrentProvider): if self.should_skip(): return results else: - search_string = unidecode(search_string) search_string = search_string.replace(show_detail, '').strip() search_url = self.urls['search'] % search_string html = self.get_url(search_url) @@ -135,7 +134,7 @@ class ShazbatProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, search_url) results = self._sort_seeding(mode, results + items[mode]) diff --git a/sickgear/providers/showrss.py b/sickgear/providers/showrss.py index b630b2fb..392e8e45 100644 --- a/sickgear/providers/showrss.py +++ b/sickgear/providers/showrss.py @@ -25,7 +25,7 @@ from .. import logger from ..helpers import sanitize_scene_name from bs4_parser import BS4Parser -from _23 import decode_str, filter_list, html_unescape, list_keys, list_values, unidecode +from _23 import decode_str, html_unescape from six import iteritems, iterkeys @@ -51,11 +51,11 @@ class ShowRSSProvider(generic.TorrentProvider): def logged_in(self, y): if all([None is y or 'logout' in y, - bool(filter_list(lambda c: 'remember_web_' in c, iterkeys(self.session.cookies)))]): + bool(list(filter(lambda c: 'remember_web_' in c, iterkeys(self.session.cookies))))]): if None is not y: self.shows = dict(re.findall(r'', y)) for k, v in iteritems(self.shows): - self.shows[k] = sanitize_scene_name(html_unescape(unidecode(decode_str(v)))) + self.shows[k] = sanitize_scene_name(html_unescape(decode_str(v))) return True return False @@ -74,13 +74,12 @@ class ShowRSSProvider(generic.TorrentProvider): if 'Cache' == mode: search_url = self.urls['browse'] else: - search_string = unidecode(search_string) - show_name = filter_list(lambda x: x.lower() == re.sub(r'\s.*', '', search_string.lower()), - list_values(self.shows)) + show_name = list(filter(lambda x: x.lower() == re.sub(r'\s.*', '', search_string.lower()), + list(self.shows.values()))) if not show_name: continue - search_url = self.urls['search'] % list_keys(self.shows)[ - list_values(self.shows).index(show_name[0])] + search_url = self.urls['search'] % list(self.shows)[ + list(self.shows.values()).index(show_name[0])] if search_url in urls: continue @@ -115,7 +114,7 @@ class ShowRSSProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, search_url) results = self._sort_seeding(mode, results + items[mode]) diff --git a/sickgear/providers/snowfl.py b/sickgear/providers/snowfl.py index e78f1f78..b0a252ac 100644 --- a/sickgear/providers/snowfl.py +++ b/sickgear/providers/snowfl.py @@ -25,7 +25,7 @@ from .. import logger from ..helpers import try_int from json_helper import json_loads -from _23 import b64encodestring, filter_iter, map_list, quote, unidecode +from _23 import b64encodestring, quote from six import iteritems # noinspection PyUnreachableCode @@ -74,7 +74,7 @@ class SnowflProvider(generic.TorrentProvider): params = dict(token=token[0], ent=token[1]) if 'Cache' != mode: - params.update({'ss': quote_fx(unidecode(search_string))}) + params.update({'ss': quote_fx(search_string)}) data_json = None vals = [i for i in range(3, 8)] @@ -92,13 +92,13 @@ class SnowflProvider(generic.TorrentProvider): if self.should_skip(): return results - for item in filter_iter(lambda di: re.match('(?i).*?(tv|television)', - di.get('type', '') or di.get('category', '')) - and (not self.confirmed or di.get('trusted') or di.get('verified')), - data_json or {}): - seeders, leechers, size = map_list(lambda arg: try_int( + for item in filter(lambda di: re.match('(?i).*?(tv|television)', + di.get('type', '') or di.get('category', '')) + and (not self.confirmed or di.get('trusted') or di.get('verified')), + data_json or {}): + seeders, leechers, size = list(map(lambda arg: try_int( *([item.get(arg[0]) if None is not item.get(arg[0]) else item.get(arg[1])]) * 2), - (('seeder', 'seed'), ('leecher', 'leech'), ('size', 'size'))) + (('seeder', 'seed'), ('leecher', 'leech'), ('size', 'size')))) if self._reject_item(seeders, leechers): continue title = item.get('name') or item.get('title') @@ -117,7 +117,7 @@ class SnowflProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, search_url) @@ -163,8 +163,8 @@ class SnowflProvider(generic.TorrentProvider): else: from sickgear import providers if 'torlock' in url.lower(): - prov = next(filter_iter(lambda p: 'torlock' == p.name.lower(), (filter_iter( - lambda sp: sp.providerType == self.providerType, providers.sortedProviderList())))) + prov = next(filter(lambda p: 'torlock' == p.name.lower(), (filter( + lambda sp: sp.providerType == self.providerType, providers.sorted_sources())))) state = prov.enabled prov.enabled = True _ = prov.url diff --git a/sickgear/providers/speedapp.py b/sickgear/providers/speedapp.py index e730e193..a354988a 100644 --- a/sickgear/providers/speedapp.py +++ b/sickgear/providers/speedapp.py @@ -21,7 +21,6 @@ from . import generic from ..helpers import try_int from six import string_types -from _23 import filter_list, map_list, unidecode class SpeedAppProvider(generic.TorrentProvider): @@ -47,7 +46,7 @@ class SpeedAppProvider(generic.TorrentProvider): return super(SpeedAppProvider, self)._authorised( logged_in=self.logged_in, parse_json=True, headers=self.auth_header(), - failed_msg=(lambda y=None: u'Invalid token or permissions for %s. Check settings')) + failed_msg=(lambda y=None: 'Invalid token or permissions for %s. Check settings')) def logged_in(self, resp=None): @@ -55,14 +54,15 @@ class SpeedAppProvider(generic.TorrentProvider): self.perms_needed = self.perms if isinstance(resp, dict) and isinstance(resp.get('scopes'), list): self._authd = True - self.perms_needed = filter_list(lambda x: True is not x, [p in resp.get('scopes') or p for p in self.perms]) + self.perms_needed = list(filter(lambda x: True is not x, + [p in resp.get('scopes') or p for p in self.perms])) if not self.perms_needed: self.categories = None resp = self.get_url(self.urls['cats'], skip_auth=True, parse_json=True, headers=self.auth_header()) if isinstance(resp, list): - categories = [category['id'] for category in filter_list( + categories = [category['id'] for category in list(filter( lambda c: isinstance(c.get('id'), int) and isinstance(c.get('name'), string_types) - and c.get('name').upper() in ('TV PACKS', 'TV HD', 'TV SD'), resp)] + and c.get('name').upper() in ('TV PACKS', 'TV HD', 'TV SD'), resp))] self.categories = {'Cache': categories, 'Episode': categories, 'Season': categories} return not any(self.perms_needed) @@ -81,7 +81,7 @@ class SpeedAppProvider(generic.TorrentProvider): for mode in search_params: for search_string in search_params[mode]: search_url = self.urls['search'] % ( - unidecode(search_string), self._categories_string(mode, template='categories[]=%s')) + search_string, self._categories_string(mode, template='categories[]=%s')) data_json = self.get_url(search_url, skip_auth=True, parse_json=True, headers=self.auth_header()) if self.should_skip(): @@ -111,10 +111,10 @@ class SpeedAppProvider(generic.TorrentProvider): ('%s_api_key_tip' % self.get_id()) == key and \ ((not_authd or self.perms_needed) and ('create token at %s site
' - 'with perms %s' % (self.url_base, self.name, map_list( + 'with perms %s' % (self.url_base, self.name, list(map( lambda p: 't.read' in p and 'Read torrents' or 't.down' in p and 'Download torrents' - or 'ch.read' in p and 'Read snatches', self.perms_needed))) + or 'ch.read' in p and 'Read snatches', self.perms_needed)))) .replace('[', '').replace(']', '') or 'token is valid and required permissions are enabled') \ or '' diff --git a/sickgear/providers/speedcd.py b/sickgear/providers/speedcd.py index 8f21401f..f1a12083 100644 --- a/sickgear/providers/speedcd.py +++ b/sickgear/providers/speedcd.py @@ -25,7 +25,7 @@ from ..helpers import try_int from bs4_parser import BS4Parser from requests.cookies import cookiejar_from_dict -from _23 import filter_list, quote, unquote +from _23 import quote, unquote from six import string_types, iteritems @@ -63,12 +63,12 @@ class SpeedCDProvider(generic.TorrentProvider): self.session.cookies.clear() json = self.get_url(self.urls['login_1'], skip_auth=True, post_data={'username': self.username}, parse_json=True) - resp = filter_list(lambda l: isinstance(l, list), json.get('Fs', [])) + resp = list(filter(lambda l: isinstance(l, list), json.get('Fs', []))) def get_html(_resp): for cur_item in _resp: if isinstance(cur_item, list): - _html = filter_list(lambda s: isinstance(s, string_types) and 'password' in s, cur_item) + _html = list(filter(lambda s: isinstance(s, string_types) and 'password' in s, cur_item)) if not _html: _html = get_html(cur_item) if _html: @@ -94,9 +94,9 @@ class SpeedCDProvider(generic.TorrentProvider): self.digest = 'inSpeed_speedian=%s' % self.session.cookies.get('inSpeed_speedian') sickgear.save_config() result = True - logger.log('Cookie details for %s updated.' % self.name, logger.DEBUG) + logger.debug('Cookie details for %s updated.' % self.name) elif not self.failure_count: - logger.log('Invalid cookie details for %s and login failed. Check settings' % self.name, logger.ERROR) + logger.error('Invalid cookie details for %s and login failed. Check settings' % self.name) return result @staticmethod @@ -128,13 +128,13 @@ class SpeedCDProvider(generic.TorrentProvider): cnt = len(items[mode]) try: - html = filter_list(lambda l: isinstance(l, list), data_json.get('Fs', [])) + html = list(filter(lambda l: isinstance(l, list), data_json.get('Fs', []))) while html: if html and all(isinstance(x, string_types) for x in html): str_lengths = [len(x) for x in html] html = html[str_lengths.index(max(str_lengths))] break - html = filter_list(lambda l: isinstance(l, list), html) + html = list(filter(lambda l: isinstance(l, list), html)) if html and 0 < len(html): html = html[0] diff --git a/sickgear/providers/thepiratebay.py b/sickgear/providers/thepiratebay.py index 51cbd129..1e390aef 100644 --- a/sickgear/providers/thepiratebay.py +++ b/sickgear/providers/thepiratebay.py @@ -25,7 +25,7 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import b64decodestring, unidecode +from _23 import b64decodestring from six import iteritems @@ -90,7 +90,6 @@ class ThePirateBayProvider(generic.TorrentProvider): for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) if 'Cache' != mode: search_url = self.urls['api'] % search_string @@ -114,7 +113,7 @@ class ThePirateBayProvider(generic.TorrentProvider): if not self._reject_item(seeders, leechers): status, info_hash = [cur_item.get(k) for k in ('status', 'info_hash')] if self.confirmed and not rc['verify'].search(status): - logger.log(u'Skipping untrusted non-verified result: ' + title, logger.DEBUG) + logger.debug('Skipping untrusted non-verified result: ' + title) continue download_magnet = info_hash if '&tr=' in info_hash \ else self._dhtless_magnet(info_hash, title) @@ -237,7 +236,7 @@ class ThePirateBayProvider(generic.TorrentProvider): if self.confirmed and not ( tr.find('img', title=rc['verify']) or tr.find('img', alt=rc['verify']) or tr.find('img', src=rc['verify'])): - logger.log(u'Skipping untrusted non-verified result: ' + title, logger.DEBUG) + logger.debug('Skipping untrusted non-verified result: ' + title) continue if title and download_magnet: @@ -246,7 +245,7 @@ class ThePirateBayProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, search_url) results = self._sort_seeding(mode, results + items[mode]) diff --git a/sickgear/providers/tokyotoshokan.py b/sickgear/providers/tokyotoshokan.py index 8396fd9e..338f38f9 100644 --- a/sickgear/providers/tokyotoshokan.py +++ b/sickgear/providers/tokyotoshokan.py @@ -22,7 +22,7 @@ from .. import show_name_helpers, tvcache from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import filter_list, map_list, urlencode +from _23 import urlencode from six import iteritems @@ -78,10 +78,10 @@ class TokyoToshokanProvider(generic.TorrentProvider): info = top.find('td', class_='desc-top') title = info and re.sub(r'[ .]{2,}', '.', info.get_text().strip()) - links = info and map_list(lambda l: l.get('href', ''), info.find_all('a')) or None + links = info and list(map(lambda l: l.get('href', ''), info.find_all('a'))) or None download_url = self._link( - (filter_list(lambda l: 'magnet:' in l, links) - or filter_list(lambda l: not re.search(r'(magnet:|\.se).+', l), links))[0]) + (list(filter(lambda l: 'magnet:' in l, links)) + or list(filter(lambda l: not re.search(r'(magnet:|\.se).+', l), links)))[0]) except (AttributeError, TypeError, ValueError, IndexError): continue @@ -98,10 +98,10 @@ class TokyoToshokanProvider(generic.TorrentProvider): return results def _season_strings(self, *args, **kwargs): - return [{'Season': show_name_helpers.makeSceneSeasonSearchString(self.show_obj, *args)}] + return [{'Season': show_name_helpers.make_scene_season_search_string(self.show_obj, *args)}] def _episode_strings(self, *args, **kwargs): - return [{'Episode': show_name_helpers.makeSceneSearchString(self.show_obj, *args)}] + return [{'Episode': show_name_helpers.make_scene_search_string(self.show_obj, *args)}] class TokyoToshokanCache(tvcache.TVCache): diff --git a/sickgear/providers/torlock.py b/sickgear/providers/torlock.py index 52fa16b8..3ddb1346 100644 --- a/sickgear/providers/torlock.py +++ b/sickgear/providers/torlock.py @@ -23,7 +23,7 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import b64decodestring, quote_plus, unidecode +from _23 import b64decodestring, quote_plus from six import iteritems @@ -66,8 +66,6 @@ class TorLockProvider(generic.TorrentProvider): for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) - search_url = self.urls['browse'] if 'Cache' == mode \ else self.urls['search'] % (quote_plus(search_string).replace('+', '-')) @@ -123,7 +121,7 @@ class TorLockProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, search_url) diff --git a/sickgear/providers/torrentday.py b/sickgear/providers/torrentday.py index 3badba6f..2e0d751d 100644 --- a/sickgear/providers/torrentday.py +++ b/sickgear/providers/torrentday.py @@ -57,7 +57,7 @@ class TorrentDayProvider(generic.TorrentProvider): ['RSS URL' in y, self.has_all_cookies()] + [(self.session.cookies.get(c, domain='') or 'sg!no!pw') in self.digest for c in ('uid', 'pass')])), - failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) + failed_msg=(lambda y=None: 'Invalid cookie details for %s. Check settings')) @staticmethod def _has_signature(data=None): diff --git a/sickgear/providers/torrenting.py b/sickgear/providers/torrenting.py index cf17d82e..c8c5c1b0 100644 --- a/sickgear/providers/torrenting.py +++ b/sickgear/providers/torrenting.py @@ -23,7 +23,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -48,7 +47,7 @@ class TorrentingProvider(generic.TorrentProvider): logged_in=(lambda y='': all( ['RSS link' in y, self.has_all_cookies()] + [(self.session.cookies.get(x) or 'sg!no!pw') in self.digest for x in ('uid', 'pass')])), - failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) + failed_msg=(lambda y=None: 'Invalid cookie details for %s. Check settings')) @staticmethod def _has_signature(data=None): @@ -67,7 +66,6 @@ class TorrentingProvider(generic.TorrentProvider): 'get': 'download'})]) for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % (self._categories_string(), search_string) html = self.get_url(search_url) @@ -109,7 +107,7 @@ class TorrentingProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, search_url) diff --git a/sickgear/providers/torrentleech.py b/sickgear/providers/torrentleech.py index f65a3efb..faeecb4d 100644 --- a/sickgear/providers/torrentleech.py +++ b/sickgear/providers/torrentleech.py @@ -21,8 +21,7 @@ import re from . import generic from ..helpers import anon_url, try_int -from _23 import unidecode -from six import iteritems, PY2 +from six import iteritems class TorrentLeechProvider(generic.TorrentProvider): @@ -45,7 +44,7 @@ class TorrentLeechProvider(generic.TorrentProvider): return super(TorrentLeechProvider, self)._authorised( logged_in=(lambda y='': all( ['TorrentLeech' in y, 'type="password"' not in y[0:4096], self.has_all_cookies(pre='tl')])), - failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) + failed_msg=(lambda y=None: 'Invalid cookie details for %s. Check settings')) @staticmethod def _has_signature(data=None): @@ -66,7 +65,7 @@ class TorrentLeechProvider(generic.TorrentProvider): for page in range((3, 5)['Cache' == mode])[1:]: urls[-1] += [self.urls[('search', 'browse')['Cache' == mode]] % { 'cats': self._categories_string(mode, '', ','), - 'query': unidecode(search_string) or search_string, + 'query': search_string, 'x': '%spage/%s' % (('facets/tags:FREELEECH/', '')[not self.freeleech], page) }] results += self._search_urls(mode, last_recent_search, urls) @@ -125,8 +124,7 @@ class TorrentLeechProvider(generic.TorrentProvider): download_url = None if dl and dl_id: # noinspection PyUnresolvedReferences - download_url = self._link('download/%s/%s' % (dl_id, dl), - url_quote=PY2 and isinstance(dl, unicode) or None) + download_url = self._link('download/%s/%s' % (dl_id, dl)) except (BaseException, Exception): continue diff --git a/sickgear/providers/tvchaosuk.py b/sickgear/providers/tvchaosuk.py index 244759cb..c7576fd1 100644 --- a/sickgear/providers/tvchaosuk.py +++ b/sickgear/providers/tvchaosuk.py @@ -27,7 +27,7 @@ from ..helpers import try_int from bs4_parser import BS4Parser from dateutil.parser import parse -from _23 import unidecode, unquote_plus +from _23 import unquote_plus from six import iteritems @@ -80,7 +80,7 @@ class TVChaosUKProvider(generic.TorrentProvider): 'info': r'/torrents?/(?P(?P\d{2,})[^"]*)', 'get': 'download'})]) for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(unquote_plus(search_string)) + search_string = unquote_plus(search_string) vals = [i for i in range(5, 16)] random.SystemRandom().shuffle(vals) @@ -142,7 +142,7 @@ class TVChaosUKProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') if soup: soup.clear(True) diff --git a/sickgear/providers/xspeeds.py b/sickgear/providers/xspeeds.py index 4b11a356..8b3a7487 100644 --- a/sickgear/providers/xspeeds.py +++ b/sickgear/providers/xspeeds.py @@ -25,7 +25,6 @@ from .. import logger from ..helpers import has_anime, try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -68,9 +67,8 @@ class XspeedsProvider(generic.TorrentProvider): if self.should_skip(): return results for search_string in search_params[mode]: - search_string = search_string.replace(u'£', '%') + search_string = search_string.replace('£', '%') search_string = re.sub(r'[\s.]+', '%', search_string) - search_string = unidecode(search_string) kwargs = dict(post_data={'keywords': search_string, 'do': 'quick_sort', 'page': '0', 'category': '0', 'search_type': 't_name', 'sort': 'added', @@ -133,7 +131,7 @@ class XspeedsProvider(generic.TorrentProvider): except generic.HaltParseException: pass except (BaseException, Exception): - logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}') self._log_search(mode, len(items[mode]) - cnt, ('search string: ' + search_string.replace('%', '%%'), self.name)['Cache' == mode]) diff --git a/sickgear/rssfeeds.py b/sickgear/rssfeeds.py index dd24463c..f91b67fe 100644 --- a/sickgear/rssfeeds.py +++ b/sickgear/rssfeeds.py @@ -30,9 +30,9 @@ class RSSFeeds(object): if data and 'error' in data.feed: err_code = data.feed['error']['code'] err_desc = data.feed['error']['description'] - logger.log(u'RSS error:[%s] code:[%s]' % (err_desc, err_code), logger.DEBUG) + logger.debug(f'RSS error:[{err_desc}] code:[{err_code}]') else: - logger.log(u'RSS error loading url: ' + url, logger.DEBUG) + logger.debug(f'RSS error loading url: {url}') except (BaseException, Exception) as e: - logger.log(u'RSS error: ' + ex(e), logger.DEBUG) + logger.debug(f'RSS error: {ex(e)}') diff --git a/sickgear/sab.py b/sickgear/sab.py index f6d5496a..e98b69f4 100644 --- a/sickgear/sab.py +++ b/sickgear/sab.py @@ -67,10 +67,10 @@ def send_nzb(search_result): return False kwargs['files'] = {'nzbfile': ('%s.nzb' % search_result.name, nzb_data)} - logger.log(u'Sending %s to SABnzbd: %s' % (nzb_type, search_result.name)) + logger.log(f'Sending {nzb_type} to SABnzbd: {search_result.name}') url = '%sapi' % sickgear.SAB_HOST - logger.log(u'SABnzbd at %s sent params: %s' % (url, params), logger.DEBUG) + logger.debug(f'SABnzbd at {url} sent params: {params}') success, result = _get_url(url, **kwargs) if not success: @@ -78,23 +78,23 @@ def send_nzb(search_result): # do some crude parsing of the result text to determine what SAB said if result.get('status'): - logger.log(u'Success from SABnzbd using %s' % nzb_type, logger.DEBUG) + logger.debug(f'Success from SABnzbd using {nzb_type}') return True elif 'error' in result: - logger.log(u'Failed using %s with SABnzbd, response: %s' % (nzb_type, result.get('error', 'und')), logger.ERROR) + logger.error(f'Failed using {nzb_type} with SABnzbd, response: {result.get("error", "und")}') else: - logger.log(u'Failure unknown using %s with SABnzbd, response: %s' % (nzb_type, result), logger.ERROR) + logger.error(f'Failure unknown using {nzb_type} with SABnzbd, response: {result}') return False def _check_sab_response(result): if 0 == len(result): - logger.log('No data returned from SABnzbd, nzb not used', logger.ERROR) + logger.error('No data returned from SABnzbd, nzb not used') return False, 'No data from SABnzbd' if 'error' in result: - logger.log(result['error'], logger.ERROR) + logger.error(result['error']) return False, result['error'] return True, result @@ -103,7 +103,7 @@ def _get_url(url, params=None, **kwargs): result = sickgear.helpers.get_url(url, params=params, parse_json=True, **kwargs) if None is result: - logger.log('Error, no response from SABnzbd', logger.ERROR) + logger.error('Error, no response from SABnzbd') return False, 'Error, no response from SABnzbd' return True, result @@ -118,7 +118,7 @@ def access_method(host): def test_authentication(host=None, username=None, password=None, apikey=None): """ - Sends a simple API request to SAB to determine if the given connection information is connect + Sends a simple API request to SAB to determine if the given connection information is correct Returns: A tuple containing the success boolean and a message :param host: The host where SAB is running (incl port) @@ -132,7 +132,7 @@ def test_authentication(host=None, username=None, password=None, apikey=None): url = '%sapi' % host # send the test request - logger.log(u'SABnzbd test URL: %s with parameters: %s' % (url, params), logger.DEBUG) + logger.debug(f'SABnzbd test URL: {url} with parameters: {params}') success, result = _get_url(url, params=params) if not success: return False, result diff --git a/sickgear/scene_exceptions.py b/sickgear/scene_exceptions.py index cb8b47eb..148285cc 100644 --- a/sickgear/scene_exceptions.py +++ b/sickgear/scene_exceptions.py @@ -25,20 +25,18 @@ import threading import traceback import sickgear -# noinspection PyPep8Naming -import encodingKludge as ek from exceptions_helper import ex from json_helper import json_load from . import db, helpers, logger, name_cache from .anime import create_anidb_obj from .classes import OrderedDefaultdict from .indexers.indexer_config import TVINFO_TVDB -from .sgdatetime import timestamp_near +from .sgdatetime import SGDatetime import lib.rarfile.rarfile as rarfile -from _23 import filter_iter, list_range, map_iter -from six import iteritems, PY2, text_type +from _23 import list_range +from six import iteritems # noinspection PyUnreachableCode if False: @@ -70,9 +68,9 @@ def should_refresh(name, max_refresh_age_secs=86400, remaining=False): if rows: last_refresh = int(rows[0]['last_refreshed']) if remaining: - time_left = (last_refresh + max_refresh_age_secs - int(timestamp_near(datetime.datetime.now()))) + time_left = (last_refresh + max_refresh_age_secs - SGDatetime.timestamp_near()) return (0, time_left)[time_left > 0] - return int(timestamp_near(datetime.datetime.now())) > last_refresh + max_refresh_age_secs + return SGDatetime.timestamp_near() > last_refresh + max_refresh_age_secs return True @@ -84,7 +82,7 @@ def set_last_refresh(name): """ my_db = db.DBConnection() my_db.upsert('scene_exceptions_refresh', - {'last_refreshed': int(timestamp_near(datetime.datetime.now()))}, + {'last_refreshed': SGDatetime.timestamp_near()}, {'list': name}) @@ -231,10 +229,10 @@ def retrieve_exceptions(): """ global exception_dict, anidb_exception_dict, xem_exception_dict - # exceptions are stored on github pages + # exceptions are stored on GitHub pages for tvid in sickgear.TVInfoAPI().sources: if should_refresh(sickgear.TVInfoAPI(tvid).name): - logger.log(u'Checking for scene exception updates for %s' % sickgear.TVInfoAPI(tvid).name) + logger.log(f'Checking for scene exception updates for {sickgear.TVInfoAPI(tvid).name}') url = sickgear.TVInfoAPI(tvid).config.get('scene_url') if not url: @@ -243,7 +241,7 @@ def retrieve_exceptions(): url_data = helpers.get_url(url) if None is url_data: # When None is urlData, trouble connecting to github - logger.log(u'Check scene exceptions update failed. Unable to get URL: %s' % url, logger.ERROR) + logger.error(f'Check scene exceptions update failed. Unable to get URL: {url}') continue else: @@ -305,17 +303,14 @@ def retrieve_exceptions(): list(cur_tvid_prodid))] # if this exception isn't already in the DB then add it - for cur_exception_dict in filter_iter(lambda e: e not in existing_exceptions, exception_dict[cur_tvid_prodid]): + for cur_exception_dict in filter(lambda e: e not in existing_exceptions, exception_dict[cur_tvid_prodid]): try: cur_exception, cur_season = next(iteritems(cur_exception_dict)) except (BaseException, Exception): - logger.log('scene exception error', logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) + logger.error('scene exception error') + logger.error(traceback.format_exc()) continue - if PY2 and not isinstance(cur_exception, text_type): - cur_exception = text_type(cur_exception, 'utf-8', 'replace') - cl.append(['INSERT INTO scene_exceptions' ' (indexer, indexer_id, show_name, season) VALUES (?,?,?,?)', list(cur_tvid_prodid) + [cur_exception, cur_season]]) @@ -323,13 +318,13 @@ def retrieve_exceptions(): if cl: my_db.mass_action(cl) - name_cache.buildNameCache(update_only_scene=True) + name_cache.build_name_cache(update_only_scene=True) # since this could invalidate the results of the cache we clear it out after updating if changed_exceptions: - logger.log(u'Updated scene exceptions') + logger.log('Updated scene exceptions') else: - logger.log(u'No scene exceptions update needed') + logger.log('No scene exceptions update needed') # cleanup exception_dict.clear() @@ -358,26 +353,22 @@ def update_scene_exceptions(tvid, prodid, scene_exceptions): # A change has been made to the scene exception list. Let's clear the cache, to make this visible exceptionsCache[(tvid, prodid)] = defaultdict(list) - logger.log(u'Updating scene exceptions', logger.MESSAGE) + logger.log('Updating scene exceptions', logger.MESSAGE) for exception in scene_exceptions: cur_season, cur_exception = exception.split('|', 1) try: cur_season = int(cur_season) except (BaseException, Exception): - logger.log('invalid scene exception: %s - %s:%s' % ('%s:%s' % (tvid, prodid), cur_season, cur_exception), - logger.ERROR) + logger.error('invalid scene exception: %s - %s:%s' % ('%s:%s' % (tvid, prodid), cur_season, cur_exception)) continue exceptionsCache[(tvid, prodid)][cur_season].append(cur_exception) - if PY2 and not isinstance(cur_exception, text_type): - cur_exception = text_type(cur_exception, 'utf-8', 'replace') - my_db.action('INSERT INTO scene_exceptions' ' (indexer, indexer_id, show_name, season) VALUES (?,?,?,?)', [tvid, prodid, cur_exception, cur_season]) - sickgear.name_cache.buildNameCache(update_only_scene=True) + sickgear.name_cache.build_name_cache(update_only_scene=True) def _custom_exceptions_fetcher(): @@ -385,29 +376,29 @@ def _custom_exceptions_fetcher(): cnt_updated_numbers = 0 src_id = 'GHSG' - logger.log(u'Checking to update custom alternatives from %s' % src_id) + logger.log(f'Checking to update custom alternatives from {src_id}') - dirpath = ek.ek(os.path.join, sickgear.CACHE_DIR, 'alts') - tmppath = ek.ek(os.path.join, dirpath, 'tmp') - file_rar = ek.ek(os.path.join, tmppath, 'alt.rar') - file_cache = ek.ek(os.path.join, dirpath, 'alt.json') + dirpath = os.path.join(sickgear.CACHE_DIR, 'alts') + tmppath = os.path.join(dirpath, 'tmp') + file_rar = os.path.join(tmppath, 'alt.rar') + file_cache = os.path.join(dirpath, 'alt.json') iv = 30 * 60 # min interval to fetch updates refresh = should_refresh(src_id, iv) - fetch_data = not ek.ek(os.path.isfile, file_cache) or (not int(os.environ.get('NO_ALT_GET', 0)) and refresh) + fetch_data = not os.path.isfile(file_cache) or (not int(os.environ.get('NO_ALT_GET', 0)) and refresh) if fetch_data: - if ek.ek(os.path.exists, tmppath): + if os.path.exists(tmppath): helpers.remove_file(tmppath, tree=True) helpers.make_path(tmppath) helpers.download_file(r'https://github.com/SickGear/sickgear.altdata/raw/main/alt.rar', file_rar) rar_handle = None if 'win32' == sys.platform: - rarfile.UNRAR_TOOL = ek.ek(os.path.join, sickgear.PROG_DIR, 'lib', 'rarfile', 'UnRAR.exe') + rarfile.UNRAR_TOOL = os.path.join(sickgear.PROG_DIR, 'lib', 'rarfile', 'UnRAR.exe') try: rar_handle = rarfile.RarFile(file_rar) rar_handle.extractall(path=dirpath, pwd='sickgear_alt') except(BaseException, Exception) as e: - logger.log(u'Failed to unpack archive: %s with error: %s' % (file_rar, ex(e)), logger.ERROR) + logger.error(f'Failed to unpack archive: {file_rar} with error: {ex(e)}') if rar_handle: rar_handle.close() @@ -418,8 +409,8 @@ def _custom_exceptions_fetcher(): if refresh: set_last_refresh(src_id) - if not fetch_data and not ek.ek(os.path.isfile, file_cache): - logger.debug(u'Unable to fetch custom exceptions, skipped: %s' % file_rar) + if not fetch_data and not os.path.isfile(file_cache): + logger.debug(f'Unable to fetch custom exceptions, skipped: {file_rar}') return custom_exception_dict, cnt_updated_numbers, should_refresh(src_id, iv, remaining=True) data = {} @@ -427,7 +418,7 @@ def _custom_exceptions_fetcher(): with io.open(file_cache) as fh: data = json_load(fh) except(BaseException, Exception) as e: - logger.log(u'Failed to unpack json data: %s with error: %s' % (file_rar, ex(e)), logger.ERROR) + logger.error(f'Failed to unpack json data: {file_rar} with error: {ex(e)}') # handle data from .scene_numbering import find_scene_numbering, set_scene_numbering_helper @@ -467,11 +458,9 @@ def _custom_exceptions_fetcher(): used.add((for_season, for_episode, target_season, target_episode)) if sn and ((for_season, for_episode) + sn) not in used \ and (for_season, for_episode) not in used: - logger.log( - u'Skipped setting "%s" episode %sx%s to target a release %sx%s because set to %sx%s' - % (show_obj.unique_name, for_season, for_episode, - target_season, target_episode, sn[0], sn[1]), - logger.DEBUG) + logger.debug(f'Skipped setting "{show_obj.unique_name}" episode {for_season}x{for_episode}' + f' to target a release {target_season}x{target_episode}' + f' because set to {sn[0]}x{sn[1]}') else: used.add((for_season, for_episode)) if not sn or sn != (target_season, target_episode): # not already set @@ -490,8 +479,8 @@ def _anidb_exceptions_fetcher(): global anidb_exception_dict if should_refresh('anidb'): - logger.log(u'Checking for AniDB scene exception updates') - for cur_show_obj in filter_iter(lambda _s: _s.is_anime and TVINFO_TVDB == _s.tvid, sickgear.showList): + logger.log('Checking for AniDB scene exception updates') + for cur_show_obj in filter(lambda _s: _s.is_anime and TVINFO_TVDB == _s.tvid, sickgear.showList): try: anime = create_anidb_obj(name=cur_show_obj.name, tvdbid=cur_show_obj.prodid, autoCorrectName=True) except (BaseException, Exception): @@ -514,15 +503,15 @@ def _xem_exceptions_fetcher(): if should_refresh(xem_list): for tvid in [i for i in sickgear.TVInfoAPI().sources if 'xem_origin' in sickgear.TVInfoAPI(i).config]: - logger.log(u'Checking for XEM scene exception updates for %s' % sickgear.TVInfoAPI(tvid).name) + logger.log(f'Checking for XEM scene exception updates for {sickgear.TVInfoAPI(tvid).name}') - url = 'http://thexem.info/map/allNames?origin=%s%s&seasonNumbers=1'\ + url = 'https://thexem.info/map/allNames?origin=%s%s&seasonNumbers=1'\ % (sickgear.TVInfoAPI(tvid).config['xem_origin'], ('&language=us', '')['xem' == xem_list]) parsed_json = helpers.get_url(url, parse_json=True, timeout=90) if not parsed_json: - logger.log(u'Check scene exceptions update failed for %s, Unable to get URL: %s' - % (sickgear.TVInfoAPI(tvid).name, url), logger.ERROR) + logger.error(f'Check scene exceptions update failed for {sickgear.TVInfoAPI(tvid).name},' + f' Unable to get URL: {url}') continue if 'failure' == parsed_json['result']: @@ -551,24 +540,23 @@ def _xem_get_ids(infosrc_name, xem_origin): """ xem_ids = [] - url = 'http://thexem.info/map/havemap?origin=%s' % xem_origin + url = 'https://thexem.info/map/havemap?origin=%s' % xem_origin task = 'Fetching show ids with%s xem scene mapping%s for origin' - logger.log(u'%s %s' % (task % ('', 's'), infosrc_name)) + logger.log(f'{task % ("", "s")} {infosrc_name}') parsed_json = helpers.get_url(url, parse_json=True, timeout=90) if not isinstance(parsed_json, dict) or not parsed_json: - logger.log(u'Failed %s %s, Unable to get URL: %s' - % (task.lower() % ('', 's'), infosrc_name, url), logger.ERROR) + logger.error(f'Failed {task.lower() % ("", "s")} {infosrc_name},' + f' Unable to get URL: {url}') else: if 'success' == parsed_json.get('result', '') and 'data' in parsed_json: - xem_ids = list(set(filter_iter(lambda prodid: 0 < prodid, - map_iter(lambda pid: helpers.try_int(pid), parsed_json['data'])))) + xem_ids = list(set(filter(lambda prodid: 0 < prodid, + map(lambda pid: helpers.try_int(pid), parsed_json['data'])))) if 0 == len(xem_ids): - logger.log(u'Failed %s %s, no data items parsed from URL: %s' - % (task.lower() % ('', 's'), infosrc_name, url), logger.WARNING) + logger.warning(f'Failed {task.lower() % ("", "s")} {infosrc_name},' + f' no data items parsed from URL: {url}') - logger.log(u'Finished %s %s' % (task.lower() % (' %s' % len(xem_ids), helpers.maybe_plural(xem_ids)), - infosrc_name)) + logger.log(f'Finished {task.lower() % (f" {len(xem_ids)}", helpers.maybe_plural(xem_ids))} {infosrc_name}') return xem_ids @@ -591,6 +579,6 @@ def has_abs_episodes(ep_obj=None, name=None): :return: :rtype: bool """ - return any([(name or ep_obj.show_obj.name or '').lower().startswith(x.lower()) for x in [ + return any((name or ep_obj.show_obj.name or '').lower().startswith(x.lower()) for x in [ 'The Eighties', 'The Making of the Mob', 'The Night Of', 'Roots 2016', 'Trepalium' - ]]) + ]) diff --git a/sickgear/scene_numbering.py b/sickgear/scene_numbering.py index 8bfa2cb7..2afc2914 100644 --- a/sickgear/scene_numbering.py +++ b/sickgear/scene_numbering.py @@ -30,9 +30,7 @@ import sickgear from . import db, logger from .helpers import try_int from .scene_exceptions import xem_ids_list -from .sgdatetime import timestamp_near - -from _23 import filter_iter, map_list +from .sgdatetime import SGDatetime # noinspection PyUnreachableCode if False: @@ -47,8 +45,8 @@ def get_scene_numbering(tvid, prodid, season, episode, fallback_to_xem=True, sho returns the TVDB numbering. (so the return values will always be set) - kwargs['scene_result']: type: Optional[List[Row]] passed thru - kwargs['show_result']: type: Optional[List[Row]] passed thru + kwargs['scene_result']: type: Optional[List[Row]] passed through + kwargs['show_result']: type: Optional[List[Row]] passed through :param tvid: tvid :type tvid: int @@ -136,8 +134,8 @@ def get_scene_absolute_numbering(tvid, prodid, absolute_number, season, episode, returns the TVDB numbering. (so the return values will always be set) - kwargs['scene_result']: type: Optional[List[Row]] passed thru - kwargs['show_result']: type: Optional[List[Row]] passed thru + kwargs['scene_result']: type: Optional[List[Row]] passed through + kwargs['show_result']: type: Optional[List[Row]] passed through :param tvid: tvid :type tvid: int @@ -718,8 +716,8 @@ def _get_absolute_numbering_for_show(tbl, tvid, prodid): """ % (tbl, ('indexer_id', 'showid')['tv_episodes' == tbl]), [int(tvid), int(prodid)]) for cur_row in sql_result: - season, episode, abs_num = map_list(lambda x: try_int(cur_row[x], None), - ('season', 'episode', 'absolute_number')) + season, episode, abs_num = list(map(lambda x: try_int(cur_row[x], None), + ('season', 'episode', 'absolute_number'))) if None is season and None is episode and None is not abs_num: season, episode, _ = _get_sea(tvid, prodid, absolute_number=abs_num) @@ -796,26 +794,26 @@ def xem_refresh(tvid, prodid, force=False): """, [tvid, prodid]) if sql_result: last_refresh = int(sql_result[0]['last_refreshed']) - refresh = int(timestamp_near(datetime.datetime.now())) > last_refresh + max_refresh_age_secs + refresh = SGDatetime.timestamp_near() > last_refresh + max_refresh_age_secs else: refresh = True if refresh or force: - logger.log(u'Looking up XEM scene mapping for show %s on %s' % (prodid, tvinfo.name), logger.DEBUG) + logger.debug(f'Looking up XEM scene mapping for show {prodid} on {tvinfo.name}') # mark refreshed my_db.upsert('xem_refresh', - dict(last_refreshed=int(timestamp_near(datetime.datetime.now()))), + dict(last_refreshed=SGDatetime.timestamp_near()), dict(indexer=tvid, indexer_id=prodid)) try: parsed_json = sickgear.helpers.get_url(url, parse_json=True, timeout=90) if not parsed_json or '' == parsed_json: - logger.log(u'No XEM data for show %s on %s' % (prodid, tvinfo.name), logger.MESSAGE) + logger.log(f'No XEM data for show {prodid} on {tvinfo.name}', logger.MESSAGE) return if 'success' in parsed_json['result']: - cl = map_list(lambda entry: [ + cl = list(map(lambda entry: [ """ UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? @@ -824,17 +822,16 @@ def xem_refresh(tvid, prodid, force=False): for v in ('season', 'episode', 'absolute')] + [tvid, prodid] + [entry.get(xem_origin).get(v) for v in ('season', 'episode')] - ], filter_iter(lambda x: 'scene' in x, parsed_json['data'])) + ], filter(lambda x: 'scene' in x, parsed_json['data']))) if 0 < len(cl): my_db = db.DBConnection() my_db.mass_action(cl) else: - logger.log(u'Empty lookup result - no XEM data for show %s on %s' % (prodid, tvinfo.name), logger.DEBUG) + logger.debug(f'Empty lookup result - no XEM data for show {prodid} on {tvinfo.name}') except (BaseException, Exception) as e: - logger.log(u'Exception refreshing XEM data for show ' + str(prodid) + ' on ' + tvinfo.name + ': ' + ex(e), - logger.WARNING) - logger.log(traceback.format_exc(), logger.ERROR) + logger.warning(f'Exception refreshing XEM data for show {str(prodid)} on {tvinfo.name}: {ex(e)}') + logger.error(traceback.format_exc()) def fix_xem_numbering(tvid, prodid): @@ -868,9 +865,7 @@ def fix_xem_numbering(tvid, prodid): update_scene_episode = False update_scene_absolute_number = False - logger.log( - u'Fixing any XEM scene mapping issues for show %s on %s' % (prodid, sickgear.TVInfoAPI(tvid).name), - logger.DEBUG) + logger.debug(f'Fixing any XEM scene mapping issues for show {prodid} on {sickgear.TVInfoAPI(tvid).name}') cl = [] for cur_row in sql_result: @@ -1003,15 +998,15 @@ def set_scene_numbering_helper(tvid, prodid, for_season=None, for_episode=None, if not show_obj.is_anime: scene_season = None if scene_season in [None, 'null', ''] else int(scene_season) scene_episode = None if scene_episode in [None, 'null', ''] else int(scene_episode) - action_log = u'Set episode scene numbering to %sx%s for episode %sx%s of "%s"' \ - % (scene_season, scene_episode, for_season, for_episode, show_obj.unique_name) + action_log = f'Set episode scene numbering to {scene_season}x{scene_episode}' \ + f' for episode {for_season}x{for_episode} of "{show_obj.unique_name}"' scene_args.update({'scene_season': scene_season, 'scene_episode': scene_episode}) result = {'forSeason': for_season, 'forEpisode': for_episode, 'sceneSeason': None, 'sceneEpisode': None} else: for_absolute = None if for_absolute in [None, 'null', ''] else int(for_absolute) scene_absolute = None if scene_absolute in [None, 'null', ''] else int(scene_absolute) - action_log = u'Set absolute scene numbering to %s for episode %sx%s of "%s"' \ - % (scene_absolute, for_season, for_episode, show_obj.unique_name) + action_log = f'Set absolute scene numbering to {scene_absolute}' \ + f' for episode {for_season}x{for_episode} of "{show_obj.unique_name}"' ep_args.update({'absolute': for_absolute}) scene_args.update({'absolute_number': for_absolute, 'scene_absolute': scene_absolute, 'anime': True}) result = {'forAbsolute': for_absolute, 'sceneAbsolute': None} @@ -1025,7 +1020,7 @@ def set_scene_numbering_helper(tvid, prodid, for_season=None, for_episode=None, result['success'] = None is not ep_obj and not isinstance(ep_obj, str) if result['success']: - logger.log(action_log, logger.DEBUG) + logger.debug(action_log) set_scene_numbering(**scene_args) show_obj.flush_episodes() if not show_obj.is_anime: diff --git a/sickgear/scheduler.py b/sickgear/scheduler.py index b310ef81..88bc2976 100644 --- a/sickgear/scheduler.py +++ b/sickgear/scheduler.py @@ -26,17 +26,17 @@ from exceptions_helper import ex class Scheduler(threading.Thread): - def __init__(self, action, cycleTime=datetime.timedelta(minutes=10), run_delay=datetime.timedelta(minutes=0), - start_time=None, threadName="ScheduledThread", silent=True, prevent_cycle_run=None, paused=False): + def __init__(self, action, cycle_time=datetime.timedelta(minutes=10), run_delay=datetime.timedelta(minutes=0), + start_time=None, thread_name="ScheduledThread", silent=True, prevent_cycle_run=None, paused=False): super(Scheduler, self).__init__() - self.lastRun = datetime.datetime.now() + run_delay - cycleTime + self.last_run = datetime.datetime.now() + run_delay - cycle_time self.action = action - self.cycleTime = cycleTime + self.cycle_time = cycle_time self.start_time = start_time self.prevent_cycle_run = prevent_cycle_run - self.name = threadName + self.name = thread_name self.silent = silent self._stopper = threading.Event() self._unpause = threading.Event() @@ -65,10 +65,10 @@ class Scheduler(threading.Thread): else: self.unpause() - def timeLeft(self): - return self.cycleTime - (datetime.datetime.now() - self.lastRun) + def time_left(self): + return self.cycle_time - (datetime.datetime.now() - self.last_run) - def forceRun(self): + def force_run(self): if not self.action.amActive: self.force = True return True @@ -93,15 +93,15 @@ class Scheduler(threading.Thread): should_run = False # check if interval has passed - if current_time - self.lastRun >= self.cycleTime: + if current_time - self.last_run >= self.cycle_time: # check if wanting to start around certain time taking interval into account if self.start_time: hour_diff = current_time.time().hour - self.start_time.hour - if not hour_diff < 0 and hour_diff < self.cycleTime.seconds // 3600: + if not hour_diff < 0 and hour_diff < self.cycle_time.seconds // 3600: should_run = True else: - # set lastRun to only check start_time after another cycleTime - self.lastRun = current_time + # set last_run to only check start_time after another cycle_time + self.last_run = current_time else: should_run = True @@ -110,22 +110,22 @@ class Scheduler(threading.Thread): if should_run and ((self.prevent_cycle_run is not None and self.prevent_cycle_run()) or getattr(self.action, 'prevent_run', False)): - logger.log(u'%s skipping this cycleTime' % self.name, logger.WARNING) - # set lastRun to only check start_time after another cycleTime - self.lastRun = current_time + logger.warning(f'{self.name} skipping this cycle_time') + # set last_run to only check start_time after another cycle_time + self.last_run = current_time should_run = False if should_run: - self.lastRun = current_time + self.last_run = current_time try: if not self.silent: - logger.log(u"Starting new thread: " + self.name, logger.DEBUG) + logger.debug(f'Starting new thread: {self.name}') self.action.run() except (BaseException, Exception) as e: - logger.log(u"Exception generated in thread " + self.name + ": " + ex(e), logger.ERROR) - logger.log(repr(traceback.format_exc()), logger.ERROR) + logger.error(f'Exception generated in thread {self.name}: {ex(e)}') + logger.error(repr(traceback.format_exc())) finally: if self.force: diff --git a/sickgear/search.py b/sickgear/search.py index fdaf3d37..67f3b22f 100644 --- a/sickgear/search.py +++ b/sickgear/search.py @@ -22,8 +22,6 @@ import re import threading import traceback -# noinspection PyPep8Naming -import encodingKludge as ek import exceptions_helper from exceptions_helper import ex from sg_helpers import write_file @@ -36,7 +34,6 @@ from .common import DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, MULTI_ from .providers.generic import GenericProvider from .tv import TVEpisode, TVShow -from _23 import filter_list, filter_iter, list_values from six import iteritems, itervalues, string_types # noinspection PyUnreachableCode @@ -55,7 +52,7 @@ def _download_result(result): res_provider = result.provider if None is res_provider: - logger.log(u'Invalid provider name - this is a coding error, report it please', logger.ERROR) + logger.error('Invalid provider name - this is a coding error, report it please') return False # NZB files with a URL can just be downloaded from the provider @@ -65,9 +62,9 @@ def _download_result(result): elif 'nzbdata' == result.resultType: # get the final file path to the nzb - file_name = ek.ek(os.path.join, sickgear.NZB_DIR, u'%s.nzb' % result.name) + file_name = os.path.join(sickgear.NZB_DIR, f'{result.name}.nzb') - logger.log(u'Saving NZB to %s' % file_name) + logger.log(f'Saving NZB to {file_name}') new_result = True @@ -80,12 +77,12 @@ def _download_result(result): write_file(file_name, data, raise_exceptions=True) except (EnvironmentError, IOError) as e: - logger.log(u'Error trying to save NZB to black hole: %s' % ex(e), logger.ERROR) + logger.error(f'Error trying to save NZB to black hole: {ex(e)}') new_result = False elif 'torrent' == res_provider.providerType: new_result = res_provider.download_result(result) else: - logger.log(u'Invalid provider type - this is a coding error, report it please', logger.ERROR) + logger.error('Invalid provider type - this is a coding error, report it please') new_result = False return new_result @@ -123,7 +120,7 @@ def snatch_episode(result, end_status=SNATCHED): elif 'nzbget' == sickgear.NZB_METHOD: dl_result = nzbget.send_nzb(result) else: - logger.log(u'Unknown NZB action specified in config: %s' % sickgear.NZB_METHOD, logger.ERROR) + logger.error(f'Unknown NZB action specified in config: {sickgear.NZB_METHOD}') dl_result = False # TORRENT files can be sent to clients or saved to disk @@ -141,7 +138,7 @@ def snatch_episode(result, end_status=SNATCHED): if not result.content and not result.url.startswith('magnet'): result.content = result.provider.get_url(result.url, as_binary=True) if result.provider.should_skip() or not result.content: - logger.log(u'Torrent content failed to download from %s' % result.url, logger.ERROR) + logger.error(f'Torrent content failed to download from {result.url}') return False # Snatches torrent with client dl_result = clients.get_client_instance(sickgear.TORRENT_METHOD)().send_torrent(result) @@ -149,7 +146,7 @@ def snatch_episode(result, end_status=SNATCHED): if result.cache_filepath: helpers.remove_file_perm(result.cache_filepath) else: - logger.log(u'Unknown result type, unable to download it', logger.ERROR) + logger.error('Unknown result type, unable to download it') dl_result = False if not dl_result: @@ -158,7 +155,7 @@ def snatch_episode(result, end_status=SNATCHED): if sickgear.USE_FAILED_DOWNLOADS: failed_history.add_snatched(result) - ui.notifications.message(u'Episode snatched', result.name) + ui.notifications.message('Episode snatched', result.name) history.log_snatch(result) @@ -168,9 +165,9 @@ def snatch_episode(result, end_status=SNATCHED): for cur_ep_obj in result.ep_obj_list: with cur_ep_obj.lock: if is_first_best_match(cur_ep_obj.status, result): - cur_ep_obj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality) + cur_ep_obj.status = Quality.composite_status(SNATCHED_BEST, result.quality) else: - cur_ep_obj.status = Quality.compositeStatus(end_status, result.quality) + cur_ep_obj.status = Quality.composite_status(end_status, result.quality) item = cur_ep_obj.get_sql() if None is not item: @@ -201,13 +198,13 @@ def pass_show_wordlist_checks(name, show_obj): result = show_name_helpers.contains_any(name, show_obj.rls_ignore_words, rx=show_obj.rls_ignore_words_regex, **re_extras) if None is not result and result: - logger.log(u'Ignored: %s for containing ignore word' % name) + logger.log(f'Ignored: {name} for containing ignore word') return False result = show_name_helpers.contains_any(name, show_obj.rls_require_words, rx=show_obj.rls_require_words_regex, **re_extras) if None is not result and not result: - logger.log(u'Ignored: %s for not containing any required word match' % name) + logger.log(f'Ignored: {name} for not containing any required word match') return False return True @@ -228,8 +225,8 @@ def pick_best_result( :param filter_rls: optional thread name :return: best search result """ - msg = (u'Picking the best result out of %s', u'Checking the best result %s')[1 == len(results)] - logger.log(msg % [x.name for x in results], logger.DEBUG) + msg = ('Picking the best result out of %s', 'Checking the best result %s')[1 == len(results)] + logger.debug(msg % [x.name for x in results]) # find the best result for the current episode best_result = None @@ -255,8 +252,8 @@ def pick_best_result( continue if quality_list and cur_result.quality not in quality_list: - logger.log(u'Rejecting unwanted quality %s for [%s]' % ( - Quality.qualityStrings[cur_result.quality], cur_result.name), logger.DEBUG) + logger.debug(f'Rejecting unwanted quality {Quality.qualityStrings[cur_result.quality]}' + f' for [{cur_result.name}]') continue if not pass_show_wordlist_checks(cur_result.name, show_obj): @@ -265,12 +262,12 @@ def pick_best_result( cur_size = getattr(cur_result, 'size', None) if sickgear.USE_FAILED_DOWNLOADS and None is not cur_size and failed_history.has_failed( cur_result.name, cur_size, cur_result.provider.name): - logger.log(u'Rejecting previously failed [%s]' % cur_result.name) + logger.log(f'Rejecting previously failed [{cur_result.name}]') continue if filter_rls and any([scene_only, non_scene_fallback, scene_rej_nuked, scene_nuked_active]): if show_obj.is_anime: - addendum = u'anime (skipping scene/nuke filter) ' + addendum = 'anime (skipping scene/nuke filter) ' else: scene_contains = False if scene_only and scene_or_contain: @@ -280,25 +277,23 @@ def pick_best_result( scene_contains = True if scene_contains and not scene_rej_nuked: - logger.log(u'Considering title match to \'or contain\' [%s]' % cur_result.name, logger.DEBUG) + logger.debug(f'Considering title match to \'or contain\' [{cur_result.name}]') reject = False else: reject, url = can_reject(cur_result.name) if reject: if isinstance(reject, string_types): if scene_rej_nuked and not scene_nuked_active: - logger.log(u'Rejecting nuked release. Nuke reason [%s] source [%s]' % (reject, url), - logger.DEBUG) + logger.debug(f'Rejecting nuked release. Nuke reason [{reject}] source [{url}]') elif scene_nuked_active: best_fallback_result = best_candidate(best_fallback_result, cur_result) else: - logger.log(u'Considering nuked release. Nuke reason [%s] source [%s]' % (reject, url), - logger.DEBUG) + logger.debug(f'Considering nuked release. Nuke reason [{reject}] source [{url}]') reject = False elif scene_contains or non_scene_fallback: best_fallback_result = best_candidate(best_fallback_result, cur_result) else: - logger.log(u'Rejecting as not scene release listed at any [%s]' % url, logger.DEBUG) + logger.debug(f'Rejecting as not scene release listed at any [{url}]') if reject: continue @@ -306,16 +301,16 @@ def pick_best_result( best_result = best_candidate(best_result, cur_result) if best_result and scene_only and not show_obj.is_anime: - addendum = u'scene release filtered ' + addendum = 'scene release filtered ' elif not best_result and best_fallback_result: - addendum = u'non scene release filtered ' + addendum = 'non scene release filtered ' best_result = best_fallback_result if best_result: - msg = (u'Picked as the best %s[%s]', u'Confirmed as the best %s[%s]')[1 == len(results)] - logger.log(msg % (addendum, best_result.name), logger.DEBUG) + msg = ('Picked as the best %s[%s]', 'Confirmed as the best %s[%s]')[1 == len(results)] + logger.debug(msg % (addendum, best_result.name)) else: - logger.log(u'No result picked.', logger.DEBUG) + logger.debug('No result picked.') return best_result @@ -329,7 +324,7 @@ def best_candidate(best_result, cur_result): :param cur_result: current best search result :return: new best search result """ - logger.log(u'Quality is %s for [%s]' % (Quality.qualityStrings[cur_result.quality], cur_result.name)) + logger.log(f'Quality is {Quality.qualityStrings[cur_result.quality]} for [{cur_result.name}]') if not best_result or best_result.quality < cur_result.quality != Quality.UNKNOWN: best_result = cur_result @@ -340,10 +335,10 @@ def best_candidate(best_result, cur_result): best_result = cur_result elif cur_result.properlevel == best_result.properlevel: if 'xvid' in best_result.name.lower() and 'x264' in cur_result.name.lower(): - logger.log(u'Preferring (x264 over xvid) [%s]' % cur_result.name) + logger.log(f'Preferring (x264 over xvid) [{cur_result.name}]') best_result = cur_result elif re.search('(?i)(h.?|x)264', best_result.name) and re.search('(?i)((h.?|x)265|hevc)', cur_result.name): - logger.log(u'Preferring (x265 over x264) [%s]' % cur_result.name) + logger.log(f'Preferring (x265 over x264) [{cur_result.name}]') best_result = cur_result elif 'internal' in best_result.name.lower() and 'internal' not in cur_result.name.lower(): best_result = cur_result @@ -357,15 +352,15 @@ def is_final_result(result): Checks if the given result is good enough quality that we can stop searching for other ones. :param result: search result to check - :return: If the result is the highest quality in both the any/best quality lists then this function + :return: If the result is the highest quality in both any and best quality lists then this function returns True, if not then it's False """ - logger.log(u'Checking if searching should continue after finding %s' % result.name, logger.DEBUG) + logger.debug(f'Checking if searching should continue after finding {result.name}') show_obj = result.ep_obj_list[0].show_obj - any_qualities, best_qualities = Quality.splitQuality(show_obj.quality) + any_qualities, best_qualities = Quality.split_quality(show_obj.quality) # if there is a download that's higher than this then we definitely need to keep looking if best_qualities and max(best_qualities) > result.quality: @@ -381,11 +376,11 @@ def is_final_result(result): elif best_qualities and max(best_qualities) == result.quality: - # if this is the best download but we have a higher initial download then keep looking + # if this is the best download, but we have a higher initial download then keep looking if any_qualities and max(any_qualities) > result.quality: return False - # if this is the best download and we don't have a higher initial download then we're done + # if this is the best download, and we don't have a higher initial download then we're done return True # if we got here than it's either not on the lists, they're empty, or it's lower than the highest required @@ -395,22 +390,21 @@ def is_final_result(result): def is_first_best_match(ep_status, result): # type: (int, sickgear.classes.SearchResult) -> bool """ - Checks if the given result is a best quality match and if we want to archive the episode on first match. + Checks if the given result is the best quality match and if we want to archive the episode on first match. :param ep_status: current episode object status :param result: search result to check :return: """ - logger.log(u'Checking if the first best quality match should be archived for episode %s' % - result.name, logger.DEBUG) + logger.debug(f'Checking if the first best quality match should be archived for episode {result.name}') show_obj = result.ep_obj_list[0].show_obj - cur_status, cur_quality = Quality.splitCompositeStatus(ep_status) + cur_status, cur_quality = Quality.split_composite_status(ep_status) - any_qualities, best_qualities = Quality.splitQuality(show_obj.quality) + any_qualities, best_qualities = Quality.split_quality(show_obj.quality) - # if there is a download that's a match to one of our best qualities and + # if there is a download that's a match to one of our best qualities, and # we want to archive the episode then we are done if best_qualities and show_obj.upgrade_once and \ (result.quality in best_qualities and @@ -436,7 +430,7 @@ def set_wanted_aired(ep_obj, # type: TVEpisode :param ep_count_scene: count of episodes in scene seasons :param manual: manual search """ - ep_status, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) + ep_status, ep_quality = common.Quality.split_composite_status(ep_obj.status) ep_obj.wanted_quality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=unaired, manual=manual) ep_obj.eps_aired_in_season = ep_count.get(ep_obj.season, 0) ep_obj.eps_aired_in_scene_season = ep_count_scene.get( @@ -461,7 +455,7 @@ def get_wanted_qualities(ep_obj, # type: TVEpisode """ if isinstance(ep_obj, TVEpisode): return sickgear.WANTEDLIST_CACHE.get_wantedlist(ep_obj.show_obj.quality, ep_obj.show_obj.upgrade_once, - cur_quality, cur_status, unaired, manual) + cur_quality, cur_status, unaired, manual) return [] @@ -546,7 +540,7 @@ def wanted_episodes(show_obj, # type: TVShow for result in sql_result: ep_obj = show_obj.get_episode(int(result['season']), int(result['episode']), ep_result=ep_sql_result) - cur_status, cur_quality = common.Quality.splitCompositeStatus(ep_obj.status) + cur_status, cur_quality = common.Quality.split_composite_status(ep_obj.status) ep_obj.wanted_quality = get_wanted_qualities(ep_obj, cur_status, cur_quality, unaired=unaired) if not ep_obj.wanted_quality: continue @@ -573,7 +567,7 @@ def wanted_episodes(show_obj, # type: TVShow ['%d unaired episode%s', total_unaired]: if 0 < total: actions.append(msg % (total, helpers.maybe_plural(total))) - logger.log(u'We want %s for %s' % (' and '.join(actions), show_obj.unique_name)) + logger.log(f'We want {" and ".join(actions)} for {show_obj.unique_name}') return wanted @@ -592,7 +586,7 @@ def search_for_needed_episodes(ep_obj_list): orig_thread_name = threading.current_thread().name - providers = filter_list(lambda x: x.is_active() and x.enable_recentsearch, sickgear.providers.sortedProviderList()) + providers = list(filter(lambda x: x.is_active() and x.enable_recentsearch, sickgear.providers.sorted_sources())) for cur_provider in providers: threading.current_thread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name) @@ -605,8 +599,8 @@ def search_for_needed_episodes(ep_obj_list): for cur_ep_obj in ep_obj_search_result_list: if cur_ep_obj.show_obj.paused: - logger.debug(u'Show %s is paused, ignoring all RSS items for %s' % - (cur_ep_obj.show_obj.unique_name, cur_ep_obj.pretty_name())) + logger.debug(f'Show {cur_ep_obj.show_obj.unique_name} is paused,' + f' ignoring all RSS items for {cur_ep_obj.pretty_name()}') continue # find the best result for the current episode @@ -615,10 +609,10 @@ def search_for_needed_episodes(ep_obj_list): # if all results were rejected move on to the next episode if not best_result: - logger.log(u'All found results for %s were rejected.' % cur_ep_obj.pretty_name(), logger.DEBUG) + logger.debug(f'All found results for {cur_ep_obj.pretty_name()} were rejected.') continue - # if it's already in the list (from another provider) and the newly found quality is no better then skip it + # if it's already in the list (from another provider) and the newly found quality is no better, then skip it if cur_ep_obj in found_results and best_result.quality <= found_results[cur_ep_obj].quality: continue @@ -635,20 +629,19 @@ def search_for_needed_episodes(ep_obj_list): found_results[cur_ep_obj] = best_result try: - cur_provider.save_list() + cur_provider.fails.save_list() except (BaseException, Exception): pass threading.current_thread().name = orig_thread_name if not len(providers): - logger.log('No NZB/Torrent providers in Media Providers/Options are enabled to match recent episodes', - logger.WARNING) + logger.warning('No NZB/Torrent providers in Media Providers/Options are enabled to match recent episodes') elif not search_done: - logger.log('Failed recent search of %s enabled provider%s. More info in debug log.' % ( - len(providers), helpers.maybe_plural(providers)), logger.ERROR) + logger.error(f'Failed recent search of {len(providers)} enabled provider{helpers.maybe_plural(providers)}.' + f' More info in debug log.') - return list_values(found_results) + return list(found_results.values()) def can_reject(release_name): @@ -716,12 +709,12 @@ def _search_provider_thread(provider, provider_results, show_obj, ep_obj_list, m search_count += 1 if 'eponly' == search_mode: - logger.log(u'Performing episode search for %s' % show_obj.unique_name) + logger.log(f'Performing episode search for {show_obj.unique_name}') else: - logger.log(u'Performing season pack search for %s' % show_obj.unique_name) + logger.log(f'Performing season pack search for {show_obj.unique_name}') try: - provider.cache._clearCache() + provider.cache.clear_cache() search_result_list = provider.find_search_results(show_obj, ep_obj_list, search_mode, manual_search, try_other_searches=try_other_searches) if any(search_result_list): @@ -729,10 +722,10 @@ def _search_provider_thread(provider, provider_results, show_obj, ep_obj_list, m len(v), (('multiep', 'season')[SEASON_RESULT == k], 'episode')['ep' in search_mode], helpers.maybe_plural(v)) for (k, v) in iteritems(search_result_list)])) except exceptions_helper.AuthException as e: - logger.error(u'Authentication error: %s' % ex(e)) + logger.error(f'Authentication error: {ex(e)}') break except (BaseException, Exception) as e: - logger.error(u'Error while searching %s, skipping: %s' % (provider.name, ex(e))) + logger.error(f'Error while searching {provider.name}, skipping: {ex(e)}') logger.error(traceback.format_exc()) break @@ -740,10 +733,10 @@ def _search_provider_thread(provider, provider_results, show_obj, ep_obj_list, m # make a list of all the results for this provider for cur_search_result in search_result_list: # skip non-tv crap - search_result_list[cur_search_result] = filter_list( + search_result_list[cur_search_result] = list(filter( lambda ep_item: ep_item.show_obj == show_obj and show_name_helpers.pass_wordlist_checks( ep_item.name, parse=False, indexer_lookup=False, show_obj=ep_item.show_obj), - search_result_list[cur_search_result]) + search_result_list[cur_search_result])) if cur_search_result in provider_results: provider_results[cur_search_result] += search_result_list[cur_search_result] @@ -755,7 +748,7 @@ def _search_provider_thread(provider, provider_results, show_obj, ep_obj_list, m break search_mode = '%sonly' % ('ep', 'sp')['ep' in search_mode] - logger.log(u'Falling back to %s search ...' % ('season pack', 'episode')['ep' in search_mode]) + logger.log(f'Falling back to {("season pack", "episode")["ep" in search_mode]} search ...') if not provider_results: logger.log('No suitable result at [%s]' % provider.name) @@ -768,8 +761,8 @@ def cache_torrent_file( ): # type: (...) -> Optional[TorrentSearchResult] - cache_file = ek.ek(os.path.join, sickgear.CACHE_DIR or helpers.get_system_temp_dir(), - '%s.torrent' % (helpers.sanitize_filename(search_result.name))) + cache_file = os.path.join(sickgear.CACHE_DIR or helpers.get_system_temp_dir(), + '%s.torrent' % (helpers.sanitize_filename(search_result.name))) if not helpers.download_file( search_result.url, cache_file, session=search_result.provider.session, failure_monitor=False): @@ -807,7 +800,7 @@ def cache_torrent_file( torrent_name = search_result.provider.regulate_cache_torrent_file(torrent_name) if not pick_best_result([search_result], show_obj, **kwargs) or \ not show_name_helpers.pass_wordlist_checks(torrent_name, indexer_lookup=False, show_obj=show_obj): - logger.log(u'Ignored %s that contains %s (debug log has detail)' % (result_name, torrent_name)) + logger.log(f'Ignored {result_name} that contains {torrent_name} (debug log has detail)') return return search_result @@ -843,7 +836,7 @@ def search_providers( orig_thread_name = threading.current_thread().name - provider_list = [x for x in sickgear.providers.sortedProviderList() if x.is_active() and + provider_list = [x for x in sickgear.providers.sorted_sources() if x.is_active() and getattr(x, 'enable_backlog', None) and (not torrent_only or GenericProvider.TORRENT == x.providerType) and (not scheduled or getattr(x, 'enable_scheduled_backlog', None))] @@ -851,7 +844,7 @@ def search_providers( # create a thread for each provider to search for cur_provider in provider_list: if cur_provider.anime_only and not show_obj.is_anime: - logger.debug(u'%s is not an anime, skipping' % show_obj.unique_name) + logger.debug(f'{show_obj.unique_name} is not an anime, skipping') continue provider_id = cur_provider.get_id() @@ -881,7 +874,7 @@ def search_providers( if provider_id not in found_results or not len(found_results[provider_id]): continue - any_qualities, best_qualities = Quality.splitQuality(show_obj.quality) + any_qualities, best_qualities = Quality.split_quality(show_obj.quality) # pick the best season NZB best_season_result = None @@ -894,14 +887,14 @@ def search_providers( for cur_result in found_results[provider_id][cur_episode]: if Quality.UNKNOWN != cur_result.quality and highest_quality_overall < cur_result.quality: highest_quality_overall = cur_result.quality - logger.debug(u'%s is the highest quality of any match' % Quality.qualityStrings[highest_quality_overall]) + logger.debug(f'{Quality.qualityStrings[highest_quality_overall]} is the highest quality of any match') # see if every episode is wanted if best_season_result: # get the quality of the season nzb season_qual = best_season_result.quality - logger.log(u'%s is the quality of the season %s' % (Quality.qualityStrings[season_qual], - best_season_result.provider.providerType), logger.DEBUG) + logger.debug(f'{Quality.qualityStrings[season_qual]} is the quality of the season' + f' {best_season_result.provider.providerType}') my_db = db.DBConnection() sql = 'SELECT season, episode' \ @@ -910,8 +903,8 @@ def search_providers( (show_obj.tvid, show_obj.prodid, ','.join([str(x.season) for x in ep_obj_list])) ep_nums = [(int(x['season']), int(x['episode'])) for x in my_db.select(sql)] - logger.log(u'Executed query: [%s]' % sql) - logger.log(u'Episode list: %s' % ep_nums, logger.DEBUG) + logger.log(f'Executed query: [{sql}]') + logger.debug(f'Episode list: {ep_nums}') all_wanted = True any_wanted = False @@ -921,11 +914,11 @@ def search_providers( else: any_wanted = True - # if we need every ep in the season and there's nothing better then just download this and - # be done with it (unless single episodes are preferred) + # if we need every ep in the season and there's nothing better, + # then download this and be done with it (unless single episodes are preferred) if all_wanted and highest_quality_overall == best_season_result.quality: - logger.log(u'Every episode in this season is needed, downloading the whole %s %s' % - (best_season_result.provider.providerType, best_season_result.name)) + logger.log(f'Every episode in this season is needed, downloading the whole' + f' {best_season_result.provider.providerType} {best_season_result.name}') ep_obj_list = [] for ep_num in ep_nums: ep_obj_list.append(show_obj.get_episode(ep_num[0], ep_num[1])) @@ -934,16 +927,16 @@ def search_providers( return [best_season_result] elif not any_wanted: - logger.log(u'No episodes from this season are wanted at this quality, ignoring the result of ' + - best_season_result.name, logger.DEBUG) + logger.debug(f'No episodes from this season are wanted at this quality,' + f' ignoring the result of {best_season_result.name}') else: if GenericProvider.NZB == best_season_result.provider.providerType: - logger.log(u'Breaking apart the NZB and adding the individual ones to our results', logger.DEBUG) + logger.debug('Breaking apart the NZB and adding the individual ones to our results') # if not, break it apart and add them as the lowest priority results - individual_results = nzbSplitter.splitResult(best_season_result) + individual_results = nzbSplitter.split_result(best_season_result) - for cur_result in filter_iter( + for cur_result in filter( lambda r: r.show_obj == show_obj and show_name_helpers.pass_wordlist_checks( r.name, parse=False, indexer_lookup=False, show_obj=r.show_obj), individual_results): ep_num = None @@ -962,8 +955,8 @@ def search_providers( else: # Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it - logger.log(u'Adding multi episode result for full season torrent. In your torrent client, set ' + - u'the episodes that you do not want to "don\'t download"') + logger.log('Adding multi episode result for full season torrent. In your torrent client,' + ' set the episodes that you do not want to "don\'t download"') ep_obj_list = [] for ep_num in ep_nums: ep_obj_list.append(show_obj.get_episode(ep_num[0], ep_num[1])) @@ -985,11 +978,11 @@ def search_providers( if MULTI_EP_RESULT in found_results[provider_id]: for multi_result in found_results[provider_id][MULTI_EP_RESULT]: - logger.log(u'Checking usefulness of multi episode result [%s]' % multi_result.name, logger.DEBUG) + logger.debug(f'Checking usefulness of multi episode result [{multi_result.name}]') if sickgear.USE_FAILED_DOWNLOADS and failed_history.has_failed(multi_result.name, multi_result.size, - multi_result.provider.name): - logger.log(u'Rejecting previously failed multi episode result [%s]' % multi_result.name) + multi_result.provider.name): + logger.log(f'Rejecting previously failed multi episode result [{multi_result.name}]') continue # see how many of the eps that this result covers aren't covered by single results @@ -1003,12 +996,12 @@ def search_providers( else: not_needed_eps.append(ep_num) - logger.log(u'Single episode check result is... needed episodes: %s, not needed episodes: %s' % - (needed_eps, not_needed_eps), logger.DEBUG) + logger.debug(f'Single episode check result is... needed episodes: {needed_eps},' + f' not needed episodes: {not_needed_eps}') if not not_needed_eps: - logger.log(u'All of these episodes were covered by single episode results, ' + - 'ignoring this multi episode result', logger.DEBUG) + logger.debug('All of these episodes were covered by single episode results,' + ' ignoring this multi episode result') continue # check if these eps are already covered by another multi-result @@ -1021,13 +1014,13 @@ def search_providers( else: multi_needed_eps.append(ep_num) - logger.log(u'Multi episode check result is... multi needed episodes: ' + - '%s, multi not needed episodes: %s' % (multi_needed_eps, multi_not_needed_eps), logger.DEBUG) + logger.debug(f'Multi episode check result is...' + f' multi needed episodes: {multi_needed_eps},' + f' multi not needed episodes: {multi_not_needed_eps}') if not multi_needed_eps: - logger.log(u'All of these episodes were covered by another multi episode nzb, ' + - 'ignoring this multi episode result', - logger.DEBUG) + logger.debug('All of these episodes were covered by another multi episode nzb,' + ' ignoring this multi episode result') continue # if we're keeping this multi-result then remember it @@ -1038,8 +1031,8 @@ def search_providers( for ep_obj in multi_result.ep_obj_list: ep_num = ep_obj.episode if ep_num in found_results[provider_id]: - logger.log(u'A needed multi episode result overlaps with a single episode result for episode ' + - '#%s, removing the single episode results from the list' % ep_num, logger.DEBUG) + logger.debug(f'A needed multi episode result overlaps with a single episode result' + f' for episode #{ep_num}, removing the single episode results from the list') del found_results[provider_id][ep_num] # of all the single ep results narrow it down to the best one for each episode @@ -1060,7 +1053,7 @@ def search_providers( found_results[provider_id][cur_search_result][0].ep_obj_list[0]) or \ found_results[provider_id][cur_search_result][0].ep_obj_list[0].status if old_status: - status, quality = Quality.splitCompositeStatus(old_status) + status, quality = Quality.split_composite_status(old_status) use_quality_list = (status not in ( common.WANTED, common.FAILED, common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN)) @@ -1096,7 +1089,7 @@ def search_providers( best_result.after_get_data_func(best_result) best_result.after_get_data_func = None # consume only once - # add result if its not a duplicate + # add result if it's not a duplicate found = False for i, result in enumerate(final_results): for best_result_ep in best_result.ep_obj_list: @@ -1122,8 +1115,8 @@ def search_providers( if not len(provider_list): logger.warning('No NZB/Torrent providers in Media Providers/Options are allowed for active searching') elif not search_done: - logger.log('Failed active search of %s enabled provider%s. More info in debug log.' % ( - len(provider_list), helpers.maybe_plural(provider_list)), logger.ERROR) + logger.error(f'Failed active search of {len(provider_list)}' + f' enabled provider{helpers.maybe_plural(provider_list)}. More info in debug log.') elif not any(final_results): logger.log('No suitable candidates') diff --git a/sickgear/search_backlog.py b/sickgear/search_backlog.py index 904d54a9..ecd9b369 100644 --- a/sickgear/search_backlog.py +++ b/sickgear/search_backlog.py @@ -25,10 +25,9 @@ from . import db, logger, scheduler, search_queue, ui from .helpers import find_show_by_id from .providers.generic import GenericProvider from .search import wanted_episodes -from .sgdatetime import SGDatetime, timestamp_near +from .sgdatetime import SGDatetime from .tv import TVidProdid, TVEpisode, TVShow -from _23 import filter_list, map_iter, map_list from six import iteritems, itervalues, moves # noinspection PyUnreachableCode @@ -48,29 +47,29 @@ class BacklogSearchScheduler(scheduler.Scheduler): self.force = True def next_run(self): - if 1 >= self.action._lastBacklog: + if 1 >= self.action.last_backlog: return datetime.date.today() - elif (self.action._lastBacklog + self.action.cycleTime) < datetime.date.today().toordinal(): + elif (self.action.last_backlog + self.action.cycle_time) < datetime.date.today().toordinal(): return datetime.date.today() - return datetime.date.fromordinal(self.action._lastBacklog + self.action.cycleTime) + return datetime.date.fromordinal(self.action.last_backlog + self.action.cycle_time) def next_backlog_timeleft(self): now = datetime.datetime.now() - torrent_enabled = 0 < len([x for x in sickgear.providers.sortedProviderList() if x.is_active() and + torrent_enabled = 0 < len([x for x in sickgear.providers.sorted_sources() if x.is_active() and getattr(x, 'enable_backlog', None) and GenericProvider.TORRENT == x.providerType]) - if now > self.action.nextBacklog or self.action.nextCyleTime != self.cycleTime: - nextruntime = now + self.timeLeft() + if now > self.action.nextBacklog or self.action.nextCyleTime != self.cycle_time: + nextruntime = now + self.time_left() if not torrent_enabled: nextpossibleruntime = (datetime.datetime.fromtimestamp(self.action.last_runtime) + datetime.timedelta(hours=23)) for _ in moves.xrange(5): if nextruntime > nextpossibleruntime: self.action.nextBacklog = nextruntime - self.action.nextCyleTime = self.cycleTime + self.action.nextCyleTime = self.cycle_time break - nextruntime += self.cycleTime + nextruntime += self.cycle_time else: - self.action.nextCyleTime = self.cycleTime + self.action.nextCyleTime = self.cycle_time self.action.nextBacklog = nextruntime return self.action.nextBacklog - now if self.action.nextBacklog > now else datetime.timedelta(seconds=0) @@ -78,8 +77,8 @@ class BacklogSearchScheduler(scheduler.Scheduler): class BacklogSearcher(object): def __init__(self): - self._lastBacklog = self._get_last_backlog() - self.cycleTime = sickgear.BACKLOG_PERIOD + self.last_backlog = self._get_last_backlog() + self.cycle_time = sickgear.BACKLOG_PERIOD self.lock = threading.Lock() self.amActive = False # type: bool self.amPaused = False # type: bool @@ -106,7 +105,7 @@ class BacklogSearcher(object): def am_running(self): # type: (...) -> bool - logger.log(u'amWaiting: ' + str(self.amWaiting) + ', amActive: ' + str(self.amActive), logger.DEBUG) + logger.debug(f'amWaiting: {self.amWaiting}, amActive: {self.amActive}') return (not self.amWaiting) and self.amActive def add_backlog_item(self, @@ -176,7 +175,7 @@ class BacklogSearcher(object): :param scheduled: scheduled backlog search (can be from webif or scheduler) :return: any provider is active for given backlog """ - return 0 < len([x for x in sickgear.providers.sortedProviderList() if x.is_active() and + return 0 < len([x for x in sickgear.providers.sorted_sources() if x.is_active() and getattr(x, 'enable_backlog', None) and (not torrent_only or GenericProvider.TORRENT == x.providerType) and (not scheduled or getattr(x, 'enable_scheduled_backlog', None))]) @@ -198,7 +197,7 @@ class BacklogSearcher(object): :rtype: None """ if self.amActive and not which_shows: - logger.log(u'Backlog is still running, not starting it again', logger.DEBUG) + logger.debug('Backlog is still running, not starting it again') return if which_shows: @@ -212,17 +211,17 @@ class BacklogSearcher(object): any_torrent_enabled = continued_backlog = False if not force and standard_backlog and (datetime.datetime.now() - datetime.datetime.fromtimestamp( self._get_last_runtime())) < datetime.timedelta(hours=23): - any_torrent_enabled = any(map_iter( + any_torrent_enabled = any(map( lambda x: x.is_active() and getattr(x, 'enable_backlog', None) and GenericProvider.TORRENT == x.providerType, - sickgear.providers.sortedProviderList())) + sickgear.providers.sorted_sources())) if not any_torrent_enabled: - logger.log('Last scheduled backlog run was within the last day, skipping this run.', logger.DEBUG) + logger.debug('Last scheduled backlog run was within the last day, skipping this run.') return if not self.providers_active(any_torrent_enabled, standard_backlog): - logger.log('No NZB/Torrent provider has active searching enabled in config/Media Providers,' - ' cannot start backlog.', logger.WARNING) + logger.warning('No NZB/Torrent provider has active searching enabled in config/Media Providers,' + ' cannot start backlog.') return self._get_last_backlog() @@ -235,14 +234,14 @@ class BacklogSearcher(object): limited_backlog = False if standard_backlog and (any_torrent_enabled or sickgear.BACKLOG_NOFULL): - logger.log(u'Running limited backlog for episodes missed during the last %s day(s)' % - str(sickgear.BACKLOG_LIMITED_PERIOD)) + logger.log(f'Running limited backlog for episodes missed during the last' + f' {sickgear.BACKLOG_LIMITED_PERIOD} day(s)') from_date = limited_from_date limited_backlog = True runparts = [] if standard_backlog and not any_torrent_enabled and sickgear.BACKLOG_NOFULL: - logger.log(u'Skipping automated full backlog search because it is disabled in search settings') + logger.log('Skipping automated full backlog search because it is disabled in search settings') my_db = db.DBConnection('cache.db') if standard_backlog and not any_torrent_enabled and not sickgear.BACKLOG_NOFULL: @@ -291,8 +290,8 @@ class BacklogSearcher(object): if not runparts and parts: runparts = parts[0] - wanted_list = filter_list( - lambda wi: wi and next(itervalues(wi))[0].show_obj.tvid_prodid in runparts, wanted_list) + wanted_list = list(filter( + lambda wi: wi and next(itervalues(wi))[0].show_obj.tvid_prodid in runparts, wanted_list)) limited_wanted_list = [] if standard_backlog and not any_torrent_enabled and runparts: @@ -314,8 +313,8 @@ class BacklogSearcher(object): for i, l in enumerate(parts): if 0 == i: continue - cl += map_list(lambda m: ['INSERT INTO backlogparts (part, indexer, indexerid) VALUES (?,?,?)', - [i + 1] + TVidProdid(m).list], l) + cl += list(map(lambda m: ['INSERT INTO backlogparts (part, indexer, indexerid) VALUES (?,?,?)', + [i + 1] + TVidProdid(m).list], l)) if 0 < len(cl): my_db.mass_action(cl) @@ -334,7 +333,7 @@ class BacklogSearcher(object): @staticmethod def _get_last_runtime(): - logger.log('Retrieving the last runtime of Backlog from the DB', logger.DEBUG) + logger.debug('Retrieving the last runtime of Backlog from the DB') my_db = db.DBConnection() sql_result = my_db.select('SELECT * FROM info') @@ -345,19 +344,19 @@ class BacklogSearcher(object): last_run_time = 1 else: last_run_time = int(sql_result[0]['last_run_backlog']) - if last_run_time > int(timestamp_near(datetime.datetime.now())): + if last_run_time > SGDatetime.timestamp_near(): last_run_time = 1 return last_run_time def _set_last_runtime(self, when): - logger.log('Setting the last backlog runtime in the DB to %s' % when, logger.DEBUG) + logger.debug('Setting the last backlog runtime in the DB to %s' % when) my_db = db.DBConnection() sql_result = my_db.select('SELECT * FROM info') if isinstance(when, datetime.datetime): - when = int(timestamp_near(when)) + when = SGDatetime.timestamp_near(when) else: when = SGDatetime.timestamp_far(when, default=0) if 0 == len(sql_result): @@ -370,7 +369,7 @@ class BacklogSearcher(object): def _get_last_backlog(self): - logger.log('Retrieving the last check time from the DB', logger.DEBUG) + logger.debug('Retrieving the last check time from the DB') my_db = db.DBConnection() sql_result = my_db.select('SELECT * FROM info') @@ -384,13 +383,13 @@ class BacklogSearcher(object): if last_backlog > datetime.date.today().toordinal(): last_backlog = 1 - self._lastBacklog = last_backlog - return self._lastBacklog + self.last_backlog = last_backlog + return self.last_backlog @staticmethod def _set_last_backlog(when): - logger.log('Setting the last backlog in the DB to %s' % when, logger.DEBUG) + logger.debug('Setting the last backlog in the DB to %s' % when) my_db = db.DBConnection() sql_result = my_db.select('SELECT * FROM info') diff --git a/sickgear/search_queue.py b/sickgear/search_queue.py index 36f804dd..200910b9 100644 --- a/sickgear/search_queue.py +++ b/sickgear/search_queue.py @@ -22,20 +22,16 @@ import re import threading import traceback -import exceptions_helper # noinspection PyPep8Naming from exceptions_helper import ex import sickgear -from lib.dateutil import tz from . import common, db, failed_history, generic_queue, helpers, \ history, logger, network_timezones, properFinder, search, ui from .classes import Proper, SimpleNamespace from .search import wanted_episodes, get_aired_in_season, set_wanted_aired from .tv import TVEpisode -from _23 import filter_list - # noinspection PyUnreachableCode if False: from typing import Any, AnyStr, Dict, List, Optional, Union @@ -87,7 +83,7 @@ class SearchQueue(generic_queue.GenericQueue): continue self.add_item(item, add_to_db=False) except (BaseException, Exception) as e: - logger.log('Exception loading queue %s: %s' % (self.__class__.__name__, ex(e)), logger.ERROR) + logger.error('Exception loading queue %s: %s' % (self.__class__.__name__, ex(e))) def _clear_sql(self): return [ @@ -326,7 +322,7 @@ class SearchQueue(generic_queue.GenericQueue): # manual and failed searches generic_queue.GenericQueue.add_item(self, item, add_to_db=add_to_db) else: - logger.log(u'Not adding item, it\'s already in the queue', logger.DEBUG) + logger.debug("Not adding item, it's already in the queue") class RecentSearchQueueItem(generic_queue.QueueItem): @@ -371,24 +367,23 @@ class RecentSearchQueueItem(generic_queue.QueueItem): self._check_for_propers(needed) if not self.ep_obj_list: - logger.log(u'No search of cache for episodes required') + logger.log('No search of cache for episodes required') self.success = True else: num_shows = len(set([ep_obj.show_obj.name for ep_obj in self.ep_obj_list])) - logger.log(u'Found %d needed episode%s spanning %d show%s' - % (len(self.ep_obj_list), helpers.maybe_plural(self.ep_obj_list), - num_shows, helpers.maybe_plural(num_shows))) + logger.log(f'Found {len(self.ep_obj_list):d} needed episode{helpers.maybe_plural(self.ep_obj_list)}' + f' spanning {num_shows:d} show{helpers.maybe_plural(num_shows)}') try: - logger.log(u'Beginning recent search for episodes') + logger.log('Beginning recent search for episodes') # noinspection PyTypeChecker search_results = search.search_for_needed_episodes(self.ep_obj_list) if not len(search_results): - logger.log(u'No needed episodes found') + logger.log('No needed episodes found') else: for result in search_results: - logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) + logger.log(f'Downloading {result.name} from {result.provider.name}') self.success = search.snatch_episode(result) if self.success: for ep_obj in result.ep_obj_list: @@ -403,7 +398,7 @@ class RecentSearchQueueItem(generic_queue.QueueItem): helpers.cpu_sleep() except (BaseException, Exception): - logger.log(traceback.format_exc(), logger.ERROR) + logger.error(traceback.format_exc()) if None is self.success: self.success = False @@ -501,13 +496,13 @@ class RecentSearchQueueItem(generic_queue.QueueItem): wanted |= (False, True)[common.WANTED == ep_obj.status] if not wanted: - logger.log(u'No unaired episodes marked wanted') + logger.log('No unaired episodes marked wanted') if 0 < len(sql_l): my_db = db.DBConnection() my_db.mass_action(sql_l) if wanted: - logger.log(u'Found new episodes marked wanted') + logger.log('Found new episodes marked wanted') @staticmethod def update_providers(needed=common.NeededQualities(need_all=True)): @@ -520,8 +515,8 @@ class RecentSearchQueueItem(generic_queue.QueueItem): orig_thread_name = threading.current_thread().name threads = [] - providers = filter_list(lambda x: x.is_active() and x.enable_recentsearch, - sickgear.providers.sortedProviderList()) + providers = list(filter(lambda x: x.is_active() and x.enable_recentsearch, + sickgear.providers.sorted_sources())) for cur_provider in providers: if not cur_provider.cache.should_update(): continue @@ -530,15 +525,14 @@ class RecentSearchQueueItem(generic_queue.QueueItem): logger.log('Updating provider caches with recent upload data') # spawn a thread for each provider to save time waiting for slow response providers - threads.append(threading.Thread(target=cur_provider.cache.updateCache, + threads.append(threading.Thread(target=cur_provider.cache.update_cache, kwargs={'needed': needed}, name='%s :: [%s]' % (orig_thread_name, cur_provider.name))) # start the thread we just created threads[-1].start() if not len(providers): - logger.log('No NZB/Torrent providers in Media Providers/Options are enabled to match recent episodes', - logger.WARNING) + logger.warning('No NZB/Torrent providers in Media Providers/Options are enabled to match recent episodes') if threads: # wait for all threads to finish @@ -641,13 +635,13 @@ class ManualSearchQueueItem(BaseSearchQueueItem): generic_queue.QueueItem.run(self) try: - logger.log(u'Beginning manual search for: [%s]' % self.segment.pretty_name()) + logger.log(f'Beginning manual search for: [{self.segment.pretty_name()}]') self.started = True ep_count, ep_count_scene = get_aired_in_season(self.show_obj) set_wanted_aired(self.segment, True, ep_count, ep_count_scene, manual=True) if not getattr(self.segment, 'wanted_quality', None): - ep_status, ep_quality = common.Quality.splitCompositeStatus(self.segment.status) + ep_status, ep_quality = common.Quality.split_composite_status(self.segment.status) self.segment.wanted_quality = search.get_wanted_qualities(self.segment, ep_status, ep_quality, unaired=True, manual=True) if not self.segment.wanted_quality: @@ -660,7 +654,7 @@ class ManualSearchQueueItem(BaseSearchQueueItem): if search_result: for result in search_result: # type: sickgear.classes.NZBSearchResult - logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) + logger.log(f'Downloading {result.name} from {result.provider.name}') self.success = search.snatch_episode(result) for ep_obj in result.ep_obj_list: # type: sickgear.tv.TVEpisode self.snatched_eps.add(SimpleNamespace(tvid_prodid=ep_obj.show_obj.tvid_prodid, @@ -677,12 +671,12 @@ class ManualSearchQueueItem(BaseSearchQueueItem): break else: ui.notifications.message('No downloads found', - u'Could not find a download for %s' % self.segment.pretty_name()) + f'Could not find a download for {self.segment.pretty_name()}') - logger.log(u'Unable to find a download for: [%s]' % self.segment.pretty_name()) + logger.log(f'Unable to find a download for: [{self.segment.pretty_name()}]') except (BaseException, Exception): - logger.log(traceback.format_exc(), logger.ERROR) + logger.error(traceback.format_exc()) finally: # Keep a list with the last executed searches @@ -733,7 +727,7 @@ class BacklogQueueItem(BaseSearchQueueItem): for ep_obj in self.segment: # type: sickgear.tv.TVEpisode set_wanted_aired(ep_obj, True, ep_count, ep_count_scene) - logger.log(u'Beginning backlog search for: [%s]' % self.show_obj.unique_name) + logger.log(f'Beginning backlog search for: [{self.show_obj.unique_name}]') search_result = search.search_providers( self.show_obj, self.segment, False, try_other_searches=(not self.standard_backlog or not self.limited_backlog), @@ -741,7 +735,7 @@ class BacklogQueueItem(BaseSearchQueueItem): if search_result: for result in search_result: # type: sickgear.classes.NZBSearchResult - logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) + logger.log(f'Downloading {result.name} from {result.provider.name}') if search.snatch_episode(result): for ep_obj in result.ep_obj_list: # type: sickgear.tv.TVEpisode self.snatched_eps.add(SimpleNamespace(tvid_prodid=ep_obj.show_obj.tvid_prodid, @@ -754,10 +748,10 @@ class BacklogQueueItem(BaseSearchQueueItem): helpers.cpu_sleep() else: - logger.log(u'No needed episodes found during backlog search for: [%s]' % self.show_obj.unique_name) + logger.log(f'No needed episodes found during backlog search for: [{self.show_obj.unique_name}]') except (BaseException, Exception): is_error = True - logger.log(traceback.format_exc(), logger.ERROR) + logger.error(traceback.format_exc()) finally: logger.log('Completed backlog search %sfor: [%s]' @@ -787,7 +781,7 @@ class FailedQueueItem(BaseSearchQueueItem): ep_count, ep_count_scene = get_aired_in_season(self.show_obj) for ep_obj in self.segment: # type: sickgear.tv.TVEpisode - logger.log(u'Marking episode as bad: [%s]' % ep_obj.pretty_name()) + logger.log(f'Marking episode as bad: [{ep_obj.pretty_name()}]') failed_history.set_episode_failed(ep_obj) (release, provider) = failed_history.find_release(ep_obj) @@ -796,14 +790,14 @@ class FailedQueueItem(BaseSearchQueueItem): failed_history.add_failed(release) history.log_failed(ep_obj, release, provider) - logger.log(u'Beginning failed download search for: [%s]' % ep_obj.pretty_name()) + logger.log(f'Beginning failed download search for: [{ep_obj.pretty_name()}]') set_wanted_aired(ep_obj, True, ep_count, ep_count_scene, manual=True) search_result = search.search_providers(self.show_obj, self.segment, True, try_other_searches=True) or [] for result in search_result: # type: sickgear.classes.NZBSearchResult - logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) + logger.log(f'Downloading {result.name} from {result.provider.name}') if search.snatch_episode(result): for ep_obj in result.ep_obj_list: # type: sickgear.tv.TVEpisode self.snatched_eps.add(SimpleNamespace(tvid_prodid=ep_obj.show_obj.tvid_prodid, @@ -817,9 +811,9 @@ class FailedQueueItem(BaseSearchQueueItem): helpers.cpu_sleep() else: pass - # logger.log(u'No valid episode found to retry for: [%s]' % self.segment.pretty_name()) + # logger.log(f'No valid episode found to retry for: [{self.segment.pretty_name()}]') except (BaseException, Exception): - logger.log(traceback.format_exc(), logger.ERROR) + logger.error(traceback.format_exc()) finally: # Keep a list with the last executed searches diff --git a/sickgear/sgdatetime.py b/sickgear/sgdatetime.py index 86bb84b5..5beada3a 100644 --- a/sickgear/sgdatetime.py +++ b/sickgear/sgdatetime.py @@ -23,11 +23,11 @@ import sys import sickgear from dateutil import tz -from six import integer_types, PY2, string_types +from six import integer_types, string_types # noinspection PyUnreachableCode if False: - from typing import Callable, Optional, Union + from typing import Optional, Union date_presets = ('%Y-%m-%d', '%a, %Y-%m-%d', @@ -211,7 +211,7 @@ class SGDatetime(datetime.datetime): obj = (dt, self)[self is not None] # type: datetime.datetime try: if None is not obj: - strd = u'%s, %s' % ( + strd = '%s, %s' % ( SGDatetime.sbstrftime(obj, (sickgear.DATE_PRESET, d_preset)[None is not d_preset]), SGDatetime.sbftime(dt, show_seconds, t_preset, False, markup)) @@ -234,7 +234,7 @@ class SGDatetime(datetime.datetime): """ convert datetime to filetime special handling for windows filetime issues - for pre Windows 7 this can result in an exception for pre 1970 dates + for pre Windows 7 this can result in an exception for pre-1970 dates """ obj = (dt, self)[self is not None] # type: datetime.datetime if is_win: @@ -282,22 +282,25 @@ class SGDatetime(datetime.datetime): finally: return (default, timestamp)[isinstance(timestamp, (float, integer_types))] + @static_or_instance + def timestamp_near(self, + dt=None, # type: Optional[SGDatetime, datetime.datetime] + td=None, # type: Optional[datetime.timedelta] + return_int=True # type: bool + ): + # type: (...) -> Union[float, integer_types] + """ + Use `timestamp_near` for a timestamp in the near future or near past -if PY2: - """ - Use `timestamp_near` for a timezone aware UTC timestamp in the near future or recent past. + Raises exception if dt cannot be converted to int - Under py3, using the faster variable assigned cpython callable, so py2 is set up to mimic the signature types. - Note: the py3 callable is limited to datetime.datetime and does not work with datetime.date. - """ - def _py2timestamp(dt=None): - # type: (datetime.datetime) -> float - try: - import time - return int(time.mktime(dt.timetuple())) - except (BaseException, Exception): - return 0 - timestamp_near = _py2timestamp # type: Callable[[datetime.datetime], float] -else: - # py3 native timestamp uses milliseconds - timestamp_near = datetime.datetime.timestamp # type: Callable[[datetime.datetime], float] + td is timedelta to subtract from datetime + """ + obj = (dt, self)[self is not None] # type: datetime.datetime + if None is obj: + obj = datetime.datetime.now() + if isinstance(td, datetime.timedelta): + obj -= td + if not return_int: + return datetime.datetime.timestamp(obj) + return int(datetime.datetime.timestamp(obj)) diff --git a/sickgear/show_name_helpers.py b/sickgear/show_name_helpers.py index b1397df1..f688c1d5 100644 --- a/sickgear/show_name_helpers.py +++ b/sickgear/show_name_helpers.py @@ -19,8 +19,6 @@ import os import copy import re -# noinspection PyPep8Naming -import encodingKludge as ek from exceptions_helper import ex import sickgear @@ -30,7 +28,7 @@ from .name_parser.parser import InvalidNameException, InvalidShowException, Name from .scene_exceptions import get_scene_exceptions from sg_helpers import scantree -from _23 import map_list, quote_plus +from _23 import quote_plus from six import iterkeys, itervalues # noinspection PyUnreachableCode @@ -64,14 +62,14 @@ def pass_wordlist_checks(name, # type: AnyStr """ if parse: - err_msg = u'Unable to parse the filename %s into a valid ' % name + err_msg = f'Unable to parse the filename {name} into a valid ' try: NameParser(indexer_lookup=indexer_lookup).parse(name) except InvalidNameException: - logger.log(err_msg + 'episode', logger.DEBUG) + logger.debug(err_msg + 'episode') return False except InvalidShowException: - logger.log(err_msg + 'show', logger.DEBUG) + logger.debug(err_msg + 'show') return False word_list = {'sub(bed|ed|pack|s)', '(dk|fin|heb|kor|nor|nordic|pl|swe)sub(bed|ed|s)?', @@ -96,7 +94,7 @@ def pass_wordlist_checks(name, # type: AnyStr result = result or contains_any(name, word_list, rx=sickgear.IGNORE_WORDS_REGEX) if None is not result and result: - logger.log(u'Ignored: %s for containing ignore word' % name, logger.DEBUG) + logger.debug(f'Ignored: {name} for containing ignore word') return False result = None @@ -110,7 +108,7 @@ def pass_wordlist_checks(name, # type: AnyStr # if any of the good strings aren't in the name then say no result = result or not_contains_any(name, req_word_list, rx=sickgear.REQUIRE_WORDS_REGEX) if None is not result and result: - logger.log(u'Ignored: %s for not containing required word match' % name, logger.DEBUG) + logger.debug(f'Ignored: {name} for not containing required word match') return False return True @@ -162,7 +160,7 @@ def contains_any(subject, # type: AnyStr if (match and not invert) or (not match and invert): msg = match and not invert and 'Found match' or '' msg = not match and invert and 'No match found' or msg - logger.log(u'%s from pattern: %s in text: %s ' % (msg, rc_filter.pattern, subject), logger.DEBUG) + logger.debug(f'{msg} from pattern: {rc_filter.pattern} in text: {subject} ') return True return False return None @@ -192,13 +190,11 @@ def compile_word_list(lookup_words, # type: Union[AnyStr, Set[AnyStr]] subject = search_raw and re.escape(word) or re.sub(r'([\" \'])', r'\\\1', word) result.append(re.compile('(?i)%s%s%s' % (re_prefix, subject, re_suffix))) except re.error as e: - logger.log(u'Failure to compile filter expression: %s ... Reason: %s' % (word, ex(e)), - logger.DEBUG) + logger.debug(f'Failure to compile filter expression: {word} ... Reason: {ex(e)}') diff = len(lookup_words) - len(result) if diff: - logger.log(u'From %s expressions, %s was discarded during compilation' % (len(lookup_words), diff), - logger.DEBUG) + logger.debug(f'From {len(lookup_words)} expressions, {diff} was discarded during compilation') return result @@ -233,19 +229,21 @@ def get_show_names_all_possible(show_obj, season=-1, scenify=True, spacer='.', f :param season: season :param scenify: :param spacer: spacer + :param force_anime: :return: """ - show_names = list(set(allPossibleShowNames(show_obj, season=season, force_anime=force_anime))) # type: List[AnyStr] + show_names = list(set( + all_possible_show_names(show_obj, season=season, force_anime=force_anime))) # type: List[AnyStr] if scenify: - show_names = map_list(sanitize_scene_name, show_names) + show_names = list(map(sanitize_scene_name, show_names)) return url_encode(show_names, spacer) -def makeSceneSeasonSearchString(show_obj, # type: sickgear.tv.TVShow - ep_obj, # type: sickgear.tv.TVEpisode - ignore_allowlist=False, # type: bool - extra_search_type=None - ): # type: (...) -> List[AnyStr] +def make_scene_season_search_string(show_obj, # type: sickgear.tv.TVShow + ep_obj, # type: sickgear.tv.TVEpisode + ignore_allowlist=False, # type: bool + extra_search_type=None + ): # type: (...) -> List[AnyStr] """ :param show_obj: show object @@ -258,34 +256,34 @@ def makeSceneSeasonSearchString(show_obj, # type: sickgear.tv.TVShow numseasons = 0 # the search string for air by date shows is just - seasonStrings = [str(ep_obj.airdate).split('-')[0]] + season_strings = [str(ep_obj.airdate).split('-')[0]] elif show_obj.is_anime: numseasons = 0 ep_obj_list = show_obj.get_all_episodes(ep_obj.season) # get show qualities - anyQualities, bestQualities = common.Quality.splitQuality(show_obj.quality) + any_qualities, best_qualities = common.Quality.split_quality(show_obj.quality) # compile a list of all the episode numbers we need in this 'season' - seasonStrings = [] + season_strings = [] for episode in ep_obj_list: # get quality of the episode - curCompositeStatus = episode.status - curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus) + cur_composite_status = episode.status + cur_status, cur_quality = common.Quality.split_composite_status(cur_composite_status) - if bestQualities: - highestBestQuality = max(bestQualities) + if best_qualities: + highest_best_quality = max(best_qualities) else: - highestBestQuality = 0 + highest_best_quality = 0 # if we need a better one then add it to the list of episodes to fetch - if (curStatus in ( + if (cur_status in ( common.DOWNLOADED, - common.SNATCHED) and curQuality < highestBestQuality) or curStatus == common.WANTED: + common.SNATCHED) and cur_quality < highest_best_quality) or cur_status == common.WANTED: ab_number = episode.scene_absolute_number if 0 < ab_number: - seasonStrings.append("%02d" % ab_number) + season_strings.append("%02d" % ab_number) else: my_db = db.DBConnection() @@ -297,7 +295,7 @@ def makeSceneSeasonSearchString(show_obj, # type: sickgear.tv.TVShow [show_obj.tvid, show_obj.prodid]) numseasons = int(sql_result[0][0]) - seasonStrings = ["S%02d" % int(ep_obj.scene_season)] + season_strings = ["S%02d" % int(ep_obj.scene_season)] show_names = get_show_names_all_possible(show_obj, ep_obj.scene_season) @@ -312,7 +310,7 @@ def makeSceneSeasonSearchString(show_obj, # type: sickgear.tv.TVShow to_return.append(cur_name) # for providers that don't allow multiple searches in one request we only search for Sxx style stuff else: - for cur_season in seasonStrings: + for cur_season in season_strings: if not ignore_allowlist and show_obj.is_anime \ and None is not show_obj.release_groups and show_obj.release_groups.allowlist: for keyword in show_obj.release_groups.allowlist: @@ -324,10 +322,10 @@ def makeSceneSeasonSearchString(show_obj, # type: sickgear.tv.TVShow return to_return -def makeSceneSearchString(show_obj, # type: sickgear.tv.TVShow - ep_obj, # type: sickgear.tv.TVEpisode - ignore_allowlist=False # type: bool - ): # type: (...) -> List[AnyStr] +def make_scene_search_string(show_obj, # type: sickgear.tv.TVShow + ep_obj, # type: sickgear.tv.TVEpisode + ignore_allowlist=False # type: bool + ): # type: (...) -> List[AnyStr] """ :param show_obj: show object @@ -374,63 +372,66 @@ def makeSceneSearchString(show_obj, # type: sickgear.tv.TVShow return to_return -def allPossibleShowNames(show_obj, season=-1, force_anime=False): +def all_possible_show_names(show_obj, season=-1, force_anime=False): # type: (sickgear.tv.TVShow, int, bool) -> List[AnyStr] """ Figures out every possible variation of the name for a particular show. Includes TVDB name, TVRage name, - country codes on the end, eg. "Show Name (AU)", and any scene exception names. + country codes on the end, e.g. "Show Name (AU)", and any scene exception names. :param show_obj: a TVShow object that we should get the names of :param season: season + :param force_anime: :return: a list of all the possible show names """ - showNames = get_scene_exceptions(show_obj.tvid, show_obj.prodid, season=season)[:] - if not showNames: # if we dont have any season specific exceptions fallback to generic exceptions + show_names = get_scene_exceptions(show_obj.tvid, show_obj.prodid, season=season)[:] + if not show_names: # if we don't have any season specific exceptions fallback to generic exceptions season = -1 - showNames = get_scene_exceptions(show_obj.tvid, show_obj.prodid, season=season)[:] + show_names = get_scene_exceptions(show_obj.tvid, show_obj.prodid, season=season)[:] if -1 == season: - showNames.append(show_obj.name) + show_names.append(show_obj.name) if not show_obj.is_anime and not force_anime: - newShowNames = [] + new_show_names = [] country_list = common.countryList country_list.update(dict(zip(itervalues(common.countryList), iterkeys(common.countryList)))) - for curName in set(showNames): - if not curName: + for cur_name in set(show_names): + if not cur_name: continue # if we have "Show Name Australia" or "Show Name (Australia)" this will add "Show Name (AU)" for # any countries defined in common.countryList # (and vice versa) - for curCountry in country_list: - if curName.endswith(' ' + curCountry): - newShowNames.append(curName.replace(' ' + curCountry, ' (' + country_list[curCountry] + ')')) - elif curName.endswith(' (' + curCountry + ')'): - newShowNames.append(curName.replace(' (' + curCountry + ')', ' (' + country_list[curCountry] + ')')) + for cur_country in country_list: + if cur_name.endswith(' ' + cur_country): + new_show_names.append(cur_name.replace(' ' + cur_country, + ' (' + country_list[cur_country] + ')')) + elif cur_name.endswith(' (' + cur_country + ')'): + new_show_names.append(cur_name.replace(' (' + cur_country + ')', + ' (' + country_list[cur_country] + ')')) # if we have "Show Name (2013)" this will strip the (2013) show year from the show name # newShowNames.append(re.sub('\(\d{4}\)','',curName)) - showNames += newShowNames + show_names += new_show_names - return showNames + return show_names -def determineReleaseName(dir_name=None, nzb_name=None): +def determine_release_name(dir_name=None, nzb_name=None): # type: (AnyStr, AnyStr) -> Union[AnyStr, None] - """Determine a release name from an nzb and/or folder name + """Determine a release name from a nzb and/or folder name :param dir_name: dir name :param nzb_name: nzb name :return: None or release name """ if None is not nzb_name: - logger.log(u'Using nzb name for release name.') + logger.log('Using nzb name for release name.') return nzb_name.rpartition('.')[0] - if not dir_name or not ek.ek(os.path.isdir, dir_name): + if not dir_name or not os.path.isdir(dir_name): return None # try to get the release name from nzb/nfo @@ -443,16 +444,16 @@ def determineReleaseName(dir_name=None, nzb_name=None): if 1 == len(results): found_file = results[0].rpartition('.')[0] if pass_wordlist_checks(found_file): - logger.log(u'Release name (%s) found from file (%s)' % (found_file, results[0])) + logger.log(f'Release name ({found_file}) found from file ({results[0]})') return found_file.rpartition('.')[0] # If that fails, we try the folder - folder = ek.ek(os.path.basename, dir_name) + folder = os.path.basename(dir_name) if pass_wordlist_checks(folder): # NOTE: Multiple failed downloads will change the folder name. # (e.g., appending #s) # Should we handle that? - logger.log(u'Folder name (%s) appears to be a valid release name. Using it.' % folder) + logger.log(f'Folder name ({folder}) appears to be a valid release name. Using it.') return folder return None diff --git a/sickgear/show_queue.py b/sickgear/show_queue.py index 03fbec9a..e083bb40 100644 --- a/sickgear/show_queue.py +++ b/sickgear/show_queue.py @@ -23,8 +23,6 @@ import traceback from lib.dateutil.parser import parser from lib.tvinfo_base.exceptions import * -# noinspection PyPep8Naming -import encodingKludge as ek import exceptions_helper from exceptions_helper import ex @@ -49,7 +47,7 @@ if False: from lib.tvinfo_base import TVInfoShow from .tv import TVEpisode -# Define special priority of tv source switch tasks, higher then anything else except newly added shows +# Define special priority of tv source switch tasks, higher than anything else except newly added shows SWITCH_PRIO = generic_queue.QueuePriorities.HIGH + 5 DAILY_SHOW_UPDATE_FINISHED_EVENT = 1 @@ -72,7 +70,7 @@ class ShowQueue(generic_queue.GenericQueue): def check_events(self): if self.daily_update_running and \ - not (self.isShowUpdateRunning() or sickgear.show_update_scheduler.action.amActive): + not (self.is_show_update_running() or sickgear.show_update_scheduler.action.amActive): self.execute_events(DAILY_SHOW_UPDATE_FINISHED_EVENT) self.daily_update_running = False @@ -91,24 +89,24 @@ class ShowQueue(generic_queue.GenericQueue): continue if cur_row['action_id'] in (ShowQueueActions.UPDATE, ShowQueueActions.FORCEUPDATE, - ShowQueueActions.WEBFORCEUPDATE): - self.updateShow(add_to_db=False, force=bool(cur_row['force']), - pausestatus_after=bool_none(cur_row['pausestatus_after']), - scheduled_update=bool(cur_row['scheduled_update']), - show_obj=show_obj, skip_refresh=bool(cur_row['skip_refresh']), - uid=cur_row['uid'], - web=ShowQueueActions.WEBFORCEUPDATE == cur_row['action_id']) + ShowQueueActions.WEBFORCEUPDATE): + self.update_show(add_to_db=False, force=bool(cur_row['force']), + pausestatus_after=bool_none(cur_row['pausestatus_after']), + scheduled_update=bool(cur_row['scheduled_update']), + show_obj=show_obj, skip_refresh=bool(cur_row['skip_refresh']), + uid=cur_row['uid'], + web=ShowQueueActions.WEBFORCEUPDATE == cur_row['action_id']) elif ShowQueueActions.REFRESH == cur_row['action_id']: - self.refreshShow(add_to_db=False, force=bool(cur_row['force']), - force_image_cache=bool(cur_row['force_image_cache']), - priority=cur_row['priority'], - scheduled_update=bool(cur_row['scheduled_update']), - show_obj=show_obj, - uid=cur_row['uid']) + self.refresh_show(add_to_db=False, force=bool(cur_row['force']), + force_image_cache=bool(cur_row['force_image_cache']), + priority=cur_row['priority'], + scheduled_update=bool(cur_row['scheduled_update']), + show_obj=show_obj, + uid=cur_row['uid']) elif ShowQueueActions.RENAME == cur_row['action_id']: - self.renameShowEpisodes(add_to_db=False, show_obj=show_obj, uid=cur_row['uid']) + self.rename_show_episodes(add_to_db=False, show_obj=show_obj, uid=cur_row['uid']) elif ShowQueueActions.SUBTITLE == cur_row['action_id']: self.download_subtitles(add_to_db=False, show_obj=show_obj, uid=cur_row['uid']) @@ -128,7 +126,7 @@ class ShowQueue(generic_queue.GenericQueue): lang=cur_row['lang'], uid=cur_row['uid'], add_to_db=False) except (BaseException, Exception) as e: - logger.log('Exception loading queue %s: %s' % (self.__class__.__name__, ex(e)), logger.ERROR) + logger.error('Exception loading queue %s: %s' % (self.__class__.__name__, ex(e))) def save_item(self, item): # type: (ShowQueueItem) -> None @@ -225,7 +223,7 @@ class ShowQueue(generic_queue.GenericQueue): else: my_db.action('DELETE FROM tv_src_switch WHERE uid = ?', [item.uid]) except (BaseException, Exception) as e: - logger.log('Exception deleting item %s from db: %s' % (item, ex(e)), logger.ERROR) + logger.error('Exception deleting item %s from db: %s' % (item, ex(e))) else: generic_queue.GenericQueue.delete_item(self, item) @@ -243,7 +241,7 @@ class ShowQueue(generic_queue.GenericQueue): # type: (List[integer_types], bool) -> None generic_queue.GenericQueue._remove_from_queue(self, to_remove=to_remove, force=force) - def _isInQueue(self, show_obj, actions): + def _is_in_queue(self, show_obj, actions): # type: (TVShow, Tuple[integer_types, ...]) -> bool """ @@ -254,7 +252,7 @@ class ShowQueue(generic_queue.GenericQueue): with self.lock: return any(1 for x in self.queue if x.action_id in actions and show_obj == x.show_obj) - def _isBeingSomethinged(self, show_obj, actions): + def _is_being_somethinged(self, show_obj, actions): # type: (TVShow, Tuple[integer_types, ...]) -> bool """ @@ -269,7 +267,7 @@ class ShowQueue(generic_queue.GenericQueue): and show_obj == self.currentItem.show_obj \ and self.currentItem.action_id in actions - def isInUpdateQueue(self, show_obj): + def is_in_update_queue(self, show_obj): # type: (TVShow) -> bool """ @@ -278,10 +276,10 @@ class ShowQueue(generic_queue.GenericQueue): :return: :rtype: bool """ - return self._isInQueue(show_obj, (ShowQueueActions.UPDATE, ShowQueueActions.FORCEUPDATE, - ShowQueueActions.WEBFORCEUPDATE)) + return self._is_in_queue(show_obj, (ShowQueueActions.UPDATE, ShowQueueActions.FORCEUPDATE, + ShowQueueActions.WEBFORCEUPDATE)) - def isInRefreshQueue(self, show_obj): + def is_in_refresh_queue(self, show_obj): # type: (TVShow) -> bool """ @@ -290,9 +288,9 @@ class ShowQueue(generic_queue.GenericQueue): :return: :rtype: bool """ - return self._isInQueue(show_obj, (ShowQueueActions.REFRESH,)) + return self._is_in_queue(show_obj, (ShowQueueActions.REFRESH,)) - def isInRenameQueue(self, show_obj): + def is_in_rename_queue(self, show_obj): # type: (TVShow) -> bool """ @@ -301,9 +299,9 @@ class ShowQueue(generic_queue.GenericQueue): :return: :rtype: bool """ - return self._isInQueue(show_obj, (ShowQueueActions.RENAME,)) + return self._is_in_queue(show_obj, (ShowQueueActions.RENAME,)) - def isInSubtitleQueue(self, show_obj): + def is_in_subtitle_queue(self, show_obj): # type: (TVShow) -> bool """ @@ -312,9 +310,9 @@ class ShowQueue(generic_queue.GenericQueue): :return: :rtype: bool """ - return self._isInQueue(show_obj, (ShowQueueActions.SUBTITLE,)) + return self._is_in_queue(show_obj, (ShowQueueActions.SUBTITLE,)) - def isBeingAdded(self, show_obj): + def is_being_added(self, show_obj): # type: (TVShow) -> bool """ @@ -323,9 +321,9 @@ class ShowQueue(generic_queue.GenericQueue): :return: :rtype: bool """ - return self._isBeingSomethinged(show_obj, (ShowQueueActions.ADD,)) + return self._is_being_somethinged(show_obj, (ShowQueueActions.ADD,)) - def isBeingUpdated(self, show_obj): + def is_being_updated(self, show_obj): # type: (TVShow) -> bool """ @@ -334,10 +332,10 @@ class ShowQueue(generic_queue.GenericQueue): :return: :rtype: bool """ - return self._isBeingSomethinged(show_obj, (ShowQueueActions.UPDATE, ShowQueueActions.FORCEUPDATE, - ShowQueueActions.WEBFORCEUPDATE)) + return self._is_being_somethinged(show_obj, (ShowQueueActions.UPDATE, ShowQueueActions.FORCEUPDATE, + ShowQueueActions.WEBFORCEUPDATE)) - def isBeingRefreshed(self, show_obj): + def is_being_refreshed(self, show_obj): # type: (TVShow) -> bool """ @@ -346,9 +344,9 @@ class ShowQueue(generic_queue.GenericQueue): :return: :rtype: bool """ - return self._isBeingSomethinged(show_obj, (ShowQueueActions.REFRESH,)) + return self._is_being_somethinged(show_obj, (ShowQueueActions.REFRESH,)) - def isBeingRenamed(self, show_obj): + def is_being_renamed(self, show_obj): # type: (TVShow) -> bool """ @@ -357,9 +355,9 @@ class ShowQueue(generic_queue.GenericQueue): :return: :rtype: bool """ - return self._isBeingSomethinged(show_obj, (ShowQueueActions.RENAME,)) + return self._is_being_somethinged(show_obj, (ShowQueueActions.RENAME,)) - def isBeingSubtitled(self, show_obj): + def is_being_subtitled(self, show_obj): # type: (TVShow) -> bool """ @@ -368,9 +366,9 @@ class ShowQueue(generic_queue.GenericQueue): :return: :rtype: bool """ - return self._isBeingSomethinged(show_obj, (ShowQueueActions.SUBTITLE,)) + return self._is_being_somethinged(show_obj, (ShowQueueActions.SUBTITLE,)) - def isShowUpdateRunning(self): + def is_show_update_running(self): """ :return: @@ -387,7 +385,7 @@ class ShowQueue(generic_queue.GenericQueue): :param show_obj: show object """ - return self._isBeingSomethinged(show_obj, (ShowQueueActions.SWITCH,)) + return self._is_being_somethinged(show_obj, (ShowQueueActions.SWITCH,)) def is_show_switch_queued(self, show_obj): # type: (TVShow) -> bool @@ -396,21 +394,21 @@ class ShowQueue(generic_queue.GenericQueue): :param show_obj: show object """ - return self._isInQueue(show_obj, (ShowQueueActions.SWITCH,)) + return self._is_in_queue(show_obj, (ShowQueueActions.SWITCH,)) def is_switch_running(self): # type: (...) -> bool with self.lock: return any(1 for x in self.queue + [self.currentItem] if isinstance(x, QueueItemSwitchSource)) - def _getLoadingShowList(self): + def _get_loading_showlist(self): """ :return: :rtype: List """ with self.lock: - return [x for x in self.queue + [self.currentItem] if None is not x and x.isLoading] + return [x for x in self.queue + [self.currentItem] if None is not x and x.is_loading] def queue_length(self): # type: (...) -> Dict[AnyStr, List[AnyStr, Dict]] @@ -454,18 +452,18 @@ class ShowQueue(generic_queue.GenericQueue): length['switch'].append(result_item) return length - loadingShowList = property(_getLoadingShowList) + loading_showlist = property(_get_loading_showlist) - def updateShow(self, - show_obj, # type: TVShow - force=False, # type: bool - web=False, # type: bool - scheduled_update=False, # type: bool - priority=generic_queue.QueuePriorities.NORMAL, # type: integer_types - uid=None, # type: integer_types - add_to_db=True, # type: bool - **kwargs # type: Any - ): # type: (...) -> Union[QueueItemUpdate, QueueItemForceUpdate, QueueItemForceUpdateWeb] + def update_show(self, + show_obj, # type: TVShow + force=False, # type: bool + web=False, # type: bool + scheduled_update=False, # type: bool + priority=generic_queue.QueuePriorities.NORMAL, # type: integer_types + uid=None, # type: integer_types + add_to_db=True, # type: bool + **kwargs # type: Any + ): # type: (...) -> Union[QueueItemUpdate, QueueItemForceUpdate, QueueItemForceUpdateWeb] """ :param show_obj: show object @@ -485,15 +483,15 @@ class ShowQueue(generic_queue.GenericQueue): :rtype: QueueItemUpdate or QueueItemForceUpdateWeb or QueueItemForceUpdate """ with self.lock: - if self.isBeingAdded(show_obj): + if self.is_being_added(show_obj): raise exceptions_helper.CantUpdateException( 'Show is still being added, wait until it is finished before you update.') - if self.isBeingUpdated(show_obj): + if self.is_being_updated(show_obj): raise exceptions_helper.CantUpdateException( 'This show is already being updated, can\'t update again until it\'s done.') - if self.isInUpdateQueue(show_obj): + if self.is_in_update_queue(show_obj): raise exceptions_helper.CantUpdateException( 'This show is already being updated, can\'t update again until it\'s done.') @@ -516,9 +514,9 @@ class ShowQueue(generic_queue.GenericQueue): return queue_item_obj - def refreshShow(self, show_obj, force=False, scheduled_update=False, after_update=False, - priority=generic_queue.QueuePriorities.HIGH, force_image_cache=False, uid=None, add_to_db=True, - **kwargs): + def refresh_show(self, show_obj, force=False, scheduled_update=False, after_update=False, + priority=generic_queue.QueuePriorities.HIGH, force_image_cache=False, uid=None, add_to_db=True, + **kwargs): # type: (TVShow, bool, bool, bool, integer_types, bool, integer_types, bool, Any) -> Optional[QueueItemRefresh] """ @@ -541,12 +539,13 @@ class ShowQueue(generic_queue.GenericQueue): :rtype: QueueItemRefresh """ with self.lock: - if (self.isBeingRefreshed(show_obj) or self.isInRefreshQueue(show_obj)) and not force: + if (self.is_being_refreshed(show_obj) or self.is_in_refresh_queue(show_obj)) and not force: raise exceptions_helper.CantRefreshException('This show is being refreshed, not refreshing again.') - if ((not after_update and self.isBeingUpdated(show_obj)) or self.isInUpdateQueue(show_obj)) and not force: - logger.log('Skipping this refresh as there is already an update queued or' - ' in progress and a refresh is done at the end of an update anyway.', logger.DEBUG) + if ((not after_update and self.is_being_updated(show_obj)) + or self.is_in_update_queue(show_obj)) and not force: + logger.debug('Skipping this refresh as there is already an update queued or' + ' in progress and a refresh is done at the end of an update anyway.') return if self.is_show_being_switched(show_obj): @@ -561,7 +560,7 @@ class ShowQueue(generic_queue.GenericQueue): return queue_item_obj - def renameShowEpisodes(self, show_obj, uid=None, add_to_db=True): + def rename_show_episodes(self, show_obj, uid=None, add_to_db=True): # type: (TVShow, integer_types, bool) -> QueueItemRename """ @@ -758,14 +757,14 @@ class ShowQueueItem(generic_queue.QueueItem): self.show_obj = show_obj # type: sickgear.tv.TVShow self.scheduled_update = scheduled_update # type: bool - def isInQueue(self): + def is_in_queue(self): """ :rtype: bool """ return self in sickgear.show_queue_scheduler.action.queue + [ sickgear.show_queue_scheduler.action.currentItem] - def _getName(self): + def _get_name(self): """ :rtype: AnyStr """ @@ -773,7 +772,7 @@ class ShowQueueItem(generic_queue.QueueItem): return self.show_obj.name return '' - def _isLoading(self): + def _is_loading(self): return False def __str__(self): @@ -782,9 +781,9 @@ class ShowQueueItem(generic_queue.QueueItem): def __repr__(self): return self.__str__() - show_name = property(_getName) + show_name = property(_get_name) - isLoading = property(_isLoading) + is_loading = property(_is_loading) class QueueItemAdd(ShowQueueItem): @@ -864,7 +863,7 @@ class QueueItemAdd(ShowQueueItem): self.priority = generic_queue.QueuePriorities.VERYHIGH - def _getName(self): + def _get_name(self): """ :return: the show name if there is a show object created, if not returns the dir that the show is being added to. @@ -876,9 +875,9 @@ class QueueItemAdd(ShowQueueItem): return self.showDir return self.show_obj.name - show_name = property(_getName) + show_name = property(_get_name) - def _isLoading(self): + def _is_loading(self): """ :return: True if we've gotten far enough to have a show object, or False if we still only know the folder name. @@ -886,7 +885,7 @@ class QueueItemAdd(ShowQueueItem): """ return None is self.show_obj - isLoading = property(_isLoading) + is_loading = property(_is_loading) # if they gave a number to start or number to end as wanted, then change those eps to it def _get_wanted(self, db_obj, wanted_max, latest): @@ -932,7 +931,7 @@ class QueueItemAdd(ShowQueueItem): wanted_updates.append({'season': sr['season'], 'episode': sr['episode'], 'status': sr['status']}) elif sr['status'] not in [WANTED]: - cur_status, cur_quality = Quality.splitCompositeStatus(int(sr['status'])) + cur_status, cur_quality = Quality.split_composite_status(int(sr['status'])) if sickgear.WANTEDLIST_CACHE.get_wantedlist( self.quality, self.upgrade_once, cur_quality, cur_status, unaired=(sickgear.SEARCH_UNAIRED and not sickgear.UNAIRED_RECENT_SEARCH_ONLY)): @@ -977,36 +976,35 @@ class QueueItemAdd(ShowQueueItem): if self.lang: tvinfo_config['language'] = self.lang - logger.log(u'' + str(sickgear.TVInfoAPI(self.tvid).name) + ': ' + repr(tvinfo_config)) + logger.log(f'{sickgear.TVInfoAPI(self.tvid).name}: {repr(tvinfo_config)}') t = sickgear.TVInfoAPI(self.tvid).setup(**tvinfo_config) s = t.get_show(self.prodid, load_episodes=False, language=self.lang) if getattr(t, 'show_not_found', False): - logger.log('Show %s was not found on %s, maybe show was deleted' % - (self.show_name, sickgear.TVInfoAPI(self.tvid).name), logger.ERROR) - self._finishEarly() + logger.error(f'Show {self.show_name} was not found on {sickgear.TVInfoAPI(self.tvid).name},' + f' maybe show was deleted') + self._finish_early() return # this usually only happens if they have an NFO in their show dir # which gave us a TV info source ID that has no proper english version of the show if None is getattr(s, 'seriesname', None): - logger.log('Show in %s has no name on %s, probably the wrong language used to search with.' % - (self.showDir, sickgear.TVInfoAPI(self.tvid).name), logger.ERROR) + logger.error(f'Show in {self.showDir} has no name on {sickgear.TVInfoAPI(self.tvid).name},' + f' probably the wrong language used to search with.') ui.notifications.error('Unable to add show', 'Show in %s has no name on %s, probably the wrong language.' ' Delete .nfo and add manually in the correct language.' % (self.showDir, sickgear.TVInfoAPI(self.tvid).name)) - self._finishEarly() + self._finish_early() return except (BaseException, Exception): - logger.log('Unable to find show ID:%s on TV info: %s' % (self.prodid, sickgear.TVInfoAPI(self.tvid).name), - logger.ERROR) + logger.error('Unable to find show ID:%s on TV info: %s' % (self.prodid, sickgear.TVInfoAPI(self.tvid).name)) ui.notifications.error('Unable to add show', 'Unable to look up the show in %s on %s using ID %s, not using the NFO.' ' Delete .nfo and try adding manually again.' % (self.showDir, sickgear.TVInfoAPI(self.tvid).name, self.prodid)) - self._finishEarly() + self._finish_early() return try: @@ -1047,28 +1045,26 @@ class QueueItemAdd(ShowQueueItem): self.show_obj.sports = 1 except BaseTVinfoException as e: - logger.log( - 'Unable to add show due to an error with %s: %s' % (sickgear.TVInfoAPI(self.tvid).name, ex(e)), - logger.ERROR) + logger.error(f'Unable to add show due to an error with {sickgear.TVInfoAPI(self.tvid).name}: {ex(e)}') if self.show_obj: ui.notifications.error('Unable to add %s due to an error with %s' % (self.show_obj.unique_name, sickgear.TVInfoAPI(self.tvid).name)) else: ui.notifications.error( 'Unable to add show due to an error with %s' % sickgear.TVInfoAPI(self.tvid).name) - self._finishEarly() + self._finish_early() return except exceptions_helper.MultipleShowObjectsException: - logger.log('The show in %s is already in your show list, skipping' % self.showDir, logger.ERROR) + logger.error('The show in %s is already in your show list, skipping' % self.showDir) ui.notifications.error('Show skipped', 'The show in %s is already in your show list' % self.showDir) - self._finishEarly() + self._finish_early() return except (BaseException, Exception) as e: - logger.log('Error trying to add show: %s' % ex(e), logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) - self._finishEarly() + logger.error('Error trying to add show: %s' % ex(e)) + logger.error(traceback.format_exc()) + self._finish_early() raise self.show_obj.load_imdb_info() @@ -1076,9 +1072,9 @@ class QueueItemAdd(ShowQueueItem): try: self.show_obj.save_to_db() except (BaseException, Exception) as e: - logger.log('Error saving the show to the database: %s' % ex(e), logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) - self._finishEarly() + logger.error('Error saving the show to the database: %s' % ex(e)) + logger.error(traceback.format_exc()) + self._finish_early() raise if not show_exists: @@ -1093,16 +1089,15 @@ class QueueItemAdd(ShowQueueItem): self.show_obj.load_episodes_from_tvinfo(tvinfo_data=(None, result)[ self.show_obj.prodid == getattr(result, 'id', None)]) except (BaseException, Exception) as e: - logger.log( - 'Error with %s, not creating episode list: %s' % (sickgear.TVInfoAPI(self.show_obj.tvid).name, ex(e)), - logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) + logger.error(f'Error with {sickgear.TVInfoAPI(self.show_obj.tvid).name},' + f' not creating episode list: {ex(e)}') + logger.error(traceback.format_exc()) try: self.show_obj.load_episodes_from_dir() except (BaseException, Exception) as e: - logger.log('Error searching directory for episodes: %s' % ex(e), logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) + logger.error('Error searching directory for episodes: %s' % ex(e)) + logger.error(traceback.format_exc()) # if they gave a custom status then change all the eps to it my_db = db.DBConnection() @@ -1150,13 +1145,13 @@ class QueueItemAdd(ShowQueueItem): try: self.show_obj.save_to_db() except (BaseException, Exception) as e: - logger.log('Error saving the show to the database: %s' % ex(e), logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) - self._finishEarly() + logger.error('Error saving the show to the database: %s' % ex(e)) + logger.error(traceback.format_exc()) + self._finish_early() raise # update internal name cache - name_cache.buildNameCache(self.show_obj) + name_cache.build_name_cache(self.show_obj) self.show_obj.load_episodes_from_db() @@ -1191,14 +1186,14 @@ class QueueItemAdd(ShowQueueItem): self.finish() - def _finishEarly(self): + def _finish_early(self): if None is not self.show_obj: self.show_obj.delete_show() if self.new_show: - # if we adding a new show, delete the empty folder that was already created + # if adding a new show, delete the empty folder that was already created try: - ek.ek(os.rmdir, self.showDir) + os.rmdir(self.showDir) except (BaseException, Exception): pass @@ -1289,8 +1284,7 @@ class QueueItemRename(ShowQueueItem): try: _ = self.show_obj.location except exceptions_helper.ShowDirNotFoundException: - logger.log('Can\'t perform rename on %s when the show directory is missing.' - % self.show_obj.unique_name, logger.WARNING) + logger.warning(f'Can\'t perform rename on {self.show_obj.unique_name} when the show directory is missing.') return ep_obj_rename_list = [] @@ -1381,13 +1375,13 @@ class QueueItemUpdate(ShowQueueItem): if not sickgear.TVInfoAPI(self.show_obj.tvid).config['active']: logger.log('TV info source %s is marked inactive, aborting update for show %s and continue with refresh.' % (sickgear.TVInfoAPI(self.show_obj.tvid).config['name'], self.show_obj.name)) - sickgear.show_queue_scheduler.action.refreshShow(self.show_obj, self.force, self.scheduled_update, + sickgear.show_queue_scheduler.action.refresh_show(self.show_obj, self.force, self.scheduled_update, after_update=True) return logger.log('Beginning update of %s' % self.show_obj.unique_name) - logger.log('Retrieving show info from %s' % sickgear.TVInfoAPI(self.show_obj.tvid).name, logger.DEBUG) + logger.debug('Retrieving show info from %s' % sickgear.TVInfoAPI(self.show_obj.tvid).name) try: result = self.show_obj.load_from_tvinfo(cache=not self.force, tvinfo_data=self.tvinfo_data, scheduled_update=self.scheduled_update, switch=self.switch) @@ -1396,12 +1390,11 @@ class QueueItemUpdate(ShowQueueItem): elif not self.show_obj.prodid == getattr(self.tvinfo_data, 'id', None): self.tvinfo_data = result except BaseTVinfoAttributenotfound as e: - logger.log('Data retrieved from %s was incomplete, aborting: %s' % - (sickgear.TVInfoAPI(self.show_obj.tvid).name, ex(e)), logger.ERROR) + logger.error(f'Data retrieved from {sickgear.TVInfoAPI(self.show_obj.tvid).name} was incomplete,' + f' aborting: {ex(e)}') return except BaseTVinfoError as e: - logger.log('Unable to contact %s, aborting: %s' % (sickgear.TVInfoAPI(self.show_obj.tvid).name, ex(e)), - logger.WARNING) + logger.warning('Unable to contact %s, aborting: %s' % (sickgear.TVInfoAPI(self.show_obj.tvid).name, ex(e))) return if self.force_web: @@ -1410,22 +1403,22 @@ class QueueItemUpdate(ShowQueueItem): try: self.show_obj.save_to_db() except (BaseException, Exception) as e: - logger.log('Error saving the show to the database: %s' % ex(e), logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) + logger.error('Error saving the show to the database: %s' % ex(e)) + logger.error(traceback.format_exc()) # get episode list from DB - logger.log('Loading all episodes from the database', logger.DEBUG) + logger.debug('Loading all episodes from the database') db_ep_obj_list = self.show_obj.load_episodes_from_db(update=True) # get episode list from TVDB - logger.log('Loading all episodes from %s' % sickgear.TVInfoAPI(self.show_obj.tvid).name, logger.DEBUG) + logger.debug('Loading all episodes from %s' % sickgear.TVInfoAPI(self.show_obj.tvid).name) try: tvinfo_ep_list = self.show_obj.load_episodes_from_tvinfo(cache=not self.force, update=True, tvinfo_data=self.tvinfo_data, switch=self.switch, old_tvid=self.old_tvid, old_prodid=self.old_prodid) except BaseTVinfoException as e: - logger.log('Unable to get info from %s, the show info will not be refreshed: %s' % - (sickgear.TVInfoAPI(self.show_obj.tvid).name, ex(e)), logger.ERROR) + logger.error(f'Unable to get info from {sickgear.TVInfoAPI(self.show_obj.tvid).name},' + f' the show info will not be refreshed: {ex(e)}') tvinfo_ep_list = None if None is tvinfo_ep_list: @@ -1438,7 +1431,7 @@ class QueueItemUpdate(ShowQueueItem): # for each ep we found on TVDB delete it from the DB list for cur_season in tvinfo_ep_list: for cur_episode in tvinfo_ep_list[cur_season]: - logger.log('Removing %sx%s from the DB list' % (cur_season, cur_episode), logger.DEBUG) + logger.debug('Removing %sx%s from the DB list' % (cur_season, cur_episode)) if cur_season in db_ep_obj_list and cur_episode in db_ep_obj_list[cur_season]: del db_ep_obj_list[cur_season][cur_episode] @@ -1447,20 +1440,19 @@ class QueueItemUpdate(ShowQueueItem): for cur_season in db_ep_obj_list: for cur_episode in db_ep_obj_list[cur_season]: ep_obj = self.show_obj.get_episode(cur_season, cur_episode) # type: Optional[TVEpisode] - status = sickgear.common.Quality.splitCompositeStatus(ep_obj.status)[0] + status = sickgear.common.Quality.split_composite_status(ep_obj.status)[0] if self.switch or should_delete_episode(status): if self.switch: cl.append(self.show_obj.switch_ep_change_sql( self.old_tvid, self.old_prodid, cur_season, cur_episode, TVSWITCH_EP_DELETED)) - logger.log('Permanently deleting episode %sx%s from the database' % - (cur_season, cur_episode), logger.MESSAGE) + logger.log(f'Permanently deleting episode {cur_season}x{cur_episode} from the database') try: cl.extend(ep_obj.delete_episode(return_sql=True)) except exceptions_helper.EpisodeDeletedException: pass else: - logger.log('Not deleting episode %sx%s from the database because status is: %s' % - (cur_season, cur_episode, statusStrings[status]), logger.MESSAGE) + logger.log(f'Not deleting episode {cur_season}x{cur_episode} from the database' + f' because status is: {statusStrings[status]}') if cl: my_db = db.DBConnection() @@ -1485,7 +1477,7 @@ class QueueItemUpdate(ShowQueueItem): sickgear.MEMCACHE['history_tab'] = sickgear.webserve.History.menu_tab( sickgear.MEMCACHE['history_tab_limit']) if not getattr(self, 'skip_refresh', False): - sickgear.show_queue_scheduler.action.refreshShow(self.show_obj, self.force, self.scheduled_update, + sickgear.show_queue_scheduler.action.refresh_show(self.show_obj, self.force, self.scheduled_update, after_update=True, force_image_cache=self.force_web, **self.kwargs) @@ -1607,7 +1599,7 @@ class QueueItemSwitchSource(ShowQueueItem): else: which_show = '%s:%s' % (self.old_tvid, self.old_prodid) self._set_switch_tbl_status(TVSWITCH_SAME_ID) - logger.log('Unchanged ids given, nothing to do for %s' % which_show, logger.ERROR) + logger.error('Unchanged ids given, nothing to do for %s' % which_show) return True return False @@ -1648,7 +1640,7 @@ class QueueItemSwitchSource(ShowQueueItem): which_show = '%s:%s' % (self.old_tvid, self.old_prodid) ui.notifications.message('TV info source switch: %s' % which_show, 'Error: could not find a id for show on new tv info source') - logger.log('Error: could not find a id for show on new tv info source: %s' % which_show, logger.WARNING) + logger.warning('Error: could not find a id for show on new tv info source: %s' % which_show) self._set_switch_tbl_status(TVSWITCH_NO_NEW_ID) return @@ -1663,7 +1655,7 @@ class QueueItemSwitchSource(ShowQueueItem): which_show = self.show_obj.unique_name else: which_show = '%s:%s' % (self.old_tvid, self.old_prodid) - logger.log('Duplicate shows in DB for show: %s' % which_show, logger.WARNING) + logger.warning('Duplicate shows in DB for show: %s' % which_show) ui.notifications.message('TV info source switch: %s' % which_show, 'Error: %s' % msg) self._set_switch_tbl_status(TVSWITCH_DUPLICATE_SHOW) @@ -1677,7 +1669,7 @@ class QueueItemSwitchSource(ShowQueueItem): ui.notifications.message('TV info source switch: %s' % which_show, 'Error: %s' % msg) self._set_switch_tbl_status(TVSWITCH_SOURCE_NOT_FOUND_ERROR) - logger.log('Unable to find the specified show: %s' % which_show, logger.WARNING) + logger.warning('Unable to find the specified show: %s' % which_show) return tvinfo_config = sickgear.TVInfoAPI(self.new_tvid).api_params.copy() @@ -1697,8 +1689,8 @@ class QueueItemSwitchSource(ShowQueueItem): td = t.get_show(show_id=new_prodid, actors=True, language=self.show_obj.lang) except (BaseException, Exception): td = None - logger.log('Failed to get new tv show id (%s) from source %s' % - (new_prodid, sickgear.TVInfoAPI(self.new_tvid).name), logger.WARNING) + logger.warning(f'Failed to get new tv show id ({new_prodid})' + f' from source {sickgear.TVInfoAPI(self.new_tvid).name}') if None is td: self._set_switch_tbl_status(TVSWITCH_NOT_FOUND_ERROR) msg = 'Show not found on new tv source' @@ -1707,7 +1699,7 @@ class QueueItemSwitchSource(ShowQueueItem): else: which_show = '%s:%s' % (self.old_tvid, self.old_prodid) ui.notifications.message('TV info source switch: %s' % which_show, 'Error: %s' % msg) - logger.log('show: %s not found on new tv source' % self.show_obj.tvid_prodid, logger.WARNING) + logger.warning('show: %s not found on new tv source' % self.show_obj.tvid_prodid) return try: @@ -1757,7 +1749,7 @@ class QueueItemSwitchSource(ShowQueueItem): msg = 'Show %s new id conflicts with existing show: %s' % \ ('[%s (%s)]' % (self.show_obj.unique_name, self.show_obj.tvid_prodid), '[%s (%s)]' % (new_show_obj.unique_name, new_show_obj.tvid_prodid)) - logger.log(msg, logger.WARNING) + logger.warning(msg) return self.progress = 'Switching to new source' self._set_switch_id(new_prodid) diff --git a/sickgear/show_updater.py b/sickgear/show_updater.py index 7bd46c31..144398a7 100644 --- a/sickgear/show_updater.py +++ b/sickgear/show_updater.py @@ -20,8 +20,6 @@ import traceback import exceptions_helper from exceptions_helper import ex -# noinspection PyPep8Naming -import encodingKludge as ek import sickgear from . import db, logger, network_timezones, properFinder, ui @@ -72,101 +70,100 @@ class ShowUpdater(object): if sickgear.db.db_supports_backup and 0 < sickgear.BACKUP_DB_MAX_COUNT: logger.log('backing up all db\'s') try: - sickgear.db.backup_all_dbs(sickgear.BACKUP_DB_PATH or - ek.ek(os.path.join, sickgear.DATA_DIR, 'backup')) + sickgear.db.backup_all_dbs(sickgear.BACKUP_DB_PATH or os.path.join(sickgear.DATA_DIR, 'backup')) except (BaseException, Exception): - logger.log('backup db error', logger.ERROR) + logger.error('backup db error') # refresh network timezones try: network_timezones.update_network_dict() except (BaseException, Exception): - logger.log('network timezone update error', logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) + logger.error('network timezone update error') + logger.error(traceback.format_exc()) # refresh webdl types try: properFinder.load_webdl_types() except (BaseException, Exception): - logger.log('error loading webdl_types', logger.DEBUG) + logger.debug('error loading webdl_types') # update xem id lists try: sickgear.scene_exceptions.get_xem_ids() except (BaseException, Exception): - logger.log('xem id list update error', logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) + logger.error('xem id list update error') + logger.error(traceback.format_exc()) # update scene exceptions try: sickgear.scene_exceptions.retrieve_exceptions() except (BaseException, Exception): - logger.log('scene exceptions update error', logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) + logger.error('scene exceptions update error') + logger.error(traceback.format_exc()) # clear the data of unused providers try: sickgear.helpers.clear_unused_providers() except (BaseException, Exception): - logger.log('unused provider cleanup error', logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) + logger.error('unused provider cleanup error') + logger.error(traceback.format_exc()) # cleanup image cache try: sickgear.helpers.cleanup_cache() except (BaseException, Exception): - logger.log('image cache cleanup error', logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) + logger.error('image cache cleanup error') + logger.error(traceback.format_exc()) # check tvinfo cache try: for i in sickgear.TVInfoAPI().all_sources: sickgear.TVInfoAPI(i).setup().check_cache() except (BaseException, Exception): - logger.log('tvinfo cache check error', logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) + logger.error('tvinfo cache check error') + logger.error(traceback.format_exc()) # cleanup tvinfo cache try: for i in sickgear.TVInfoAPI().all_sources: sickgear.TVInfoAPI(i).setup().clean_cache() except (BaseException, Exception): - logger.log('tvinfo cache cleanup error', logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) + logger.error('tvinfo cache cleanup error') + logger.error(traceback.format_exc()) # cleanup ignore and require lists try: clean_ignore_require_words() - except Exception: - logger.log('ignore, require words cleanup error', logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) + except (BaseException, Exception): + logger.error('ignore, require words cleanup error') + logger.error(traceback.format_exc()) # cleanup manual search history sickgear.search_queue.remove_old_fifo(sickgear.search_queue.MANUAL_SEARCH_HISTORY) # add missing mapped ids if not sickgear.background_mapping_task.is_alive(): - logger.log(u'Updating the TV info mappings') + logger.log('Updating the TV info mappings') import threading try: sickgear.background_mapping_task = threading.Thread( name='MAPPINGSUPDATER', target=sickgear.indexermapper.load_mapped_ids, kwargs={'update': True}) sickgear.background_mapping_task.start() except (BaseException, Exception): - logger.log('missing mapped ids update error', logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) + logger.error('missing mapped ids update error') + logger.error(traceback.format_exc()) - logger.log(u'Doing full update on all shows') + logger.log('Doing full update on all shows') # clean out cache directory, remove everything > 12 hours old try: sickgear.helpers.clear_cache() except (BaseException, Exception): - logger.log('cache dir cleanup error', logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) + logger.error('cache dir cleanup error') + logger.error(traceback.format_exc()) # select 10 'Ended' tv_shows updated more than 90 days ago - # and all shows not updated more then 180 days ago to include in this update + # and all shows not updated more than 180 days ago to include in this update stale_should_update = [] stale_update_date = (update_date - datetime.timedelta(days=90)).toordinal() stale_update_date_max = (update_date - datetime.timedelta(days=180)).toordinal() @@ -204,28 +201,28 @@ class ShowUpdater(object): try: # if should_update returns True (not 'Ended') or show is selected stale 'Ended' then update, # otherwise just refresh - if cur_show_obj.should_update(update_date=update_date, - last_indexer_change=show_updates.get(cur_show_obj.tvid, {}). - get(cur_show_obj.prodid)) \ + if cur_show_obj.should_update( + update_date=update_date, + last_indexer_change=show_updates.get(cur_show_obj.tvid, {}).get(cur_show_obj.prodid)) \ or cur_show_obj.tvid_prodid in stale_should_update: - cur_queue_item = sickgear.show_queue_scheduler.action.updateShow(cur_show_obj, - scheduled_update=True) + cur_queue_item = sickgear.show_queue_scheduler.action.update_show( + cur_show_obj, scheduled_update=True) else: - logger.debug(u'Not updating episodes for show %s because it\'s marked as ended and last/next' - u' episode is not within the grace period.' % cur_show_obj.unique_name) - cur_queue_item = sickgear.show_queue_scheduler.action.refreshShow(cur_show_obj, True, True) + logger.debug(f'Not updating episodes for show {cur_show_obj.unique_name} because it\'s' + f' marked as ended and last/next episode is not within the grace period.') + cur_queue_item = sickgear.show_queue_scheduler.action.refresh_show(cur_show_obj, True, True) pi_list.append(cur_queue_item) except (exceptions_helper.CantUpdateException, exceptions_helper.CantRefreshException) as e: - logger.log(u'Automatic update failed: ' + ex(e), logger.ERROR) + logger.error(f'Automatic update failed: {ex(e)}') if len(pi_list): sickgear.show_queue_scheduler.action.daily_update_running = True - ui.ProgressIndicators.setIndicator('dailyUpdate', ui.QueueProgressIndicator('Daily Update', pi_list)) + ui.ProgressIndicators.set_indicator('dailyUpdate', ui.QueueProgressIndicator('Daily Update', pi_list)) - logger.log(u'Added all shows to show queue for full update') + logger.log('Added all shows to show queue for full update') finally: self.amActive = False diff --git a/sickgear/subtitles.py b/sickgear/subtitles.py index d3a7dbbd..c8cda3a0 100644 --- a/sickgear/subtitles.py +++ b/sickgear/subtitles.py @@ -17,9 +17,6 @@ import datetime -# noinspection PyPep8Naming -import encodingKludge as ek - from . import db, helpers, logger from .common import * @@ -31,41 +28,41 @@ SINGLE = 'und' def sorted_service_list(): - servicesMapping = dict([(x.lower(), x) for x in subliminal.core.SERVICES]) + services_mapping = dict([(x.lower(), x) for x in subliminal.core.SERVICES]) - newList = [] + new_list = [] # add all services in the priority list, in order - curIndex = 0 - for curService in sickgear.SUBTITLES_SERVICES_LIST: - if curService in servicesMapping: - curServiceDict = dict( - id=curService, - image=curService + '.png', - name=servicesMapping[curService], - enabled=1 == sickgear.SUBTITLES_SERVICES_ENABLED[curIndex], - api_based=__import__('lib.subliminal.services.' + curService, globals=globals(), + cur_index = 0 + for cur_service in sickgear.SUBTITLES_SERVICES_LIST: + if cur_service in services_mapping: + cur_service_dict = dict( + id=cur_service, + image=cur_service + '.png', + name=services_mapping[cur_service], + enabled=1 == sickgear.SUBTITLES_SERVICES_ENABLED[cur_index], + api_based=__import__('lib.subliminal.services.' + cur_service, globals=globals(), locals=locals(), fromlist=['Service']).Service.api_based, - url=__import__('lib.subliminal.services.' + curService, globals=globals(), + url=__import__('lib.subliminal.services.' + cur_service, globals=globals(), locals=locals(), fromlist=['Service']).Service.site_url) - newList.append(curServiceDict) - curIndex += 1 + new_list.append(cur_service_dict) + cur_index += 1 # add any services that are missing from that list - for curService in servicesMapping: - if curService not in [x['id'] for x in newList]: - curServiceDict = dict( - id=curService, - image=curService + '.png', - name=servicesMapping[curService], + for cur_service in services_mapping: + if cur_service not in [x['id'] for x in new_list]: + cur_service_dict = dict( + id=cur_service, + image=cur_service + '.png', + name=services_mapping[cur_service], enabled=False, - api_based=__import__('lib.subliminal.services.' + curService, globals=globals(), + api_based=__import__('lib.subliminal.services.' + cur_service, globals=globals(), locals=locals(), fromlist=['Service']).Service.api_based, - url=__import__('lib.subliminal.services.' + curService, globals=globals(), + url=__import__('lib.subliminal.services.' + cur_service, globals=globals(), locals=locals(), fromlist=['Service']).Service.site_url) - newList.append(curServiceDict) + new_list.append(cur_service_dict) - return newList + return new_list def get_enabled_service_list(): @@ -81,10 +78,10 @@ def get_language_name(select_lang): def wanted_languages(sql_like=False): - wantedLanguages = sorted(sickgear.SUBTITLES_LANGUAGES) + wanted_langs = sorted(sickgear.SUBTITLES_LANGUAGES) if sql_like: - return '%' + ','.join(wantedLanguages) + '%' - return wantedLanguages + return '%' + ','.join(wanted_langs) + '%' + return wanted_langs def subtitles_languages(video_path): @@ -127,11 +124,11 @@ class SubtitlesFinder(object): def _main(self): if 1 > len(sickgear.subtitles.get_enabled_service_list()): - logger.log(u'Not enough services selected. At least 1 service is required to' - u' search subtitles in the background', logger.ERROR) + logger.error('Not enough services selected. At least 1 service is required to' + ' search subtitles in the background') return - logger.log(u'Checking for subtitles', logger.MESSAGE) + logger.log('Checking for subtitles') # get episodes on which we want subtitles # criteria is: @@ -166,9 +163,9 @@ class SubtitlesFinder(object): now = datetime.datetime.now() for cur_result in sql_result: - if not ek.ek(os.path.isfile, cur_result['location']): - logger.log('Episode file does not exist, cannot download subtitles for episode %dx%d of show %s' - % (cur_result['season'], cur_result['episode'], cur_result['show_name']), logger.DEBUG) + if not os.path.isfile(cur_result['location']): + logger.debug(f'Episode file does not exist, cannot download subtitles for episode' + f' {cur_result["season"]:d}x{cur_result["episode"]:d} of show {cur_result["show_name"]}') continue # Old shows rule @@ -180,17 +177,17 @@ class SubtitlesFinder(object): (cur_result['airdate_daydiff'] <= 7 and cur_result['searchcount'] < 7 and now - datetime.datetime.strptime(cur_result['lastsearch'], '%Y-%m-%d %H:%M:%S') > datetime.timedelta(hours=rules['new'][cur_result['searchcount']]))): - logger.log('Downloading subtitles for episode %dx%d of show %s' - % (cur_result['season'], cur_result['episode'], cur_result['show_name']), logger.DEBUG) + logger.debug(f'Downloading subtitles for episode {cur_result["season"]:d}x{cur_result["episode"]:d}' + f' of show {cur_result["show_name"]}') show_obj = helpers.find_show_by_id({int(cur_result['tv_id']): int(cur_result['prod_id'])}) if not show_obj: - logger.log(u'Show not found', logger.DEBUG) + logger.debug('Show not found') return ep_obj = show_obj.get_episode(int(cur_result['season']), int(cur_result['episode'])) if isinstance(ep_obj, str): - logger.log(u'Episode not found', logger.DEBUG) + logger.debug('Episode not found') return # noinspection PyUnusedLocal @@ -200,7 +197,7 @@ class SubtitlesFinder(object): # noinspection PyUnusedLocal subtitles = ep_obj.download_subtitles() except (BaseException, Exception): - logger.log(u'Unable to find subtitles', logger.DEBUG) + logger.debug('Unable to find subtitles') return @staticmethod diff --git a/sickgear/traktChecker.py b/sickgear/traktChecker.py deleted file mode 100644 index 092a0471..00000000 --- a/sickgear/traktChecker.py +++ /dev/null @@ -1,225 +0,0 @@ -# Author: Frank Fenton -# -# This file is part of SickGear. -# -# SickGear is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SickGear is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SickGear. If not, see . - -import datetime -import os -import traceback - -# noinspection PyPep8Naming -import encodingKludge as ek - -import sickgear -from . import helpers, logger, search_queue -from .common import SKIPPED, WANTED -from .indexers.indexer_config import TVINFO_TVRAGE - - -class TraktChecker(object): - def __init__(self): - self.todoWanted = [] - - def run(self, force=False): - try: - # add shows from trakt.tv watchlist - if sickgear.TRAKT_USE_WATCHLIST: - self.todoWanted = [] # its about to all get re-added - if len(sickgear.ROOT_DIRS.split('|')) < 2: - logger.log(u"No default root directory", logger.ERROR) - return - self.updateShows() - self.updateEpisodes() - - # sync trakt.tv library with SickGear library - if sickgear.TRAKT_SYNC: - self.syncLibrary() - except Exception: - logger.log(traceback.format_exc(), logger.DEBUG) - - def findShow(self, tvid, prodid): - library = TraktCall("user/library/shows/all.json/%API%/" + sickgear.TRAKT_USERNAME, sickgear.TRAKT_API, sickgear.TRAKT_USERNAME, sickgear.TRAKT_PASSWORD) - - if library == 'NULL': - logger.log(u"No shows found in your library, aborting library update", logger.DEBUG) - return - - if not library: - logger.log(u"Could not connect to trakt service, aborting library check", logger.ERROR) - return - - return filter(lambda x: int(prodid) in [int(x['tvdb_id']) or 0, int(x['tvrage_id'])] or 0, library) - - def syncLibrary(self): - logger.log(u"Syncing Trakt.tv show library", logger.DEBUG) - - for cur_show_obj in sickgear.showList: - self.addShowToTraktLibrary(cur_show_obj) - - def removeShowFromTraktLibrary(self, show_obj): - data = {} - if self.findShow(show_obj.tvid, show_obj.prodid): - # URL parameters - data['tvdb_id'] = helpers.mapIndexersToShow(show_obj)[1] - data['title'] = show_obj.name - data['year'] = show_obj.startyear - - if len(data): - logger.log(u"Removing " + show_obj.name + " from trakt.tv library", logger.DEBUG) - TraktCall("show/unlibrary/%API%", sickgear.TRAKT_API, sickgear.TRAKT_USERNAME, sickgear.TRAKT_PASSWORD, - data) - - def addShowToTraktLibrary(self, show_obj): - """ - Sends a request to trakt indicating that the given show and all its episodes is part of our library. - - show_obj: The TVShow object to add to trakt - """ - - data = {} - - if not self.findShow(show_obj.tvid, show_obj.prodid): - # URL parameters - data['tvdb_id'] = helpers.mapIndexersToShow(show_obj)[1] - data['title'] = show_obj.name - data['year'] = show_obj.startyear - - if len(data): - logger.log(u"Adding " + show_obj.name + " to trakt.tv library", logger.DEBUG) - TraktCall("show/library/%API%", sickgear.TRAKT_API, sickgear.TRAKT_USERNAME, sickgear.TRAKT_PASSWORD, - data) - - def updateShows(self): - logger.log(u"Starting trakt show watchlist check", logger.DEBUG) - watchlist = TraktCall("user/watchlist/shows.json/%API%/" + sickgear.TRAKT_USERNAME, sickgear.TRAKT_API, sickgear.TRAKT_USERNAME, sickgear.TRAKT_PASSWORD) - - if watchlist == 'NULL': - logger.log(u"No shows found in your watchlist, aborting watchlist update", logger.DEBUG) - return - - if not watchlist: - logger.log(u"Could not connect to trakt service, aborting watchlist update", logger.ERROR) - return - - for show in watchlist: - tvid = int(sickgear.TRAKT_DEFAULT_INDEXER) - prodid = int(show[('tvdb_id', 'tvrage_id')[TVINFO_TVRAGE == tvid]]) - - if int(sickgear.TRAKT_METHOD_ADD) != 2: - self.addDefaultShow(tvid, prodid, show["title"], SKIPPED) - else: - self.addDefaultShow(tvid, prodid, show["title"], WANTED) - - if int(sickgear.TRAKT_METHOD_ADD) == 1: - show_obj = helpers.find_show_by_id({tvid: prodid}) - if None is not show_obj: - self.setEpisodeToWanted(show_obj, 1, 1) - else: - self.todoWanted.append((prodid, 1, 1)) - - def updateEpisodes(self): - """ - Sets episodes to wanted that are in trakt watchlist - """ - logger.log(u"Starting trakt episode watchlist check", logger.DEBUG) - watchlist = TraktCall("user/watchlist/episodes.json/%API%/" + sickgear.TRAKT_USERNAME, sickgear.TRAKT_API, sickgear.TRAKT_USERNAME, sickgear.TRAKT_PASSWORD) - - if watchlist == 'NULL': - logger.log(u"No episodes found in your watchlist, aborting watchlist update", logger.DEBUG) - return - - if not watchlist: - logger.log(u"Could not connect to trakt service, aborting watchlist update", logger.ERROR) - return - - for show in watchlist: - tvid = int(sickgear.TRAKT_DEFAULT_INDEXER) - prodid = int(show[('tvdb_id', 'tvrage_id')[TVINFO_TVRAGE == tvid]]) - - self.addDefaultShow(tvid, prodid, show['title'], SKIPPED) - show_obj = helpers.find_show_by_id({tvid: prodid}) - - try: - if show_obj and show_obj.tvid == tvid: - for episode in show["episodes"]: - if None is not show_obj: - self.setEpisodeToWanted(show_obj, episode["season"], episode["number"]) - else: - self.todoWanted.append((prodid, episode["season"], episode["number"])) - except TypeError: - logger.log(u"Could not parse the output from trakt for " + show["title"], logger.DEBUG) - - def addDefaultShow(self, tvid, prod_id, name, status): - """ - Adds a new show with the default settings - """ - if not helpers.find_show_by_id({int(tvid): int(prodid)}): - logger.log(u"Adding show " + str(prod_id)) - root_dirs = sickgear.ROOT_DIRS.split('|') - - try: - location = root_dirs[int(root_dirs[0]) + 1] - except: - location = None - - if location: - showPath = ek.ek(os.path.join, location, helpers.sanitize_filename(name)) - dir_exists = helpers.make_dir(showPath) - if not dir_exists: - logger.log(u"Unable to create the folder " + showPath + ", can't add the show", logger.ERROR) - return - else: - helpers.chmod_as_parent(showPath) - - sickgear.show_queue_scheduler.action.add_show( - int(tvid), int(prod_id), showPath, - quality=int(sickgear.QUALITY_DEFAULT), - paused=sickgear.TRAKT_START_PAUSED, default_status=status, - flatten_folders=int(sickgear.FLATTEN_FOLDERS_DEFAULT) - ) - else: - logger.log(u"There was an error creating the show, no root directory setting found", logger.ERROR) - return - - def setEpisodeToWanted(self, show_obj, s, e): - """ - Sets an episode to wanted, only is it is currently skipped - """ - ep_obj = show_obj.get_episode(int(s), int(e)) - if ep_obj: - - with ep_obj.lock: - if ep_obj.status != SKIPPED or ep_obj.airdate == datetime.date.fromordinal(1): - return - - logger.log(u"Setting episode s" + str(s) + "e" + str(e) + " of show " + show_obj.name + " to wanted") - # figure out what segment the episode is in and remember it so we can backlog it - - ep_obj.status = WANTED - ep_obj.save_to_db() - - backlog_queue_item = search_queue.BacklogQueueItem(show_obj, [ep_obj]) - sickgear.search_queue_scheduler.action.add_item(backlog_queue_item) - - logger.log(u"Starting backlog for " + show_obj.name + " season " + str( - s) + " episode " + str(e) + " because some eps were set to wanted") - - def manageNewShow(self, show_obj): - logger.log(u"Checking if trakt watch list wants to search for episodes from new show " + show_obj.name, - logger.DEBUG) - episodes = [i for i in self.todoWanted if i[0] == show_obj.prodid] - for episode in episodes: - self.todoWanted.remove(episode) - self.setEpisodeToWanted(show_obj, episode[1], episode[2]) diff --git a/sickgear/trakt_helpers.py b/sickgear/trakt_helpers.py index acbbb398..b1a8314f 100644 --- a/sickgear/trakt_helpers.py +++ b/sickgear/trakt_helpers.py @@ -5,7 +5,7 @@ import re import sickgear from .helpers import try_int -from _23 import decode_bytes, decode_str, list_items +from _23 import decode_bytes, decode_str from six import iteritems, text_type @@ -51,7 +51,7 @@ def build_config_string(config): :param config: dicts of Trakt account id, parent location :return: string csv of parsed config kwargs for config file """ - return text_type(list_items(config)) + return text_type(list(config.items())) def trakt_collection_remove_account(account_id): diff --git a/sickgear/tv.py b/sickgear/tv.py index 94cbf772..0587137d 100644 --- a/sickgear/tv.py +++ b/sickgear/tv.py @@ -36,8 +36,6 @@ import traceback from imdbpie import ImdbAPIError from lxml_etree import etree -# noinspection PyPep8Naming -import encodingKludge as ek import exceptions_helper from exceptions_helper import ex @@ -45,7 +43,7 @@ import sickgear from . import db, helpers, history, image_cache, indexermapper, logger, \ name_cache, network_timezones, notifiers, postProcessor, subtitles from .anime import AniGroupList -from .classes import weakList +from .classes import WeakList from .common import Quality, statusStrings, \ ARCHIVED, DOWNLOADED, FAILED, IGNORED, SKIPPED, SNATCHED, SNATCHED_ANY, SNATCHED_PROPER, UNAIRED, UNKNOWN, WANTED, \ NAMING_DUPLICATE, NAMING_EXTEND, NAMING_LIMITED_EXTEND, NAMING_LIMITED_EXTEND_E_PREFIXED, NAMING_SEPARATED_REPEAT @@ -54,7 +52,7 @@ from .helpers import try_float, try_int from .indexermapper import del_mapping, MapStatus, save_mapping from .indexers.indexer_config import TVINFO_IMDB, TVINFO_TMDB, TVINFO_TRAKT, TVINFO_TVDB, TVINFO_TVMAZE, TVINFO_TVRAGE from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser -from .sgdatetime import SGDatetime, timestamp_near +from .sgdatetime import SGDatetime from .tv_base import TVEpisodeBase, TVShowBase from lib import imdbpie, subliminal @@ -65,14 +63,13 @@ from lib.tvinfo_base import RoleTypes, TVINFO_FACEBOOK, TVINFO_INSTAGRAM, TVINFO from lib.tvinfo_base.exceptions import * from sg_helpers import calc_age, int_to_time, remove_file_perm, time_to_int -from _23 import filter_iter, filter_list, list_keys -from six import integer_types, iteritems, itervalues, moves, PY2, string_types +from six import integer_types, iteritems, itervalues, moves, string_types # noinspection PyUnreachableCode if False: from typing import Any, AnyStr, Dict, List, Optional, Set, Text, Tuple, Union from sqlite3 import Row - from lib.tvinfo_base import CastList, Character as TVINFO_Character, Person as TVINFO_Person, \ + from lib.tvinfo_base import CastList, TVInfoCharacter, TVInfoPerson, \ TVInfoEpisode, TVInfoShow coreid_warnings = False @@ -174,9 +171,9 @@ class TVidProdid(object): if coreid_warnings: logger.log('%s\n' % pre_msg + '|>%s^-- Note: Bootstrap & Tornado startup functions stripped from traceback log.' % - '|>'.join(filter_iter(lambda text: not re.search(r'(?i)bootstrap|traceback\.' - r'format_stack|pydevd|tornado' - r'|webserveinit', text), + '|>'.join(filter(lambda text: not re.search(r'(?i)bootstrap|traceback\.' + r'format_stack|pydevd|tornado' + r'|webserveinit', text), traceback.format_stack(inspect.currentframe())))) except IndexError: pass @@ -283,7 +280,7 @@ def usable_id(value): def usable_rid(value): # type: (Union[AnyStr]) -> Optional[AnyStr] """ - return value if is a id:format is valid + return value if is an id:format is valid otherwise None if value fails basic id format validation """ if isinstance(value, string_types) and ':' in value: @@ -381,7 +378,7 @@ class Person(Referential): akas=None, # type: Set[AnyStr] character_obj=None, # type: Character tmp_character_obj=None # type: Character - ): # type: (...) -> Person + ): super(Person, self).__init__(sid) @@ -453,9 +450,9 @@ class Person(Referential): 'homepage', 'ids', 'image_url', 'name', 'nicknames', 'real_name', 'thumb_url']} def reset(self, person_obj=None): - # type: (TVINFO_Person) -> None + # type: (TVInfoPerson) -> None """ - reset all properties with the exception of: name, id, ids + reset all properties except; name, id, ids :param person_obj: TVInfo Person object to reset to """ @@ -646,7 +643,7 @@ class Person(Referential): break def update_prop_from_tvinfo_person(self, person_obj): - # type: (TVINFO_Person) -> None + # type: (TVInfoPerson) -> None """ update person with tvinfo person object info Note: doesn't change: name, id, image_url, thumb_url @@ -747,7 +744,7 @@ class Person(Referential): continue if tvsrc_result: # verify we have the correct person - for cur_person in tvsrc_result: # type: TVINFO_Person + for cur_person in tvsrc_result: # type: TVInfoPerson if None is not rp: break if not (imdb_confirmed and TVINFO_IMDB == tv_src) \ @@ -767,7 +764,7 @@ class Person(Referential): # noinspection PyUnresolvedReferences if show_obj and None is not pd and pd.characters: clean_show_name = indexermapper.clean_show_name(show_obj.name.lower()) - for ch in pd.characters or []: # type: TVINFO_Character + for ch in pd.characters or []: # type: TVInfoCharacter if clean_show_name == indexermapper.clean_show_name(ch.show.seriesname.lower()): rp = pd confirmed_on_src = True @@ -791,7 +788,9 @@ class Person(Referential): if None is not rp: if confirmed_on_src: for i in (TVINFO_TRAKT, TVINFO_IMDB, TVINFO_TMDB, TVINFO_TVMAZE, TVINFO_TVDB): - # in case it's the current source use it's id and lock if from being changed + if not rp.ids.get(i): + continue + # in case it's the current source use its id and lock if from being changed if cur_tv_info_src == i and rp.ids.get(i): source_confirmed[i] = True if rp.ids.get(i) != self.ids.get(i): @@ -805,6 +804,8 @@ class Person(Referential): self.dirty_ids = True for i in (TVINFO_INSTAGRAM, TVINFO_TWITTER, TVINFO_FACEBOOK, TVINFO_WIKIPEDIA): + if not rp.social_ids.get(i): + continue if rp.social_ids.get(i) and not self.ids.get(i) or \ (rp.social_ids.get(i) and rp.social_ids.get(i) != self.ids.get(i)): self.ids[i] = rp.social_ids[i] @@ -894,11 +895,12 @@ class Person(Referential): ] if force or self.dirty_ids: for s, v in iteritems(self.ids): - cl.extend([ - ['UPDATE person_ids SET src_id = ? WHERE person_id = ? AND src = ?', [v, self.id, s]], - ["INSERT INTO person_ids (src, src_id, person_id) SELECT %s, '%s', %s WHERE changes() == 0" - % (s, v, self.id)] - ]) + if v: + cl.extend([ + ['UPDATE person_ids SET src_id = ? WHERE person_id = ? AND src = ?', [v, self.id, s]], + ["INSERT INTO person_ids (src, src_id, person_id) SELECT %s, '%s', %s WHERE changes() == 0" + % (s, v, self.id)] + ]) if cl: r_id = my_db.mass_action(cl) if r_id and r_id[-1:][0]: @@ -1401,8 +1403,8 @@ class TVShow(TVShowBase): @cast_list.setter def cast_list(self, value): - # type: (weakList[Character]) -> None - self._cast_list = None if not isinstance(value, weakList) else weakref.ref(value) + # type: (WeakList[Character]) -> None + self._cast_list = None if not isinstance(value, WeakList) else weakref.ref(value) @property def network_id(self): @@ -1528,7 +1530,7 @@ class TVShow(TVShowBase): self._last_found_on_indexer = self.last_found_on_indexer my_db = db.DBConnection() # noinspection PyUnresolvedReferences - last_check = int(timestamp_near(datetime.datetime.now())) + last_check = SGDatetime.timestamp_near() # in case of flag change (+/-) don't change last_check date if abs(v) == abs(self._not_found_count): sql_result = my_db.select( @@ -1587,8 +1589,7 @@ class TVShow(TVShowBase): self._paused = int(value) self.dirty = True else: - logger.log('tried to set paused property to invalid value: %s of type: %s' % (value, type(value)), - logger.ERROR) + logger.error('tried to set paused property to invalid value: %s of type: %s' % (value, type(value))) @property def ids(self): @@ -1635,16 +1636,16 @@ class TVShow(TVShowBase): if sickgear.CREATE_MISSING_SHOW_DIRS: return self._location - if ek.ek(os.path.isdir, self._location): + if os.path.isdir(self._location): return self._location raise exceptions_helper.ShowDirNotFoundException('Show folder does not exist: \'%s\'' % self._location) def _set_location(self, new_location): # type: (AnyStr) -> None - logger.log('Setter sets location to %s' % new_location, logger.DEBUG) + logger.debug('Setter sets location to %s' % new_location) # Don't validate dir if user wants to add shows without creating a dir - if sickgear.ADD_SHOWS_WO_DIR or ek.ek(os.path.isdir, new_location): + if sickgear.ADD_SHOWS_WO_DIR or os.path.isdir(new_location): self.dirty_setter('_location')(self, new_location) self.path = new_location # self._is_location_good = True @@ -1779,8 +1780,8 @@ class TVShow(TVShowBase): if no_create: return - # logger.log('%s: An object for episode %sx%s did not exist in the cache, trying to create it' % - # (self.tvid_prodid, season, episode), logger.DEBUG) + # logger.debug('%s: An object for episode %sx%s did not exist in the cache, trying to create it' % + # (self.tvid_prodid, season, episode)) if path and not existing_only: ep_obj = TVEpisode(self, season, episode, path, show_result=ep_result) @@ -1898,7 +1899,7 @@ class TVShow(TVShowBase): bio=cur_row['c_bio'], ids=c_ids, image_url=cur_row['image_url'], person=[person], persons_years=p_years, show_obj=self, sid=cur_row['c_id'], thumb_url=cur_row['thumb_url'], updated=cur_row['cast_updated'])) - cast_list = weakList(c for c in old_cast or [] if c.id not in old_list) + cast_list = WeakList(c for c in old_cast or [] if c.id not in old_list) self.cast_list = cast_list return cast_list @@ -1988,11 +1989,10 @@ class TVShow(TVShowBase): return True return False - # In some situations self.status = None.. need to figure out where that is! + # In some situations self.status = None, need to figure out where that is! if not self._status: self.status = '' - logger.log('Status missing for show: [%s] with status: [%s]' % - (self.tvid_prodid, self._status), logger.DEBUG) + logger.debug(f'Status missing for show: [{self.tvid_prodid}] with status: [{self._status}]') last_update_indexer = datetime.date.fromordinal(self._last_update_indexer) @@ -2024,7 +2024,7 @@ class TVShow(TVShowBase): last_airdate = datetime.date.fromordinal(sql_result[1][0]['airdate']) \ if sql_result and sql_result[1] else datetime.date.fromordinal(1) - # if show is not 'Ended' and last episode aired less then 460 days ago + # if show is not 'Ended' and last episode aired less than 460 days ago # or don't have an airdate for the last episode always update (status 'Continuing' or '') update_days_limit = 2013 ended_limit = datetime.timedelta(days=update_days_limit) @@ -2052,7 +2052,7 @@ class TVShow(TVShowBase): result = False - if not ek.ek(os.path.isdir, self._location): + if not os.path.isdir(self._location): logger.log('%s: Show directory doesn\'t exist, skipping NFO generation' % self.tvid_prodid) return False @@ -2068,7 +2068,7 @@ class TVShow(TVShowBase): :param show_only: only for show :param force: """ - if not ek.ek(os.path.isdir, self._location): + if not os.path.isdir(self._location): logger.log('%s: Show directory doesn\'t exist, skipping NFO generation' % self.tvid_prodid) return @@ -2084,7 +2084,7 @@ class TVShow(TVShowBase): def write_episode_nfo(self, force=False): # type: (bool) -> None - if not ek.ek(os.path.isdir, self._location): + if not os.path.isdir(self._location): logger.log('%s: Show directory doesn\'t exist, skipping NFO generation' % self.tvid_prodid) return @@ -2103,23 +2103,20 @@ class TVShow(TVShowBase): for cur_row in sql_result: if (cur_row['season'], cur_row['episode']) in processed: continue - logger.log('%s: Retrieving/creating episode %sx%s' - % (self.tvid_prodid, cur_row['season'], cur_row['episode']), logger.DEBUG) + logger.debug(f'{self.tvid_prodid}: Retrieving/creating episode {cur_row["season"]}x{cur_row["episode"]}') ep_obj = self.get_episode(cur_row['season'], cur_row['episode'], ep_result=[cur_row]) if not ep_obj.related_ep_obj: processed += [(cur_row['season'], cur_row['episode'])] else: - logger.log('%s: Found related to %sx%s episode(s)... %s' - % (self.tvid_prodid, cur_row['season'], cur_row['episode'], - ', '.join(['%sx%s' % (x.season, x.episode) for x in ep_obj.related_ep_obj])), - logger.DEBUG) + logger.debug(f'{self.tvid_prodid}: Found related to {cur_row["season"]}x{cur_row["episode"]} episode(s)' + f'... {", ".join(["%sx%s" % (x.season, x.episode) for x in ep_obj.related_ep_obj])}') processed += list(set([(cur_row['season'], cur_row['episode'])] + [(x.season, x.episode) for x in ep_obj.related_ep_obj])) ep_obj.create_meta_files(force) def update_metadata(self): - if not ek.ek(os.path.isdir, self._location): + if not os.path.isdir(self._location): logger.log('%s: Show directory doesn\'t exist, skipping NFO generation' % self.tvid_prodid) return @@ -2129,7 +2126,7 @@ class TVShow(TVShowBase): result = False - if not ek.ek(os.path.isdir, self._location): + if not os.path.isdir(self._location): logger.log('%s: Show directory doesn\'t exist, skipping NFO generation' % self.tvid_prodid) return False @@ -2142,7 +2139,7 @@ class TVShow(TVShowBase): # find all media files in the show folder and create episodes for as many as possible def load_episodes_from_dir(self): - if not ek.ek(os.path.isdir, self._location): + if not os.path.isdir(self._location): logger.log('%s: Show directory doesn\'t exist, not loading episodes from disk' % self.tvid_prodid) return @@ -2157,21 +2154,21 @@ class TVShow(TVShowBase): parse_result = None ep_obj = None - logger.log('%s: Creating episode from %s' % (self.tvid_prodid, cur_media_file), logger.DEBUG) + logger.debug('%s: Creating episode from %s' % (self.tvid_prodid, cur_media_file)) try: - ep_obj = self.ep_obj_from_file(ek.ek(os.path.join, self._location, cur_media_file)) + ep_obj = self.ep_obj_from_file(os.path.join(self._location, cur_media_file)) except (exceptions_helper.ShowNotFoundException, exceptions_helper.EpisodeNotFoundException) as e: - logger.log('Episode %s returned an exception: %s' % (cur_media_file, ex(e)), logger.ERROR) + logger.error('Episode %s returned an exception: %s' % (cur_media_file, ex(e))) continue except exceptions_helper.EpisodeDeletedException: - logger.log('The episode deleted itself when I tried making an object for it', logger.DEBUG) + logger.debug('The episode deleted itself when I tried making an object for it') if None is ep_obj: continue # see if we should save the release name in the db - ep_file_name = ek.ek(os.path.basename, ep_obj.location) - ep_file_name = ek.ek(os.path.splitext, ep_file_name)[0] + ep_file_name = os.path.basename(ep_obj.location) + ep_file_name = os.path.splitext(ep_file_name)[0] try: parse_result = None @@ -2181,9 +2178,7 @@ class TVShow(TVShowBase): pass if ep_file_name and parse_result and None is not parse_result.release_group and not ep_obj.release_name: - logger.log( - 'Name %s gave release group of %s, seems valid' % (ep_file_name, parse_result.release_group), - logger.DEBUG) + logger.debug(f'Name {ep_file_name} gave release group of {parse_result.release_group}, seems valid') ep_obj.release_name = ep_file_name # store the reference in the show @@ -2192,8 +2187,8 @@ class TVShow(TVShowBase): try: ep_obj.refresh_subtitles() except (BaseException, Exception): - logger.log('%s: Could not refresh subtitles' % self.tvid_prodid, logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) + logger.error('%s: Could not refresh subtitles' % self.tvid_prodid) + logger.error(traceback.format_exc()) result = ep_obj.get_sql() if None is not result: @@ -2236,8 +2231,7 @@ class TVShow(TVShowBase): try: cached_show = t.get_show(self.prodid, language=self._lang) except BaseTVinfoError as e: - logger.log('Unable to find cached seasons from %s: %s' % ( - sickgear.TVInfoAPI(self.tvid).name, ex(e)), logger.WARNING) + logger.warning(f'Unable to find cached seasons from {sickgear.TVInfoAPI(self.tvid).name}: {ex(e)}') if None is cached_show: return scanned_eps @@ -2262,14 +2256,14 @@ class TVShow(TVShowBase): try: cached_seasons[season] = cached_show[season] except BaseTVinfoSeasonnotfound as e: - logger.log('Error when trying to load the episode for [%s] from %s: %s' % - (self._name, sickgear.TVInfoAPI(self.tvid).name, ex(e)), logger.WARNING) + logger.warning(f'Error when trying to load the episode for [{self._name}]' + f' from {sickgear.TVInfoAPI(self.tvid).name}: {ex(e)}') delete_ep = True if season not in scanned_eps: scanned_eps[season] = {} - logger.log('Loading episode %sx%s for [%s] from the DB' % (season, episode, self.name), logger.DEBUG) + logger.debug('Loading episode %sx%s for [%s] from the DB' % (season, episode, self.name)) try: ep_obj = self.get_episode(season, episode, ep_result=[cur_row]) # type: TVEpisode @@ -2283,8 +2277,8 @@ class TVShow(TVShowBase): ep_obj.load_from_tvinfo(tvapi=t, update=update, cached_show=cached_show) scanned_eps[season][episode] = True except exceptions_helper.EpisodeDeletedException: - logger.log('Tried loading an episode that should have been deleted from the DB [%s], skipping it' - % self._name, logger.DEBUG) + logger.debug(f'Tried loading an episode that should have been deleted from the DB [{self._name}],' + f' skipping it') continue if cl: @@ -2336,9 +2330,8 @@ class TVShow(TVShowBase): t = sickgear.TVInfoAPI(self.tvid).setup(**tvinfo_config) show_obj = t.get_show(self.prodid, language=self._lang) except BaseTVinfoError: - logger.log('%s timed out, unable to update episodes for [%s] from %s' % - (sickgear.TVInfoAPI(self.tvid).name, self._name, sickgear.TVInfoAPI(self.tvid).name), - logger.ERROR) + logger.error(f'{sickgear.TVInfoAPI(self.tvid).name} timed out,' + f' unable to update episodes for [{self._name}] from {sickgear.TVInfoAPI(self.tvid).name}') return None scanned_eps = {} @@ -2372,9 +2365,8 @@ class TVShow(TVShowBase): continue with ep_obj.lock: - logger.log('%s: Loading info from %s for episode %sx%s from [%s]' % - (self.tvid_prodid, sickgear.TVInfoAPI(self.tvid).name, season, episode, self._name), - logger.DEBUG) + logger.debug(f'{self.tvid_prodid}: Loading info from {sickgear.TVInfoAPI(self.tvid).name}' + f' for episode {season}x{episode} from [{self._name}]') ep_obj.load_from_tvinfo(season, episode, tvapi=t, update=update, cached_show=show_obj, switch=switch, old_tvid=old_tvid, old_prodid=old_prodid, switch_list=sql_l) @@ -2401,7 +2393,7 @@ class TVShow(TVShowBase): for cur_provider in itervalues(sickgear.metadata_provider_dict): # FIXME: Needs to not show this message if the option is not enabled? - logger.log('Running metadata routines for %s' % cur_provider.name, logger.DEBUG) + logger.debug('Running metadata routines for %s' % cur_provider.name) fanart_result = cur_provider.create_fanart(self) or fanart_result poster_result = cur_provider.create_poster(self) or poster_result @@ -2423,28 +2415,28 @@ class TVShow(TVShowBase): :param path: :return: """ - if not ek.ek(os.path.isfile, path): + if not os.path.isfile(path): logger.log('%s: Not a real file... %s' % (self.tvid_prodid, path)) return None - logger.log('%s: Creating episode object from %s' % (self.tvid_prodid, path), logger.DEBUG) + logger.debug('%s: Creating episode object from %s' % (self.tvid_prodid, path)) try: my_parser = NameParser(show_obj=self) parse_result = my_parser.parse(path) except InvalidNameException: - logger.log('Unable to parse the filename %s into a valid episode' % path, logger.DEBUG) + logger.debug('Unable to parse the filename %s into a valid episode' % path) return None except InvalidShowException: - logger.log('Unable to parse the filename %s into a valid show' % path, logger.DEBUG) + logger.debug('Unable to parse the filename %s into a valid show' % path) return None if not len(parse_result.episode_numbers): logger.log('parse_result: %s' % parse_result) - logger.log('No episode number found in %s, ignoring it' % path, logger.ERROR) + logger.error('No episode number found in %s, ignoring it' % path) return None - # for now lets assume that any episode in the show dir belongs to that show + # for now let's assume that any episode in the show dir belongs to that show season_number = parse_result.season_number if None is not parse_result.season_number else 1 episode_numbers = parse_result.episode_numbers root_ep_obj = None @@ -2453,8 +2445,7 @@ class TVShow(TVShowBase): for cur_ep_num in episode_numbers: cur_ep_num = int(cur_ep_num) - logger.log('%s: %s parsed to %s %sx%s' % (self.tvid_prodid, path, self._name, season_number, cur_ep_num), - logger.DEBUG) + logger.debug('%s: %s parsed to %s %sx%s' % (self.tvid_prodid, path, self._name, season_number, cur_ep_num)) check_quality_again = False same_file = False @@ -2464,21 +2455,21 @@ class TVShow(TVShowBase): try: ep_obj = self.get_episode(season_number, cur_ep_num, path) except exceptions_helper.EpisodeNotFoundException: - logger.log('%s: Unable to figure out what this file is, skipping' % self.tvid_prodid, logger.ERROR) + logger.error('%s: Unable to figure out what this file is, skipping' % self.tvid_prodid) continue else: # if there is a new file associated with this ep then re-check the quality - status, quality = sickgear.common.Quality.splitCompositeStatus(ep_obj.status) + status, quality = sickgear.common.Quality.split_composite_status(ep_obj.status) if IGNORED == status: continue - if (ep_obj.location and ek.ek(os.path.normpath, ep_obj.location) != ek.ek(os.path.normpath, path)) or \ + if (ep_obj.location and os.path.normpath(ep_obj.location) != os.path.normpath(path)) or \ (not ep_obj.location and path) or \ (SKIPPED == status): - logger.log('The old episode had a different file associated with it, re-checking the quality ' + - 'based on the new filename %s' % path, logger.DEBUG) + logger.debug('The old episode had a different file associated with it, re-checking the quality ' + 'based on the new filename %s' % path) check_quality_again = True with ep_obj.lock: @@ -2504,42 +2495,40 @@ class TVShow(TVShowBase): # if user replaces a file, attempt to recheck the quality unless it's know to be the same file if check_quality_again and not same_file: - new_quality = Quality.nameQuality(path, self.is_anime) + new_quality = Quality.name_quality(path, self.is_anime) if Quality.UNKNOWN == new_quality: - new_quality = Quality.fileQuality(path) - logger.log('Since this file was renamed, file %s was checked and quality "%s" found' - % (path, Quality.qualityStrings[new_quality]), logger.DEBUG) - status, quality = sickgear.common.Quality.splitCompositeStatus(ep_obj.status) + new_quality = Quality.file_quality(path) + logger.debug(f'Since this file was renamed, file {path}' + f' was checked and quality "{Quality.qualityStrings[new_quality]}" found') + status, quality = sickgear.common.Quality.split_composite_status(ep_obj.status) if Quality.UNKNOWN != new_quality or status in (SKIPPED, UNAIRED): - ep_obj.status = Quality.compositeStatus(DOWNLOADED, new_quality) + ep_obj.status = Quality.composite_status(DOWNLOADED, new_quality) # check for status/quality changes as long as it's a new file elif not same_file and sickgear.helpers.has_media_ext(path)\ and ep_obj.status not in Quality.DOWNLOADED + Quality.ARCHIVED + [IGNORED]: - old_status, old_quality = Quality.splitCompositeStatus(ep_obj.status) - new_quality = Quality.nameQuality(path, self.is_anime) + old_status, old_quality = Quality.split_composite_status(ep_obj.status) + new_quality = Quality.name_quality(path, self.is_anime) if Quality.UNKNOWN == new_quality: - new_quality = Quality.fileQuality(path) + new_quality = Quality.file_quality(path) if Quality.UNKNOWN == new_quality: - new_quality = Quality.assumeQuality(path) + new_quality = Quality.assume_quality(path) new_status = None # if it was snatched and now exists then set the status correctly if SNATCHED == old_status and old_quality <= new_quality: - logger.log('STATUS: this episode used to be snatched with quality %s but' - ' a file exists with quality %s so setting the status to DOWNLOADED' - % (Quality.qualityStrings[old_quality], Quality.qualityStrings[new_quality]), - logger.DEBUG) + logger.debug(f'STATUS: this episode used to be snatched with quality' + f' {Quality.qualityStrings[old_quality]} but a file exists with quality' + f' {Quality.qualityStrings[new_quality]} so setting the status to DOWNLOADED') new_status = DOWNLOADED - # if it was snatched proper and we found a higher quality one then allow the status change + # if it was snatched proper, and we found a higher quality one then allow the status change elif SNATCHED_PROPER == old_status and old_quality < new_quality: - logger.log('STATUS: this episode used to be snatched proper with quality %s but' - ' a file exists with quality %s so setting the status to DOWNLOADED' - % (Quality.qualityStrings[old_quality], Quality.qualityStrings[new_quality]), - logger.DEBUG) + logger.debug(f'STATUS: this episode used to be snatched proper with quality' + f' {Quality.qualityStrings[old_quality]} but a file exists with quality' + f' {Quality.qualityStrings[new_quality]} so setting the status to DOWNLOADED') new_status = DOWNLOADED elif old_status not in SNATCHED_ANY: @@ -2547,19 +2536,19 @@ class TVShow(TVShowBase): if None is not new_status: with ep_obj.lock: - logger.log('STATUS: we have an associated file, so setting the status from %s to DOWNLOADED/%s' - % (ep_obj.status, Quality.compositeStatus(new_status, new_quality)), logger.DEBUG) - ep_obj.status = Quality.compositeStatus(new_status, new_quality) + logger.debug(f'STATUS: we have an associated file, so setting the status from {ep_obj.status}' + f' to DOWNLOADED/{Quality.composite_status(new_status, new_quality)}') + ep_obj.status = Quality.composite_status(new_status, new_quality) elif same_file: - status, quality = Quality.splitCompositeStatus(ep_obj.status) + status, quality = Quality.split_composite_status(ep_obj.status) if status in (SKIPPED, UNAIRED): - new_quality = Quality.nameQuality(path, self.is_anime) + new_quality = Quality.name_quality(path, self.is_anime) if Quality.UNKNOWN == new_quality: - new_quality = Quality.fileQuality(path) - logger.log('Since this file has status: "%s", file %s was checked and quality "%s" found' - % (statusStrings[status], path, Quality.qualityStrings[new_quality]), logger.DEBUG) - ep_obj.status = Quality.compositeStatus(DOWNLOADED, new_quality) + new_quality = Quality.file_quality(path) + logger.debug(f'Since this file has status: "{statusStrings[status]}", file {path}' + f' was checked and quality "{Quality.qualityStrings[new_quality]}" found') + ep_obj.status = Quality.composite_status(DOWNLOADED, new_quality) with ep_obj.lock: result = ep_obj.get_sql() @@ -2670,7 +2659,7 @@ class TVShow(TVShowBase): self.release_groups = self._anime and AniGroupList(self.tvid, self.prodid, self.tvid_prodid) or None - logger.log(u'Loaded.. {: <9} {: <8} {}'.format( + logger.log('Loaded.. {: <9} {: <8} {}'.format( sickgear.TVInfoAPI(self.tvid).config.get('name') + ',', '%s,' % self.prodid, self.name)) # Get IMDb_info from database @@ -2695,8 +2684,7 @@ class TVShow(TVShowBase): if 'is_mini_series' in self._imdb_info: self._imdb_info['is_mini_series'] = bool(self._imdb_info['is_mini_series']) elif sickgear.USE_IMDB_INFO: - logger.log('%s: The next show update will attempt to find IMDb info for [%s]' % - (self.tvid_prodid, self.name), logger.DEBUG) + logger.debug(f'{self.tvid_prodid}: The next show update will attempt to find IMDb info for [{self.name}]') return self.dirty = False @@ -2771,7 +2759,7 @@ class TVShow(TVShowBase): :param scheduled_update: :param switch: """ - # There's gotta be a better way of doing this but we don't wanna + # There's gotta be a better way of doing this, but we don't want to # change the cache value elsewhere if None is tvapi: tvinfo_config = sickgear.TVInfoAPI(self.tvid).api_params.copy() @@ -2797,9 +2785,9 @@ class TVShow(TVShowBase): if None is show_info or getattr(t, 'show_not_found', False): if getattr(t, 'show_not_found', False): self.inc_not_found_count() - logger.log('Show [%s] not found (maybe even removed?)' % self._name, logger.WARNING) + logger.warning('Show [%s] not found (maybe even removed?)' % self._name) else: - logger.log('Show data [%s] not found' % self._name, logger.WARNING) + logger.warning('Show data [%s] not found' % self._name) return False self.reset_not_found_count() @@ -2856,14 +2844,14 @@ class TVShow(TVShowBase): if show_info.cast and self._should_cast_update(show_info.cast): sickgear.people_queue_scheduler.action.add_cast_update(show_obj=self, show_info_cast=show_info.cast, - scheduled_update=scheduled_update, switch=switch) + scheduled_update=scheduled_update, switch=switch) else: logger.log('Not updating cast for show because data is unchanged.') return show_info @staticmethod def _update_person_properties_helper(person_obj, src_person, p_ids): - # type: (Person, TVINFO_Person, Dict) -> None + # type: (Person, TVInfoPerson, Dict) -> None person_obj.update_properties( name=src_person.name, gender=src_person.gender, birthday=src_person.birthdate, deathday=src_person.deathdate, @@ -2898,8 +2886,8 @@ class TVShow(TVShowBase): cast_list = self._load_cast_from_db() remove_char_ids = {c.id for c in cast_list or []} - cast_ordered = weakList() - for ct, c_l in iteritems(show_info_cast): # type: (integer_types, List[TVINFO_Character]) + cast_ordered = WeakList() + for ct, c_l in iteritems(show_info_cast): # type: (integer_types, List[TVInfoCharacter]) if ct not in (RoleTypes.ActorMain, RoleTypes.Host, RoleTypes.Interviewer, RoleTypes.Presenter): continue for c in c_l: @@ -2959,8 +2947,8 @@ class TVShow(TVShowBase): try: old_person_ids.remove(existing_person.id) except KeyError: - logger.log('%s - Person error: %s (%s)' % - (self.name, existing_person.name, existing_person.id), logger.ERROR) + logger.error(f'{self.name} -' + f' Person error: {existing_person.name} ({existing_person.id})') pass if force: existing_person.reset(src_person) @@ -3023,12 +3011,12 @@ class TVShow(TVShowBase): if not sickgear.USE_IMDB_INFO: return - logger.log('Retrieving show info [%s] from IMDb' % self._name, logger.DEBUG) + logger.debug('Retrieving show info [%s] from IMDb' % self._name) try: self._get_imdb_info() except (BaseException, Exception) as e: - logger.log('Error loading IMDb info: %s' % ex(e), logger.ERROR) - logger.log('%s' % traceback.format_exc(), logger.ERROR) + logger.error('Error loading IMDb info: %s' % ex(e)) + logger.error('%s' % traceback.format_exc()) @staticmethod def check_imdb_redirect(imdb_id): @@ -3040,7 +3028,7 @@ class TVShow(TVShowBase): page_url = 'https://www.imdb.com/title/{0}/'.format(imdb_id) try: response = requests.head(page_url, allow_redirects=True) - if response.history and any([h for h in response.history if 301 == h.status_code]): + if response.history and any(h for h in response.history if 301 == h.status_code): return helpers.parse_imdb_id(response.url) except (BaseException, Exception): pass @@ -3075,9 +3063,9 @@ class TVShow(TVShowBase): self._imdbid = redirect_check imdb_id = redirect_check imdb_info['imdb_id'] = self.imdbid - i = imdbpie.Imdb(exclude_episodes=True, cachedir=ek.ek(os.path.join, sickgear.CACHE_DIR, 'imdb-pie')) + i = imdbpie.Imdb(exclude_episodes=True, cachedir=os.path.join(sickgear.CACHE_DIR, 'imdb-pie')) if not helpers.parse_imdb_id(imdb_id): - logger.log('Not a valid imdbid: %s for show: %s' % (imdb_id, self._name), logger.WARNING) + logger.warning('Not a valid imdbid: %s for show: %s' % (imdb_id, self._name)) return imdb_ratings = i.get_title_ratings(imdb_id=imdb_id) imdb_akas = i.get_title_versions(imdb_id=imdb_id) @@ -3085,8 +3073,8 @@ class TVShow(TVShowBase): ipie = getattr(imdbpie.__dict__.get('imdbpie'), '_SIMPLE_GET_ENDPOINTS', None) if ipie: ipie.update({ - u'get_title_certificates': u'/title/{imdb_id}/certificates', - u'get_title_parentalguide': u'/title/{imdb_id}/parentalguide', + 'get_title_certificates': '/title/{imdb_id}/certificates', + 'get_title_parentalguide': '/title/{imdb_id}/parentalguide', }) imdb_certificates = i.get_title_certificates(imdb_id=imdb_id) except LookupError as e: @@ -3097,17 +3085,17 @@ class TVShow(TVShowBase): indexermapper.map_indexers_to_show(self, force=True) if not retry and imdb_id != 'tt%07d' % self.ids[indexermapper.TVINFO_IMDB]['id']: # add retry arg to prevent endless loops - logger.log('imdbid: %s not found. retrying with newly found id: %s' % - (imdb_id, 'tt%07d' % self.ids[indexermapper.TVINFO_IMDB]['id']), logger.DEBUG) + logger.debug(f'imdbid: {imdb_id} not found. retrying with newly found id:' + f' {"tt%07d" % self.ids[indexermapper.TVINFO_IMDB]["id"]}') self._get_imdb_info(retry=True) return - logger.log('imdbid: %s not found. Error: %s' % (imdb_id, ex(e)), logger.WARNING) + logger.warning('imdbid: %s not found. Error: %s' % (imdb_id, ex(e))) return except ImdbAPIError as e: - logger.log('Imdb API Error: %s' % ex(e), logger.WARNING) + logger.warning('Imdb API Error: %s' % ex(e)) return except (BaseException, Exception) as e: - logger.log('Error: %s retrieving imdb id: %s' % (ex(e), imdb_id), logger.WARNING) + logger.warning('Error: %s retrieving imdb id: %s' % (ex(e), imdb_id)) return # ratings @@ -3154,9 +3142,9 @@ class TVShow(TVShowBase): if isinstance(imdb_tv.get('numberOfEpisodes'), (int, string_types)): imdb_info['episode_count'] = try_int(imdb_tv.get('numberOfEpisodes'), 1) if isinstance(imdb_tv.get('genres'), (list, tuple)): - imdb_info['genres'] = '|'.join(filter_iter(lambda _v: _v, imdb_tv.get('genres'))) + imdb_info['genres'] = '|'.join(filter(lambda _v: _v, imdb_tv.get('genres'))) if isinstance(imdb_tv.get('origins'), list): - imdb_info['country_codes'] = '|'.join(filter_iter(lambda _v: _v, imdb_tv.get('origins'))) + imdb_info['country_codes'] = '|'.join(filter(lambda _v: _v, imdb_tv.get('origins'))) # certificate if isinstance(imdb_certificates.get('certificates'), dict): @@ -3178,19 +3166,19 @@ class TVShow(TVShowBase): imdb_info['certificates'] = '|'.join([cert for cert in itervalues(certs_head) if cert] + sorted(certs_tail)) if (not imdb_info['certificates'] and isinstance(imdb_tv.get('certificate'), dict) and isinstance(imdb_tv.get('certificate').get('certificate'), string_types)): - imdb_info['certificates'] = '%s:%s' % (u'US', imdb_tv.get('certificate').get('certificate')) + imdb_info['certificates'] = f'US:{imdb_tv.get("certificate").get("certificate")}' imdb_info['last_update'] = datetime.date.today().toordinal() # Rename dict keys without spaces for DB upsert self.imdb_info = dict( [(k.replace(' ', '_'), k(v) if hasattr(v, 'keys') else v) for k, v in iteritems(imdb_info)]) - logger.log('%s: Obtained info from IMDb -> %s' % (self.tvid_prodid, self._imdb_info), logger.DEBUG) + logger.debug('%s: Obtained info from IMDb -> %s' % (self.tvid_prodid, self._imdb_info)) logger.log('%s: Parsed latest IMDb show info for [%s]' % (self.tvid_prodid, self._name)) def next_episode(self): - logger.log('%s: Finding the episode which airs next for: %s' % (self.tvid_prodid, self._name), logger.DEBUG) + logger.debug('%s: Finding the episode which airs next for: %s' % (self.tvid_prodid, self._name)) cur_date = datetime.date.today().toordinal() if not self.nextaired or self.nextaired and cur_date > self.nextaired: @@ -3206,11 +3194,10 @@ class TVShow(TVShowBase): """, [self.tvid, self.prodid, datetime.date.today().toordinal(), UNAIRED, WANTED, FAILED]) if None is sql_result or 0 == len(sql_result): - logger.log('%s: No episode found... need to implement a show status' % self.tvid_prodid, logger.DEBUG) + logger.debug('%s: No episode found... need to implement a show status' % self.tvid_prodid) self.nextaired = '' else: - logger.log('%s: Found episode %sx%s' % ( - self.tvid_prodid, sql_result[0]['season'], sql_result[0]['episode']), logger.DEBUG) + logger.debug(f'{self.tvid_prodid}: Found episode {sql_result[0]["season"]}x{sql_result[0]["episode"]}') self.nextaired = sql_result[0]['airdate'] return self.nextaired @@ -3258,7 +3245,7 @@ class TVShow(TVShowBase): action = ('delete', 'trash')[sickgear.TRASH_REMOVE_SHOW] # remove self from show list - sickgear.showList = filter_list(lambda so: so.tvid_prodid != self.tvid_prodid, sickgear.showList) + sickgear.showList = list(filter(lambda so: so.tvid_prodid != self.tvid_prodid, sickgear.showList)) try: del sickgear.showDict[self.sid_int] except (BaseException, Exception): @@ -3277,10 +3264,10 @@ class TVShow(TVShowBase): # clear the cache ic = image_cache.ImageCache() - for cache_obj in ek.ek(glob.glob, ic.fanart_path(self.tvid, self.prodid).replace('fanart.jpg', '*')) \ - + ek.ek(glob.glob, ic.poster_thumb_path(self.tvid, self.prodid).replace('poster.jpg', '*')) \ - + ek.ek(glob.glob, ic.poster_path(self.tvid, self.prodid).replace('poster.jpg', '*')): - cache_dir = ek.ek(os.path.isdir, cache_obj) + for cache_obj in glob.glob(ic.fanart_path(self.tvid, self.prodid).replace('fanart.jpg', '*')) \ + + glob.glob(ic.poster_thumb_path(self.tvid, self.prodid).replace('poster.jpg', '*')) \ + + glob.glob(ic.poster_path(self.tvid, self.prodid).replace('poster.jpg', '*')): + cache_dir = os.path.isdir(cache_obj) result = helpers.remove_file(cache_obj, tree=cache_dir, log_level=logger.WARNING) if result: logger.log('%s cache %s %s' % (result, cache_dir and 'dir' or 'file', cache_obj)) @@ -3293,23 +3280,23 @@ class TVShow(TVShowBase): try: logger.log('Attempt to %s show folder %s' % (action, self._location)) # check first the read-only attribute - file_attribute = ek.ek(os.stat, self.location)[0] + file_attribute = os.stat(self.location)[0] if not file_attribute & stat.S_IWRITE: # File is read-only, so make it writeable - logger.log('Attempting to make writeable the read only folder %s' % self._location, logger.DEBUG) + logger.debug('Attempting to make writeable the read only folder %s' % self._location) try: - ek.ek(os.chmod, self.location, stat.S_IWRITE) + os.chmod(self.location, stat.S_IWRITE) except (BaseException, Exception): - logger.log('Unable to change permissions of %s' % self._location, logger.WARNING) + logger.warning('Unable to change permissions of %s' % self._location) result = helpers.remove_file(self.location, tree=True) if result: logger.log('%s show folder %s' % (result, self._location)) except exceptions_helper.ShowDirNotFoundException: - logger.log('Show folder does not exist, no need to %s %s' % (action, self._location), logger.WARNING) + logger.warning('Show folder does not exist, no need to %s %s' % (action, self._location)) except OSError as e: - logger.log('Unable to %s %s: %s / %s' % (action, self._location, repr(e), ex(e)), logger.WARNING) + logger.warning('Unable to %s %s: %s / %s' % (action, self._location, repr(e), ex(e))) def populate_cache(self, force=False): # type: (bool) -> None @@ -3325,7 +3312,7 @@ class TVShow(TVShowBase): def refresh_dir(self): # make sure the show dir is where we think it is unless dirs are created on the fly - if not ek.ek(os.path.isdir, self._location) and not sickgear.CREATE_MISSING_SHOW_DIRS: + if not os.path.isdir(self._location) and not sickgear.CREATE_MISSING_SHOW_DIRS: return False # load from dir @@ -3352,50 +3339,48 @@ class TVShow(TVShowBase): for cur_row in sql_result: season = int(cur_row['season']) episode = int(cur_row['episode']) - location = ek.ek(os.path.normpath, cur_row['location']) + location = os.path.normpath(cur_row['location']) try: ep_obj = self.get_episode(season, episode, ep_result=[cur_row]) except exceptions_helper.EpisodeDeletedException: - logger.log('The episode from [%s] was deleted while we were refreshing it, moving on to the next one' - % self._name, logger.DEBUG) + logger.debug(f'The episode from [{self._name}] was deleted while we were refreshing it,' + f' moving on to the next one') continue # if the path exist and if it's in our show dir if (self.prune and season and ep_obj.location not in attempted and 0 < helpers.get_size(ep_obj.location) and - ek.ek(os.path.normpath, location).startswith(ek.ek(os.path.normpath, self.location))): + os.path.normpath(location).startswith(os.path.normpath(self.location))): with ep_obj.lock: if ep_obj.status in Quality.DOWNLOADED: # locations repeat but attempt to delete once attempted += ep_obj.location if kept >= self.prune: - result = helpers.remove_file(ep_obj.location, prefix_failure=u'%s: ' % self.tvid_prodid) + result = helpers.remove_file(ep_obj.location, prefix_failure=f'{self.tvid_prodid}: ') if result: - logger.log(u'%s: %s file %s' % (self.tvid_prodid, result, ep_obj.location), - logger.DEBUG) + logger.debug(f'{self.tvid_prodid}: {result} file {ep_obj.location}') deleted += 1 else: kept += 1 # if the path doesn't exist or if it's not in our show dir - if not ek.ek(os.path.isfile, location) or not ek.ek(os.path.normpath, location).startswith( - ek.ek(os.path.normpath, self.location)): + if not os.path.isfile(location) or not os.path.normpath(location).startswith( + os.path.normpath(self.location)): # check if downloaded files still exist, update our data if this has changed if 1 != sickgear.SKIP_REMOVED_FILES: with ep_obj.lock: - # if it used to have a file associated with it and it doesn't anymore then set it to IGNORED + # if it used to have a file associated with it, and it doesn't anymore then set it to IGNORED if ep_obj.location and ep_obj.status in Quality.DOWNLOADED: if ARCHIVED == sickgear.SKIP_REMOVED_FILES: - ep_obj.status = Quality.compositeStatus( - ARCHIVED, Quality.qualityDownloaded(ep_obj.status)) + ep_obj.status = Quality.composite_status( + ARCHIVED, Quality.quality_downloaded(ep_obj.status)) else: ep_obj.status = (sickgear.SKIP_REMOVED_FILES, IGNORED)[ not sickgear.SKIP_REMOVED_FILES] - logger.log( - '%s: File no longer at location for s%02de%02d,' % (self.tvid_prodid, season, episode) - + ' episode removed and status changed to %s' % statusStrings[ep_obj.status], - logger.DEBUG) + logger.debug(f'{self.tvid_prodid}: File no longer at location for' + f' s{season:02d}e{episode:02d}, episode removed' + f' and status changed to {statusStrings[ep_obj.status]}') ep_obj.subtitles = list() ep_obj.subtitles_searchcount = 0 ep_obj.subtitles_lastsearch = str(datetime.datetime.min) @@ -3428,10 +3413,10 @@ class TVShow(TVShowBase): :param force: """ # TODO: Add support for force option - if not ek.ek(os.path.isdir, self._location): - logger.log('%s: Show directory doesn\'t exist, can\'t download subtitles' % self.tvid_prodid, logger.DEBUG) + if not os.path.isdir(self._location): + logger.debug('%s: Show directory doesn\'t exist, can\'t download subtitles' % self.tvid_prodid) return - logger.log('%s: Downloading subtitles' % self.tvid_prodid, logger.DEBUG) + logger.debug('%s: Downloading subtitles' % self.tvid_prodid) try: my_db = db.DBConnection() @@ -3447,7 +3432,7 @@ class TVShow(TVShowBase): ep_obj = self.ep_obj_from_file(cur_row['location']) _ = ep_obj.download_subtitles(force=force) except (BaseException, Exception): - logger.log('Error occurred when downloading subtitles: %s' % traceback.format_exc(), logger.ERROR) + logger.error('Error occurred when downloading subtitles: %s' % traceback.format_exc()) return def remove_character_images(self): @@ -3527,14 +3512,13 @@ class TVShow(TVShowBase): save_mapping(self) name_cache.remove_from_namecache(old_tvid, old_prodid) - image_cache_dir = ek.ek(os.path.join, sickgear.CACHE_DIR, 'images', 'shows') - old_dir = ek.ek(os.path.join, image_cache_dir, '%s-%s' % (old_tvid, old_prodid)) - new_dir = ek.ek(os.path.join, image_cache_dir, '%s-%s' % (self.tvid, self.prodid)) + image_cache_dir = os.path.join(sickgear.CACHE_DIR, 'images', 'shows') + old_dir = os.path.join(image_cache_dir, '%s-%s' % (old_tvid, old_prodid)) + new_dir = os.path.join(image_cache_dir, '%s-%s' % (self.tvid, self.prodid)) try: - ek.ek(os.rename, old_dir, new_dir) + os.rename(old_dir, new_dir) except (BaseException, Exception) as e: - logger.log('Unable to rename %s to %s: %s / %s' % (old_dir, new_dir, repr(e), ex(e)), - logger.WARNING) + logger.warning('Unable to rename %s to %s: %s / %s' % (old_dir, new_dir, repr(e), ex(e))) old_id = TVidProdid({old_tvid: old_prodid})() rating = sickgear.FANART_RATINGS.get(old_id) @@ -3543,7 +3527,7 @@ class TVShow(TVShowBase): sickgear.FANART_RATINGS[self.tvid_prodid] = rating sickgear.save_config() - name_cache.buildNameCache(self) + name_cache.build_name_cache(self) self.reset_not_found_count() old_sid_int = self.create_sid(old_tvid, old_prodid) if old_sid_int != self.sid_int: @@ -3557,11 +3541,11 @@ class TVShow(TVShowBase): if update_show: # force the update try: - sickgear.show_queue_scheduler.action.updateShow( + sickgear.show_queue_scheduler.action.update_show( self, force=True, web=True, priority=QueuePriorities.VERYHIGH, pausestatus_after=pausestatus_after, switch_src=True) except exceptions_helper.CantUpdateException as e: - logger.log('Unable to update this show. %s' % ex(e), logger.ERROR) + logger.error('Unable to update this show. %s' % ex(e)) def save_to_db(self, force_save=False): # type: (bool) -> None @@ -3570,10 +3554,10 @@ class TVShow(TVShowBase): :param force_save: """ if not self.dirty and not force_save: - logger.log('%s: Not saving show to db - record is not dirty' % self.tvid_prodid, logger.DEBUG) + logger.debug('%s: Not saving show to db - record is not dirty' % self.tvid_prodid) return - logger.log('%s: Saving show info to database' % self.tvid_prodid, logger.DEBUG) + logger.debug('%s: Saving show info to database' % self.tvid_prodid) new_value_dict = dict( air_by_date=self._air_by_date, @@ -3670,45 +3654,46 @@ class TVShow(TVShowBase): :param multi_ep: multiple episodes :return: """ - logger.log('Checking if found %sepisode %sx%s is wanted at quality %s' % - (('', 'multi-part ')[multi_ep], season, episode, Quality.qualityStrings[quality]), logger.DEBUG) + logger.debug(f'Checking if found {("", "multi-part ")[multi_ep]}episode {season}x{episode}' + f' is wanted at quality {Quality.qualityStrings[quality]}') if not multi_ep: try: wq = getattr(self.sxe_ep_obj.get(season, {}).get(episode, {}), 'wanted_quality', None) if None is not wq: if quality in wq: - cur_status, cur_quality = Quality.splitCompositeStatus(self.sxe_ep_obj[season][episode].status) + cur_status, cur_quality = Quality.split_composite_status(self.sxe_ep_obj[season][episode].status) if cur_status in (WANTED, UNAIRED, SKIPPED, FAILED): - logger.log('Existing episode status is wanted/unaired/skipped/failed,' - ' getting found episode', logger.DEBUG) + logger.debug('Existing episode status is wanted/unaired/skipped/failed,' + ' getting found episode') return True elif manual_search: - logger.log('Usually ignoring found episode, but forced search allows the quality,' - ' getting found episode', logger.DEBUG) + logger.debug('Usually ignoring found episode, but forced search allows the quality,' + ' getting found episode') return True elif quality > cur_quality: - logger.log( - 'Episode already exists but the found episode has better quality,' - ' getting found episode', logger.DEBUG) + logger.debug('Episode already exists but the found episode has better quality,' + ' getting found episode') return True - logger.log('None of the conditions were met, ignoring found episode', logger.DEBUG) + logger.debug('None of the conditions were met,' + ' ignoring found episode') return False except (BaseException, Exception): pass # if the quality isn't one we want under any circumstances then just say no - initial_qualities, archive_qualities = Quality.splitQuality(self._quality) + initial_qualities, archive_qualities = Quality.split_quality(self._quality) all_qualities = list(set(initial_qualities + archive_qualities)) initial = '= (%s)' % ','.join([Quality.qualityStrings[qual] for qual in initial_qualities]) if 0 < len(archive_qualities): initial = '+ upgrade to %s + (%s)'\ % (initial, ','.join([Quality.qualityStrings[qual] for qual in archive_qualities])) - logger.log('Want initial %s and found %s' % (initial, Quality.qualityStrings[quality]), logger.DEBUG) + logger.debug('Want initial %s and found %s' % (initial, Quality.qualityStrings[quality])) if quality not in all_qualities: - logger.log('Don\'t want this quality, ignoring found episode', logger.DEBUG) + logger.debug('Don\'t want this quality,' + ' ignoring found episode') return False my_db = db.DBConnection() @@ -3720,34 +3705,33 @@ class TVShow(TVShowBase): """, [self.tvid, self.prodid, season, episode]) if not sql_result or not len(sql_result): - logger.log('Unable to find a matching episode in database, ignoring found episode', logger.DEBUG) + logger.debug('Unable to find a matching episode in database,' + ' ignoring found episode') return False - cur_status, cur_quality = Quality.splitCompositeStatus(int(sql_result[0]['status'])) + cur_status, cur_quality = Quality.split_composite_status(int(sql_result[0]['status'])) ep_status_text = statusStrings[cur_status] - logger.log('Existing episode status: %s (%s)' % (statusStrings[cur_status], ep_status_text), logger.DEBUG) + logger.debug('Existing episode status: %s (%s)' % (statusStrings[cur_status], ep_status_text)) # if we know we don't want it then just say no if cur_status in [IGNORED, ARCHIVED] + ([SKIPPED], [])[multi_ep] and not manual_search: - logger.log('Existing episode status is %signored/archived, ignoring found episode' % - ('skipped/', '')[multi_ep], logger.DEBUG) + logger.debug(f'Existing episode status is {("skipped/", "")[multi_ep]}ignored/archived,' + f' ignoring found episode') return False # if it's one of these then we want it as long as it's in our allowed initial qualities if quality in all_qualities: if cur_status in [WANTED, UNAIRED, SKIPPED, FAILED] + ([], SNATCHED_ANY)[multi_ep]: - logger.log('Existing episode status is wanted/unaired/skipped/failed, getting found episode', - logger.DEBUG) + logger.debug('Existing episode status is wanted/unaired/skipped/failed,' + ' getting found episode') return True elif manual_search: - logger.log( - 'Usually ignoring found episode, but forced search allows the quality, getting found episode', - logger.DEBUG) + logger.debug('Usually ignoring found episode, but forced search allows the quality,' + ' getting found episode') return True else: - logger.log('Quality is on wanted list, need to check if it\'s better than existing quality', - logger.DEBUG) + logger.debug('Quality is on wanted list, need to check if it\'s better than existing quality') downloaded_status_list = SNATCHED_ANY + [DOWNLOADED] # special case: already downloaded quality is not in any of the wanted Qualities @@ -3758,14 +3742,14 @@ class TVShow(TVShowBase): # if re-downloading then only keep items in the archiveQualities list and better than what we have if cur_status in downloaded_status_list and quality in wanted_qualities and quality > cur_quality: - logger.log('Episode already exists but the found episode has better quality, getting found episode', - logger.DEBUG) + logger.debug('Episode already exists but the found episode has better quality,' + ' getting found episode') return True else: - logger.log('Episode already exists and the found episode has same/lower quality, ignoring found episode', - logger.DEBUG) + logger.debug('Episode already exists and the found episode has same/lower quality,' + ' ignoring found episode') - logger.log('None of the conditions were met, ignoring found episode', logger.DEBUG) + logger.debug('None of the conditions were met, ignoring found episode') return False def get_overview(self, ep_status, split_snatch=False): @@ -3940,13 +3924,13 @@ class TVEpisode(TVEpisodeBase): def _set_location(self, val): log_vals = (('clears', ''), ('sets', ' to ' + val))[any(val)] # noinspection PyStringFormat - logger.log(u'Setter %s location%s' % log_vals, logger.DEBUG) + logger.debug('Setter %s location%s' % log_vals) # self._location = newLocation self.dirty_setter('_location')(self, val) - if val and ek.ek(os.path.isfile, val): - self.file_size = ek.ek(os.path.getsize, val) + if val and os.path.isfile(val): + self.file_size = os.path.getsize(val) else: self.file_size = 0 @@ -3969,12 +3953,11 @@ class TVEpisode(TVEpisodeBase): return # TODO: Add support for force option - if not ek.ek(os.path.isfile, self.location): - logger.log('%s: Episode file doesn\'t exist, can\'t download subtitles for episode %sx%s' % - (self.show_obj.tvid_prodid, self.season, self.episode), logger.DEBUG) + if not os.path.isfile(self.location): + logger.debug(f'{self.show_obj.tvid_prodid}: Episode file doesn\'t exist,' + f' can\'t download subtitles for episode {self.season}x{self.episode}') return - logger.log('%s: Downloading subtitles for episode %sx%s' - % (self.show_obj.tvid_prodid, self.season, self.episode), logger.DEBUG) + logger.debug(f'{self.show_obj.tvid_prodid}: Downloading subtitles for episode {self.season}x{self.episode}') previous_subtitles = self.subtitles @@ -3988,15 +3971,15 @@ class TVEpisode(TVEpisodeBase): if sickgear.SUBTITLES_DIR: for video in subs: - subs_new_path = ek.ek(os.path.join, ek.ek(os.path.dirname, video.path), sickgear.SUBTITLES_DIR) + subs_new_path = os.path.join(os.path.dirname(video.path), sickgear.SUBTITLES_DIR) dir_exists = helpers.make_dir(subs_new_path) if not dir_exists: - logger.log('Unable to create subtitles folder %s' % subs_new_path, logger.ERROR) + logger.error('Unable to create subtitles folder %s' % subs_new_path) else: helpers.chmod_as_parent(subs_new_path) for subtitle in subs.get(video): - new_file_path = ek.ek(os.path.join, subs_new_path, ek.ek(os.path.basename, subtitle.path)) + new_file_path = os.path.join(subs_new_path, os.path.basename(subtitle.path)) helpers.move_file(subtitle.path, new_file_path) helpers.chmod_as_parent(new_file_path) else: @@ -4005,11 +3988,11 @@ class TVEpisode(TVEpisodeBase): helpers.chmod_as_parent(subtitle.path) except (BaseException, Exception): - logger.log('Error occurred when downloading subtitles: %s' % traceback.format_exc(), logger.ERROR) + logger.error('Error occurred when downloading subtitles: %s' % traceback.format_exc()) return self.refresh_subtitles() - # added the if because sometime it raises an error + # added the if because sometimes it raises an error self.subtitles_searchcount = self.subtitles_searchcount + 1 if self.subtitles_searchcount else 1 self.subtitles_lastsearch = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') self.save_to_db() @@ -4020,17 +4003,17 @@ class TVEpisode(TVEpisodeBase): try: subtitle_list = ", ".join([subliminal.language.Language(x).name for x in newsubtitles]) except (BaseException, Exception): - logger.log('Could not parse a language to use to fetch subtitles for episode %sx%s' % - (self.season, self.episode), logger.DEBUG) + logger.debug(f'Could not parse a language to use to fetch subtitles' + f' for episode {self.season}x{self.episode}') return - logger.log('%s: Downloaded %s subtitles for episode %sx%s' % - (self.show_obj.tvid_prodid, subtitle_list, self.season, self.episode), logger.DEBUG) + logger.debug(f'{self.show_obj.tvid_prodid}: Downloaded {subtitle_list} subtitles' + f' for episode {self.season}x{self.episode}') notifiers.notify_subtitle_download(self, subtitle_list) else: - logger.log('%s: No subtitles downloaded for episode %sx%s' - % (self.show_obj.tvid_prodid, self.season, self.episode), logger.DEBUG) + logger.debug(f'{self.show_obj.tvid_prodid}: No subtitles downloaded' + f' for episode {self.season}x{self.episode}') if sickgear.SUBTITLES_HISTORY: for video in subs: @@ -4053,7 +4036,7 @@ class TVEpisode(TVEpisodeBase): hastbn = False # check for nfo and tbn - if ek.ek(os.path.isfile, self.location): + if os.path.isfile(self.location): for cur_provider in itervalues(sickgear.metadata_provider_dict): if cur_provider.episode_metadata: new_result = cur_provider.has_episode_metadata(self) @@ -4086,12 +4069,12 @@ class TVEpisode(TVEpisodeBase): """ if not self.load_from_db(season, episode, **kwargs): # only load from NFO if we didn't load from DB - if ek.ek(os.path.isfile, self.location): + if os.path.isfile(self.location): try: self.load_from_nfo(self.location) except exceptions_helper.NoNFOException: - logger.log('%s: There was an error loading the NFO for episode %sx%s' % - (self.show_obj.tvid_prodid, season, episode), logger.ERROR) + logger.error(f'{self.show_obj.tvid_prodid}: There was an error loading the NFO' + f' for episode {season}x{episode}') pass # if we tried loading it from NFO and didn't find the NFO, try the Indexers @@ -4116,8 +4099,7 @@ class TVEpisode(TVEpisodeBase): :param episode: episode number :param show_result: """ - logger.log('%s: Loading episode details from DB for episode %sx%s' - % (self._show_obj.tvid_prodid, season, episode), logger.DEBUG) + logger.debug(f'{self._show_obj.tvid_prodid}: Loading episode details from DB for episode {season}x{episode}') show_result = show_result and next(iter(show_result), None) if not show_result or episode != show_result['episode'] or season != show_result['season']: @@ -4134,8 +4116,8 @@ class TVEpisode(TVEpisodeBase): if len(sql_result): raise exceptions_helper.MultipleDBEpisodesException('DB has multiple records for the same show') - logger.log('%s: Episode %sx%s not found in the database' - % (self._show_obj.tvid_prodid, self._season, self._episode), logger.DEBUG) + logger.debug(f'{self._show_obj.tvid_prodid}: Episode {self._season}x{self._episode}' + f' not found in the database') return False show_result = next(iter(sql_result)) @@ -4169,7 +4151,7 @@ class TVEpisode(TVEpisodeBase): self._subtitles_searchcount = show_result['subtitles_searchcount'] self._timestamp = show_result['timestamp'] or self._make_timestamp() self._version = self._version if not show_result['version'] else int(show_result['version']) - self.location = show_result['location'] and ek.ek(os.path.normpath, show_result['location']) or self.location + self.location = show_result['location'] and os.path.normpath(show_result['location']) or self.location if None is not show_result['release_group']: self._release_group = show_result['release_group'] @@ -4222,8 +4204,6 @@ class TVEpisode(TVEpisodeBase): tzinfo = self._show_obj.timezone elif isinstance(self._show_obj.network, string_types) and self._show_obj.network: tzinfo = network_timezones.get_network_timezone(self._show_obj.network) - if PY2: - return SGDatetime.combine(self.airdate, ep_time).replace(tzinfo=tzinfo).timestamp_far() return SGDatetime.combine(self.airdate, ep_time, tzinfo=tzinfo).timestamp_far() return None @@ -4259,9 +4239,8 @@ class TVEpisode(TVEpisodeBase): if None is episode: episode = self._episode - logger.log('%s: Loading episode details from %s for episode %sx%s' % - (self._show_obj.tvid_prodid, sickgear.TVInfoAPI(self._show_obj.tvid).name, season, episode), - logger.DEBUG) + logger.debug(f'{self._show_obj.tvid_prodid}: Loading episode details from' + f' {sickgear.TVInfoAPI(self._show_obj.tvid).name} for episode {season}x{episode}') try: if cached_show: @@ -4290,35 +4269,34 @@ class TVEpisode(TVEpisodeBase): ep_info = cached_season[episode] # type: TVInfoEpisode except (BaseTVinfoEpisodenotfound, BaseTVinfoSeasonnotfound): - logger.log('Unable to find the episode on %s... has it been removed? Should I delete from db?' % - sickgear.TVInfoAPI(self.tvid).name, logger.DEBUG) - # if I'm no longer on the Indexers but I once was then delete myself from the DB + logger.debug(f'Unable to find the episode on {sickgear.TVInfoAPI(self.tvid).name}...' + f' has it been removed? Should it be deleted from the db?') + # if no longer on the Indexers, but once was, then delete it from the DB if -1 != self._epid and helpers.should_delete_episode(self._status): self.delete_episode() elif UNKNOWN == self._status: self.status = SKIPPED return except (BaseTVinfoError, IOError) as e: - logger.log('%s threw up an error: %s' % (sickgear.TVInfoAPI(self.tvid).name, ex(e)), logger.DEBUG) + logger.debug('%s threw up an error: %s' % (sickgear.TVInfoAPI(self.tvid).name, ex(e))) # if the episode is already valid just log it, if not throw it up if UNKNOWN == self._status: self.status = SKIPPED if self._name: - logger.log('%s timed out but there is enough info from other sources, allowing the error' % - sickgear.TVInfoAPI(self.tvid).name, logger.DEBUG) + logger.debug(f'{sickgear.TVInfoAPI(self.tvid).name}' + f' timed out but there is enough info from other sources, allowing the error') return - logger.log('%s timed out, unable to create the episode' % sickgear.TVInfoAPI(self.tvid).name, - logger.ERROR) + logger.error('%s timed out, unable to create the episode' % sickgear.TVInfoAPI(self.tvid).name) return False if getattr(ep_info, 'absolute_number', None) in (None, ''): logger.debug('This episode (%s - %sx%s) has no absolute number on %s' % (self.show_obj.unique_name, season, episode, sickgear.TVInfoAPI(self.tvid).name)) else: - logger.log('%s: The absolute_number for %sx%s is : %s' % - (self._show_obj.tvid_prodid, season, episode, ep_info['absolute_number']), logger.DEBUG) + logger.debug(f'{self._show_obj.tvid_prodid}:' + f' The absolute_number for {season}x{episode} is : {ep_info["absolute_number"]}') self.absolute_number = int(ep_info['absolute_number']) if switch and None is not switch_list: @@ -4352,7 +4330,7 @@ class TVEpisode(TVEpisodeBase): except (ValueError, IndexError): logger.error('Malformed air date retrieved from %s (%s - %sx%s)' % (sickgear.TVInfoAPI(self.tvid).name, self.show_obj.unique_name, season, episode)) - # if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now + # if I'm incomplete on TVDB, but I once was complete then just delete myself from the DB for now if -1 != self._epid and helpers.should_delete_episode(self._status): self.delete_episode() elif UNKNOWN == self._status: @@ -4407,7 +4385,7 @@ class TVEpisode(TVEpisodeBase): # early conversion to int so that episode doesn't get marked dirty self.epid = getattr(ep_info, 'id', None) if None is self._epid: - logger.log('Failed to retrieve ID from %s' % sickgear.TVInfoAPI(self.tvid).name, logger.ERROR) + logger.error('Failed to retrieve ID from %s' % sickgear.TVInfoAPI(self.tvid).name) if helpers.should_delete_episode(self._status): self.delete_episode() elif UNKNOWN == self._status: @@ -4416,7 +4394,7 @@ class TVEpisode(TVEpisodeBase): # don't update show status if show dir is missing, unless it's missing on purpose # noinspection PyProtectedMember - if not ek.ek(os.path.isdir, self._show_obj._location) \ + if not os.path.isdir(self._show_obj._location) \ and not sickgear.CREATE_MISSING_SHOW_DIRS and not sickgear.ADD_SHOWS_WO_DIR: if UNKNOWN == self._status: self.status = (SKIPPED, UNAIRED)[future_airtime] @@ -4427,12 +4405,11 @@ class TVEpisode(TVEpisodeBase): return if self._location: - logger.log('%s: Setting status for %sx%s based on status %s and existence of %s' % - (self._show_obj.tvid_prodid, season, episode, statusStrings[self._status], self._location), - logger.DEBUG) + logger.debug(f'{self._show_obj.tvid_prodid}: Setting status for {season}x{episode}' + f' based on status {statusStrings[self._status]} and existence of {self._location}') # if we don't have the file - if not ek.ek(os.path.isfile, self._location): + if not os.path.isfile(self._location): if self._status in [SKIPPED, UNAIRED, UNKNOWN, WANTED]: very_old_delta = datetime.timedelta(days=90) @@ -4474,24 +4451,24 @@ class TVEpisode(TVEpisodeBase): else: msg = 'Not touching episode status %s, because there is no file' - logger.log(msg % statusStrings[self._status], logger.DEBUG) + logger.debug(msg % statusStrings[self._status]) # if we have a media file then it's downloaded elif sickgear.helpers.has_media_ext(self._location): if IGNORED == self._status: - logger.log('File exists for %sx%s, ignoring because of status %s' % - (self._season, self._episode, statusStrings[self._status]), logger.DEBUG) + logger.debug(f'File exists for {self._season}x{self._episode},' + f' ignoring because of status {statusStrings[self._status]}') # leave propers alone, you have to either post-process them or manually change them back elif self._status not in Quality.SNATCHED_ANY + Quality.DOWNLOADED + Quality.ARCHIVED: msg = '(1) Status changes from %s to ' % statusStrings[self._status] - self.status = Quality.statusFromNameOrFile(self._location, anime=self._show_obj.is_anime) - logger.log('%s%s' % (msg, statusStrings[self._status]), logger.DEBUG) + self.status = Quality.status_from_name_or_file(self._location, anime=self._show_obj.is_anime) + logger.debug('%s%s' % (msg, statusStrings[self._status])) # shouldn't get here probably else: msg = '(2) Status changes from %s to ' % statusStrings[self._status] self.status = UNKNOWN - logger.log('%s%s' % (msg, statusStrings[self._status]), logger.DEBUG) + logger.debug('%s%s' % (msg, statusStrings[self._status])) def load_from_nfo(self, location): """ @@ -4500,38 +4477,36 @@ class TVEpisode(TVEpisodeBase): :type location: AnyStr """ # noinspection PyProtectedMember - if not ek.ek(os.path.isdir, self._show_obj._location): + if not os.path.isdir(self._show_obj._location): logger.log('%s: The show directory is missing, not bothering to try loading the episode NFO' % self._show_obj.tvid_prodid) return - logger.log('%s: Loading episode details from the NFO file associated with %s' - % (self.show_obj.tvid_prodid, location), logger.DEBUG) + logger.debug(f'{self.show_obj.tvid_prodid}' + f': Loading episode details from the NFO file associated with {location}') self.location = location if '' != self.location: if UNKNOWN == self._status and sickgear.helpers.has_media_ext(self.location): - status_quality = Quality.statusFromNameOrFile(self.location, anime=self._show_obj.is_anime) - logger.log('(3) Status changes from %s to %s' % (self._status, status_quality), logger.DEBUG) + status_quality = Quality.status_from_name_or_file(self.location, anime=self._show_obj.is_anime) + logger.debug('(3) Status changes from %s to %s' % (self._status, status_quality)) self.status = status_quality nfo_file = sickgear.helpers.replace_extension(self.location, 'nfo') - logger.log('%s: Using NFO name %s' % (self._show_obj.tvid_prodid, nfo_file), logger.DEBUG) + logger.debug('%s: Using NFO name %s' % (self._show_obj.tvid_prodid, nfo_file)) - if ek.ek(os.path.isfile, nfo_file): + if os.path.isfile(nfo_file): try: show_xml = etree.ElementTree(file=nfo_file) except (SyntaxError, ValueError) as e: - logger.log('Error loading the NFO, backing up the NFO and skipping for now: %s' % ex(e), - logger.ERROR) # TODO: figure out what's wrong and fix it + # TODO: figure out what's wrong and fix it + logger.error('Error loading the NFO, backing up the NFO and skipping for now: %s' % ex(e)) try: - ek.ek(os.rename, nfo_file, '%s.old' % nfo_file) + os.rename(nfo_file, '%s.old' % nfo_file) except (BaseException, Exception) as e: - logger.log( - 'Failed to rename your episode\'s NFO file - you need to delete it or fix it: %s' % ex(e), - logger.ERROR) + logger.error(f'Failed to rename episode\'s NFO file - you need to delete it or fix it: {ex(e)}') raise exceptions_helper.NoNFOException('Error in NFO format') # TODO: deprecated function getiterator needs to be replaced @@ -4540,10 +4515,10 @@ class TVEpisode(TVEpisodeBase): if None is epDetails.findtext('season') or int(epDetails.findtext('season')) != self._season or \ None is epDetails.findtext('episode') or int( epDetails.findtext('episode')) != self._episode: - logger.log('%s: NFO has an block for a different episode - wanted %sx%s' - ' but got %sx%s' % - (self._show_obj.tvid_prodid, self._season, self._episode, - epDetails.findtext('season'), epDetails.findtext('episode')), logger.DEBUG) + logger.debug(f'{self._show_obj.tvid_prodid}' + f': NFO has an block for a different episode - wanted' + f' {self._season}x{self._episode}' + f' but got {epDetails.findtext("season")}x{epDetails.findtext("episode")}') continue if None is epDetails.findtext('title') or None is epDetails.findtext('aired'): @@ -4578,7 +4553,7 @@ class TVEpisode(TVEpisodeBase): else: self.hasnfo = False - if ek.ek(os.path.isfile, sickgear.helpers.replace_extension(nfo_file, 'tbn')): + if os.path.isfile(sickgear.helpers.replace_extension(nfo_file, 'tbn')): self.hastbn = True else: self.hastbn = False @@ -4615,7 +4590,7 @@ class TVEpisode(TVEpisodeBase): def create_meta_files(self, force=False): # noinspection PyProtectedMember - if not ek.ek(os.path.isdir, self.show_obj._location): + if not os.path.isdir(self.show_obj._location): logger.log('%s: The show directory is missing, not bothering to try to create metadata' % self.show_obj.tvid_prodid) return @@ -4664,11 +4639,11 @@ class TVEpisode(TVEpisodeBase): # remove myself from the show dictionary if self.show_obj.get_episode(self._season, self._episode, no_create=True) == self: - logger.log('Removing myself from my show\'s list', logger.DEBUG) + logger.debug('Removing myself from my show\'s list') del self.show_obj.sxe_ep_obj[self._season][self._episode] # delete myself from the DB - logger.log('Deleting myself from the database', logger.DEBUG) + logger.debug('Deleting myself from the database') sql = [['DELETE FROM tv_episodes WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?', [self._show_obj.tvid, self._show_obj.prodid, self._season, self._episode]]] @@ -4690,7 +4665,7 @@ class TVEpisode(TVEpisodeBase): """ if not self.dirty and not force_save: - logger.log('%s: Not creating SQL queue - record is not dirty' % self._show_obj.tvid_prodid, logger.DEBUG) + logger.debug('%s: Not creating SQL queue - record is not dirty' % self._show_obj.tvid_prodid) return self.dirty = False @@ -4750,12 +4725,12 @@ class TVEpisode(TVEpisodeBase): """ if not self.dirty and not force_save: - logger.log('%s: Not saving episode to db - record is not dirty' % self._show_obj.tvid_prodid, logger.DEBUG) + logger.debug('%s: Not saving episode to db - record is not dirty' % self._show_obj.tvid_prodid) return - logger.log('%s: Saving episode details to database' % self._show_obj.tvid_prodid, logger.DEBUG) + logger.debug('%s: Saving episode details to database' % self._show_obj.tvid_prodid) - logger.log('STATUS IS %s' % statusStrings[self._status], logger.DEBUG) + logger.debug('STATUS IS %s' % statusStrings[self._status]) new_value_dict = dict( absolute_number=self._absolute_number, @@ -4799,7 +4774,7 @@ class TVEpisode(TVEpisodeBase): # def full_location(self): # if self.location in (None, ''): # return None - # return ek.ek(os.path.join, self.show_obj.location, self.location) + # return os.path.join(self.show_obj.location, self.location) # # # TODO: remove if unused # def create_strings(self, pattern=None): @@ -4841,8 +4816,8 @@ class TVEpisode(TVEpisodeBase): def _ep_name(self): """ :return: the name of the episode to use during renaming. Combines the names of related episodes. - Eg. "Ep Name (1)" and "Ep Name (2)" becomes "Ep Name" - "Ep Name" and "Other Ep Name" becomes "Ep Name & Other Ep Name" + E.g. "Ep Name (1)" and "Ep Name (2)" becomes "Ep Name" + "Ep Name" and "Other Ep Name" becomes "Ep Name & Other Ep Name" :rtype: AnyStr """ @@ -4908,14 +4883,14 @@ class TVEpisode(TVEpisodeBase): np = NameParser(name, show_obj=show_obj, naming_pattern=True) parse_result = np.parse(name) except (InvalidNameException, InvalidShowException) as e: - logger.log('Unable to get parse release_group: %s' % ex(e), logger.DEBUG) + logger.debug('Unable to get parse release_group: %s' % ex(e)) return '' if not parse_result.release_group: return '' return parse_result.release_group - ep_status, ep_qual = Quality.splitCompositeStatus(self._status) + ep_status, ep_qual = Quality.split_composite_status(self._status) if sickgear.NAMING_STRIP_YEAR: show_name = re.sub(r'\(\d+\)$', '', self._show_obj.name).rstrip() @@ -4966,7 +4941,7 @@ class TVEpisode(TVEpisodeBase): result_name = pattern # do the replacements - for cur_replacement in sorted(list_keys(replace_map), reverse=True): + for cur_replacement in sorted(list(replace_map), reverse=True): result_name = result_name.replace(cur_replacement, helpers.sanitize_filename(replace_map[cur_replacement])) result_name = result_name.replace(cur_replacement.lower(), helpers.sanitize_filename(replace_map[cur_replacement].lower())) @@ -5005,7 +4980,7 @@ class TVEpisode(TVEpisodeBase): result_name = result_name.replace('%RG', 'SickGear') result_name = result_name.replace('%rg', 'SickGear') - logger.log('Episode has no release name, replacing it with a generic one: %s' % result_name, logger.DEBUG) + logger.debug('Episode has no release name, replacing it with a generic one: %s' % result_name) if not replace_map['%RT']: result_name = re.sub('([ _.-]*)%RT([ _.-]*)', r'\2', result_name) @@ -5061,7 +5036,7 @@ class TVEpisode(TVEpisodeBase): if not ep_sep or not ep_format: continue - # start with the ep string, eg. E03 + # start with the ep string, e.g. E03 ep_string = self._format_string(ep_format.upper(), replace_map) for cur_ep_obj in self.related_ep_obj: @@ -5089,7 +5064,7 @@ class TVEpisode(TVEpisodeBase): if 3 != anime_type: absolute_number = (self._absolute_number, self._episode)[0 == self._absolute_number] - if 0 != self._season: # dont set absolute numbers if we are on specials ! + if 0 != self._season: # don't set absolute numbers if we are on specials ! if 1 == anime_type: # this crazy person wants both ! (note: +=) ep_string += sep + '%(#)03d' % {'#': absolute_number} elif 2 == anime_type: # total anime freak only need the absolute number ! (note: =) @@ -5113,14 +5088,14 @@ class TVEpisode(TVEpisodeBase): # fill out the template for this piece and then insert this piece into the actual pattern cur_name_group_result = re.sub('(?i)(?x)' + regex_used, regex_replacement, cur_name_group) # cur_name_group_result = cur_name_group.replace(ep_format, ep_string) - # logger.log(u"found "+ep_format+" as the ep pattern using "+regex_used+" + # logger.debug("found "+ep_format+" as the ep pattern using "+regex_used+" # and replaced it with "+regex_replacement+" to result in "+cur_name_group_result+" - # from "+cur_name_group, logger.DEBUG) + # from "+cur_name_group) result_name = result_name.replace(cur_name_group, cur_name_group_result) result_name = self._format_string(result_name, replace_map) - logger.log('formatting pattern: %s -> %s' % (pattern, result_name), logger.DEBUG) + logger.debug('formatting pattern: %s -> %s' % (pattern, result_name)) return result_name @@ -5142,7 +5117,7 @@ class TVEpisode(TVEpisodeBase): return result # if not we append the folder on and use that - return ek.ek(os.path.join, self.formatted_dir(), result) + return os.path.join(self.formatted_dir(), result) def formatted_dir(self, pattern=None, multi=None): """ @@ -5166,7 +5141,7 @@ class TVEpisode(TVEpisodeBase): if 1 == len(name_groups): logger.debug('No Season Folder set in Naming pattern: %s' % pattern) return '' - return self._format_pattern(ek.ek(os.sep.join, name_groups[:-1]), multi) + return self._format_pattern(os.sep.join(name_groups[:-1]), multi) def formatted_filename(self, pattern=None, multi=None, anime_type=None): """ @@ -5195,13 +5170,13 @@ class TVEpisode(TVEpisodeBase): in the naming settings. """ - if not ek.ek(os.path.isfile, self.location): - logger.log('Can\'t perform rename on %s when it doesn\'t exist, skipping' % self.location, logger.WARNING) + if not os.path.isfile(self.location): + logger.warning('Can\'t perform rename on %s when it doesn\'t exist, skipping' % self.location) return proper_path = self.proper_path() - absolute_proper_path = ek.ek(os.path.join, self._show_obj.location, proper_path) - absolute_current_path_no_ext, file_ext = ek.ek(os.path.splitext, self.location) + absolute_proper_path = os.path.join(self._show_obj.location, proper_path) + absolute_current_path_no_ext, file_ext = os.path.splitext(self.location) absolute_current_path_no_ext_length = len(absolute_current_path_no_ext) related_subs = [] @@ -5211,13 +5186,11 @@ class TVEpisode(TVEpisodeBase): if absolute_current_path_no_ext.startswith(self._show_obj.location): current_path = absolute_current_path_no_ext[len(self._show_obj.location):] - logger.log('Renaming/moving episode from the base path %s to %s' % (self.location, absolute_proper_path), - logger.DEBUG) + logger.debug('Renaming/moving episode from the base path %s to %s' % (self.location, absolute_proper_path)) # if it's already named correctly then don't do anything if proper_path == current_path: - logger.log('%s: File %s is already named correctly, skipping' % (self._epid, self.location), - logger.DEBUG) + logger.debug('%s: File %s is already named correctly, skipping' % (self._epid, self.location)) return related_files = postProcessor.PostProcessor(self.location).list_associated_files( @@ -5226,9 +5199,9 @@ class TVEpisode(TVEpisodeBase): if self.show_obj.subtitles and '' != sickgear.SUBTITLES_DIR: related_subs = postProcessor.PostProcessor(self.location).list_associated_files(sickgear.SUBTITLES_DIR, subtitles_only=True) - # absolute_proper_subs_path = ek.ek(os.path.join, sickgear.SUBTITLES_DIR, self.formatted_filename()) + # absolute_proper_subs_path = os.path.join(sickgear.SUBTITLES_DIR, self.formatted_filename()) - logger.log('Files associated to %s: %s' % (self.location, related_files), logger.DEBUG) + logger.debug('Files associated to %s: %s' % (self.location, related_files)) # move the ep file result = helpers.rename_ep_file(self.location, absolute_proper_path, absolute_current_path_no_ext_length) @@ -5238,14 +5211,14 @@ class TVEpisode(TVEpisodeBase): renamed = helpers.rename_ep_file(cur_related_file, absolute_proper_path, absolute_current_path_no_ext_length) if not renamed: - logger.log('%s: Unable to rename file %s' % (self._epid, cur_related_file), logger.ERROR) + logger.error('%s: Unable to rename file %s' % (self._epid, cur_related_file)) for cur_related_sub in related_subs: - absolute_proper_subs_path = ek.ek(os.path.join, sickgear.SUBTITLES_DIR, self.formatted_filename()) + absolute_proper_subs_path = os.path.join(sickgear.SUBTITLES_DIR, self.formatted_filename()) renamed = helpers.rename_ep_file(cur_related_sub, absolute_proper_subs_path, absolute_current_path_no_ext_length) if not renamed: - logger.log('%s: Unable to rename file %s' % (self._epid, cur_related_sub), logger.ERROR) + logger.error('%s: Unable to rename file %s' % (self._epid, cur_related_sub)) # save the ep with self.lock: @@ -5272,14 +5245,15 @@ class TVEpisode(TVEpisodeBase): def airdate_modify_stamp(self): """ - Make the modify date and time of a file reflect the show air date and time. + Make modify date and time of a file reflect the show air date and time. Note: Also called from postProcessor """ has_timestamp = isinstance(self._timestamp, int) and 0 != self._timestamp if not has_timestamp and (not isinstance(self._airdate, datetime.date) or 1 == self._airdate.year): - logger.log('%s: Did not change modify date of %s because episode date is never aired or invalid' - % (self._show_obj.tvid_prodid, ek.ek(os.path.basename, self.location)), logger.DEBUG) + logger.debug(f'{self._show_obj.tvid_prodid}' + f': Did not change modify date of {os.path.basename(self.location)}' + f' because episode date is never aired or invalid') return aired_dt = None @@ -5300,7 +5274,7 @@ class TVEpisode(TVEpisodeBase): try: aired_epoch = SGDatetime.to_file_timestamp(aired_dt) - filemtime = int(ek.ek(os.path.getmtime, self.location)) + filemtime = int(os.path.getmtime(self.location)) except (BaseException, Exception): return @@ -5311,7 +5285,7 @@ class TVEpisode(TVEpisodeBase): result, loglevel = 'Error changing', logger.WARNING logger.log('%s: %s modify date of %s to show air date %s' - % (self._show_obj.tvid_prodid, result, ek.ek(os.path.basename, self.location), + % (self._show_obj.tvid_prodid, result, os.path.basename(self.location), 'n/a' if not aired_dt else aired_dt.strftime('%b %d,%Y (%H:%M)')), loglevel) def __getstate__(self): diff --git a/sickgear/tv_base.py b/sickgear/tv_base.py index b8a72466..0d4c8f45 100644 --- a/sickgear/tv_base.py +++ b/sickgear/tv_base.py @@ -20,7 +20,7 @@ import sickgear from . import logger from ._legacy_classes import LegacyTVShow, LegacyTVEpisode from .common import UNKNOWN -from .name_cache import buildNameCache +from .name_cache import build_name_cache from six import string_types @@ -42,8 +42,8 @@ class TVBase(object): setattr(self, attr_name, val) self.dirty = True else: - logger.log('Didn\'t change property "%s" because expected: %s, but got: %s with value: %s' % - (attr_name, types, type(val), val), logger.WARNING) + logger.warning(f'Didn\'t change property "{attr_name}" because expected: {types},' + f' but got: {type(val)} with value: {val}') return wrapper @@ -132,7 +132,7 @@ class TVShowBase(LegacyTVShow, TVBase): _current_name = self._name self.dirty_setter('_name')(self, *arg) if _current_name != self._name: - buildNameCache(self) + build_name_cache(self) # imdbid = property(lambda self: self._imdbid, dirty_setter('_imdbid')) @property diff --git a/sickgear/tvcache.py b/sickgear/tvcache.py index 6450b15d..d7fbd365 100644 --- a/sickgear/tvcache.py +++ b/sickgear/tvcache.py @@ -27,15 +27,13 @@ from .classes import SearchResult from .common import Quality from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser, ParseResult from .rssfeeds import RSSFeeds -from .sgdatetime import timestamp_near +from .sgdatetime import SGDatetime from .tv import TVEpisode -from _23 import filter_list, map_iter -from six import PY2, text_type - # noinspection PyUnreachableCode if False: from typing import Any, AnyStr, Dict, List, Tuple, Union + from providers.generic import GenericProvider, NZBProvider, TorrentProvider class CacheDBConnection(db.DBConnection): @@ -44,7 +42,7 @@ class CacheDBConnection(db.DBConnection): # Create the table if it's not already there try: - if not self.hasTable('lastUpdate'): + if not self.has_table('lastUpdate'): self.action('CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)') except (BaseException, Exception) as e: if ex(e) != 'table lastUpdate already exists': @@ -53,7 +51,7 @@ class CacheDBConnection(db.DBConnection): class TVCache(object): def __init__(self, provider, interval=10): - # type: (AnyStr, int) -> None + # type: (Union[GenericProvider, NZBProvider, TorrentProvider], int) -> None self.provider = provider self.providerID = self.provider.get_id() self.providerDB = None @@ -63,7 +61,7 @@ class TVCache(object): def get_db(): return CacheDBConnection() - def _clearCache(self): + def clear_cache(self): if self.should_clear_cache(): my_db = self.get_db() my_db.action('DELETE FROM provider_cache WHERE provider = ?', [self.providerID]) @@ -84,28 +82,15 @@ class TVCache(object): data = None return data - def _checkAuth(self): + def check_auth(self): # noinspection PyProtectedMember return self.provider._check_auth() - @staticmethod - def _checkItemAuth(title, url): - """ - - :param title: title - :type title: AnyStr - :param url: url - :type url: AnyStr - :return: - :rtype: bool - """ - return True - - def updateCache(self, **kwargs): + def update_cache(self, **kwargs): try: - self._checkAuth() + self.check_auth() except AuthException as e: - logger.log(u'Authentication error: ' + ex(e), logger.ERROR) + logger.error(f'Authentication error: {ex(e)}') return [] if self.should_update(): @@ -113,13 +98,13 @@ class TVCache(object): # clear cache if data: - self._clearCache() + self.clear_cache() # parse data cl = [] for item in data or []: title, url = self._title_and_url(item) - ci = self._parseItem(title, url) + ci = self.parse_item(title, url) if None is not ci: cl.append(ci) @@ -131,13 +116,13 @@ class TVCache(object): logger.log('Warning could not save cache value [%s], caught err: %s' % (cl, ex(e))) # set updated as time the attempt to fetch data is - self.setLastUpdate() + self.set_last_update() def get_rss(self, url, **kwargs): return RSSFeeds(self.provider).get_feed(url, **kwargs) @staticmethod - def _translateTitle(title): + def _translate_title(title): """ :param title: title @@ -145,10 +130,10 @@ class TVCache(object): :return: :rtype: AnyStr """ - return u'' + title.replace(' ', '.') + return f'{title.replace(" ", ".")}' @staticmethod - def _translateLinkURL(url): + def _translate_link_url(url): """ :param url: url @@ -158,7 +143,7 @@ class TVCache(object): """ return url.replace('&', '&') - def _parseItem(self, title, url): + def parse_item(self, title, url): """ :param title: title @@ -168,18 +153,15 @@ class TVCache(object): :return: :rtype: None or List[AnyStr, List[Any]] """ - self._checkItemAuth(title, url) - if title and url: - title = self._translateTitle(title) - url = self._translateLinkURL(url) + title = self._translate_title(title) + url = self._translate_link_url(url) return self.add_cache_entry(title, url) - logger.log('Data returned from the %s feed is incomplete, this result is unusable' % self.provider.name, - logger.DEBUG) + logger.debug('Data returned from the %s feed is incomplete, this result is unusable' % self.provider.name) - def _getLastUpdate(self): + def _get_last_update(self): """ :return: @@ -189,15 +171,15 @@ class TVCache(object): sql_result = my_db.select('SELECT time FROM lastUpdate WHERE provider = ?', [self.providerID]) if sql_result: - lastTime = int(sql_result[0]['time']) - if lastTime > int(timestamp_near(datetime.datetime.now())): - lastTime = 0 + last_time = int(sql_result[0]['time']) + if last_time > SGDatetime.timestamp_near(): + last_time = 0 else: - lastTime = 0 + last_time = 0 - return datetime.datetime.fromtimestamp(lastTime) + return datetime.datetime.fromtimestamp(last_time) - def _getLastSearch(self): + def _get_last_search(self): """ :return: @@ -207,15 +189,15 @@ class TVCache(object): sql_result = my_db.select('SELECT time FROM lastSearch WHERE provider = ?', [self.providerID]) if sql_result: - lastTime = int(sql_result[0]['time']) - if lastTime > int(timestamp_near(datetime.datetime.now())): - lastTime = 0 + last_time = int(sql_result[0]['time']) + if last_time > SGDatetime.timestamp_near(): + last_time = 0 else: - lastTime = 0 + last_time = 0 - return datetime.datetime.fromtimestamp(lastTime) + return datetime.datetime.fromtimestamp(last_time) - def setLastUpdate(self, to_date=None): + def set_last_update(self, to_date=None): """ :param to_date: date time @@ -229,7 +211,7 @@ class TVCache(object): {'time': int(time.mktime(to_date.timetuple()))}, {'provider': self.providerID}) - def setLastSearch(self, to_date=None): + def _set_last_search(self, to_date=None): """ :param to_date: date time @@ -243,8 +225,8 @@ class TVCache(object): {'time': int(time.mktime(to_date.timetuple()))}, {'provider': self.providerID}) - lastUpdate = property(_getLastUpdate) - lastSearch = property(_getLastSearch) + last_update = property(_get_last_update) + last_search = property(_get_last_search) def should_update(self): """ @@ -253,7 +235,7 @@ class TVCache(object): :rtype: bool """ # if we've updated recently then skip the update - return datetime.datetime.now() - self.lastUpdate >= datetime.timedelta(minutes=self.update_iv) + return datetime.datetime.now() - self.last_update >= datetime.timedelta(minutes=self.update_iv) def should_clear_cache(self): """ @@ -262,7 +244,7 @@ class TVCache(object): :rtype: bool """ # if recent search hasn't used our previous results yet then don't clear the cache - return self.lastSearch >= self.lastUpdate + return self.last_search >= self.last_update def add_cache_entry(self, name, # type: AnyStr @@ -293,7 +275,7 @@ class TVCache(object): parser = NameParser(show_obj=show_obj, convert=True, indexer_lookup=False) parse_result = parser.parse(name) except InvalidNameException: - logger.log('Unable to parse the filename %s into a valid episode' % name, logger.DEBUG) + logger.debug('Unable to parse the filename %s into a valid episode' % name) return except InvalidShowException: return @@ -315,24 +297,21 @@ class TVCache(object): if season_number and episode_numbers: # store episodes as a separated string - episode_text = '|%s|' % '|'.join(map_iter(str, episode_numbers)) + episode_text = '|%s|' % '|'.join(map(str, episode_numbers)) # get the current timestamp - cur_timestamp = int(timestamp_near(datetime.datetime.now())) + cur_timestamp = SGDatetime.timestamp_near() # get quality of release quality = parse_result.quality - if PY2 and not isinstance(name, text_type): - name = text_type(name, 'utf-8', 'replace') - # get release group release_group = parse_result.release_group # get version version = parse_result.version - logger.log('Add to cache: [%s]' % name, logger.DEBUG) + logger.debug('Add to cache: [%s]' % name) return [ 'INSERT OR IGNORE INTO provider_cache' @@ -346,22 +325,22 @@ class TVCache(object): url, cur_timestamp, quality, release_group, version, parse_result.show_obj.tvid]] - def searchCache(self, - episode, # type: TVEpisode - manual_search=False # type: bool - ): # type: (...) -> List[SearchResult] + def search_cache(self, + episode, # type: TVEpisode + manual_search=False # type: bool + ): # type: (...) -> List[SearchResult] """ :param episode: episode object :param manual_search: manual search :return: found results or empty List """ - neededEps = self.findNeededEpisodes(episode, manual_search) - if 0 != len(neededEps): - return neededEps[episode] + needed_eps = self.find_needed_episodes(episode, manual_search) + if 0 != len(needed_eps): + return needed_eps[episode] return [] - def listPropers(self, date=None): + def list_propers(self, date=None): """ :param date: date @@ -376,16 +355,16 @@ class TVCache(object): if date: sql += ' AND time >= ' + str(int(time.mktime(date.timetuple()))) - return filter_list(lambda x: x['indexerid'] != 0, my_db.select(sql, [self.providerID])) + return list(filter(lambda x: x['indexerid'] != 0, my_db.select(sql, [self.providerID]))) - def findNeededEpisodes(self, ep_obj_list, manual_search=False): + def find_needed_episodes(self, ep_obj_list, manual_search=False): # type: (Union[TVEpisode, List[TVEpisode]], bool) -> Dict[TVEpisode, SearchResult] """ :param ep_obj_list: episode object or list of episode objects :param manual_search: manual search """ - neededEps = {} + needed_eps = {} cl = [] my_db = self.get_db() @@ -408,8 +387,8 @@ class TVCache(object): sql_result = list(itertools.chain(*sql_result)) if not sql_result: - self.setLastSearch() - return neededEps + self._set_last_search() + return needed_eps # for each cache entry for cur_result in sql_result: @@ -426,7 +405,7 @@ class TVCache(object): # skip if provider is anime only and show is not anime if self.provider.anime_only and not show_obj.is_anime: - logger.debug(u'%s is not an anime, skipping' % show_obj.unique_name) + logger.debug(f'{show_obj.unique_name} is not an anime, skipping') continue # get season and ep data (ignoring multi-eps for now) @@ -444,8 +423,8 @@ class TVCache(object): # if the show says we want that episode then add it to the list if not show_obj.want_episode(season, ep_obj_list, quality, manual_search): - logger.log(u'Skipping ' + cur_result['name'] + ' because we don\'t want an episode that\'s ' + - Quality.qualityStrings[quality], logger.DEBUG) + logger.debug(f"Skipping {cur_result['name']}" + f" because we don't want an episode that's {Quality.qualityStrings[quality]}") continue ep_obj = show_obj.get_episode(season, ep_obj_list) @@ -454,7 +433,7 @@ class TVCache(object): title = cur_result['name'] url = cur_result['url'] - logger.log(u'Found result ' + title + ' at ' + url) + logger.log(f'Found result {title} at {url}') result = self.provider.get_result([ep_obj], url) if None is result: @@ -479,12 +458,12 @@ class TVCache(object): check_is_repack=True) # add it to the list - if ep_obj not in neededEps: - neededEps[ep_obj] = [result] + if ep_obj not in needed_eps: + needed_eps[ep_obj] = [result] else: - neededEps[ep_obj].append(result) + needed_eps[ep_obj].append(result) # datetime stamp this search so cache gets cleared - self.setLastSearch() + self._set_last_search() - return neededEps + return needed_eps diff --git a/sickgear/ui.py b/sickgear/ui.py index a15743dc..522a9093 100644 --- a/sickgear/ui.py +++ b/sickgear/ui.py @@ -117,7 +117,7 @@ class Notification(object): class ProgressIndicator(object): def __init__(self, percent_complete=0, current_status=None): - self.percentComplete = percent_complete + self.percent_complete = percent_complete self.currentStatus = {'title': ''} if None is current_status else current_status @@ -128,20 +128,20 @@ class ProgressIndicators(object): } @staticmethod - def getIndicator(name): + def get_indicator(name): if name not in ProgressIndicators._pi: return [] # if any of the progress indicators are done take them off the list for curPI in ProgressIndicators._pi[name]: - if None is not curPI and 100 == curPI.percentComplete(): + if None is not curPI and 100 == curPI.percent_complete(): ProgressIndicators._pi[name].remove(curPI) # return the list of progress indicators associated with this name return ProgressIndicators._pi[name] @staticmethod - def setIndicator(name, indicator): + def set_indicator(name, indicator): ProgressIndicators._pi[name].append(indicator) @@ -154,16 +154,16 @@ class QueueProgressIndicator(object): self.queueItemList = queue_item_list self.name = name - def numTotal(self): + def num_total(self): return len(self.queueItemList) - def numFinished(self): - return len([x for x in self.queueItemList if not x.isInQueue()]) + def num_finished(self): + return len([x for x in self.queueItemList if not x.is_in_queue()]) - def numRemaining(self): - return len([x for x in self.queueItemList if x.isInQueue()]) + def num_remaining(self): + return len([x for x in self.queueItemList if x.is_in_queue()]) - def nextName(self): + def next_name(self): for curItem in [ sickgear.show_queue_scheduler.action.currentItem] + sickgear.show_queue_scheduler.action.queue: if curItem in self.queueItemList: @@ -171,13 +171,13 @@ class QueueProgressIndicator(object): return "Unknown" - def percentComplete(self): - numFinished = self.numFinished() - numTotal = self.numTotal() + def percent_complete(self): + num_finished = self.num_finished() + num_total = self.num_total() - if 0 == numTotal: + if 0 == num_total: return 0 - return int(float(numFinished) / float(numTotal) * 100) + return int(float(num_finished) / float(num_total) * 100) class LoadingTVShow(object): diff --git a/sickgear/version_checker.py b/sickgear/version_checker.py index a2c892a2..aee6ccd1 100644 --- a/sickgear/version_checker.py +++ b/sickgear/version_checker.py @@ -25,8 +25,6 @@ import time import traceback from . import gh_api as github -# noinspection PyPep8Naming -import encodingKludge as ek from exceptions_helper import ex import sickgear @@ -37,7 +35,6 @@ from sg_helpers import cmdline_runner, get_url # noinspection PyUnresolvedReferences from six.moves import urllib from six import string_types -from _23 import list_keys # noinspection PyUnreachableCode if False: @@ -85,7 +82,7 @@ class PackagesUpdater(object): ui.notifications.message(msg) return False - logger.log('Update(s) for %s found %s' % (self.install_type, list_keys(sickgear.UPDATES_TODO))) + logger.log('Update(s) for %s found %s' % (self.install_type, list(sickgear.UPDATES_TODO))) # save updates_todo to config to be loaded after restart sickgear.save_config() @@ -176,7 +173,7 @@ class SoftwareUpdater(object): 'git': running from source using git 'source': running from source without git """ - return ('source', 'git')[os.path.isdir(ek.ek(os.path.join, sickgear.PROG_DIR, '.git'))] + return ('source', 'git')[os.path.isdir(os.path.join(sickgear.PROG_DIR, '.git'))] def check_for_new_version(self, force=False): """ @@ -280,17 +277,17 @@ class GitUpdateManager(UpdateManager): def _find_working_git(self): - logger.debug(u'Checking if git commands are available') + logger.debug('Checking if git commands are available') main_git = (sickgear.GIT_PATH, 'git')[not sickgear.GIT_PATH] _, _, exit_status = self._git_version(main_git) if 0 == exit_status: - logger.debug(u'Using: %s' % main_git) + logger.debug(f'Using: {main_git}') return main_git - logger.debug(u'Git not found: %s' % main_git) + logger.debug(f'Git not found: {main_git}') # trying alternatives @@ -304,12 +301,12 @@ class GitUpdateManager(UpdateManager): if main_git != main_git.lower(): alt_git_paths.append(main_git.lower()) if sickgear.GIT_PATH: - logger.debug(u'git.exe is missing, remove `git_path` from config.ini: %s' % main_git) + logger.debug(f'git.exe is missing, remove `git_path` from config.ini: {main_git}') if re.search(r' \(x86\)', main_git): alt_git_paths.append(re.sub(r' \(x86\)', '', main_git)) else: alt_git_paths.append(re.sub('Program Files', 'Program Files (x86)', main_git)) - logger.debug(u'Until `git_path` is removed by a config.ini edit, trying: %s' % alt_git_paths[-1]) + logger.debug(f'Until `git_path` is removed by a config.ini edit, trying: {alt_git_paths[-1]}') if alt_git_paths: logger.debug('Trying known alternative git locations') @@ -318,9 +315,9 @@ class GitUpdateManager(UpdateManager): _, _, exit_status = self._git_version(cur_git_path) if 0 == exit_status: - logger.debug(u'Using: %s' % cur_git_path) + logger.debug(f'Using: {cur_git_path}') return cur_git_path - logger.debug(u'Not using: %s' % cur_git_path) + logger.debug(f'Not using: {cur_git_path}') # Still haven't found a working git error_message = 'Unable to find your git executable - Shutdown SickGear and EITHER set git_path' \ @@ -340,15 +337,15 @@ class GitUpdateManager(UpdateManager): git_path = self._git_path if not git_path: - logger.error(u'No git specified, cannot use git commands') + logger.error('No git specified, cannot use git commands') return output, err, exit_status cmd = ' '.join([git_path] + arg_list) try: - logger.debug(u'Executing %s with your shell in %s' % (cmd, sickgear.PROG_DIR)) + logger.debug(f'Executing {cmd} with your shell in {sickgear.PROG_DIR}') output, err, exit_status = cmdline_runner([git_path] + arg_list, env={'LANG': 'en_US.UTF-8'}) - logger.debug(u'git output: %s' % output) + logger.debug(f'git output: {output}') except OSError: logger.log('Failed command: %s' % cmd) @@ -357,12 +354,12 @@ class GitUpdateManager(UpdateManager): logger.log('Failed command: %s, %s' % (cmd, ex(e))) if 0 == exit_status: - logger.debug(u'Successful return: %s' % cmd) + logger.debug(f'Successful return: {cmd}') exit_status = 0 self.unsafe = False elif 1 == exit_status: - logger.error(u'Failed: %s returned: %s' % (cmd, output)) + logger.error(f'Failed: {cmd} returned: {output}') elif 128 == exit_status or 'fatal:' in output or err: if 'unsafe repository' not in output and 'fatal:' in output: @@ -385,14 +382,14 @@ class GitUpdateManager(UpdateManager): except (BaseException, Exception): pass exit_status = 128 - msg = u'Fatal: %s returned: %s' % (cmd, output) + msg = f'Fatal: {cmd} returned: {output}' if 'develop' in output.lower() or 'main' in output.lower(): logger.error(msg) else: logger.debug(msg) else: - logger.error(u'Treat as error for now, command: %s returned: %s' % (cmd, output)) + logger.error(f'Treat as error for now, command: {cmd} returned: {output}') return output, err, exit_status @@ -408,7 +405,7 @@ class GitUpdateManager(UpdateManager): if 0 == exit_status and output: cur_commit_hash = output.strip() if not re.match(r'^[a-z0-9]+$', cur_commit_hash): - logger.error(u'Output doesn\'t look like a hash, not using it') + logger.error("Output doesn't look like a hash, not using it") return False self._cur_commit_hash = cur_commit_hash sickgear.CUR_COMMIT_HASH = str(cur_commit_hash) @@ -437,7 +434,7 @@ class GitUpdateManager(UpdateManager): _, _, exit_status = self._run_git(['fetch', '%s' % sickgear.GIT_REMOTE]) if 0 != exit_status: - logger.error(u'Unable to contact github, can\'t check for update') + logger.error("Unable to contact github, can't check for update") return if not self._cur_pr_number: @@ -449,14 +446,14 @@ class GitUpdateManager(UpdateManager): cur_commit_hash = output.strip() if not re.match('^[a-z0-9]+$', cur_commit_hash): - logger.debug(u'Output doesn\'t look like a hash, not using it') + logger.debug("Output doesn't look like a hash, not using it") return self._newest_commit_hash = cur_commit_hash self._old_commit_hash = cur_commit_hash self._old_branch = self._find_installed_branch() else: - logger.debug(u'git didn\'t return newest commit hash') + logger.debug("git didn't return newest commit hash") return # get number of commits behind and ahead (option --count not supported git < 1.7.2) @@ -469,11 +466,13 @@ class GitUpdateManager(UpdateManager): self._num_commits_ahead = int(output.count('>')) except (BaseException, Exception): - logger.debug(u'git didn\'t return numbers for behind and ahead, not using it') + logger.debug("git didn't return numbers for behind and ahead, not using it") return - logger.debug(u'cur_commit = %s, newest_commit = %s, num_commits_behind = %s, num_commits_ahead = %s' % ( - self._cur_commit_hash, self._newest_commit_hash, self._num_commits_behind, self._num_commits_ahead)) + logger.debug(f'cur_commit = {self._cur_commit_hash}' + f', newest_commit = {self._newest_commit_hash}' + f', num_commits_behind = {self._num_commits_behind}' + f', num_commits_ahead = {self._num_commits_ahead}') else: # we need to treat pull requests specially as it doesn't seem possible to set their "@{upstream}" tag output, _, _ = self._run_git(['ls-remote', '%s' % sickgear.GIT_REMOTE, @@ -515,7 +514,7 @@ class GitUpdateManager(UpdateManager): installed_branch = self._find_installed_branch() if self.branch != installed_branch: - logger.debug(u'Branch checkout: %s->%s' % (installed_branch, self.branch)) + logger.debug(f'Branch checkout: {installed_branch}->{self.branch}') return True self._find_installed_version() @@ -527,7 +526,7 @@ class GitUpdateManager(UpdateManager): try: self._check_github_for_update() except (BaseException, Exception) as e: - logger.error(u'Unable to contact github, can\'t check for update: %r' % e) + logger.error(f"Unable to contact github, can't check for update: {e!r}") return False if 0 < self._num_commits_behind: @@ -664,12 +663,12 @@ class SourceUpdateManager(UpdateManager): try: self._check_github_for_update() except (BaseException, Exception) as e: - logger.error(u'Unable to contact github, can\'t check for update: %r' % e) + logger.error(f"Unable to contact github, can't check for update: {e!r}") return False installed_branch = self._find_installed_branch() if self.branch != installed_branch: - logger.debug(u'Branch checkout: %s->%s' % (installed_branch, self.branch)) + logger.debug(f'Branch checkout: {installed_branch}->{self.branch}') return True if not self._cur_commit_hash or 0 < self._num_commits_behind: @@ -715,8 +714,9 @@ class SourceUpdateManager(UpdateManager): # when _cur_commit_hash doesn't match anything _num_commits_behind == 100 self._num_commits_behind += 1 - logger.debug(u'cur_commit = %s, newest_commit = %s, num_commits_behind = %s' - % (self._cur_commit_hash, self._newest_commit_hash, self._num_commits_behind)) + logger.debug(f'cur_commit = {self._cur_commit_hash}' + f', newest_commit = {self._newest_commit_hash}' + f', num_commits_behind = {self._num_commits_behind}') def set_newest_text(self): @@ -724,7 +724,7 @@ class SourceUpdateManager(UpdateManager): newest_text = None if not self._cur_commit_hash: - logger.debug(u'Unknown current version number, don\'t know if we should update or not') + logger.debug("Unknown current version number, don't know if we should update or not") newest_text = 'Unknown current version number: If you\'ve never used the SickGear upgrade system' \ ' before then current version is not set. — Update Now' \ @@ -754,48 +754,48 @@ class SourceUpdateManager(UpdateManager): try: # prepare the update dir - sg_update_dir = ek.ek(os.path.join, sickgear.PROG_DIR, u'sg-update') + sg_update_dir = os.path.join(sickgear.PROG_DIR, 'sg-update') if os.path.isdir(sg_update_dir): - logger.log(u'Clearing out update folder %s before extracting' % sg_update_dir) + logger.log(f'Clearing out update folder {sg_update_dir} before extracting') shutil.rmtree(sg_update_dir) - logger.log(u'Creating update folder %s before extracting' % sg_update_dir) + logger.log(f'Creating update folder {sg_update_dir} before extracting') os.makedirs(sg_update_dir) # retrieve file - logger.log(u'Downloading update from %r' % tar_download_url) - tar_download_path = os.path.join(sg_update_dir, u'sg-update.tar') + logger.log(f'Downloading update from {tar_download_url!r}') + tar_download_path = os.path.join(sg_update_dir, 'sg-update.tar') urllib.request.urlretrieve(tar_download_url, tar_download_path) - if not ek.ek(os.path.isfile, tar_download_path): - logger.error(u'Unable to retrieve new version from %s, can\'t update' % tar_download_url) + if not os.path.isfile(tar_download_path): + logger.error(f"Unable to retrieve new version from {tar_download_url}, can't update") return False - if not ek.ek(tarfile.is_tarfile, tar_download_path): - logger.error(u'Retrieved version from %s is corrupt, can\'t update' % tar_download_url) + if not tarfile.is_tarfile(tar_download_path): + logger.error(f"Retrieved version from {tar_download_url} is corrupt, can't update") return False # extract to sg-update dir - logger.log(u'Extracting file %s' % tar_download_path) + logger.log(f'Extracting file {tar_download_path}') tar = tarfile.open(tar_download_path) tar.extractall(sg_update_dir) tar.close() # delete .tar.gz - logger.log(u'Deleting file %s' % tar_download_path) + logger.log(f'Deleting file {tar_download_path}') os.remove(tar_download_path) # find update dir name update_dir_contents = [x for x in os.listdir(sg_update_dir) if os.path.isdir(os.path.join(sg_update_dir, x))] if 1 != len(update_dir_contents): - logger.error(u'Invalid update data, update failed: %s' % update_dir_contents) + logger.error(f'Invalid update data, update failed: {update_dir_contents}') return False content_dir = os.path.join(sg_update_dir, update_dir_contents[0]) # walk temp folder and move files to main folder - logger.log(u'Moving files from %s to %s' % (content_dir, sickgear.PROG_DIR)) + logger.log(f'Moving files from {content_dir} to {sickgear.PROG_DIR}') for dirname, dirnames, filenames in os.walk(content_dir): dirname = dirname[len(content_dir) + 1:] for curfile in filenames: @@ -811,7 +811,7 @@ class SourceUpdateManager(UpdateManager): os.remove(new_path) os.renames(old_path, new_path) except (BaseException, Exception) as e: - logger.debug(u'Unable to update %s: %s' % (new_path, ex(e))) + logger.debug(f'Unable to update {new_path}: {ex(e)}') os.remove(old_path) # Trash the updated file without moving in new path continue @@ -823,8 +823,8 @@ class SourceUpdateManager(UpdateManager): sickgear.CUR_COMMIT_BRANCH = self.branch except (BaseException, Exception) as e: - logger.error(u'Error while trying to update: %s' % ex(e)) - logger.debug(u'Traceback: %s' % traceback.format_exc()) + logger.error(f'Error while trying to update: {ex(e)}') + logger.debug(f'Traceback: {traceback.format_exc()}') return False # Notify update successful diff --git a/sickgear/watchedstate.py b/sickgear/watchedstate.py index 0ffbf41e..14454cbc 100644 --- a/sickgear/watchedstate.py +++ b/sickgear/watchedstate.py @@ -33,6 +33,7 @@ class WatchedStateUpdater(object): return sickgear.watched_state_queue_scheduler.action.is_in_queue(self.queue_item) def run(self): + # noinspection PyUnresolvedReferences if self.is_enabled(): self.amActive = True new_item = self.queue_item() diff --git a/sickgear/watchedstate_queue.py b/sickgear/watchedstate_queue.py index 7c5d83f5..c7449506 100644 --- a/sickgear/watchedstate_queue.py +++ b/sickgear/watchedstate_queue.py @@ -48,7 +48,7 @@ class WatchedStateQueue(generic_queue.GenericQueue): return length - def add_item(self, item): + def add_item(self, item, **kwargs): if isinstance(item, EmbyWatchedStateQueueItem) and not self.is_in_queue(EmbyWatchedStateQueueItem): # emby watched state item generic_queue.GenericQueue.add_item(self, item) @@ -56,7 +56,7 @@ class WatchedStateQueue(generic_queue.GenericQueue): # plex watched state item generic_queue.GenericQueue.add_item(self, item) else: - logger.log(u'Not adding item, it\'s already in the queue', logger.DEBUG) + logger.debug("Not adding item, it's already in the queue") class EmbyWatchedStateQueueItem(generic_queue.QueueItem): diff --git a/sickgear/webapi.py b/sickgear/webapi.py index b13bea0d..908a32eb 100644 --- a/sickgear/webapi.py +++ b/sickgear/webapi.py @@ -31,8 +31,6 @@ import time import traceback from . import webserve -# noinspection PyPep8Naming -import encodingKludge as ek import exceptions_helper from exceptions_helper import ex from json_helper import is_orjson, json_dumps, JSON_INDENT, json_loads, JSONEncoder, ORJSON_OPTIONS @@ -45,7 +43,7 @@ from . import classes, db, helpers, history, image_cache, logger, network_timezo from .common import ARCHIVED, DOWNLOADED, FAILED, IGNORED, SKIPPED, SNATCHED, SNATCHED_ANY, SNATCHED_BEST, \ SNATCHED_PROPER, UNAIRED, UNKNOWN, WANTED, Quality, qualityPresetStrings, statusStrings from .name_parser.parser import NameParser -from .helpers import starify +from .helpers import df, find_mount_point, starify from .indexers import indexer_api, indexer_config from .indexers.indexer_config import * from lib.tvinfo_base.exceptions import * @@ -57,8 +55,8 @@ from .tv import TVEpisode, TVShow, TVidProdid from .webserve import AddShows import dateutil.parser -from _23 import decode_str, list_keys, unquote_plus -from six import integer_types, iteritems, iterkeys, PY2, string_types, text_type +from _23 import decode_str, unquote_plus +from six import integer_types, iteritems, iterkeys, string_types, text_type # noinspection PyUnreachableCode if False: @@ -152,7 +150,7 @@ else: class Api(webserve.BaseHandler): """ api class that returns json results """ - version = 14 # use an int since float-point is unpredictable + version = 15 # use an int since float-point is unpredictable def check_xsrf_cookie(self): pass @@ -255,9 +253,7 @@ class Api(webserve.BaseHandler): result = function(*ag) return result except Exception as e: - if PY2: - logger.log('traceback: %s' % traceback.format_exc(), logger.ERROR) - logger.log(ex(e), logger.ERROR) + logger.error(ex(e)) raise e def _out_as_json(self, dict): @@ -281,17 +277,17 @@ class Api(webserve.BaseHandler): self.apikey_name = '' if not sickgear.USE_API: - msg = u'%s - SB API Disabled. ACCESS DENIED' % remoteIp + msg = f'{remoteIp} - SB API Disabled. ACCESS DENIED' return False, msg, args, kwargs if not apiKey: - msg = u'%s - gave NO API KEY. ACCESS DENIED' % remoteIp + msg = f'{remoteIp} - gave NO API KEY. ACCESS DENIED' return False, msg, args, kwargs for realKey in realKeys: if apiKey == realKey[1]: self.apikey_name = realKey[0] - msg = u'%s - gave correct API KEY: %s. ACCESS GRANTED' % (remoteIp, realKey[0]) + msg = f'{remoteIp} - gave correct API KEY: {realKey[0]}. ACCESS GRANTED' return True, msg, args, kwargs - msg = u'%s - gave WRONG API KEY %s. ACCESS DENIED' % (remoteIp, apiKey) + msg = f'{remoteIp} - gave WRONG API KEY {apiKey}. ACCESS DENIED' return False, msg, args, kwargs @@ -310,10 +306,10 @@ def call_dispatcher(handler, args, kwargs): cmds = kwargs["cmd"] del kwargs["cmd"] - api_log(handler, u"cmd: '" + str(cmds) + "'", logger.DEBUG) - api_log(handler, u"all args: '" + str(args) + "'", logger.DEBUG) - api_log(handler, u"all kwargs: '" + str(kwargs) + "'", logger.DEBUG) - # logger.log(u"dateFormat: '" + str(dateFormat) + "'", logger.DEBUG) + api_log(handler, f'cmd: "{cmds}"', logger.DEBUG) + api_log(handler, f'all args: "{args}"', logger.DEBUG) + api_log(handler, f'all kwargs: "{kwargs}"', logger.DEBUG) + # logger.debug(f'dateFormat: "{dateFormat}"') outDict = {} @@ -604,7 +600,7 @@ class ApiCall(object): elif isinstance(value, string_types): if '|' in value: li = [int(v) for v in value.split('|')] - if any([not isinstance(v, integer_types) for v in li]): + if any(not isinstance(v, integer_types) for v in li): error = True else: value = li @@ -614,7 +610,7 @@ class ApiCall(object): error = True else: li = value.split('|') - if any([sub_type is not type(v) for v in li]): + if any(sub_type is not type(v) for v in li): error = True else: value = li @@ -630,14 +626,11 @@ class ApiCall(object): elif "ignore" == type: pass else: - self.log(u"Invalid param type set " + str(type) + " can not check or convert ignoring it", - logger.ERROR) + self.log(f"Invalid param type set {type} can not check or convert ignoring it", logger.ERROR) if error: # this is a real ApiError !! - raise ApiError( - u"param: '" + str(name) + "' with given value: '" + str(value) + "' could not be parsed into '" + str( - type) + "'") + raise ApiError(f'param: "{name}" with given value: "{value}" could not be parsed into "{type}"') return value @@ -658,8 +651,7 @@ class ApiCall(object): if error: # this is kinda a ApiError but raising an error is the only way of quitting here - raise ApiError(u"param: '" + str(name) + "' with given value: '" + str( - value) + "' is out of allowed range '" + str(allowedValues) + "'") + raise ApiError(f'param: "{name}" with given value: "{value}" is out of allowed range "{allowedValues}"') class TVDBShorthandWrapper(ApiCall): @@ -795,7 +787,7 @@ def _mapQuality(show_obj): anyQualities = [] bestQualities = [] - iqualityID, aqualityID = Quality.splitQuality(int(show_obj)) + iqualityID, aqualityID = Quality.split_quality(int(show_obj)) if iqualityID: for quality in iqualityID: anyQualities.append(quality_map[quality]) @@ -809,38 +801,46 @@ def _getQualityMap(): return quality_map_inversed -def _getRootDirs(): - if "" == sickgear.ROOT_DIRS: - return {} +def _get_root_dirs(get_freespace=False): + # type: (bool) -> List[Dict] + """ + + :param get_freespace: include disk free space info in response + """ + dir_list = [] + if not sickgear.ROOT_DIRS: + return dir_list - rootDir = {} root_dirs = sickgear.ROOT_DIRS.split('|') - default_index = int(sickgear.ROOT_DIRS.split('|')[0]) - - rootDir["default_index"] = int(sickgear.ROOT_DIRS.split('|')[0]) - # remove default_index value from list (this fixes the offset) - root_dirs.pop(0) + default_index = int(root_dirs.pop(0)) if len(root_dirs) < default_index: - return {} + return dir_list # clean up the list - replace %xx escapes by their single-character equivalent root_dirs = [unquote_plus(x) for x in root_dirs] default_dir = root_dirs[default_index] - dir_list = [] - for root_dir in root_dirs: - valid = 1 + if root_dirs and get_freespace and sickgear.DISPLAY_FREESPACE: + diskfree, _ = df() + + for cur_root_dir in root_dirs: try: - ek.ek(os.listdir, root_dir) + os.listdir(cur_root_dir) + valid = 1 except (BaseException, Exception): valid = 0 - default = 0 - if root_dir is default_dir: - default = 1 - dir_list.append({'valid': valid, 'location': root_dir, 'default': default}) + new_entry = {'valid': valid, 'location': cur_root_dir, 'default': int(cur_root_dir is default_dir)} + + if get_freespace: + # noinspection PyUnboundLocalVariable + new_entry['free_space'] = 'Required setting "Display freespace" is not enabled' \ + if not sickgear.DISPLAY_FREESPACE \ + else next((_space for _disk, _space in diskfree or [] if _disk == find_mount_point(cur_root_dir)), '') + + dir_list.append(new_entry) return dir_list @@ -1045,7 +1045,7 @@ class CMD_SickGearComingEpisodes(ApiCall): ep['network'] and network_timezones.get_network_timezone(ep['network'], return_name=True)[1]) # remove all field we don't want for api response - for cur_f in list_keys(ep): + for cur_f in list(ep): if cur_f not in [ # fields to preserve 'absolute_number', 'air_by_date', 'airdate', 'airs', 'archive_firstmatch', 'classification', 'data_network', 'data_show_name', @@ -1159,7 +1159,7 @@ class CMD_SickGearEpisode(ApiCall): timezone, episode['timezone'] = network_timezones.get_network_timezone(show_obj.network, return_name=True) episode['airdate'] = SGDatetime.sbfdate(SGDatetime.convert_to_setting( network_timezones.parse_date_time(int(episode['airdate']), show_obj.airs, timezone)), d_preset=dateFormat) - status, quality = Quality.splitCompositeStatus(int(episode["status"])) + status, quality = Quality.split_composite_status(int(episode["status"])) episode["status"] = _get_status_Strings(status) episode["quality"] = _get_quality_string(quality) episode["file_size_human"] = _sizeof_fmt(episode["file_size"]) @@ -1228,7 +1228,7 @@ class CMD_SickGearEpisodeSearch(ApiCall): # return the correct json value if ep_queue_item.success: - status, quality = Quality.splitCompositeStatus(ep_obj.status) + status, quality = Quality.split_composite_status(ep_obj.status) # TODO: split quality and status? return _responds(RESULT_SUCCESS, {"quality": _get_quality_string(quality)}, "Snatched (" + _get_quality_string(quality) + ")") @@ -1352,7 +1352,7 @@ class CMD_SickGearEpisodeSetStatus(ApiCall): continue if None is not self.quality: - ep_obj.status = Quality.compositeStatus(self.status, self.quality) + ep_obj.status = Quality.composite_status(self.status, self.quality) else: ep_obj.status = self.status result = ep_obj.get_sql() @@ -1373,8 +1373,8 @@ class CMD_SickGearEpisodeSetStatus(ApiCall): backlog_queue_item = search_queue.BacklogQueueItem(show_obj, segment) sickgear.search_queue_scheduler.action.add_item(backlog_queue_item) - self.log(u'Starting backlog for %s season %s because some episodes were set to WANTED' % - (show_obj.unique_name, season)) + self.log(f'Starting backlog for {show_obj.unique_name} season {season}' + f' because some episodes were set to WANTED') extra_msg = " Backlog started" @@ -1671,7 +1671,7 @@ class CMD_SickGearHistory(ApiCall): results = [] np = NameParser(True, testing=True, indexer_lookup=False, try_scene_exceptions=False) for cur_result in sql_result: - status, quality = Quality.splitCompositeStatus(int(cur_result["action"])) + status, quality = Quality.split_composite_status(int(cur_result["action"])) if type_filter and status not in type_filter: continue status = _get_status_Strings(status) @@ -1983,7 +1983,8 @@ class CMD_SickGearAddRootDir(ApiCall): _help = {"desc": "add a user configured parent directory", "requiredParameters": {"location": {"desc": "the full path to root (parent) directory"} }, - "optionalParameters": {"default": {"desc": "make the location passed the default root (parent) directory"} + "optionalParameters": {"default": {"desc": "make the location passed the default root (parent) directory"}, + "freespace": {"desc": "include free space of paths in response"} } } @@ -1992,6 +1993,7 @@ class CMD_SickGearAddRootDir(ApiCall): self.location, args = self.check_params(args, kwargs, "location", None, True, "string", []) # optional self.default, args = self.check_params(args, kwargs, "default", 0, False, "bool", []) + self.freespace, args = self.check_params(args, kwargs, "freespace", 0, False, "bool", []) # super, missing, help ApiCall.__init__(self, handler, args, kwargs) @@ -2003,7 +2005,7 @@ class CMD_SickGearAddRootDir(ApiCall): index = 0 # disallow adding/setting an invalid dir - if not ek.ek(os.path.isdir, self.location): + if not os.path.isdir(self.location): return _responds(RESULT_FAILURE, msg="Location is invalid") root_dirs = [] @@ -2034,7 +2036,9 @@ class CMD_SickGearAddRootDir(ApiCall): root_dirs_new = '|'.join([text_type(x) for x in root_dirs_new]) sickgear.ROOT_DIRS = root_dirs_new - return _responds(RESULT_SUCCESS, _getRootDirs(), msg="Root directories updated") + sickgear.save_config() + return _responds(RESULT_SUCCESS, _get_root_dirs(not self.sickbeard_call and self.freespace), + msg="Root directories updated") class CMD_SickBeardAddRootDir(CMD_SickGearAddRootDir): @@ -2092,20 +2096,24 @@ class CMD_SickBeardCheckScheduler(CMD_SickGearCheckScheduler): class CMD_SickGearDeleteRootDir(ApiCall): _help = {"desc": "delete a user configured parent directory", - "requiredParameters": {"location": {"desc": "the full path to root (parent) directory"}} + "requiredParameters": {"location": {"desc": "the full path to root (parent) directory"}}, + "optionalParameters": {"freespace": {"desc": "include free space of paths in response"} + } } def __init__(self, handler, args, kwargs): # required self.location, args = self.check_params(args, kwargs, "location", None, True, "string", []) # optional + self.freespace, args = self.check_params(args, kwargs, "freespace", 0, False, "bool", []) # super, missing, help ApiCall.__init__(self, handler, args, kwargs) def run(self): """ delete a user configured parent directory """ if sickgear.ROOT_DIRS == "": - return _responds(RESULT_FAILURE, _getRootDirs(), msg="No root directories detected") + return _responds(RESULT_FAILURE, _get_root_dirs(not self.sickbeard_call and self.freespace), + msg="No root directories detected") newIndex = 0 root_dirs_new = [] @@ -2132,8 +2140,10 @@ class CMD_SickGearDeleteRootDir(ApiCall): root_dirs_new = "|".join([text_type(x) for x in root_dirs_new]) sickgear.ROOT_DIRS = root_dirs_new + sickgear.save_config() # what if the root dir was not found? - return _responds(RESULT_SUCCESS, _getRootDirs(), msg="Root directory deleted") + return _responds(RESULT_SUCCESS, _get_root_dirs(not self.sickbeard_call and self.freespace), + msg="Root directory deleted") class CMD_SickBeardDeleteRootDir(CMD_SickGearDeleteRootDir): @@ -2168,14 +2178,14 @@ class CMD_SickGearForceSearch(ApiCall): result = None if 'recent' == self.searchtype and not sickgear.search_queue_scheduler.action.is_recentsearch_in_progress() \ and not sickgear.recent_search_scheduler.action.amActive: - result = sickgear.recent_search_scheduler.forceRun() + result = sickgear.recent_search_scheduler.force_run() elif 'backlog' == self.searchtype and not sickgear.search_queue_scheduler.action.is_backlog_in_progress() \ and not sickgear.backlog_search_scheduler.action.amActive: sickgear.backlog_search_scheduler.force_search(force_type=FORCED_BACKLOG) result = True elif 'proper' == self.searchtype and not sickgear.search_queue_scheduler.action.is_propersearch_in_progress() \ and not sickgear.proper_finder_scheduler.action.amActive: - result = sickgear.proper_finder_scheduler.forceRun() + result = sickgear.proper_finder_scheduler.force_run() if result: return _responds(RESULT_SUCCESS, msg='%s search successfully forced' % self.searchtype) return _responds(RESULT_FAILURE, @@ -2340,8 +2350,8 @@ class CMD_SickGearGetIndexerIcon(ApiCall): self.handler.set_status(404) return _responds(RESULT_FAILURE, 'Icon not found') img = i['icon'] - image = ek.ek(os.path.join, sickgear.PROG_DIR, 'gui', 'slick', 'images', img) - if not ek.ek(os.path.isfile, image): + image = os.path.join(sickgear.PROG_DIR, 'gui', 'slick', 'images', img) + if not os.path.isfile(image): self.handler.set_status(404) return _responds(RESULT_FAILURE, 'Icon not found') return {'outputType': 'image', 'image': self.handler.get_image(image)} @@ -2361,9 +2371,8 @@ class CMD_SickGearGetNetworkIcon(ApiCall): ApiCall.__init__(self, handler, args, kwargs) def run(self): - image = ek.ek(os.path.join, sickgear.PROG_DIR, 'gui', 'slick', 'images', 'network', - '%s.png' % self.network.lower()) - if not ek.ek(os.path.isfile, image): + image = os.path.join(sickgear.PROG_DIR, 'gui', 'slick', 'images', 'network', '%s.png' % self.network.lower()) + if not os.path.isfile(image): self.handler.set_status(404) return _responds(RESULT_FAILURE, 'Icon not found') return {'outputType': 'image', 'image': self.handler.get_image(image)} @@ -2383,18 +2392,22 @@ class CMD_SickGearGetqualityStrings(ApiCall): class CMD_SickGearGetRootDirs(ApiCall): - _help = {"desc": "get list of user configured parent directories"} + _help = {"desc": "get list of user configured parent directories", + "optionalParameters": {"freespace": {"desc": "include free space of paths in response"} + } + } def __init__(self, handler, args, kwargs): # required # optional + self.freespace, args = self.check_params(args, kwargs, "freespace", 0, False, "bool", []) # super, missing, help ApiCall.__init__(self, handler, args, kwargs) def run(self): """ get list of user configured parent directories """ - return _responds(RESULT_SUCCESS, _getRootDirs()) + return _responds(RESULT_SUCCESS, _get_root_dirs(not self.sickbeard_call and self.freespace)) class CMD_SickBeardGetRootDirs(CMD_SickGearGetRootDirs): @@ -2671,7 +2684,7 @@ class CMD_SickGearSetDefaults(ApiCall): aqualityID.append(quality_map[quality]) if iqualityID or aqualityID: - sickgear.QUALITY_DEFAULT = Quality.combineQualities(iqualityID, aqualityID) + sickgear.QUALITY_DEFAULT = Quality.combine_qualities(iqualityID, aqualityID) if self.status: # convert the string status to a int @@ -3328,7 +3341,7 @@ class CMD_SickGearShowAddExisting(ApiCall): if show_obj: return _responds(RESULT_FAILURE, msg="An existing indexerid already exists in the database") - if not ek.ek(os.path.isdir, self.location): + if not os.path.isdir(self.location): return _responds(RESULT_FAILURE, msg='Not a valid location') lINDEXER_API_PARMS = sickgear.TVInfoAPI(self.tvid).api_params.copy() @@ -3341,7 +3354,7 @@ class CMD_SickGearShowAddExisting(ApiCall): try: myShow = t[int(self.prodid), False] except BaseTVinfoError as e: - self.log(u"Unable to find show with id " + str(self.tvid), logger.WARNING) + self.log(f'Unable to find show with id {self.tvid}', logger.WARNING) return _responds(RESULT_FAILURE, msg="Unable to retrieve information from indexer") indexerName = None @@ -3370,7 +3383,7 @@ class CMD_SickGearShowAddExisting(ApiCall): aqualityID.append(quality_map[quality]) if iqualityID or aqualityID: - newQuality = Quality.combineQualities(iqualityID, aqualityID) + newQuality = Quality.combine_qualities(iqualityID, aqualityID) sickgear.show_queue_scheduler.action.add_show( int(self.tvid), int(self.prodid), self.location, @@ -3460,7 +3473,7 @@ class CMD_SickGearShowAddNew(ApiCall): else: return _responds(RESULT_FAILURE, msg="Root directory is not set, please provide a location") - if not ek.ek(os.path.isdir, self.location): + if not os.path.isdir(self.location): return _responds(RESULT_FAILURE, msg="'" + self.location + "' is not a valid location") # use default quality as a failsafe @@ -3476,7 +3489,7 @@ class CMD_SickGearShowAddNew(ApiCall): aqualityID.append(quality_map[quality]) if iqualityID or aqualityID: - newQuality = Quality.combineQualities(iqualityID, aqualityID) + newQuality = Quality.combine_qualities(iqualityID, aqualityID) # use default status as a failsafe newStatus = sickgear.STATUS_DEFAULT @@ -3504,7 +3517,7 @@ class CMD_SickGearShowAddNew(ApiCall): try: myShow = t[int(self.prodid), False] except BaseTVinfoError as e: - self.log(u"Unable to find show with id " + str(self.tvid), logger.WARNING) + self.log(f'Unable to find show with id {self.tvid}', logger.WARNING) return _responds(RESULT_FAILURE, msg="Unable to retrieve information from indexer") indexerName = None @@ -3525,11 +3538,11 @@ class CMD_SickGearShowAddNew(ApiCall): # don't create show dir if config says not to if sickgear.ADD_SHOWS_WO_DIR: - self.log(u"Skipping initial creation of " + showPath + " due to config.ini setting") + self.log(f'Skipping initial creation of {showPath} due to config.ini setting') else: dir_exists = helpers.make_dir(showPath) if not dir_exists: - self.log(u"Unable to create the folder " + showPath + ", can't add the show", logger.ERROR) + self.log(f"Unable to create the folder {showPath}, can't add the show", logger.ERROR) return _responds(RESULT_FAILURE, {"path": showPath}, "Unable to create the folder " + showPath + ", can't add the show") else: @@ -3611,9 +3624,9 @@ class CMD_SickGearShowCache(ApiCall): has_poster = 0 has_banner = 0 - if ek.ek(os.path.isfile, cache_obj.poster_path(show_obj.tvid, show_obj.prodid)): + if os.path.isfile(cache_obj.poster_path(show_obj.tvid, show_obj.prodid)): has_poster = 1 - if ek.ek(os.path.isfile, cache_obj.banner_path(show_obj.tvid, show_obj.prodid)): + if os.path.isfile(cache_obj.banner_path(show_obj.tvid, show_obj.prodid)): has_banner = 1 return _responds(RESULT_SUCCESS, {"poster": has_poster, "banner": has_banner}) @@ -3663,8 +3676,8 @@ class CMD_SickGearShowDelete(ApiCall): if not show_obj: return _responds(RESULT_FAILURE, msg="Show not found") - if sickgear.show_queue_scheduler.action.isBeingAdded( - show_obj) or sickgear.show_queue_scheduler.action.isBeingUpdated(show_obj): + if sickgear.show_queue_scheduler.action.is_being_added( + show_obj) or sickgear.show_queue_scheduler.action.is_being_updated(show_obj): return _responds(RESULT_FAILURE, msg="Show can not be deleted while being added or updated") show_obj.delete_show(full=self.full_delete) @@ -3834,8 +3847,7 @@ class CMD_SickGearShowListFanart(ApiCall): fanart = [] rating_names = {10: 'group', 20: 'favorite', 30: 'avoid'} cache_obj = image_cache.ImageCache() - for img in ek.ek(glob.glob, cache_obj.fanart_path( - show_obj.tvid, show_obj.prodid).replace('fanart.jpg', '*')) or []: + for img in glob.glob(cache_obj.fanart_path(show_obj.tvid, show_obj.prodid).replace('fanart.jpg', '*')) or []: match = re.search(r'(\d+(?:\.(\w*?(\d*)))?\.(?:\w{5,8}))\.fanart\.', img, re.I) if match and match.group(1): fanart += [(match.group(1), rating_names.get(sickgear.FANART_RATINGS.get( @@ -3870,7 +3882,7 @@ class CMD_SickGearShowRateFanart(ApiCall): cache_obj = image_cache.ImageCache() fanartfile = cache_obj.fanart_path(self.tvid, self.prodid).replace('fanart.jpg', '%s.fanart.jpg' % self.fanartname) - if not ek.ek(os.path.isfile, fanartfile): + if not os.path.isfile(fanartfile): return _responds(RESULT_FAILURE, msg='Unknown Fanart') fan_ratings = {'unrate': 0, 'group': 10, 'favorite': 20, 'avoid': 30} show_id = TVidProdid({self.tvid: self.prodid})() @@ -3906,19 +3918,19 @@ class CMD_SickGearShowGetFanart(ApiCall): def run(self): """ get the fanart stored for a show """ cache_obj = image_cache.ImageCache() - default_fanartfile = ek.ek(os.path.join, sickgear.PROG_DIR, 'gui', 'slick', 'images', 'trans.png') + default_fanartfile = os.path.join(sickgear.PROG_DIR, 'gui', 'slick', 'images', 'trans.png') fanartfile = default_fanartfile used_fanart = 'default' if self.fanartname: fanartfile = cache_obj.fanart_path(self.tvid, self.prodid).replace('fanart.jpg', '%s.fanart.jpg' % self.fanartname) - if not ek.ek(os.path.isfile, fanartfile): + if not os.path.isfile(fanartfile): fanartfile = default_fanartfile used_fanart = self.fanartname else: fanart = [] - for img in ek.ek(glob.glob, cache_obj.fanart_path(self.tvid, self.prodid).replace('fanart.jpg', '*')) or []: - if not ek.ek(os.path.isfile, img): + for img in glob.glob(cache_obj.fanart_path(self.tvid, self.prodid).replace('fanart.jpg', '*')) or []: + if not os.path.isfile(img): continue match = re.search(r'(\d+(?:\.(\w*?(\d*)))?\.(?:\w{5,8}))\.fanart\.', img, re.I) if match and match.group(1): @@ -3933,8 +3945,8 @@ class CMD_SickGearShowGetFanart(ApiCall): fanartfile = fanartsorted[random_fanart][0] used_fanart = fanartsorted[random_fanart][1] - if fanartfile and ek.ek(os.path.isfile, fanartfile): - with ek.ek(open, fanartfile, 'rb') as f: + if fanartfile and os.path.isfile(fanartfile): + with open(fanartfile, 'rb') as f: mime_type, encoding = MimeTypes().guess_type(fanartfile) self.handler.set_header('X-Fanartname', used_fanart) self.handler.set_header('Content-Type', mime_type) @@ -4021,7 +4033,7 @@ class CMD_SickGearShowRefresh(ApiCall): return _responds(RESULT_FAILURE, msg="Show not found") try: - sickgear.show_queue_scheduler.action.refreshShow(show_obj) + sickgear.show_queue_scheduler.action.refresh_show(show_obj) return _responds(RESULT_SUCCESS, msg='%s has queued to be refreshed' % show_obj.unique_name) except exceptions_helper.CantRefreshException as e: # TODO: log the exception @@ -4150,7 +4162,7 @@ class CMD_SickGearShowSeasons(ApiCall): [self.tvid, self.prodid]) seasons = {} # type: Dict[int, Dict] for cur_result in sql_result: - status, quality = Quality.splitCompositeStatus(int(cur_result["status"])) + status, quality = Quality.split_composite_status(int(cur_result["status"])) cur_result["status"] = _get_status_Strings(status) cur_result["quality"] = _get_quality_string(quality) timezone, cur_result['timezone'] = network_timezones.get_network_timezone(show_obj.network, @@ -4183,7 +4195,7 @@ class CMD_SickGearShowSeasons(ApiCall): for cur_result in sql_result: curEpisode = int(cur_result["episode"]) del cur_result["episode"] - status, quality = Quality.splitCompositeStatus(int(cur_result["status"])) + status, quality = Quality.split_composite_status(int(cur_result["status"])) cur_result["status"] = _get_status_Strings(status) cur_result["quality"] = _get_quality_string(quality) timezone, cur_result['timezone'] = network_timezones.get_network_timezone(show_obj.network, @@ -4268,7 +4280,7 @@ class CMD_SickGearShowSetQuality(ApiCall): aqualityID.append(quality_map[quality]) if iqualityID or aqualityID: - newQuality = Quality.combineQualities(iqualityID, aqualityID) + newQuality = Quality.combine_qualities(iqualityID, aqualityID) show_obj.quality = newQuality show_obj.upgrade_once = self.upgradeonce @@ -4332,7 +4344,7 @@ class CMD_SickGearShowStats(ApiCall): # add all the downloaded qualities episode_qualities_counts_download = {"total": 0} for statusCode in Quality.DOWNLOADED: - status, quality = Quality.splitCompositeStatus(statusCode) + status, quality = Quality.split_composite_status(statusCode) if quality in [Quality.NONE]: continue episode_qualities_counts_download[statusCode] = 0 @@ -4340,7 +4352,7 @@ class CMD_SickGearShowStats(ApiCall): # add all snatched qualities episode_qualities_counts_snatch = {"total": 0} for statusCode in Quality.SNATCHED_ANY: - status, quality = Quality.splitCompositeStatus(statusCode) + status, quality = Quality.split_composite_status(statusCode) if quality in [Quality.NONE]: continue episode_qualities_counts_snatch[statusCode] = 0 @@ -4351,7 +4363,7 @@ class CMD_SickGearShowStats(ApiCall): [self.prodid, self.tvid]) # the main loop that goes through all episodes for cur_result in sql_result: - status, quality = Quality.splitCompositeStatus(int(cur_result["status"])) + status, quality = Quality.split_composite_status(int(cur_result["status"])) episode_status_counts_total["total"] += 1 @@ -4373,7 +4385,7 @@ class CMD_SickGearShowStats(ApiCall): if "total" == statusCode: episodes_stats["downloaded"]["total"] = episode_qualities_counts_download[statusCode] continue - status, quality = Quality.splitCompositeStatus(int(statusCode)) + status, quality = Quality.split_composite_status(int(statusCode)) statusString = Quality.qualityStrings[quality].lower().replace(" ", "_").replace("(", "").replace(")", "") episodes_stats["downloaded"][statusString] = episode_qualities_counts_download[statusCode] @@ -4384,7 +4396,7 @@ class CMD_SickGearShowStats(ApiCall): if "total" == statusCode: episodes_stats["snatched"]["total"] = episode_qualities_counts_snatch[statusCode] continue - status, quality = Quality.splitCompositeStatus(int(statusCode)) + status, quality = Quality.split_composite_status(int(statusCode)) statusString = Quality.qualityStrings[quality].lower().replace(" ", "_").replace("(", "").replace(")", "") if Quality.qualityStrings[quality] in episodes_stats["snatched"]: episodes_stats["snatched"][statusString] += episode_qualities_counts_snatch[statusCode] @@ -4396,7 +4408,7 @@ class CMD_SickGearShowStats(ApiCall): if "total" == statusCode: episodes_stats["total"] = episode_status_counts_total[statusCode] continue - status, quality = Quality.splitCompositeStatus(int(statusCode)) + status, quality = Quality.split_composite_status(int(statusCode)) statusString = statusStrings.statusStrings[statusCode].lower().replace(" ", "_").replace("(", "").replace( ")", "") episodes_stats[statusString] = episode_status_counts_total[statusCode] @@ -4443,10 +4455,10 @@ class CMD_SickGearShowUpdate(ApiCall): return _responds(RESULT_FAILURE, msg="Show not found") try: - sickgear.show_queue_scheduler.action.updateShow(show_obj, True) + sickgear.show_queue_scheduler.action.update_show(show_obj, True) return _responds(RESULT_SUCCESS, msg='%s has queued to be updated' % show_obj.unique_name) except exceptions_helper.CantUpdateException as e: - self.log(u'Unable to update %s. %s' % (show_obj.unique_name, ex(e)), logger.ERROR) + self.log(f'Unable to update {show_obj.unique_name}. {ex(e)}', logger.ERROR) return _responds(RESULT_FAILURE, msg='Unable to update %s. %s' % (show_obj.unique_name, ex(e))) @@ -4655,11 +4667,11 @@ class CMD_SickGearShowsForceUpdate(ApiCall): def run(self): """ force the daily show update now """ - if sickgear.show_queue_scheduler.action.isShowUpdateRunning() \ + if sickgear.show_queue_scheduler.action.is_show_update_running() \ or sickgear.show_update_scheduler.action.amActive: return _responds(RESULT_FAILURE, msg="show update already running.") - result = sickgear.show_update_scheduler.forceRun() + result = sickgear.show_update_scheduler.force_run() if result: return _responds(RESULT_SUCCESS, msg="daily show update started") return _responds(RESULT_FAILURE, msg="can't start show update currently") diff --git a/sickgear/webserve.py b/sickgear/webserve.py index 1bb406be..9ccafc64 100644 --- a/sickgear/webserve.py +++ b/sickgear/webserve.py @@ -19,6 +19,7 @@ from __future__ import with_statement, division # noinspection PyProtectedMember from mimetypes import MimeTypes +from urllib.parse import urljoin import base64 import copy @@ -37,19 +38,25 @@ import zipfile from exceptions_helper import ex, MultipleShowObjectsException import exceptions_helper -# noinspection PyPep8Naming -import encodingKludge as ek from json_helper import json_dumps, json_loads import sg_helpers from sg_helpers import remove_file, scantree, is_virtualenv +from sg_futures import SgThreadPoolExecutor +try: + from multiprocessing import cpu_count +except ImportError: + # some platforms don't have multiprocessing + def cpu_count(): + return None + import sickgear from . import classes, clients, config, db, helpers, history, image_cache, logger, name_cache, naming, \ network_timezones, notifiers, nzbget, processTV, sab, scene_exceptions, search_queue, subtitles, ui from .anime import AniGroupList, pull_anidb_groups, short_group_names from .browser import folders_at_path from .common import ARCHIVED, DOWNLOADED, FAILED, IGNORED, SKIPPED, SNATCHED, SNATCHED_ANY, UNAIRED, UNKNOWN, WANTED, \ - SD, HD720p, HD1080p, UHD2160p, Overview, Quality, qualityPresetStrings, statusStrings + SD, HD720p, HD1080p, UHD2160p, Overview, Quality, qualityPresetStrings, statusStrings from .helpers import get_media_stats, has_image_ext, real_path, remove_article, remove_file_perm, starify from .indexermapper import MapStatus, map_indexers_to_show, save_mapping from .indexers.indexer_config import TVINFO_IMDB, TVINFO_TMDB, TVINFO_TRAKT, TVINFO_TVDB, TVINFO_TVMAZE, \ @@ -59,7 +66,7 @@ from .providers import newznab, rsstorrent from .scene_numbering import get_scene_absolute_numbering_for_show, get_scene_numbering_for_show, \ get_xem_absolute_numbering_for_show, get_xem_numbering_for_show, set_scene_numbering_helper from .search_backlog import FORCED_BACKLOG -from .sgdatetime import SGDatetime, timestamp_near +from .sgdatetime import SGDatetime from .show_name_helpers import abbr_showname from .show_updater import clean_ignore_require_words @@ -74,13 +81,9 @@ from unidecode import unidecode import dateutil.parser from tornado import gen, iostream -# noinspection PyUnresolvedReferences +from tornado.escape import utf8 from tornado.web import RequestHandler, StaticFileHandler, authenticated from tornado.concurrent import run_on_executor -# tornado.web.RequestHandler above is unresolved until... -# 1) RouteHandler derives from RequestHandler instead of LegacyBaseHandler -# 2) the following line is removed (plus the noinspection deleted) -from ._legacy import LegacyBaseHandler from lib import subliminal from lib.cfscrape import CloudflareScraper @@ -92,14 +95,15 @@ from lib.api_trakt.exceptions import TraktException, TraktAuthException import lib.rarfile.rarfile as rarfile -from _23 import decode_bytes, decode_str, filter_list, filter_iter, getargspec, list_keys, list_values, \ - map_consume, map_iter, map_list, map_none, ordered_dict, quote_plus, unquote_plus, urlparse -from six import binary_type, integer_types, iteritems, iterkeys, itervalues, moves, PY2, string_types +from _23 import decode_bytes, decode_str, getargspec, \ + map_consume, map_none, quote_plus, unquote_plus, urlparse +from six import binary_type, integer_types, iteritems, iterkeys, itervalues, moves, string_types # noinspection PyUnreachableCode if False: from typing import Any, AnyStr, Dict, List, Optional, Set, Tuple from sickgear.providers.generic import TorrentProvider + from tv import TVInfoShow # noinspection PyAbstractClass @@ -163,17 +167,17 @@ class BaseStaticFileHandler(StaticFileHandler): body = '\nRequest body: %s' % decode_str(self.request.body) except (BaseException, Exception): pass - logger.log('Sent %s error response to a `%s` request for `%s` with headers:\n%s%s' % - (status_code, self.request.method, self.request.path, self.request.headers, body), logger.WARNING) + logger.warning(f'Sent {status_code} error response to a `{self.request.method}`' + f' request for `{self.request.path}` with headers:\n' + f'{self.request.headers}{body}') # suppress traceback by removing 'exc_info' kwarg if 'exc_info' in kwargs: - logger.log('Gracefully handled exception text:\n%s' % traceback.format_exception(*kwargs["exc_info"]), - logger.DEBUG) + logger.debug('Gracefully handled exception text:\n%s' % traceback.format_exception(*kwargs["exc_info"])) del kwargs['exc_info'] return super(BaseStaticFileHandler, self).write_error(status_code, **kwargs) def validate_absolute_path(self, root, absolute_path): - if '\\images\\flags\\' in absolute_path and not ek.ek(os.path.isfile, absolute_path): + if '\\images\\flags\\' in absolute_path and not os.path.isfile(absolute_path): absolute_path = re.sub(r'\\[^\\]+\.png$', '\\\\unknown.png', absolute_path) return super(BaseStaticFileHandler, self).validate_absolute_path(root, absolute_path) @@ -189,7 +193,49 @@ class BaseStaticFileHandler(StaticFileHandler): self.set_header('X-Frame-Options', 'SAMEORIGIN') -class RouteHandler(LegacyBaseHandler): +class RouteHandler(RequestHandler): + + executor = SgThreadPoolExecutor(thread_name_prefix='WEBSERVER', max_workers=min(32, (cpu_count() or 1) + 4)) + + def redirect(self, url, permanent=False, status=None): + """Send a redirect to the given (optionally relative) URL. + + ----->>>>> NOTE: Removed self.finish <<<<<----- + + If the ``status`` argument is specified, that value is used as the + HTTP status code; otherwise either 301 (permanent) or 302 + (temporary) is chosen based on the ``permanent`` argument. + The default is 302 (temporary). + """ + if not url.startswith(sickgear.WEB_ROOT): + url = sickgear.WEB_ROOT + url + + # noinspection PyUnresolvedReferences + if self._headers_written: + raise Exception('Cannot redirect after headers have been written') + if status is None: + status = 301 if permanent else 302 + else: + assert isinstance(status, int) + assert 300 <= status <= 399 + self.set_status(status) + self.set_header('Location', urljoin(utf8(self.request.uri), utf8(url))) + + def write_error(self, status_code, **kwargs): + body = '' + try: + if self.request.body: + body = '\nRequest body: %s' % decode_str(self.request.body) + except (BaseException, Exception): + pass + logger.warning(f'Sent {status_code} error response to a `{self.request.method}`' + f' request for `{self.request.path}` with headers:\n{self.request.headers}{body}') + # suppress traceback by removing 'exc_info' kwarg + if 'exc_info' in kwargs: + logger.debug('Gracefully handled exception text:\n%s' % traceback.format_exception(*kwargs["exc_info"])) + del kwargs['exc_info'] + return super(RouteHandler, self).write_error(status_code, **kwargs) + def data_received(self, *args): pass @@ -200,9 +246,7 @@ class RouteHandler(LegacyBaseHandler): return [self.decode_data(d) for d in data] if not isinstance(data, string_types): return data - if not PY2: - return data.encode('latin1').decode('utf-8') - return data.decode('utf-8') + return data.encode('latin1').decode('utf-8') @gen.coroutine def route_method(self, route, use_404=False, limit_route=None, xsrf_filter=True): @@ -242,7 +286,7 @@ class RouteHandler(LegacyBaseHandler): # no filtering for legacy and routes that depend on *args and **kwargs result = yield self.async_call(method, request_kwargs) # method(**request_kwargs) else: - filter_kwargs = dict(filter_iter(lambda kv: kv[0] in method_args, iteritems(request_kwargs))) + filter_kwargs = dict(filter(lambda kv: kv[0] in method_args, iteritems(request_kwargs))) result = yield self.async_call(method, filter_kwargs) # method(**filter_kwargs) self.finish(result) @@ -251,8 +295,6 @@ class RouteHandler(LegacyBaseHandler): try: return function(**kw) except (BaseException, Exception) as e: - if PY2: - raise Exception(traceback.format_exc().replace('\n', '
')) raise e def page_not_found(self): @@ -277,14 +319,14 @@ class BaseHandler(RouteHandler): def get_current_user(self): if sickgear.WEB_USERNAME or sickgear.WEB_PASSWORD: - return self.get_secure_cookie('sickgear-session-%s' % helpers.md5_for_text(sickgear.WEB_PORT)) + return self.get_signed_cookie('sickgear-session-%s' % helpers.md5_for_text(sickgear.WEB_PORT)) return True def get_image(self, image): - if ek.ek(os.path.isfile, image): + if os.path.isfile(image): mime_type, encoding = MimeTypes().guess_type(image) self.set_header('Content-Type', mime_type) - with ek.ek(open, image, 'rb') as img: + with open(image, 'rb') as img: return img.read() def show_poster(self, tvid_prodid=None, which=None, api=None): @@ -313,22 +355,22 @@ class BaseHandler(RouteHandler): elif 'fanart' == which[0:6]: image_file_name = [cache_obj.fanart_path( *tvid_prodid_obj.tuple + - ('%s' % (re.sub(r'.*?fanart_(\d+(?:\.\w{1,20})?\.\w{5,8}).*', r'\1.', which, 0, re.I)),))] + ('%s' % (re.sub(r'.*?fanart_(\d+(?:\.\w{1,20})?\.\w{5,8}).*', r'\1.', which, 0, re.I)),))] for cur_name in image_file_name: - if ek.ek(os.path.isfile, cur_name): + if os.path.isfile(cur_name): static_image_path = cur_name break if api: - used_file = ek.ek(os.path.basename, static_image_path) + used_file = os.path.basename(static_image_path) if static_image_path.startswith('/images'): used_file = 'default' - static_image_path = ek.ek(os.path.join, sickgear.PROG_DIR, 'gui', 'slick', static_image_path[1:]) + static_image_path = os.path.join(sickgear.PROG_DIR, 'gui', 'slick', static_image_path[1:]) mime_type, encoding = MimeTypes().guess_type(static_image_path) self.set_header('Content-Type', mime_type) self.set_header('X-Filename', used_file) - with ek.ek(open, static_image_path, 'rb') as img: + with open(static_image_path, 'rb') as img: return img.read() else: static_image_path = os.path.normpath(static_image_path.replace(sickgear.CACHE_DIR, '/cache')) @@ -358,7 +400,7 @@ class LoginHandler(BaseHandler): httponly=True) if sickgear.ENABLE_HTTPS: params.update(dict(secure=True)) - self.set_secure_cookie('sickgear-session-%s' % helpers.md5_for_text(sickgear.WEB_PORT), + self.set_signed_cookie('sickgear-session-%s' % helpers.md5_for_text(sickgear.WEB_PORT), sickgear.COOKIE_SECRET, **params) self.redirect(self.get_argument('next', '/home/')) else: @@ -389,7 +431,7 @@ class CalendarHandler(BaseHandler): Works with iCloud, Google Calendar and Outlook. Provides a subscribeable URL for iCal subscriptions """ - logger.log(u'Receiving iCal request from %s' % self.request.remote_ip) + logger.log(f'Receiving iCal request from {self.request.remote_ip}') # Limit dates past_date = (datetime.date.today() + datetime.timedelta(weeks=-52)).toordinal() @@ -429,21 +471,17 @@ class CalendarHandler(BaseHandler): minutes=helpers.try_int(show['runtime'], 60)) # Create event for episode - ical += 'BEGIN:VEVENT%s' % crlf \ - + 'DTSTART:%sT%sZ%s' % (air_date_time.strftime('%Y%m%d'), - air_date_time.strftime('%H%M%S'), crlf) \ - + 'DTEND:%sT%sZ%s' % (air_date_time_end.strftime('%Y%m%d'), - air_date_time_end.strftime('%H%M%S'), crlf) \ - + u'SUMMARY:%s - %sx%s - %s%s' % (show['show_name'], episode['season'], episode['episode'], - episode['name'], crlf) \ - + u'UID:%s-%s-%s-E%sS%s%s' % (appname, datetime.date.today().isoformat(), - show['show_name'].replace(' ', '-'), - episode['episode'], episode['season'], crlf) \ - + u'DESCRIPTION:%s on %s' % ((show['airs'] or '(Unknown airs)'), - (show['network'] or 'Unknown network')) \ - + ('' if not episode['description'] - else u'%s%s' % (nl, episode['description'].splitlines()[0])) \ - + '%sEND:VEVENT%s' % (crlf, crlf) + desc = '' if not episode['description'] else f'{nl}{episode["description"].splitlines()[0]}' + ical += (f'BEGIN:VEVENT{crlf}' + f'DTSTART:{air_date_time.strftime("%Y%m%d")}T{air_date_time.strftime("%H%M%S")}Z{crlf}' + f'DTEND:{air_date_time_end.strftime("%Y%m%d")}T{air_date_time_end.strftime("%H%M%S")}Z{crlf}' + f'SUMMARY:{show["show_name"]} - {episode["season"]}x{episode["episode"]}' + f' - {episode["name"]}{crlf}' + f'UID:{appname}-{datetime.date.today().isoformat()}-{show["show_name"].replace(" ", "-")}' + f'-E{episode["episode"]}S{episode["season"]}{crlf}' + f'DESCRIPTION:{(show["airs"] or "(Unknown airs)")} on {(show["network"] or "Unknown network")}' + f'{desc}{crlf}' + f'END:VEVENT{crlf}') # Ending the iCal return ical + 'END:VCALENDAR' @@ -456,7 +494,7 @@ class RepoHandler(BaseStaticFileHandler): kodi_is_legacy = None def parse_url_path(self, url_path): - logger.log('Kodi req... get(path): %s' % url_path, logger.DEBUG) + logger.debug('Kodi req... get(path): %s' % url_path) return super(RepoHandler, self).parse_url_path(url_path) def set_extra_headers(self, *args, **kwargs): @@ -471,38 +509,38 @@ class RepoHandler(BaseStaticFileHandler): super(RepoHandler, self).initialize(*args, **kwargs) - logger.log('Kodi req... initialize(path): %s' % kwargs['path'], logger.DEBUG) - cache_client = ek.ek(os.path.join, sickgear.CACHE_DIR, 'clients') - cache_client_kodi = ek.ek(os.path.join, cache_client, 'kodi') - cache_client_kodi_watchedstate = ek.ek(os.path.join, cache_client_kodi, 'service.sickgear.watchedstate.updater') + logger.debug('Kodi req... initialize(path): %s' % kwargs['path']) + cache_client = os.path.join(sickgear.CACHE_DIR, 'clients') + cache_client_kodi = os.path.join(cache_client, 'kodi') + cache_client_kodi_watchedstate = os.path.join(cache_client_kodi, 'service.sickgear.watchedstate.updater') - cache_resources = ek.ek(os.path.join, cache_client_kodi_watchedstate, 'resources') - cache_lang = ek.ek(os.path.join, cache_resources, 'language') - cache_other_lang = ek.ek(os.path.join, cache_lang, ('English', 'resource.language.en_gb')[self.kodi_is_legacy]) - ek.ek(os.path.exists, cache_other_lang) and remove_file(cache_other_lang, tree=True) + cache_resources = os.path.join(cache_client_kodi_watchedstate, 'resources') + cache_lang = os.path.join(cache_resources, 'language') + cache_other_lang = os.path.join(cache_lang, ('English', 'resource.language.en_gb')[self.kodi_is_legacy]) + os.path.exists(cache_other_lang) and remove_file(cache_other_lang, tree=True) - cache_lang_sub = ek.ek(os.path.join, cache_lang, ('resource.language.en_gb', 'English')[self.kodi_is_legacy]) + cache_lang_sub = os.path.join(cache_lang, ('resource.language.en_gb', 'English')[self.kodi_is_legacy]) for folder in (cache_client, cache_client_kodi, - ek.ek(os.path.join, cache_client_kodi, 'repository.sickgear'), + os.path.join(cache_client_kodi, 'repository.sickgear'), cache_client_kodi_watchedstate, - ek.ek(os.path.join, cache_resources), + os.path.join(cache_resources), cache_lang, cache_lang_sub, ): - if not ek.ek(os.path.exists, folder): - ek.ek(os.mkdir, folder) + if not os.path.exists(folder): + os.mkdir(folder) - with io.open(ek.ek(os.path.join, cache_client_kodi, 'index.html'), 'w') as fh: + with io.open(os.path.join(cache_client_kodi, 'index.html'), 'w') as fh: fh.write(self.render_kodi_index()) - with io.open(ek.ek(os.path.join, cache_client_kodi, 'repository.sickgear', 'index.html'), 'w') as fh: + with io.open(os.path.join(cache_client_kodi, 'repository.sickgear', 'index.html'), 'w') as fh: fh.write(self.render_kodi_repository_sickgear_index()) - with io.open(ek.ek(os.path.join, cache_client_kodi_watchedstate, 'index.html'), 'w') as fh: + with io.open(os.path.join(cache_client_kodi_watchedstate, 'index.html'), 'w') as fh: fh.write(self.render_kodi_service_sickgear_watchedstate_updater_index()) - with io.open(ek.ek(os.path.join, cache_resources, 'index.html'), 'w') as fh: + with io.open(os.path.join(cache_resources, 'index.html'), 'w') as fh: fh.write(self.render_kodi_service_sickgear_watchedstate_updater_resources_index()) - with io.open(ek.ek(os.path.join, cache_lang, 'index.html'), 'w') as fh: + with io.open(os.path.join(cache_lang, 'index.html'), 'w') as fh: fh.write(self.render_kodi_service_sickgear_watchedstate_updater_resources_language_index()) - with io.open(ek.ek(os.path.join, cache_lang_sub, 'index.html'), 'w') as fh: + with io.open(os.path.join(cache_lang_sub, 'index.html'), 'w') as fh: fh.write(self.render_kodi_service_sickgear_watchedstate_updater_resources_language_english_index()) ''' @@ -511,7 +549,7 @@ class RepoHandler(BaseStaticFileHandler): if repo rendered md5 changes or flag is true, update the repo addon, where repo version *must* be increased ''' - repo_md5_file = ek.ek(os.path.join, cache_client_kodi, 'addons.xml.md5') + repo_md5_file = os.path.join(cache_client_kodi, 'addons.xml.md5') saved_md5 = None try: with io.open(repo_md5_file, 'r', encoding='utf8') as fh: @@ -520,18 +558,18 @@ class RepoHandler(BaseStaticFileHandler): pass rendered_md5 = self.render_kodi_repo_addons_xml_md5() if saved_md5 != rendered_md5: - with io.open(ek.ek(os.path.join, cache_client_kodi, 'repository.sickgear', 'addon.xml'), 'w') as fh: + with io.open(os.path.join(cache_client_kodi, 'repository.sickgear', 'addon.xml'), 'w') as fh: fh.write(self.render_kodi_repo_addon_xml()) - with io.open(ek.ek(os.path.join, cache_client_kodi_watchedstate, 'addon.xml'), 'w') as fh: + with io.open(os.path.join(cache_client_kodi_watchedstate, 'addon.xml'), 'w') as fh: fh.write(self.get_watchedstate_updater_addon_xml()) - with io.open(ek.ek(os.path.join, cache_client_kodi, 'addons.xml'), 'w') as fh: + with io.open(os.path.join(cache_client_kodi, 'addons.xml'), 'w') as fh: fh.write(self.render_kodi_repo_addons_xml()) - with io.open(ek.ek(os.path.join, cache_client_kodi, 'addons.xml.md5'), 'w') as fh: + with io.open(os.path.join(cache_client_kodi, 'addons.xml.md5'), 'w') as fh: fh.write(rendered_md5) def save_zip(name, version, zip_path, zip_method): zip_name = '%s-%s.zip' % (name, version) - zip_file = ek.ek(os.path.join, zip_path, zip_name) + zip_file = os.path.join(zip_path, zip_name) for direntry in helpers.scantree(zip_path, ['resources'], [r'\.(?:md5|zip)$'], filter_kind=False): remove_file_perm(direntry.path) zip_data = zip_method() @@ -539,11 +577,11 @@ class RepoHandler(BaseStaticFileHandler): zh.write(zip_data) # Force a UNIX line ending, like the md5sum utility. - with io.open(ek.ek(os.path.join, zip_path, '%s.md5' % zip_name), 'w', newline='\n') as zh: - zh.write(u'%s *%s\n' % (self.md5ify(zip_data), zip_name)) + with io.open(os.path.join(zip_path, '%s.md5' % zip_name), 'w', newline='\n') as zh: + zh.write(f'{self.md5ify(zip_data)} *{zip_name}\n') aid, ver = self.repo_sickgear_details() - save_zip(aid, ver, ek.ek(os.path.join, cache_client_kodi, 'repository.sickgear'), + save_zip(aid, ver, os.path.join(cache_client_kodi, 'repository.sickgear'), self.kodi_repository_sickgear_zip) aid, ver = self.addon_watchedstate_details() @@ -566,8 +604,8 @@ class RepoHandler(BaseStaticFileHandler): (cache_lang_sub, 'strings.xml') ))[self.kodi_is_legacy], ): - helpers.copy_file(ek.ek( - os.path.join, *(sickgear.PROG_DIR, 'sickgear', 'clients', 'kodi') + src), ek.ek(os.path.join, *dst)) + helpers.copy_file( + os.path.join(*(sickgear.PROG_DIR, 'sickgear', 'clients', 'kodi') + src), os.path.join(*dst)) def get_content_type(self): if '.md5' == self.absolute_path[-4:] or '.po' == self.absolute_path[-3:]: @@ -583,7 +621,7 @@ class RepoHandler(BaseStaticFileHandler): t.addon = '%s-%s.zip' % self.addon_watchedstate_details() try: - with open(ek.ek(os.path.join, sickgear.PROG_DIR, 'CHANGES.md')) as fh: + with open(os.path.join(sickgear.PROG_DIR, 'CHANGES.md')) as fh: t.version = re.findall(r'###[^0-9x]+([0-9]+\.[0-9]+\.[0-9x]+)', fh.readline())[0] except (BaseException, Exception): t.version = '' @@ -624,7 +662,7 @@ class RepoHandler(BaseStaticFileHandler): return self.index([('resource.language.en_gb/', 'English/')[self.kodi_is_legacy]]) def render_kodi_service_sickgear_watchedstate_updater_resources_language_english_index(self): - return self.index([('strings.po', 'strings.xml')[self.kodi_is_legacy]]) + return self.index([('strings.po', 'strings.xml')[self.kodi_is_legacy]]) def repo_sickgear_details(self): return re.findall(r'(?si)addon\sid="(repository\.[^"]+)[^>]+version="([^"]+)', @@ -636,15 +674,15 @@ class RepoHandler(BaseStaticFileHandler): def get_watchedstate_updater_addon_xml(self): mem_key = 'kodi_xml' - if int(timestamp_near(datetime.datetime.now())) < sickgear.MEMCACHE.get(mem_key, {}).get('last_update', 0): + if SGDatetime.timestamp_near() < sickgear.MEMCACHE.get(mem_key, {}).get('last_update', 0): return sickgear.MEMCACHE.get(mem_key).get('data') filename = 'addon%s.xml' % self.kodi_include - with io.open(ek.ek(os.path.join, sickgear.PROG_DIR, 'sickgear', 'clients', - 'kodi', 'service.sickgear.watchedstate.updater', filename), 'r', encoding='utf8') as fh: + with io.open(os.path.join(sickgear.PROG_DIR, 'sickgear', 'clients', 'kodi', + 'service.sickgear.watchedstate.updater', filename), 'r', encoding='utf8') as fh: xml = fh.read().strip() % dict(ADDON_VERSION=self.get_addon_version(self.kodi_include)) - sickgear.MEMCACHE[mem_key] = dict(last_update=30 + int(timestamp_near(datetime.datetime.now())), data=xml) + sickgear.MEMCACHE[mem_key] = dict(last_update=30 + SGDatetime.timestamp_near(), data=xml) return xml @staticmethod @@ -658,15 +696,15 @@ class RepoHandler(BaseStaticFileHandler): Must use an arg here instead of `self` due to static call use case from external class """ mem_key = 'kodi_ver' - if int(timestamp_near(datetime.datetime.now())) < sickgear.MEMCACHE.get(mem_key, {}).get('last_update', 0): + if SGDatetime.timestamp_near() < sickgear.MEMCACHE.get(mem_key, {}).get('last_update', 0): return sickgear.MEMCACHE.get(mem_key).get('data') filename = 'service%s.py' % kodi_include - with io.open(ek.ek(os.path.join, sickgear.PROG_DIR, 'sickgear', 'clients', - 'kodi', 'service.sickgear.watchedstate.updater', filename), 'r', encoding='utf8') as fh: + with io.open(os.path.join(sickgear.PROG_DIR, 'sickgear', 'clients', 'kodi', + 'service.sickgear.watchedstate.updater', filename), 'r', encoding='utf8') as fh: version = re.findall(r'ADDON_VERSION\s*?=\s*?\'([^\']+)', fh.read())[0] - sickgear.MEMCACHE[mem_key] = dict(last_update=30 + int(timestamp_near(datetime.datetime.now())), data=version) + sickgear.MEMCACHE[mem_key] = dict(last_update=30 + SGDatetime.timestamp_near(), data=version) return version def render_kodi_repo_addon_xml(self): @@ -696,7 +734,7 @@ class RepoHandler(BaseStaticFileHandler): def md5ify(string): if not isinstance(string, binary_type): string = string.encode('utf-8') - return u'%s' % hashlib.new('md5', string).hexdigest() + return f'{hashlib.new("md5", string).hexdigest()}' def kodi_repository_sickgear_zip(self): bfr = io.BytesIO() @@ -705,12 +743,12 @@ class RepoHandler(BaseStaticFileHandler): with zipfile.ZipFile(bfr, 'w') as zh: zh.writestr('repository.sickgear/addon.xml', self.render_kodi_repo_addon_xml(), zipfile.ZIP_DEFLATED) - with io.open(ek.ek(os.path.join, sickgear.PROG_DIR, - 'sickgear', 'clients', 'kodi', 'repository.sickgear', 'icon.png'), 'rb') as fh: + with io.open(os.path.join(sickgear.PROG_DIR, 'sickgear', 'clients', 'kodi', + 'repository.sickgear', 'icon.png'), 'rb') as fh: infile = fh.read() zh.writestr('repository.sickgear/icon.png', infile, zipfile.ZIP_DEFLATED) except OSError as e: - logger.log('Unable to zip: %r / %s' % (e, ex(e)), logger.WARNING) + logger.warning('Unable to zip: %r / %s' % (e, ex(e))) zip_data = bfr.getvalue() bfr.close() @@ -719,12 +757,12 @@ class RepoHandler(BaseStaticFileHandler): def kodi_service_sickgear_watchedstate_updater_zip(self): bfr = io.BytesIO() - basepath = ek.ek(os.path.join, sickgear.PROG_DIR, 'sickgear', 'clients', 'kodi') + basepath = os.path.join(sickgear.PROG_DIR, 'sickgear', 'clients', 'kodi') - zip_path = ek.ek(os.path.join, basepath, 'service.sickgear.watchedstate.updater') - devenv_src = ek.ek(os.path.join, sickgear.PROG_DIR, 'tests', '_devenv.py') - devenv_dst = ek.ek(os.path.join, zip_path, '_devenv.py') - if sickgear.ENV.get('DEVENV') and ek.ek(os.path.exists, devenv_src): + zip_path = os.path.join(basepath, 'service.sickgear.watchedstate.updater') + devenv_src = os.path.join(sickgear.PROG_DIR, 'tests', '_devenv.py') + devenv_dst = os.path.join(zip_path, '_devenv.py') + if sickgear.ENV.get('DEVENV') and os.path.exists(devenv_src): helpers.copy_file(devenv_src, devenv_dst) else: helpers.remove_file_perm(devenv_dst) @@ -746,10 +784,10 @@ class RepoHandler(BaseStaticFileHandler): infile = fh.read() with zipfile.ZipFile(bfr, 'a') as zh: - zh.writestr(ek.ek(os.path.relpath, direntry.path.replace(self.kodi_legacy, ''), basepath), + zh.writestr(os.path.relpath(direntry.path.replace(self.kodi_legacy, ''), basepath), infile, zipfile.ZIP_DEFLATED) except OSError as e: - logger.log('Unable to zip %s: %r / %s' % (direntry.path, e, ex(e)), logger.WARNING) + logger.warning('Unable to zip %s: %r / %s' % (direntry.path, e, ex(e))) zip_data = bfr.getvalue() bfr.close() @@ -881,16 +919,17 @@ class LogfileHandler(BaseHandler): super(LogfileHandler, self).__init__(application, request, **kwargs) self.lock = threading.Lock() + # noinspection PyUnusedLocal @authenticated @gen.coroutine - def get(self, path, *args, **kwargs): + def get(self, *args, **kwargs): logfile_name = logger.current_log_file() try: self.set_header('Content-Type', 'text/html; charset=utf-8') self.set_header('Content-Description', 'Logfile Download') self.set_header('Content-Disposition', 'attachment; filename=sickgear.log') - # self.set_header('Content-Length', ek.ek(os.path.getsize, logfile_name)) + # self.set_header('Content-Length', os.path.getsize(logfile_name)) auths = sickgear.GenericProvider.dedupe_auths(True) rxc_auths = re.compile('(?i)%s' % '|'.join([(re.escape(_a)) for _a in auths])) replacements = dict([(_a, starify(_a)) for _a in auths]) @@ -1133,7 +1172,7 @@ class MainHandler(WebHandler): # make a dict out of the sql results sql_result = [dict(row) for row in sql_result - if Quality.splitCompositeStatus(helpers.try_int(row['status']))[0] not in + if Quality.split_composite_status(helpers.try_int(row['status']))[0] not in SNATCHED_ANY + [DOWNLOADED, ARCHIVED, IGNORED, SKIPPED]] # multi dimension sort @@ -1184,15 +1223,15 @@ class MainHandler(WebHandler): pass if imdb_id: sql_result[index]['imdb_url'] = sickgear.indexers.indexer_config.tvinfo_config[ - sickgear.indexers.indexer_config.TVINFO_IMDB][ - 'show_url'] % imdb_id + sickgear.indexers.indexer_config.TVINFO_IMDB][ + 'show_url'] % imdb_id else: sql_result[index]['imdb_url'] = '' if tvid_prodid in fanarts: continue - for img in ek.ek(glob.glob, cache_obj.fanart_path(*tvid_prodid_obj.tuple).replace('fanart.jpg', '*')) or []: + for img in glob.glob(cache_obj.fanart_path(*tvid_prodid_obj.tuple).replace('fanart.jpg', '*')) or []: match = re.search(r'(\d+(?:\.\w*)?\.\w{5,8})\.fanart\.', img, re.I) if not match: continue @@ -1276,8 +1315,8 @@ class MainHandler(WebHandler): elif 'backart' in kwargs: sickgear.EPISODE_VIEW_BACKGROUND = backart sickgear.FANART_PANEL = 'highlight-off' == sickgear.FANART_PANEL and 'highlight-off' or \ - 'highlight2' == sickgear.FANART_PANEL and 'highlight1' or \ - 'highlight1' == sickgear.FANART_PANEL and 'highlight' or 'highlight-off' + 'highlight2' == sickgear.FANART_PANEL and 'highlight1' or \ + 'highlight1' == sickgear.FANART_PANEL and 'highlight' or 'highlight-off' elif 'viewmode' in kwargs: sickgear.EPISODE_VIEW_VIEWMODE = viewmode @@ -1288,7 +1327,7 @@ class MainHandler(WebHandler): now = datetime.datetime.now() events = [ - ('recent', sickgear.recent_search_scheduler.timeLeft), + ('recent', sickgear.recent_search_scheduler.time_left), ('backlog', sickgear.backlog_search_scheduler.next_backlog_timeleft), ] @@ -1395,7 +1434,7 @@ r.close() if data: my_db = db.DBConnection(row_type='dict') - media_paths = map_list(lambda arg: ek.ek(os.path.basename, arg[1]['path_file']), iteritems(data)) + media_paths = list(map(lambda arg: os.path.basename(arg[1]['path_file']), iteritems(data))) def chunks(lines, n): for c in range(0, len(lines), n): @@ -1412,21 +1451,21 @@ r.close() cl = [] ep_results = {} - map_consume(lambda r: ep_results.update({'%s' % ek.ek(os.path.basename, r['location']).lower(): dict( + map_consume(lambda r: ep_results.update({'%s' % os.path.basename(r['location']).lower(): dict( episode_id=r['episode_id'], status=r['status'], location=r['location'], file_size=r['file_size'])}), sql_result) for (k, v) in iteritems(data): - bname = (ek.ek(os.path.basename, v.get('path_file')) or '').lower() + bname = (os.path.basename(v.get('path_file')) or '').lower() if not bname: msg = 'Missing media file name provided' data[k] = msg - logger.log('Update watched state skipped an item: %s' % msg, logger.WARNING) + logger.warning('Update watched state skipped an item: %s' % msg) continue if bname in ep_results: - date_watched = now = int(timestamp_near(datetime.datetime.now())) + date_watched = now = SGDatetime.timestamp_near() if 1500000000 < date_watched: date_watched = helpers.try_int(float(v.get('date_watched'))) @@ -1450,7 +1489,7 @@ r.close() if as_json: if not data: data = dict(error='Request made to SickGear with invalid payload') - logger.log('Update watched state failed: %s' % data['error'], logger.WARNING) + logger.warning('Update watched state failed: %s' % data['error']) return json_dumps(data) @@ -1555,13 +1594,13 @@ class Home(MainHandler): index = 0 if 'custom' == sickgear.SHOWLIST_TAGVIEW: for name in sickgear.SHOW_TAGS: - results = filter_list(lambda so: so.tag == name, sickgear.showList) + results = list(filter(lambda so: so.tag == name, sickgear.showList)) if results: t.showlists.append(['container%s' % index, name, results]) index += 1 elif 'anime' == sickgear.SHOWLIST_TAGVIEW: - show_results = filter_list(lambda so: not so.anime, sickgear.showList) - anime_results = filter_list(lambda so: so.anime, sickgear.showList) + show_results = list(filter(lambda so: not so.anime, sickgear.showList)) + anime_results = list(filter(lambda so: so.anime, sickgear.showList)) if show_results: t.showlists.append(['container%s' % index, 'Show List', show_results]) index += 1 @@ -1581,16 +1620,16 @@ class Home(MainHandler): if 'simple' != sickgear.HOME_LAYOUT: t.network_images = {} networks = {} - images_path = ek.ek(os.path.join, sickgear.PROG_DIR, 'gui', 'slick', 'images', 'network') + images_path = os.path.join(sickgear.PROG_DIR, 'gui', 'slick', 'images', 'network') for cur_show_obj in sickgear.showList: network_name = 'nonetwork' if None is cur_show_obj.network \ - else cur_show_obj.network.replace(u'\u00C9', 'e').lower() + else cur_show_obj.network.replace('\u00C9', 'e').lower() if network_name not in networks: - filename = u'%s.png' % network_name - if not ek.ek(os.path.isfile, ek.ek(os.path.join, images_path, filename)): - filename = u'%s.png' % re.sub(r'(?m)(.*)\s+\(\w{2}\)$', r'\1', network_name) - if not ek.ek(os.path.isfile, ek.ek(os.path.join, images_path, filename)): - filename = u'nonetwork.png' + filename = f'{network_name}.png' + if not os.path.isfile(os.path.join(images_path, filename)): + filename = '%s.png' % re.sub(r'(?m)(.*)\s+\(\w{2}\)$', r'\1', network_name) + if not os.path.isfile(os.path.join(images_path, filename)): + filename = 'nonetwork.png' networks.setdefault(network_name, filename) t.network_images.setdefault(cur_show_obj.tvid_prodid, networks[network_name]) @@ -1646,10 +1685,10 @@ class Home(MainHandler): authed, auth_msg = sab.test_authentication(host, username, password, apikey) if authed: - return u'Success. Connected %s authentication' % \ - ('using %s' % access_msg, 'with no')['None' == auth_msg.lower()] - return u'Authentication failed. %s' % auth_msg - return u'Unable to connect to host' + return f'Success. Connected' \ + f' {(f"using {access_msg}", "with no")["None" == auth_msg.lower()]} authentication' + return f'Authentication failed. {auth_msg}' + return 'Unable to connect to host' def test_nzbget(self, host=None, use_https=None, username=None, password=None): self.set_header('Cache-Control', 'max-age=0,no-cache,no-store') @@ -1906,7 +1945,7 @@ class Home(MainHandler): ' AND notify_list != ""', [TVidProdid.glue]) notify_lists = {} - for r in filter_iter(lambda x: x['notify_list'].strip(), rows): + for r in filter(lambda x: x['notify_list'].strip(), rows): # noinspection PyTypeChecker notify_lists[r['tvid_prodid']] = r['notify_list'] @@ -1978,10 +2017,10 @@ class Home(MainHandler): def check_update(self): # force a check to see if there is a new version if sickgear.update_software_scheduler.action.check_for_new_version(force=True): - logger.log(u'Forced version check found results') + logger.log('Forced version check found results') if sickgear.update_packages_scheduler.action.check_for_new_version(force=True): - logger.log(u'Forced package version check found results') + logger.log('Forced package version check found results') self.redirect('/home/') @@ -2002,7 +2041,7 @@ class Home(MainHandler): if not line.strip(): continue if line.startswith(' '): - change_parts = re.findall(r'^[\W]+(.*)$', line) + change_parts = re.findall(r'^\W+(.*)$', line) change['text'] += change_parts and (' %s' % change_parts[0].strip()) or '' else: if change: @@ -2014,11 +2053,11 @@ class Home(MainHandler): elif not max_rel: break elif line.startswith('### '): - rel_data = re.findall(r'(?im)^###\W*([^\s]+)\W\(([^)]+)\)', line) + rel_data = re.findall(r'(?im)^###\W*(\S+)\W\(([^)]+)\)', line) rel_data and output.append({'type': 'rel', 'ver': rel_data[0][0], 'date': rel_data[0][1]}) max_rel -= 1 elif line.startswith('# '): - max_data = re.findall(r'^#\W*([\d]+)\W*$', line) + max_data = re.findall(r'^#\W*(\d+)\W*$', line) max_rel = max_data and helpers.try_int(max_data[0], None) or 5 if change: output.append(change) @@ -2077,6 +2116,7 @@ class Home(MainHandler): else: self.redirect('/home/') + # noinspection PyUnusedLocal def season_render(self, tvid_prodid=None, season=None, **kwargs): response = {'success': False} @@ -2141,25 +2181,25 @@ class Home(MainHandler): show_message = [] - if sickgear.show_queue_scheduler.action.isBeingAdded(show_obj): + if sickgear.show_queue_scheduler.action.is_being_added(show_obj): show_message = ['Downloading this show, the information below is incomplete'] - elif sickgear.show_queue_scheduler.action.isBeingUpdated(show_obj): + elif sickgear.show_queue_scheduler.action.is_being_updated(show_obj): show_message = ['Updating information for this show'] - elif sickgear.show_queue_scheduler.action.isBeingRefreshed(show_obj): + elif sickgear.show_queue_scheduler.action.is_being_refreshed(show_obj): show_message = ['Refreshing episodes from disk for this show'] - elif sickgear.show_queue_scheduler.action.isBeingSubtitled(show_obj): + elif sickgear.show_queue_scheduler.action.is_being_subtitled(show_obj): show_message = ['Downloading subtitles for this show'] - elif sickgear.show_queue_scheduler.action.isInRefreshQueue(show_obj): + elif sickgear.show_queue_scheduler.action.is_in_refresh_queue(show_obj): show_message = ['Refresh queued for this show'] - elif sickgear.show_queue_scheduler.action.isInUpdateQueue(show_obj): + elif sickgear.show_queue_scheduler.action.is_in_update_queue(show_obj): show_message = ['Update queued for this show'] - elif sickgear.show_queue_scheduler.action.isInSubtitleQueue(show_obj): + elif sickgear.show_queue_scheduler.action.is_in_subtitle_queue(show_obj): show_message = ['Subtitle download queued for this show'] if sickgear.show_queue_scheduler.action.is_show_being_switched(show_obj): @@ -2185,8 +2225,8 @@ class Home(MainHandler): show_message = '.
'.join(show_message) t.force_update = 'home/update-show?tvid_prodid=%s&force=1&web=1' % tvid_prodid - if not sickgear.show_queue_scheduler.action.isBeingAdded(show_obj): - if not sickgear.show_queue_scheduler.action.isBeingUpdated(show_obj): + if not sickgear.show_queue_scheduler.action.is_being_added(show_obj): + if not sickgear.show_queue_scheduler.action.is_being_updated(show_obj): t.submenu.append( {'title': 'Remove', 'path': 'home/delete-show?tvid_prodid=%s' % tvid_prodid, 'confirm': True}) @@ -2211,7 +2251,7 @@ class Home(MainHandler): t.submenu.append( {'title': 'Media Rename', 'path': 'home/rename-media?tvid_prodid=%s' % tvid_prodid}) - if sickgear.USE_SUBTITLES and not sickgear.show_queue_scheduler.action.isBeingSubtitled( + if sickgear.USE_SUBTITLES and not sickgear.show_queue_scheduler.action.is_being_subtitled( show_obj) and show_obj.subtitles: t.submenu.append( {'title': 'Download Subtitles', @@ -2267,7 +2307,7 @@ class Home(MainHandler): del (ep_counts['totals'][0]) ep_counts['eps_all'] = sum(itervalues(ep_counts['totals'])) - ep_counts['eps_most'] = max(list_values(ep_counts['totals']) + [0]) + ep_counts['eps_most'] = max(list(ep_counts['totals'].values()) + [0]) all_seasons = sorted(iterkeys(ep_counts['totals']), reverse=True) t.lowest_season, t.highest_season = all_seasons and (all_seasons[-1], all_seasons[0]) or (0, 0) @@ -2315,7 +2355,7 @@ class Home(MainHandler): status_overview = show_obj.get_overview(row['status']) if status_overview: ep_counts[status_overview] += row['cnt'] - if ARCHIVED == Quality.splitCompositeStatus(row['status'])[0]: + if ARCHIVED == Quality.split_composite_status(row['status'])[0]: ep_counts['archived'].setdefault(row['season'], 0) ep_counts['archived'][row['season']] = row['cnt'] + ep_counts['archived'].get(row['season'], 0) else: @@ -2355,8 +2395,7 @@ class Home(MainHandler): t.fanart = [] cache_obj = image_cache.ImageCache() - for img in ek.ek(glob.glob, - cache_obj.fanart_path(show_obj.tvid, show_obj.prodid).replace('fanart.jpg', '*')) or []: + for img in glob.glob(cache_obj.fanart_path(show_obj.tvid, show_obj.prodid).replace('fanart.jpg', '*')) or []: match = re.search(r'(\d+(?:\.(\w*?(\d*)))?\.\w{5,8})\.fanart\.', img, re.I) if match and match.group(1): t.fanart += [(match.group(1), @@ -2383,7 +2422,7 @@ class Home(MainHandler): t.clean_show_name = quote_plus(sickgear.indexermapper.clean_show_name(show_obj.name)) - t.min_initial = Quality.get_quality_ui(min(Quality.splitQuality(show_obj.quality)[0])) + t.min_initial = Quality.get_quality_ui(min(Quality.split_quality(show_obj.quality)[0])) t.show_obj.exceptions = scene_exceptions.get_scene_exceptions(show_obj.tvid, show_obj.prodid) # noinspection PyUnresolvedReferences t.all_scene_exceptions = show_obj.exceptions # normally Unresolved as not a class attribute, force set above @@ -2429,7 +2468,7 @@ class Home(MainHandler): sorted_show_list[i].unique_name = '%s (%s)' % (sorted_show_list[i].name, start_year) dups[sorted_show_list[i].unique_name] = i - name_cache.buildNameCache() + name_cache.build_name_cache() @staticmethod def sorted_show_lists(): @@ -2439,7 +2478,7 @@ class Home(MainHandler): if 'custom' == sickgear.SHOWLIST_TAGVIEW: sorted_show_lists = [] for tag in sickgear.SHOW_TAGS: - results = filter_list(lambda _so: _so.tag == tag, sickgear.showList) + results = list(filter(lambda _so: _so.tag == tag, sickgear.showList)) if results: sorted_show_lists.append([tag, sorted(results, key=lambda x: titler(x.unique_name))]) # handle orphaned shows @@ -2544,10 +2583,10 @@ class Home(MainHandler): show_obj = helpers.find_show_by_id({tvid: prodid}, no_mapped_ids=True) try: sickgear.show_queue_scheduler.action.switch_show(show_obj=show_obj, new_tvid=m_tvid, - new_prodid=m_prodid, force_id=True, - set_pause=set_pause, mark_wanted=mark_wanted) + new_prodid=m_prodid, force_id=True, + set_pause=set_pause, mark_wanted=mark_wanted) except (BaseException, Exception) as e: - logger.log('Could not add show %s to switch queue: %s' % (show_obj.tvid_prodid, ex(e)), logger.WARNING) + logger.warning('Could not add show %s to switch queue: %s' % (show_obj.tvid_prodid, ex(e))) ui.notifications.message('TV info source switch', 'Queued switch of tv info source') return {'Success': 'Switched to new TV info source'} @@ -2584,12 +2623,12 @@ class Home(MainHandler): for k, v in iteritems(new_ids): if None is v.get('id') or None is v.get('status'): continue - if (show_obj.ids.get(k, {'id': 0}).get('id') != v.get('id') or - (MapStatus.NO_AUTOMATIC_CHANGE == v.get('status') and - MapStatus.NO_AUTOMATIC_CHANGE != show_obj.ids.get( - k, {'status': MapStatus.NONE}).get('status')) or - (MapStatus.NO_AUTOMATIC_CHANGE != v.get('status') and - MapStatus.NO_AUTOMATIC_CHANGE == show_obj.ids.get( + if (show_obj.ids.get(k, {'id': 0}).get('id') != v.get('id') + or (MapStatus.NO_AUTOMATIC_CHANGE == v.get('status') + and MapStatus.NO_AUTOMATIC_CHANGE != show_obj.ids.get( + k, {'status': MapStatus.NONE}).get('status')) + or (MapStatus.NO_AUTOMATIC_CHANGE != v.get('status') + and MapStatus.NO_AUTOMATIC_CHANGE == show_obj.ids.get( k, {'status': MapStatus.NONE}).get('status'))): show_obj.ids[k]['id'] = (0, v['id'])[v['id'] >= 0] show_obj.ids[k]['status'] = (MapStatus.NOT_FOUND, v['status'])[v['id'] != 0] @@ -2614,12 +2653,12 @@ class Home(MainHandler): else: msg = 'Main ID unchanged, because show from %s with ID: %s exists in DB.' % \ (sickgear.TVInfoAPI(m_tvid).name, mtvid_prodid) - logger.log(msg, logger.WARNING) + logger.warning(msg) ui.notifications.message(*[s.strip() for s in msg.split(',')]) except MultipleShowObjectsException: msg = 'Main ID unchanged, because show from %s with ID: %s exists in DB.' % \ (sickgear.TVInfoAPI(m_tvid).name, m_prodid) - logger.log(msg, logger.WARNING) + logger.warning(msg) ui.notifications.message(*[s.strip() for s in msg.split(',')]) response.update({ @@ -2666,7 +2705,7 @@ class Home(MainHandler): t.fanart = [] cache_obj = image_cache.ImageCache() show_obj = getattr(t, 'show_obj', None) or getattr(t, 'show', None) - for img in ek.ek(glob.glob, cache_obj.fanart_path( + for img in glob.glob(cache_obj.fanart_path( show_obj.tvid, show_obj.prodid).replace('fanart.jpg', '*')) or []: match = re.search(r'(\d+(?:\.(\w*?(\d*)))?\.\w{5,8})\.fanart\.', img, re.I) if match and match.group(1): @@ -2844,14 +2883,14 @@ class Home(MainHandler): errors = [] with show_obj.lock: - show_obj.quality = Quality.combineQualities(map_list(int, any_qualities), map_list(int, best_qualities)) + show_obj.quality = Quality.combine_qualities(list(map(int, any_qualities)), list(map(int, best_qualities))) show_obj.upgrade_once = upgrade_once # reversed for now if bool(show_obj.flatten_folders) != bool(flatten_folders): show_obj.flatten_folders = flatten_folders try: - sickgear.show_queue_scheduler.action.refreshShow(show_obj) + sickgear.show_queue_scheduler.action.refresh_show(show_obj) except exceptions_helper.CantRefreshException as e: errors.append('Unable to refresh this show: ' + ex(e)) @@ -2896,12 +2935,12 @@ class Home(MainHandler): # if we change location clear the db of episodes, change it, write to db, and rescan # noinspection PyProtectedMember - old_path = ek.ek(os.path.normpath, show_obj._location) - new_path = ek.ek(os.path.normpath, location) + old_path = os.path.normpath(show_obj._location) + new_path = os.path.normpath(location) if old_path != new_path: - logger.log(u'%s != %s' % (old_path, new_path), logger.DEBUG) - if not ek.ek(os.path.isdir, new_path) and not sickgear.CREATE_MISSING_SHOW_DIRS: - errors.append(u'New location %s does not exist' % new_path) + logger.debug(f'{old_path} != {new_path}') + if not os.path.isdir(new_path) and not sickgear.CREATE_MISSING_SHOW_DIRS: + errors.append(f'New location {new_path} does not exist') # don't bother if we're going to update anyway elif not do_update: @@ -2909,16 +2948,15 @@ class Home(MainHandler): try: show_obj.location = new_path try: - sickgear.show_queue_scheduler.action.refreshShow(show_obj) + sickgear.show_queue_scheduler.action.refresh_show(show_obj) except exceptions_helper.CantRefreshException as e: errors.append('Unable to refresh this show:' + ex(e)) # grab updated info from TVDB # show_obj.load_episodes_from_tvinfo() # rescan the episodes in the new folder except exceptions_helper.NoNFOException: - errors.append( - u"The folder at %s doesn't contain a tvshow.nfo - " - u"copy your files to that folder before you change the directory in SickGear." % new_path) + errors.append(f'The folder at {new_path} doesn"t contain a tvshow.nfo -' + f' copy your files to that folder before you change the directory in SickGear.') # save it to the DB show_obj.save_to_db() @@ -2926,7 +2964,7 @@ class Home(MainHandler): # force the update if do_update: try: - sickgear.show_queue_scheduler.action.updateShow(show_obj, True) + sickgear.show_queue_scheduler.action.update_show(show_obj, True) helpers.cpu_sleep() except exceptions_helper.CantUpdateException: errors.append('Unable to force an update on the show.') @@ -2964,8 +3002,8 @@ class Home(MainHandler): if None is show_obj: return self._generic_message('Error', 'Unable to find the specified show') - if sickgear.show_queue_scheduler.action.isBeingAdded( - show_obj) or sickgear.show_queue_scheduler.action.isBeingUpdated(show_obj): + if sickgear.show_queue_scheduler.action.is_being_added( + show_obj) or sickgear.show_queue_scheduler.action.is_being_updated(show_obj): return self._generic_message("Error", "Shows can't be deleted while they're being added or updated.") # if sickgear.USE_TRAKT and sickgear.TRAKT_SYNC: @@ -3010,7 +3048,7 @@ class Home(MainHandler): # force the update from the DB try: - sickgear.show_queue_scheduler.action.refreshShow(show_obj) + sickgear.show_queue_scheduler.action.refresh_show(show_obj) except exceptions_helper.CantRefreshException as e: ui.notifications.error('Unable to refresh this show.', ex(e)) @@ -3030,7 +3068,7 @@ class Home(MainHandler): # force the update try: - sickgear.show_queue_scheduler.action.updateShow(show_obj, bool(force), bool(web)) + sickgear.show_queue_scheduler.action.update_show(show_obj, bool(force), bool(web)) except exceptions_helper.CantUpdateException as e: ui.notifications.error('Unable to update this show.', ex(e)) @@ -3039,6 +3077,7 @@ class Home(MainHandler): self.redirect('/home/view-show?tvid_prodid=%s' % show_obj.tvid_prodid) + # noinspection PyUnusedLocal def subtitle_show(self, tvid_prodid=None, force=0): if None is tvid_prodid: @@ -3057,6 +3096,7 @@ class Home(MainHandler): self.redirect('/home/view-show?tvid_prodid=%s' % show_obj.tvid_prodid) + # noinspection PyUnusedLocal def update_mb(self, tvid_prodid=None, **kwargs): if notifiers.NotifierFactory().get('EMBY').update_library( @@ -3122,14 +3162,14 @@ class Home(MainHandler): return json_dumps({'result': 'error'}) return self._generic_message('Error', err_msg) - min_initial = min(Quality.splitQuality(show_obj.quality)[0]) + min_initial = min(Quality.split_quality(show_obj.quality)[0]) segments = {} if None is not eps: sql_l = [] for cur_ep in eps.split('|'): - logger.log(u'Attempting to set status on episode %s to %s' % (cur_ep, status), logger.DEBUG) + logger.debug(f'Attempting to set status on episode {cur_ep} to {status}') ep_obj = show_obj.get_episode(*tuple([int(x) for x in cur_ep.split('x')])) @@ -3155,21 +3195,21 @@ class Home(MainHandler): elif status in Quality.DOWNLOADED \ and ep_obj.status not in required + Quality.ARCHIVED + [IGNORED, SKIPPED] \ - and not ek.ek(os.path.isfile, ep_obj.location): + and not os.path.isfile(ep_obj.location): err_msg = 'to downloaded because it\'s not snatched/downloaded/archived' if err_msg: - logger.log('Refusing to change status of %s %s' % (cur_ep, err_msg), logger.ERROR) + logger.error('Refusing to change status of %s %s' % (cur_ep, err_msg)) continue if ARCHIVED == status: if ep_obj.status in Quality.DOWNLOADED or direct: - ep_obj.status = Quality.compositeStatus( - ARCHIVED, (Quality.splitCompositeStatus(ep_obj.status)[1], min_initial)[use_default]) + ep_obj.status = Quality.composite_status( + ARCHIVED, (Quality.split_composite_status(ep_obj.status)[1], min_initial)[use_default]) elif DOWNLOADED == status: if ep_obj.status in Quality.ARCHIVED: - ep_obj.status = Quality.compositeStatus( - DOWNLOADED, Quality.splitCompositeStatus(ep_obj.status)[1]) + ep_obj.status = Quality.composite_status( + DOWNLOADED, Quality.split_composite_status(ep_obj.status)[1]) else: ep_obj.status = status @@ -3193,31 +3233,31 @@ class Home(MainHandler): if season not in season_wanted: season_wanted += [season] - season_list += u'
  • Season %s
  • ' % season - logger.log((u'Not adding wanted eps to backlog search for %s season %s because show is paused', - u'Starting backlog search for %s season %s because eps were set to wanted')[ + season_list += f'
  • Season {season}
  • ' + logger.log(('Not adding wanted eps to backlog search for %s season %s because show is paused', + 'Starting backlog search for %s season %s because eps were set to wanted')[ not show_obj.paused] % (show_obj.unique_name, season)) - (title, msg) = (('Not starting backlog', u'Paused show prevented backlog search'), - ('Backlog started', u'Backlog search started'))[not show_obj.paused] + (title, msg) = (('Not starting backlog', 'Paused show prevented backlog search'), + ('Backlog started', 'Backlog search started'))[not show_obj.paused] if segments: ui.notifications.message(title, - u'%s for the following seasons of %s:
      %s
    ' - % (msg, show_obj.unique_name, season_list)) + f'{msg} for the following seasons of {show_obj.unique_name}:
    ' + f'
      {season_list}
    ') else: ui.notifications.message('Not starting backlog', 'No provider has active searching enabled') elif FAILED == status: - msg = u'Retrying search automatically for the following season of %s:
      ' % show_obj.unique_name + msg = f'Retrying search automatically for the following season of {show_obj.unique_name}:
        ' for season, segment in iteritems(segments): # type: int, List[sickgear.tv.TVEpisode] cur_failed_queue_item = search_queue.FailedQueueItem(show_obj, segment) sickgear.search_queue_scheduler.action.add_item(cur_failed_queue_item) msg += '
      • Season %s
      • ' % season - logger.log(u'Retrying search for %s season %s because some eps were set to failed' % - (show_obj.unique_name, season)) + logger.log(f'Retrying search for {show_obj.unique_name} season {season}' + f' because some eps were set to failed') msg += '
      ' @@ -3255,12 +3295,12 @@ class Home(MainHandler): for _cur_ep_obj in cur_ep_obj.related_ep_obj + [cur_ep_obj]: if _cur_ep_obj in ep_obj_rename_list: break - ep_status, ep_qual = Quality.splitCompositeStatus(_cur_ep_obj.status) + ep_status, ep_qual = Quality.split_composite_status(_cur_ep_obj.status) if not ep_qual: continue ep_obj_rename_list.append(cur_ep_obj) else: - ep_status, ep_qual = Quality.splitCompositeStatus(cur_ep_obj.status) + ep_status, ep_qual = Quality.split_composite_status(cur_ep_obj.status) if not ep_qual: continue ep_obj_rename_list.append(cur_ep_obj) @@ -3313,7 +3353,7 @@ class Home(MainHandler): tvid_prodid_obj.list + [ep_info[0], ep_info[1]]) if not sql_result: - logger.log(u'Unable to find an episode for ' + cur_ep + ', skipping', logger.WARNING) + logger.warning(f'Unable to find an episode for {cur_ep}, skipping') continue related_ep_result = my_db.select('SELECT * FROM tv_episodes WHERE location = ? AND episode != ?', [sql_result[0]['location'], ep_info[1]]) @@ -3337,7 +3377,7 @@ class Home(MainHandler): # retrieve the episode object and fail if we can't get one ep_obj = self._get_episode(tvid_prodid, season, episode) if not isinstance(ep_obj, str): - if UNKNOWN == Quality.splitCompositeStatus(ep_obj.status)[0]: + if UNKNOWN == Quality.split_composite_status(ep_obj.status)[0]: ep_obj.status = SKIPPED # make a queue item for the TVEpisode and put it on the queue @@ -3374,7 +3414,7 @@ class Home(MainHandler): sickgear.search_queue.remove_old_fifo(sickgear.search_queue.MANUAL_SEARCH_HISTORY) results = sickgear.search_queue.MANUAL_SEARCH_HISTORY - for item in filter_iter(lambda q: hasattr(q, 'segment_ns'), queued): + for item in filter(lambda q: hasattr(q, 'segment_ns'), queued): for ep_ns in item.segment_ns: ep_data, uniq_sxe = self.prepare_episode(ep_ns, 'queued') ep_data_list.append(ep_data) @@ -3390,9 +3430,9 @@ class Home(MainHandler): seen_eps.add(uniq_sxe) episode_params = dict(searchstate='finished', retrystate=True, statusoverview=True) - for item in filter_iter(lambda r: hasattr(r, 'segment_ns') and ( + for item in filter(lambda r: hasattr(r, 'segment_ns') and ( not tvid_prodid or tvid_prodid == str(r.show_ns.tvid_prodid)), results): - for ep_ns in filter_iter( + for ep_ns in filter( lambda e: (e.show_ns.tvid, e.show_ns.prodid, e.season, e.episode) not in seen_eps, item.segment_ns): ep_obj = getattr(ep_ns, 'ep_obj', None) if not ep_obj: @@ -3406,8 +3446,8 @@ class Home(MainHandler): ep_data_list.append(ep_data) seen_eps.add(uniq_sxe) - for snatched in filter_iter(lambda s: ((s.tvid, s.prodid, s.season, s.episode) not in seen_eps), - item.snatched_eps): + for snatched in filter(lambda s: ((s.tvid, s.prodid, s.season, s.episode) not in seen_eps), + item.snatched_eps): ep_obj = getattr(snatched, 'ep_obj', None) if not ep_obj: continue @@ -3442,9 +3482,9 @@ class Home(MainHandler): """ # Find the quality class for the episode quality_class = Quality.qualityStrings[Quality.UNKNOWN] - ep_status, ep_quality = Quality.splitCompositeStatus(ep_type.status) + ep_status, ep_quality = Quality.split_composite_status(ep_type.status) for x in (SD, HD720p, HD1080p, UHD2160p): - if ep_quality in Quality.splitQuality(x)[0]: + if ep_quality in Quality.split_quality(x)[0]: quality_class = qualityPresetStrings[x] break @@ -3473,7 +3513,7 @@ class Home(MainHandler): if isinstance(ep_obj, str): return json_dumps({'result': 'failure'}) - # try do download subtitles for that episode + # try to download subtitles for that episode try: previous_subtitles = set([subliminal.language.Language(x) for x in ep_obj.subtitles]) ep_obj.subtitles = set([x.language for x in next(itervalues(ep_obj.download_subtitles()))]) @@ -3849,8 +3889,8 @@ class HomeProcessMedia(Home): skip_failure_processing = nzbget_call and not nzbget_dupekey if nzbget_call and sickgear.NZBGET_SCRIPT_VERSION != kwargs.get('pp_version', '0'): - logger.log('Calling SickGear-NG.py script %s is not current version %s, please update.' % - (kwargs.get('pp_version', '0'), sickgear.NZBGET_SCRIPT_VERSION), logger.ERROR) + logger.error(f'Calling SickGear-NG.py script {kwargs.get("pp_version", "0")} is not current version' + f' {sickgear.NZBGET_SCRIPT_VERSION}, please update.') if sickgear.NZBGET_SKIP_PM and nzbget_call and nzbget_dupekey and nzb_name and show_obj: processTV.process_minimal(nzb_name, show_obj, @@ -3887,21 +3927,18 @@ class HomeProcessMedia(Home): regexp = re.compile(r'(?i)', flags=re.UNICODE) result = regexp.sub('\n', result) if None is not quiet and 1 == int(quiet): - regexp = re.compile(u'(?i)]+>([^<]+)<[/]a>', flags=re.UNICODE) - return u'%s' % regexp.sub(r'\1', result) - return self._generic_message('Postprocessing results', u'
      %s
      ' % result) + regexp = re.compile('(?i)]+>([^<]+)', flags=re.UNICODE) + return regexp.sub(r'\1', result) + return self._generic_message('Postprocessing results', f'
      {result}
      ') # noinspection PyPep8Naming - def processEpisode(self, dir_name=None, nzb_name=None, process_type=None, **kwargs): - """ legacy function name, stubbed but can _not_ be removed as this - is potentially used in pp scripts located outside of SG path (need to verify this) + @staticmethod + def processEpisode(**kwargs): + """ legacy function name, stubbed and will be removed """ - kwargs['dir_name'] = dir_name or kwargs.pop('dir', None) - kwargs['nzb_name'] = nzb_name or kwargs.pop('nzbName', None) - kwargs['process_type'] = process_type or kwargs.pop('type', 'auto') - kwargs['pp_version'] = kwargs.pop('ppVersion', '0') - return self.process_files(**kwargs) - + logger.error('This endpoint is no longer to be used,' + ' nzbToMedia users please follow: https://github.com/SickGear/SickGear/wiki/FAQ-nzbToMedia') + sickgear.MEMCACHE['DEPRECATE_PP_LEGACY'] = True class AddShows(Home): @@ -3944,12 +3981,12 @@ class AddShows(Home): b_term = decode_str(used_search_term).strip() terms = [] try: - for cur_term in ([], [b_term.encode('utf-8')])[PY2] + [unidecode(b_term), b_term]: + for cur_term in [unidecode(b_term), b_term]: if cur_term not in terms: terms += [cur_term] except (BaseException, Exception): text = used_search_term.strip() - terms = [text if not PY2 else text.encode('utf-8')] + terms = text return set(s for s in set([used_search_term] + terms) if s) @@ -3992,7 +4029,7 @@ class AddShows(Home): r'(?P[^ ]+themoviedb\.org/tv/(?P\d+)[^ ]*)|' r'(?P[^ ]+trakt\.tv/shows/(?P[^ /]+)[^ ]*)|' r'(?P[^ ]+thetvdb\.com/series/(?P[^ /]+)[^ ]*)|' - r'(?P[^ ]+thetvdb\.com/[^\d]+(?P[^ /]+)[^ ]*)|' + r'(?P[^ ]+thetvdb\.com/\D+(?P[^ /]+)[^ ]*)|' r'(?P[^ ]+tvmaze\.com/shows/(?P\d+)/?[^ ]*)', search_term) if id_check: for cur_match in id_check: @@ -4042,7 +4079,7 @@ class AddShows(Home): t = sickgear.TVInfoAPI(cur_tvid).setup(**tvinfo_config) results.setdefault(cur_tvid, {}) try: - for cur_result in t.search_show(list(used_search_term), ids=ids_search_used): + for cur_result in t.search_show(list(used_search_term), ids=ids_search_used): # type: TVInfoShow if TVINFO_TRAKT == cur_tvid and not cur_result['ids'].tvdb: continue tv_src_id = int(cur_result['id']) @@ -4063,7 +4100,7 @@ class AddShows(Home): any(ids_to_search[si] == results[cur_tvid][tv_src_id].get('ids', {})[si] for si in ids_to_search): ids_search_used.update({k: v for k, v in iteritems( - results[cur_tvid][tv_src_id].get('ids',{})) + results[cur_tvid][tv_src_id].get('ids', {})) if v and k not in iterkeys(ids_to_search)}) results[cur_tvid][tv_src_id]['rename_suggest'] = '' \ if not results[cur_tvid][tv_src_id]['firstaired'] \ @@ -4085,7 +4122,7 @@ class AddShows(Home): for tvid, name in iteritems(sickgear.TVInfoAPI().all_sources)} if TVINFO_TRAKT in results and TVINFO_TVDB in results: - tvdb_ids = list_keys(results[TVINFO_TVDB]) + tvdb_ids = list(results[TVINFO_TVDB]) results[TVINFO_TRAKT] = {k: v for k, v in iteritems(results[TVINFO_TRAKT]) if v['ids'].tvdb not in tvdb_ids} def in_db(tvid, prod_id): @@ -4112,7 +4149,8 @@ class AddShows(Home): show['seriesname'], helpers.xhtml_escape(show['seriesname']), show['firstaired'], (isinstance(show['firstaired'], string_types) and SGDatetime.sbfdate(_parse_date(show['firstaired'])) or ''), - show.get('network', '') or '', (show.get('genres', '') or show.get('genre', '') or '').replace('|', ', '), # 11 - 12 + show.get('network', '') or '', # 11 + (show.get('genres', '') or show.get('genre', '') or '').replace('|', ', '), # 12 show.get('language', ''), show.get('language_country_code') or '', # 13 - 14 re.sub(r'([,.!][^,.!]*?)$', '...', re.sub(r'([.!?])(?=\w)', r'\1 ', @@ -4277,7 +4315,7 @@ class AddShows(Home): try: for cur_dir in scantree(cur_root_dir, filter_kind=True, recurse=False): - normpath = ek.ek(os.path.normpath, cur_dir.path) + normpath = os.path.normpath(cur_dir.path) highlight = hash_dir == re.sub('[^a-z]', '', sg_helpers.md5_for_text(normpath)) if hash_dir: display_one_dir = highlight @@ -4320,7 +4358,7 @@ class AddShows(Home): if display_one_dir and not cur_data['highlight'][cur_enum]: continue - dir_item = dict(normpath=cur_normpath, rootpath='%s%s' % (ek.ek(os.path.dirname, cur_normpath), os.sep), + dir_item = dict(normpath=cur_normpath, rootpath='%s%s' % (os.path.dirname(cur_normpath), os.sep), name=cur_data['name'][cur_enum], added_already=any(cur_data['exists'][cur_enum]), highlight=cur_data['highlight'][cur_enum]) @@ -4332,7 +4370,7 @@ class AddShows(Home): if prodid and show_name: break - (tvid, prodid, show_name) = cur_provider.retrieveShowMetadata(cur_normpath) + (tvid, prodid, show_name) = cur_provider.retrieve_show_metadata(cur_normpath) # default to TVDB if TV info src was not detected if show_name and (not tvid or not prodid): @@ -4378,7 +4416,7 @@ class AddShows(Home): elif not show_dir: t.default_show_name = '' elif not show_name: - t.default_show_name = ek.ek(os.path.basename, ek.ek(os.path.normpath, show_dir)).replace('.', ' ') + t.default_show_name = os.path.basename(os.path.normpath(show_dir)).replace('.', ' ') else: t.default_show_name = show_name @@ -4399,9 +4437,9 @@ class AddShows(Home): t.infosrc = sickgear.TVInfoAPI().search_sources search_tvid = None if use_show_name and 1 == show_name.count(':'): # if colon is found once - search_tvid = filter_list(lambda x: bool(x), + search_tvid = list(filter(lambda x: bool(x), [('%s:' % sickgear.TVInfoAPI(_tvid).config['slug']) in show_name and _tvid - for _tvid, _ in iteritems(t.infosrc)]) + for _tvid, _ in iteritems(t.infosrc)])) search_tvid = 1 == len(search_tvid) and search_tvid[0] t.provided_tvid = search_tvid or int(tvid or sickgear.TVINFO_DEFAULT) t.infosrc_icons = [sickgear.TVInfoAPI(cur_tvid).config.get('icon') for cur_tvid in t.infosrc] @@ -4532,7 +4570,7 @@ class AddShows(Home): def info_anidb(self, ids, show_name): - if not filter_list(lambda tvid_prodid: helpers.find_show_by_id(tvid_prodid), ids.split(' ')): + if not list(filter(lambda tvid_prodid: helpers.find_show_by_id(tvid_prodid), ids.split(' '))): return self.new_show('|'.join(['', '', '', ' '.join([ids, show_name])]), use_show_name=True, is_anime=True) @staticmethod @@ -4619,8 +4657,8 @@ class AddShows(Home): oldest, newest, oldest_dt, newest_dt = None, None, 9999999, 0 show_list = (data or {}).get('list', {}).get('items', {}) - idx_ids = dict(map_iter(lambda so: (so.imdbid, (so.tvid, so.prodid)), - filter_iter(lambda _so: getattr(_so, 'imdbid', None), sickgear.showList))) + idx_ids = dict(map(lambda so: (so.imdbid, (so.tvid, so.prodid)), + filter(lambda _so: getattr(_so, 'imdbid', None), sickgear.showList))) # list_id = (data or {}).get('list', {}).get('id', {}) for row in show_list: @@ -4685,7 +4723,7 @@ class AddShows(Home): def parse_imdb_html(self, html, filtered, kwargs): - img_size = re.compile(r'(?im)(V1[^XY]+([XY]))(\d+)([^\d]+)(\d+)([^\d]+)(\d+)([^\d]+)(\d+)([^\d]+)(\d+)(.*?)$') + img_size = re.compile(r'(?im)(V1[^XY]+([XY]))(\d+)(\D+)(\d+)(\D+)(\d+)(\D+)(\d+)(\D+)(\d+)(.*?)$') with BS4Parser(html, features=['html5lib', 'permissive']) as soup: show_list = soup.select('.lister-list') @@ -4755,7 +4793,7 @@ class AddShows(Home): show_obj = helpers.find_show_by_id({TVINFO_IMDB: int(ids['imdb'].replace('tt', ''))}, no_mapped_ids=False) - for tvid in filter_iter(lambda _tvid: _tvid == show_obj.tvid, sickgear.TVInfoAPI().search_sources): + for tvid in filter(lambda _tvid: _tvid == show_obj.tvid, sickgear.TVInfoAPI().search_sources): infosrc_slug, infosrc_url = (sickgear.TVInfoAPI(tvid).config[x] for x in ('slug', 'show_url')) filtered[-1]['ids'][infosrc_slug] = show_obj.prodid @@ -4980,13 +5018,13 @@ class AddShows(Home): normalised = resp else: for item in resp: - normalised.append({u'show': item}) + normalised.append({'show': item}) del resp except TraktAuthException as e: - logger.log(u'Pin authorisation needed to connect to Trakt service: %s' % ex(e), logger.WARNING) + logger.warning(f'Pin authorisation needed to connect to Trakt service: {ex(e)}') error_msg = 'Unauthorized: Get another pin in the Notifications Trakt settings' except TraktException as e: - logger.log(u'Could not connect to Trakt service: %s' % ex(e), logger.WARNING) + logger.warning(f'Could not connect to Trakt service: {ex(e)}') except exceptions_helper.ConnectionSkipException as e: logger.log('Skipping Trakt because of previous failure: %s' % ex(e)) except (IndexError, KeyError): @@ -5116,7 +5154,7 @@ class AddShows(Home): def info_trakt(self, ids, show_name): - if not filter_list(lambda tvid_prodid: helpers.find_show_by_id(tvid_prodid), ids.split(' ')): + if not list(filter(lambda tvid_prodid: helpers.find_show_by_id(tvid_prodid), ids.split(' '))): return self.new_show('|'.join(['', '', '', ' '.join([ids, show_name])]), use_show_name=True) def ne_default(self): @@ -5206,7 +5244,7 @@ class AddShows(Home): channel_tag_copy = copy.copy(channel_tag) if channel_tag_copy: network = channel_tag_copy.a.extract().get_text(strip=True) - date_info = re.sub(r'^[^\d]+', '', channel_tag_copy.get_text(strip=True)) + date_info = re.sub(r'^\D+', '', channel_tag_copy.get_text(strip=True)) if date_info: dt = dateutil.parser.parse((date_info, '%s.01.01' % date_info)[4 == len(date_info)]) @@ -5215,7 +5253,7 @@ class AddShows(Home): and 'printed' in ' '.join(t.get('class', ''))] if len(tag): age_args = {} - future = re.sub(r'[^\d]+(.*)', r'\1', tag[0].get_text(strip=True)) + future = re.sub(r'\D+(.*)', r'\1', tag[0].get_text(strip=True)) for (dim, rcx) in rc: value = helpers.try_int(rcx.sub(r'\1', future), None) if value: @@ -5243,7 +5281,7 @@ class AddShows(Home): genres = row.find(class_='genre') if genres: - genres = re.sub(r',([^\s])', r', \1', genres.get_text(strip=True)) + genres = re.sub(r',(\S)', r', \1', genres.get_text(strip=True)) overview = row.find(class_='summary') if overview: overview = overview.get_text(strip=True) @@ -5430,7 +5468,7 @@ class AddShows(Home): # noinspection PyUnusedLocal def info_tvmaze(self, ids, show_name): - if not filter_list(lambda tvid_prodid: helpers.find_show_by_id(tvid_prodid), ids.split(' ')): + if not list(filter(lambda tvid_prodid: helpers.find_show_by_id(tvid_prodid), ids.split(' '))): return self.new_show('|'.join(['', '', '', ' '.join([ids, show_name])]), use_show_name=True) def tvc_default(self): @@ -5729,7 +5767,7 @@ class AddShows(Home): dt_ordinal = 0 dt_string = '' - date_tags = filter_list(lambda t: t.find('span'), row.find_all('div', class_='clamp-details')) + date_tags = list(filter(lambda t: t.find('span'), row.find_all('div', class_='clamp-details'))) if date_tags: dt = dateutil.parser.parse(date_tags[0].get_text().strip()) dt_ordinal = dt.toordinal() @@ -5842,11 +5880,11 @@ class AddShows(Home): tvid_prodid_list = [] # first, process known ids - for tvid, infosrc_slug in filter_iter( + for tvid, infosrc_slug in filter( lambda tvid_slug: item['ids'].get(tvid_slug[1]) and not sickgear.TVInfoAPI(tvid_slug[0]).config.get('defunct'), - map_iter(lambda _tvid: (_tvid, sickgear.TVInfoAPI(_tvid).config['slug']), - iterkeys(sickgear.TVInfoAPI().all_sources))): + map(lambda _tvid: (_tvid, sickgear.TVInfoAPI(_tvid).config['slug']), + iterkeys(sickgear.TVInfoAPI().all_sources))): try: src_id = item['ids'][infosrc_slug] tvid_prodid_list += ['%s:%s' % (infosrc_slug, src_id)] @@ -5901,7 +5939,7 @@ class AddShows(Home): known.append(item['show_id']) t.all_shows.append(item) - if any(filter_iter(lambda tp: tp in sickgear.BROWSELIST_HIDDEN, tvid_prodid_list)): + if any(filter(lambda tp: tp in sickgear.BROWSELIST_HIDDEN, tvid_prodid_list)): item['hide'] = True t.num_hidden += 1 @@ -5950,7 +5988,7 @@ class AddShows(Home): tvid, void, prodid, show_name = self.split_extra_show(which_series) if bool(helpers.try_int(cancel_form)): tvid = tvid or provided_tvid or '0' - prodid = re.findall(r'tvid_prodid=[^%s]+%s([\d]+)' % tuple(2 * [TVidProdid.glue]), return_to)[0] + prodid = re.findall(r'tvid_prodid=[^%s]+%s(\d+)' % tuple(2 * [TVidProdid.glue]), return_to)[0] return self.redirect(return_to % (tvid, prodid)) # grab our list of other dirs if given @@ -5984,8 +6022,7 @@ class AddShows(Home): series_pieces = which_series.split('|') if (which_series and root_dir) or (which_series and full_show_path and 1 < len(series_pieces)): if 4 > len(series_pieces): - logger.log('Unable to add show due to show selection. Not enough arguments: %s' % (repr(series_pieces)), - logger.ERROR) + logger.error(f'Unable to add show due to show selection. Not enough arguments: {repr(series_pieces)}') ui.notifications.error('Unknown error. Unable to add show due to problem with show selection.') return self.redirect('/add-shows/import/') @@ -6003,15 +6040,15 @@ class AddShows(Home): # use the whole path if it's given, or else append the show name to the root dir to get the full show path if full_show_path: - show_dir = ek.ek(os.path.normpath, full_show_path) + show_dir = os.path.normpath(full_show_path) new_show = False else: show_dir = helpers.generate_show_dir_name(root_dir, show_name) new_show = True # if the dir exists, do 'add existing show' - if ek.ek(os.path.isdir, show_dir) and not full_show_path: - ui.notifications.error('Unable to add show', u'Found existing folder: ' + show_dir) + if os.path.isdir(show_dir) and not full_show_path: + ui.notifications.error('Unable to add show', f'Found existing folder: {show_dir}') return self.redirect( '/add-shows/import?tvid_prodid=%s%s%s&hash_dir=%s%s' % (tvid, TVidProdid.glue, prodid, re.sub('[^a-z]', '', sg_helpers.md5_for_text(show_dir)), @@ -6019,11 +6056,11 @@ class AddShows(Home): # don't create show dir if config says not to if sickgear.ADD_SHOWS_WO_DIR: - logger.log(u'Skipping initial creation due to config.ini setting (add_shows_wo_dir)') + logger.log('Skipping initial creation due to config.ini setting (add_shows_wo_dir)') else: if not helpers.make_dir(show_dir): - logger.log(u'Unable to add show because can\'t create folder: ' + show_dir, logger.ERROR) - ui.notifications.error('Unable to add show', u'Can\'t create folder: ' + show_dir) + logger.error(f"Unable to add show because can't create folder: {show_dir}") + ui.notifications.error('Unable to add show', f"Can't create folder: {show_dir}") return self.redirect('/home/') helpers.chmod_as_parent(show_dir) @@ -6037,7 +6074,7 @@ class AddShows(Home): any_qualities = [any_qualities] if type(best_qualities) != list: best_qualities = [best_qualities] - new_quality = Quality.combineQualities(map_list(int, any_qualities), map_list(int, best_qualities)) + new_quality = Quality.combine_qualities(list(map(int, any_qualities)), list(map(int, best_qualities))) upgrade_once = config.checkbox_to_value(upgrade_once) wanted_begin = config.minimax(wanted_begin, 0, -1, 10) @@ -6232,7 +6269,7 @@ class Manage(MainHandler): if cur_season not in result: result[cur_season] = {} - cur_quality = Quality.splitCompositeStatus(int(cur_result['status']))[1] + cur_quality = Quality.split_composite_status(int(cur_result['status']))[1] result[cur_season][cur_episode] = {'name': cur_result['name'], 'airdateNever': 1000 > int(cur_result['airdate']), 'qualityCss': Quality.get_quality_css(cur_quality), @@ -6252,9 +6289,9 @@ class Manage(MainHandler): if event_sql_result: for cur_result_event in event_sql_result: if None is d_status and cur_result_event['action'] in Quality.DOWNLOADED: - d_status, d_qual = Quality.splitCompositeStatus(cur_result_event['action']) + d_status, d_qual = Quality.split_composite_status(cur_result_event['action']) if None is s_status and cur_result_event['action'] in Quality.SNATCHED_ANY: - s_status, s_quality = Quality.splitCompositeStatus(cur_result_event['action']) + s_status, s_quality = Quality.split_composite_status(cur_result_event['action']) aged = ((datetime.datetime.now() - datetime.datetime.strptime(str(cur_result_event['date']), sickgear.history.dateFormat)) @@ -6295,11 +6332,11 @@ class Manage(MainHandler): if Quality.NONE == cur_quality: return undo_from_history, change_to, status - cur_status = Quality.splitCompositeStatus(int(cur_status))[0] + cur_status = Quality.split_composite_status(int(cur_status))[0] if any([location]): undo_from_history = True change_to = statusStrings[DOWNLOADED] - status = [Quality.compositeStatus(DOWNLOADED, d_qual or cur_quality)] + status = [Quality.composite_status(DOWNLOADED, d_qual or cur_quality)] elif cur_status in Quality.SNATCHED_ANY + [IGNORED, SKIPPED, WANTED]: if None is d_qual: if cur_status not in [IGNORED, SKIPPED]: @@ -6311,7 +6348,7 @@ class Manage(MainHandler): or sickgear.SKIP_REMOVED_FILES in [ARCHIVED, IGNORED, SKIPPED]: undo_from_history = True change_to = '%s %s' % (statusStrings[ARCHIVED], Quality.qualityStrings[d_qual]) - status = [Quality.compositeStatus(ARCHIVED, d_qual)] + status = [Quality.composite_status(ARCHIVED, d_qual)] elif sickgear.SKIP_REMOVED_FILES in [IGNORED, SKIPPED] \ and cur_status not in [IGNORED, SKIPPED]: change_to = statusStrings[statusStrings[sickgear.SKIP_REMOVED_FILES]] @@ -6405,8 +6442,7 @@ class Manage(MainHandler): ' AND season != 0' ' AND indexer = ? AND showid = ?', status_list + tvid_prodid_list) - what = (sql_result and '|'.join(map_iter(lambda r: '%sx%s' % (r['season'], r['episode']), - sql_result)) + what = (sql_result and '|'.join(map(lambda r: '%sx%s' % (r['season'], r['episode']), sql_result)) or None) to = new_status @@ -6564,7 +6600,8 @@ class Manage(MainHandler): ' WHERE indexer = ? AND showid = ?' ' AND season != 0 AND status LIKE \'%4\'', TVidProdid(cur_tvid_prodid).list) - to_download[cur_tvid_prodid] = map_list(lambda x: '%sx%s' % (x['season'], x['episode']), sql_result) + to_download[cur_tvid_prodid] = list(map(lambda x: '%sx%s' % (x['season'], x['episode']), + sql_result)) for epResult in to_download[cur_tvid_prodid]: season, episode = epResult.split('x') @@ -6693,7 +6730,7 @@ class Manage(MainHandler): for cur_show_obj in show_list: # noinspection PyProtectedMember - cur_root_dir = ek.ek(os.path.dirname, cur_show_obj._location) + cur_root_dir = os.path.dirname(cur_show_obj._location) if cur_root_dir not in root_dir_list: root_dir_list.append(cur_root_dir) @@ -6819,11 +6856,11 @@ class Manage(MainHandler): continue # noinspection PyProtectedMember - cur_root_dir = ek.ek(os.path.dirname, show_obj._location) + cur_root_dir = os.path.dirname(show_obj._location) # noinspection PyProtectedMember - cur_show_dir = ek.ek(os.path.basename, show_obj._location) + cur_show_dir = os.path.basename(show_obj._location) if cur_root_dir in dir_map and cur_root_dir != dir_map[cur_root_dir]: - new_show_dir = ek.ek(os.path.join, dir_map[cur_root_dir], cur_show_dir) + new_show_dir = os.path.join(dir_map[cur_root_dir], cur_show_dir) if 'nt' != os.name and ':\\' in cur_show_dir: # noinspection PyProtectedMember cur_show_dir = show_obj._location.split('\\')[-1] @@ -6831,10 +6868,9 @@ class Manage(MainHandler): base_dir = dir_map[cur_root_dir].rsplit(cur_show_dir)[0].rstrip('/') except IndexError: base_dir = dir_map[cur_root_dir] - new_show_dir = ek.ek(os.path.join, base_dir, cur_show_dir) + new_show_dir = os.path.join(base_dir, cur_show_dir) # noinspection PyProtectedMember - logger.log(u'For show %s changing dir from %s to %s' % - (show_obj.unique_name, show_obj._location, new_show_dir)) + logger.log(f'For show {show_obj.unique_name} changing dir from {show_obj._location} to {new_show_dir}') else: # noinspection PyProtectedMember new_show_dir = show_obj._location @@ -6899,7 +6935,7 @@ class Manage(MainHandler): new_subtitles = 'on' if new_subtitles else 'off' if 'keep' == quality_preset: - any_qualities, best_qualities = Quality.splitQuality(show_obj.quality) + any_qualities, best_qualities = Quality.split_quality(show_obj.quality) elif int(quality_preset): best_qualities = [] @@ -6913,7 +6949,7 @@ class Manage(MainHandler): prune=new_prune, tag=new_tag, direct_call=True) if cur_errors: - logger.log(u'Errors: ' + str(cur_errors), logger.ERROR) + logger.error(f'Errors: {cur_errors}') errors.append('%s:\n
        ' % show_obj.unique_name + ' '.join( ['
      • %s
      • ' % error for error in cur_errors]) + '
      ') @@ -6947,20 +6983,20 @@ class Manage(MainHandler): else: if cur_tvid_prodid in to_update: try: - sickgear.show_queue_scheduler.action.updateShow(cur_show_obj, True, True) + sickgear.show_queue_scheduler.action.update_show(cur_show_obj, True, True) update.append(cur_show_obj.name) except exceptions_helper.CantUpdateException as e: errors.append('Unable to update show %s: %s' % (cur_show_obj.unique_name, ex(e))) elif cur_tvid_prodid in to_refresh: try: - sickgear.show_queue_scheduler.action.refreshShow(cur_show_obj) + sickgear.show_queue_scheduler.action.refresh_show(cur_show_obj) refresh.append(cur_show_obj.name) except exceptions_helper.CantRefreshException as e: errors.append('Unable to refresh show %s: %s' % (cur_show_obj.unique_name, ex(e))) if cur_tvid_prodid in to_rename: - sickgear.show_queue_scheduler.action.renameShowEpisodes(cur_show_obj) + sickgear.show_queue_scheduler.action.rename_show_episodes(cur_show_obj) rename.append(cur_show_obj.name) if sickgear.USE_SUBTITLES and cur_tvid_prodid in to_subtitle: @@ -7039,7 +7075,7 @@ class Manage(MainHandler): new_show_id = new_show.split(':') new_tvid = int(new_show_id[0]) if new_tvid not in tv_sources: - logger.log('Skipping %s because target is not a valid source' % show, logger.WARNING) + logger.warning('Skipping %s because target is not a valid source' % show) errors.append('Skipping %s because target is not a valid source' % show) continue try: @@ -7047,7 +7083,7 @@ class Manage(MainHandler): except (BaseException, Exception): show_obj = None if not show_obj: - logger.log('Skipping %s because source is not a valid show' % show, logger.WARNING) + logger.warning('Skipping %s because source is not a valid show' % show) errors.append('Skipping %s because source is not a valid show' % show) continue if 2 == len(new_show_id): @@ -7057,21 +7093,20 @@ class Manage(MainHandler): except (BaseException, Exception): new_show_obj = None if new_show_obj: - logger.log('Skipping %s because target show with that id already exists in db' % show, - logger.WARNING) + logger.warning('Skipping %s because target show with that id already exists in db' % show) errors.append('Skipping %s because target show with that id already exists in db' % show) continue else: new_prodid = None if show_obj.tvid == new_tvid and (not new_prodid or new_prodid == show_obj.prodid): - logger.log('Skipping %s because target same as source' % show, logger.WARNING) + logger.warning('Skipping %s because target same as source' % show) errors.append('Skipping %s because target same as source' % show) continue try: sickgear.show_queue_scheduler.action.switch_show(show_obj=show_obj, new_tvid=new_tvid, - new_prodid=new_prodid, force_id=force_id) + new_prodid=new_prodid, force_id=force_id) except (BaseException, Exception) as e: - logger.log('Could not add show %s to switch queue: %s' % (show_obj.tvid_prodid, ex(e)), logger.WARNING) + logger.warning('Could not add show %s to switch queue: %s' % (show_obj.tvid_prodid, ex(e))) errors.append('Could not add show %s to switch queue: %s' % (show_obj.tvid_prodid, ex(e))) return json_dumps(({'result': 'success'}, {'errors': ', '.join(errors)})[0 < len(errors)]) @@ -7116,7 +7151,7 @@ class ManageSearch(Manage): def retry_provider(provider=None): if not provider: return - prov = [p for p in sickgear.providerList + sickgear.newznabProviderList if p.get_id() == provider] + prov = [p for p in sickgear.provider_list + sickgear.newznab_providers if p.get_id() == provider] if not prov: return prov[0].retry_next() @@ -7127,7 +7162,7 @@ class ManageSearch(Manage): # force it to run the next time it looks if not sickgear.search_queue_scheduler.action.is_standard_backlog_in_progress(): sickgear.backlog_search_scheduler.force_search(force_type=FORCED_BACKLOG) - logger.log(u'Backlog search forced') + logger.log('Backlog search forced') ui.notifications.message('Backlog search started') time.sleep(5) @@ -7137,9 +7172,9 @@ class ManageSearch(Manage): # force it to run the next time it looks if not sickgear.search_queue_scheduler.action.is_recentsearch_in_progress(): - result = sickgear.recent_search_scheduler.forceRun() + result = sickgear.recent_search_scheduler.force_run() if result: - logger.log(u'Recent search forced') + logger.log('Recent search forced') ui.notifications.message('Recent search started') time.sleep(5) @@ -7148,9 +7183,9 @@ class ManageSearch(Manage): def force_find_propers(self): # force it to run the next time it looks - result = sickgear.proper_finder_scheduler.forceRun() + result = sickgear.proper_finder_scheduler.force_run() if result: - logger.log(u'Find propers search forced') + logger.log('Find propers search forced') ui.notifications.message('Find propers search started') time.sleep(5) @@ -7172,9 +7207,9 @@ class ShowTasks(Manage): t = PageTemplate(web_handler=self, file='manage_showProcesses.tmpl') t.queue_length = sickgear.show_queue_scheduler.action.queue_length() t.people_queue = sickgear.people_queue_scheduler.action.queue_data() - t.next_run = sickgear.show_update_scheduler.lastRun.replace( + t.next_run = sickgear.show_update_scheduler.last_run.replace( hour=sickgear.show_update_scheduler.start_time.hour) - t.show_update_running = sickgear.show_queue_scheduler.action.isShowUpdateRunning() \ + t.show_update_running = sickgear.show_queue_scheduler.action.is_show_update_running() \ or sickgear.show_update_scheduler.action.amActive my_db = db.DBConnection(row_type='dict') @@ -7258,9 +7293,9 @@ class ShowTasks(Manage): def force_show_update(self): - result = sickgear.show_update_scheduler.forceRun() + result = sickgear.show_update_scheduler.force_run() if result: - logger.log(u'Show Update forced') + logger.log('Show Update forced') ui.notifications.message('Forced Show Update started') time.sleep(5) @@ -7418,7 +7453,7 @@ class History(MainHandler): r['status'] = r['status_w'] r['file_size'] = r['file_size_w'] - r['status'], r['quality'] = Quality.splitCompositeStatus(helpers.try_int(r['status'])) + r['status'], r['quality'] = Quality.split_composite_status(helpers.try_int(r['status'])) r['season'], r['episode'] = '%02i' % r['season'], '%02i' % r['episode'] if r['tvep_id'] not in mru_count: # depends on SELECT ORDER BY date_watched DESC to determine mru_count @@ -7434,9 +7469,9 @@ class History(MainHandler): elif 'stats' in sickgear.HISTORY_LAYOUT: - prov_list = [p.name for p in (sickgear.providerList - + sickgear.newznabProviderList - + sickgear.torrentRssProviderList)] + prov_list = [p.name for p in (sickgear.provider_list + + sickgear.newznab_providers + + sickgear.torrent_rss_providers)] # noinspection SqlResolve sql = 'SELECT COUNT(1) AS count,' \ ' MIN(DISTINCT date) AS earliest,' \ @@ -7463,12 +7498,12 @@ class History(MainHandler): elif 'failures' in sickgear.HISTORY_LAYOUT: - t.provider_fail_stats = filter_list(lambda stat: len(stat['fails']), [ + t.provider_fail_stats = list(filter(lambda stat: len(stat['fails']), [ dict(name=p.name, id=p.get_id(), active=p.is_active(), prov_img=p.image_name(), prov_id=p.get_id(), # 2020.03.17 legacy var, remove at future date fails=p.fails.fails_sorted, next_try=p.get_next_try_time, has_limit=getattr(p, 'has_limit', False), tmr_limit_time=p.tmr_limit_time) - for p in sickgear.providerList + sickgear.newznabProviderList]) + for p in sickgear.provider_list + sickgear.newznab_providers])) t.provider_fail_cnt = len([p for p in t.provider_fail_stats if len(p['fails'])]) t.provider_fails = t.provider_fail_cnt # 2020.03.17 legacy var, remove at future date @@ -7502,11 +7537,11 @@ class History(MainHandler): return result with sg_helpers.DOMAIN_FAILURES.lock: - t.domain_fail_stats = filter_list(lambda stat: len(stat['fails']), [ + t.domain_fail_stats = list(filter(lambda stat: len(stat['fails']), [ dict(name=k, id=sickgear.GenericProvider.make_id(k), img=img(k), cls=img(k, True), fails=v.fails_sorted, next_try=v.get_next_try_time, has_limit=getattr(v, 'has_limit', False), tmr_limit_time=v.tmr_limit_time) - for k, v in iteritems(sg_helpers.DOMAIN_FAILURES.domain_list)]) + for k, v in iteritems(sg_helpers.DOMAIN_FAILURES.domain_list)])) t.domain_fail_cnt = len([d for d in t.domain_fail_stats if len(d['fails'])]) @@ -7611,12 +7646,11 @@ class History(MainHandler): hosts, keys, message = client.check_config(sickgear.EMBY_HOST, sickgear.EMBY_APIKEY) if sickgear.USE_EMBY and hosts: - logger.log('Updating Emby watched episode states', logger.DEBUG) + logger.debug('Updating Emby watched episode states') rd = sickgear.ROOT_DIRS.split('|')[1:] \ + [x.split('=')[0] for x in sickgear.EMBY_PARENT_MAPS.split(',') if any(x)] - rootpaths = sorted( - ['%s%s' % (ek.ek(os.path.splitdrive, x)[1], os.path.sep) for x in rd], key=len, reverse=True) + rootpaths = sorted(['%s%s' % (os.path.splitdrive(x)[1], os.path.sep) for x in rd], key=len, reverse=True) rootdirs = sorted([x for x in rd], key=len, reverse=True) headers = {'Content-type': 'application/json'} states = {} @@ -7661,7 +7695,7 @@ class History(MainHandler): ParentId=folder_id, Filters='IsPlayed', format='json'), timeout=10, parse_json=True) or {} - for d in filter_iter(lambda item: 'Episode' == item.get('Type', ''), items.get('Items')): + for d in filter(lambda item: 'Episode' == item.get('Type', ''), items.get('Items')): try: root_dir_found = False path_file = d.get('Path') @@ -7669,8 +7703,8 @@ class History(MainHandler): continue for index, p in enumerate(rootpaths): if p in path_file: - path_file = ek.ek(os.path.join, rootdirs[index], - re.sub('.*?%s' % re.escape(p), '', path_file)) + path_file = os.path.join( + rootdirs[index], re.sub('.*?%s' % re.escape(p), '', path_file)) root_dir_found = True break if not root_dir_found: @@ -7698,16 +7732,16 @@ class History(MainHandler): except (BaseException, Exception): continue if mapping: - logger.log('Folder mappings used, the first of %s is [%s] in Emby is [%s] in SickGear' % - (mapped, mapping[0], mapping[1]), logger.DEBUG) + logger.debug(f'Folder mappings used, the first of {mapped} is [{mapping[0]}] in Emby is' + f' [{mapping[1]}] in SickGear') if states: # Prune user removed items that are no longer being returned by API - media_paths = map_list(lambda arg: ek.ek(os.path.basename, arg[1]['path_file']), iteritems(states)) + media_paths = list(map(lambda arg: os.path.basename(arg[1]['path_file']), iteritems(states))) sql = 'FROM tv_episodes_watched WHERE hide=1 AND label LIKE "%%{Emby}"' my_db = db.DBConnection(row_type='dict') files = my_db.select('SELECT location %s' % sql) - for i in filter_iter(lambda f: ek.ek(os.path.basename, f['location']) not in media_paths, files): + for i in filter(lambda f: os.path.basename(f['location']) not in media_paths, files): loc = i.get('location') if loc: my_db.select('DELETE %s AND location="%s"' % (sql, loc)) @@ -7721,7 +7755,7 @@ class History(MainHandler): hosts = [x.strip().lower() for x in sickgear.PLEX_SERVER_HOST.split(',')] if sickgear.USE_PLEX and hosts: - logger.log('Updating Plex watched episode states', logger.DEBUG) + logger.debug('Updating Plex watched episode states') from lib.plex import Plex @@ -7739,7 +7773,7 @@ class History(MainHandler): # noinspection HttpUrlsUsage parts = re.search(r'(.*):(\d+)$', urlparse('http://' + re.sub(r'^\w+://', '', cur_host)).netloc) if not parts: - logger.log('Skipping host not in min. host:port format : %s' % cur_host, logger.WARNING) + logger.warning('Skipping host not in min. host:port format : %s' % cur_host) elif parts.group(1): plex.plex_host = parts.group(1) if None is not parts.group(2): @@ -7764,19 +7798,18 @@ class History(MainHandler): idx += 1 - logger.log('Fetched %s of %s played for host : %s' % (len(plex.show_states), played, cur_host), - logger.DEBUG) + logger.debug('Fetched %s of %s played for host : %s' % (len(plex.show_states), played, cur_host)) if mapping: - logger.log('Folder mappings used, the first of %s is [%s] in Plex is [%s] in SickGear' % - (mapped, mapping[0], mapping[1]), logger.DEBUG) + logger.debug(f'Folder mappings used, the first of {mapped} is [{mapping[0]}] in Plex is' + f' [{mapping[1]}] in SickGear') if states: # Prune user removed items that are no longer being returned by API - media_paths = map_list(lambda arg: ek.ek(os.path.basename, arg[1]['path_file']), iteritems(states)) + media_paths = list(map(lambda arg: os.path.basename(arg[1]['path_file']), iteritems(states))) sql = 'FROM tv_episodes_watched WHERE hide=1 AND label LIKE "%%{Plex}"' my_db = db.DBConnection(row_type='dict') files = my_db.select('SELECT location %s' % sql) - for i in filter_iter(lambda f: ek.ek(os.path.basename, f['location']) not in media_paths, files): + for i in filter(lambda f: os.path.basename(f['location']) not in media_paths, files): loc = i.get('location') if loc: my_db.select('DELETE %s AND location="%s"' % (sql, loc)) @@ -7814,13 +7847,13 @@ class History(MainHandler): refresh = [] for cur_result in sql_result: if files and cur_result['location'] not in attempted and 0 < helpers.get_size(cur_result['location']) \ - and ek.ek(os.path.isfile, cur_result['location']): + and os.path.isfile(cur_result['location']): # locations repeat with watch events but attempt to delete once attempted += [cur_result['location']] result = helpers.remove_file(cur_result['location']) if result: - logger.log(u'%s file %s' % (result, cur_result['location'])) + logger.log(f'{result} file {cur_result["location"]}') deleted.update({cur_result['tvep_id']: row_show_ids[cur_result['rowid']]}) if row_show_ids[cur_result['rowid']] not in refresh: @@ -7847,8 +7880,8 @@ class History(MainHandler): for cur_result in sql_result: show_obj = helpers.find_show_by_id(tvid_prodid_dict) ep_obj = show_obj.get_episode(cur_result['season'], cur_result['episode']) - for n in filter_iter(lambda x: x.name.lower() in ('emby', 'kodi', 'plex'), - notifiers.NotifierFactory().get_enabled()): + for n in filter(lambda x: x.name.lower() in ('emby', 'kodi', 'plex'), + notifiers.NotifierFactory().get_enabled()): if 'PLEX' == n.name: if updating: continue @@ -7857,7 +7890,7 @@ class History(MainHandler): for tvid_prodid_dict in refresh: try: - sickgear.show_queue_scheduler.action.refreshShow( + sickgear.show_queue_scheduler.action.refresh_show( helpers.find_show_by_id(tvid_prodid_dict)) except (BaseException, Exception): pass @@ -7901,7 +7934,7 @@ class Config(MainHandler): t.submenu = self.config_menu() try: - with open(ek.ek(os.path.join, sickgear.PROG_DIR, 'CHANGES.md')) as fh: + with open(os.path.join(sickgear.PROG_DIR, 'CHANGES.md')) as fh: t.version = re.findall(r'###[^0-9]+([0-9]+\.[0-9]+\.[0-9x]+)', fh.readline())[0] except (BaseException, Exception): t.version = '' @@ -7911,18 +7944,18 @@ class Config(MainHandler): t.tz_version = None try: if None is not current_file: - current_file = ek.ek(os.path.basename, current_file) - zonefile = real_path(ek.ek(os.path.join, sickgear.ZONEINFO_DIR, current_file)) - if not ek.ek(os.path.isfile, zonefile): + current_file = os.path.basename(current_file) + zonefile = real_path(os.path.join(sickgear.ZONEINFO_DIR, current_file)) + if not os.path.isfile(zonefile): t.tz_fallback = True - zonefile = ek.ek(os.path.join, ek.ek(os.path.dirname, zoneinfo.__file__), current_file) - if ek.ek(os.path.isfile, zonefile): + zonefile = os.path.join(os.path.dirname(zoneinfo.__file__), current_file) + if os.path.isfile(zonefile): t.tz_version = zoneinfo.ZoneInfoFile(zoneinfo.getzoneinfofile_stream()).metadata['tzversion'] except (BaseException, Exception): pass t.backup_db_path = sickgear.BACKUP_DB_MAX_COUNT and \ - (sickgear.BACKUP_DB_PATH or ek.ek(os.path.join, sickgear.DATA_DIR, 'backup')) or 'Disabled' + (sickgear.BACKUP_DB_PATH or os.path.join(sickgear.DATA_DIR, 'backup')) or 'Disabled' return t.respond() @@ -7969,7 +8002,7 @@ class ConfigGeneral(Config): seasons = [-1] + seasons[0:-1] # bubble -1 # prepare a seasonal ordered dict for output - alts = ordered_dict([(season, {}) for season in seasons]) + alts = dict([(season, {}) for season in seasons]) # add original show name show_obj = sickgear.helpers.find_show_by_id(tvid_prodid, no_mapped_ids=True) @@ -8018,7 +8051,7 @@ class ConfigGeneral(Config): return json_dumps(dict(text='%s\n\n' % ui_output)) @staticmethod - def generate_key(): + def generate_key(*args, **kwargs): """ Return a new randomized API_KEY """ # Create some values to seed md5 @@ -8026,8 +8059,10 @@ class ConfigGeneral(Config): result = hashlib.new('md5', decode_bytes(seed)).hexdigest() - # Return a hex digest of the md5, eg 49f68a5c8493ec2c0bf489821c21fc3b - logger.log(u'New API generated') + # Return a hex digest of the md5, e.g. 49f68a5c8493ec2c0bf489821c21fc3b + app_name = kwargs.get('app_name') + app_name = '' if not app_name else ' for [%s]' % app_name + logger.log(f'New API generated{app_name}') return result @@ -8060,8 +8095,8 @@ class ConfigGeneral(Config): any_qualities = ([], any_qualities.split(','))[any(any_qualities)] best_qualities = ([], best_qualities.split(','))[any(best_qualities)] - sickgear.QUALITY_DEFAULT = int(Quality.combineQualities(map_list(int, any_qualities), - map_list(int, best_qualities))) + sickgear.QUALITY_DEFAULT = int(Quality.combine_qualities(list(map(int, any_qualities)), + list(map(int, best_qualities)))) sickgear.WANTED_BEGIN_DEFAULT = config.minimax(default_wanted_begin, 0, -1, 10) sickgear.WANTED_LATEST_DEFAULT = config.minimax(default_wanted_latest, 0, -1, 10) sickgear.SHOW_TAG_DEFAULT = default_tag @@ -8074,33 +8109,6 @@ class ConfigGeneral(Config): sickgear.save_config() - @staticmethod - def generateKey(*args, **kwargs): - """ Return a new randomized API_KEY - """ - - try: - from hashlib import md5 - except ImportError: - # noinspection PyUnresolvedReferences,PyCompatibility - from md5 import md5 - - # Create some values to seed md5 - t = str(time.time()) - r = str(random.random()) - - # Create the md5 instance and give it the current time - m = md5(decode_bytes(t)) - - # Update the md5 instance with the random variable - m.update(decode_bytes(r)) - - # Return a hex digest of the md5, eg 49f68a5c8493ec2c0bf489821c21fc3b - app_name = kwargs.get('app_name') - app_name = '' if not app_name else ' for [%s]' % app_name - logger.log(u'New apikey generated%s' % app_name) - return m.hexdigest() - def create_apikey(self, app_name): result = dict() if not app_name: @@ -8108,16 +8116,16 @@ class ConfigGeneral(Config): elif app_name in [k[0] for k in sickgear.API_KEYS if k[0]]: result['result'] = 'Failed: name is not unique' else: - api_key = self.generateKey(app_name=app_name) + api_key = self.generate_key(app_name=app_name) if api_key in [k[1] for k in sickgear.API_KEYS if k[0]]: result['result'] = 'Failed: apikey already exists, try again' else: sickgear.API_KEYS.append([app_name, api_key]) - logger.log('Created apikey for [%s]' % app_name, logger.DEBUG) + logger.debug('Created apikey for [%s]' % app_name) result.update(dict(result='Success: apikey added', added=api_key)) sickgear.USE_API = 1 sickgear.save_config() - ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickgear.CONFIG_FILE)) + ui.notifications.message('Configuration Saved', os.path.join(sickgear.CONFIG_FILE)) return json_dumps(result) @@ -8132,10 +8140,10 @@ class ConfigGeneral(Config): result['result'] = 'Failed: key doesn\'t exist' else: sickgear.API_KEYS = [ak for ak in sickgear.API_KEYS if ak[0] and api_key != ak[1]] - logger.log('Revoked [%s] apikey [%s]' % (app_name, api_key), logger.DEBUG) + logger.debug('Revoked [%s] apikey [%s]' % (app_name, api_key)) result.update(dict(result='Success: apikey removed', removed=True)) sickgear.save_config() - ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickgear.CONFIG_FILE)) + ui.notifications.message('Configuration Saved', os.path.join(sickgear.CONFIG_FILE)) return json_dumps(result) @@ -8175,7 +8183,7 @@ class ConfigGeneral(Config): with sickgear.show_update_scheduler.lock: sickgear.show_update_scheduler.start_time = datetime.time(hour=sickgear.SHOW_UPDATE_HOUR) except (BaseException, Exception) as e: - logger.log('Could not change Show Update Scheduler time: %s' % ex(e), logger.ERROR) + logger.error('Could not change Show Update Scheduler time: %s' % ex(e)) sickgear.TRASH_REMOVE_SHOW = config.checkbox_to_value(trash_remove_show) sg_helpers.TRASH_REMOVE_SHOW = sickgear.TRASH_REMOVE_SHOW sickgear.TRASH_ROTATE_LOGS = config.checkbox_to_value(trash_rotate_logs) @@ -8206,19 +8214,19 @@ class ConfigGeneral(Config): sickgear.FANART_LIMIT = config.minimax(fanart_limit, 3, 0, 500) sickgear.SHOWLIST_TAGVIEW = showlist_tagview - # 'Show List' is the must have default fallback. Tags in use that are removed from config ui are restored, + # 'Show List' is the must-have default fallback. Tags in use that are removed from config ui are restored, # not deleted. Deduped list order preservation is key to feature function. my_db = db.DBConnection() sql_result = my_db.select('SELECT DISTINCT tag FROM tv_shows') - new_names = [u'' + v.strip() for v in (show_tags.split(u','), [])[None is show_tags] if v.strip()] + new_names = [v.strip() for v in (show_tags.split(','), [])[None is show_tags] if v.strip()] orphans = [item for item in [v['tag'] for v in sql_result or []] if item not in new_names] cleanser = [] if 0 < len(orphans): cleanser = [item for item in sickgear.SHOW_TAGS if item in orphans or item in new_names] - results += [u'An attempt was prevented to remove a show list group name still in use'] + results += ['An attempt was prevented to remove a show list group name still in use'] dedupe = {} - sickgear.SHOW_TAGS = [dedupe.setdefault(item, item) for item in (cleanser + new_names + [u'Show List']) - if item not in dedupe] + sickgear.SHOW_TAGS = [dedupe.setdefault(item, item) for item in (cleanser + new_names + ['Show List']) + if item not in dedupe] sickgear.HOME_SEARCH_FOCUS = config.checkbox_to_value(home_search_focus) sickgear.USE_IMDB_INFO = config.checkbox_to_value(use_imdb_info) @@ -8230,7 +8238,7 @@ class ConfigGeneral(Config): sickgear.DATE_PRESET = date_preset if time_preset: sickgear.TIME_PRESET_W_SECONDS = time_preset - sickgear.TIME_PRESET = sickgear.TIME_PRESET_W_SECONDS.replace(u':%S', u'') + sickgear.TIME_PRESET = sickgear.TIME_PRESET_W_SECONDS.replace(':%S', '') sickgear.TIMEZONE_DISPLAY = timezone_display # Web interface @@ -8261,8 +8269,8 @@ class ConfigGeneral(Config): sickgear.WEB_IPV64 = config.checkbox_to_value(web_ipv64) sickgear.HANDLE_REVERSE_PROXY = config.checkbox_to_value(handle_reverse_proxy) sickgear.SEND_SECURITY_HEADERS = config.checkbox_to_value(send_security_headers) - hosts = ','.join(filter_iter(lambda name: not helpers.re_valid_hostname(with_allowed=False).match(name), - config.clean_hosts(allowed_hosts).split(','))) + hosts = ','.join(filter(lambda name: not helpers.re_valid_hostname(with_allowed=False).match(name), + config.clean_hosts(allowed_hosts).split(','))) if not hosts or self.request.host_name in hosts: sickgear.ALLOWED_HOSTS = hosts sickgear.ALLOW_ANYIP = config.checkbox_to_value(allow_anyip) @@ -8286,11 +8294,11 @@ class ConfigGeneral(Config): if 0 < len(results): for v in results: - logger.log(v, logger.ERROR) + logger.error(v) ui.notifications.error('Error(s) Saving Configuration', '
      \n'.join(results)) else: - ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickgear.CONFIG_FILE)) + ui.notifications.message('Configuration Saved', os.path.join(sickgear.CONFIG_FILE)) if restart: self.clear_cookie('sickgear-session-%s' % helpers.md5_for_text(sickgear.WEB_PORT)) @@ -8310,7 +8318,7 @@ class ConfigGeneral(Config): pulls = sickgear.update_software_scheduler.action.list_remote_pulls() return json_dumps({'result': 'success', 'pulls': pulls}) except (BaseException, Exception) as e: - logger.log(u'exception msg: ' + ex(e), logger.DEBUG) + logger.debug(f'exception msg: {ex(e)}') return json_dumps({'result': 'fail'}) @staticmethod @@ -8319,7 +8327,7 @@ class ConfigGeneral(Config): branches = sickgear.update_software_scheduler.action.list_remote_branches() return json_dumps({'result': 'success', 'branches': branches, 'current': sickgear.BRANCH or 'main'}) except (BaseException, Exception) as e: - logger.log(u'exception msg: ' + ex(e), logger.DEBUG) + logger.debug(f'exception msg: {ex(e)}') return json_dumps({'result': 'fail'}) @@ -8402,9 +8410,9 @@ class ConfigSearch(Config): sickgear.USENET_RETENTION = config.to_int(usenet_retention, default=500) sickgear.IGNORE_WORDS, sickgear.IGNORE_WORDS_REGEX = helpers.split_word_str(ignore_words - if ignore_words else '') + if ignore_words else '') sickgear.REQUIRE_WORDS, sickgear.REQUIRE_WORDS_REGEX = helpers.split_word_str(require_words - if require_words else '') + if require_words else '') clean_ignore_require_words() @@ -8413,7 +8421,7 @@ class ConfigSearch(Config): sickgear.SEARCH_UNAIRED = bool(config.checkbox_to_value(search_unaired)) sickgear.UNAIRED_RECENT_SEARCH_ONLY = bool(config.checkbox_to_value(unaired_recent_search_only, - value_off=1, value_on=0)) + value_off=1, value_on=0)) sickgear.FLARESOLVERR_HOST = config.clean_url(flaresolverr_host) sg_helpers.FLARESOLVERR_HOST = sickgear.FLARESOLVERR_HOST @@ -8444,7 +8452,7 @@ class ConfigSearch(Config): sickgear.TORRENT_LABEL = torrent_label sickgear.TORRENT_LABEL_VAR = config.to_int((0, torrent_label_var)['rtorrent' == torrent_method], 1) if not (0 <= sickgear.TORRENT_LABEL_VAR <= 5): - logger.log('Setting rTorrent custom%s is not 0-5, defaulting to custom1' % torrent_label_var, logger.DEBUG) + logger.debug('Setting rTorrent custom%s is not 0-5, defaulting to custom1' % torrent_label_var) sickgear.TORRENT_LABEL_VAR = 1 sickgear.TORRENT_VERIFY_CERT = config.checkbox_to_value(torrent_verify_cert) sickgear.TORRENT_PATH = torrent_path @@ -8457,11 +8465,11 @@ class ConfigSearch(Config): if 0 < len(results): for x in results: - logger.log(x, logger.ERROR) + logger.error(x) ui.notifications.error('Error(s) Saving Configuration', '
      \n'.join(results)) else: - ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickgear.CONFIG_FILE)) + ui.notifications.message('Configuration Saved', os.path.join(sickgear.CONFIG_FILE)) self.redirect('/config/search/') @@ -8580,11 +8588,11 @@ class ConfigMediaProcess(Config): if 0 < len(results): for x in results: - logger.log(x, logger.ERROR) + logger.error(x) ui.notifications.error('Error(s) Saving Configuration', '
      \n'.join(results)) else: - ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickgear.CONFIG_FILE)) + ui.notifications.message('Configuration Saved', os.path.join(sickgear.CONFIG_FILE)) self.redirect('/config/media-process/') @@ -8599,7 +8607,7 @@ class ConfigMediaProcess(Config): result = naming.test_name(pattern, multi, abd, sports, anime, anime_type) - result = ek.ek(os.path.join, result['dir'], result['name']) + result = os.path.join(result['dir'], result['name']) return result @@ -8646,15 +8654,15 @@ class ConfigMediaProcess(Config): try: if 'win32' == sys.platform: - rarfile.UNRAR_TOOL = ek.ek(os.path.join, sickgear.PROG_DIR, 'lib', 'rarfile', 'UnRAR.exe') - rar_path = ek.ek(os.path.join, sickgear.PROG_DIR, 'lib', 'rarfile', 'test.rar') + rarfile.UNRAR_TOOL = os.path.join(sickgear.PROG_DIR, 'lib', 'rarfile', 'UnRAR.exe') + rar_path = os.path.join(sickgear.PROG_DIR, 'lib', 'rarfile', 'test.rar') if 'This is only a test.' == decode_str(rarfile.RarFile(rar_path).read(r'test/test.txt')): return 'supported' msg = 'Could not read test file content' except (BaseException, Exception) as e: msg = ex(e) - logger.log(u'Rar Not Supported: %s' % msg, logger.ERROR) + logger.error(f'Rar Not Supported: {msg}') return 'not supported' @@ -8671,9 +8679,9 @@ class ConfigProviders(Config): return json_dumps({'error': 'No Provider Name or url specified'}) provider_dict = dict(zip([sickgear.providers.generic_provider_name(x.get_id()) - for x in sickgear.newznabProviderList], sickgear.newznabProviderList)) + for x in sickgear.newznab_providers], sickgear.newznab_providers)) provider_url_dict = dict(zip([sickgear.providers.generic_provider_url(x.url) - for x in sickgear.newznabProviderList], sickgear.newznabProviderList)) + for x in sickgear.newznab_providers], sickgear.newznab_providers)) temp_provider = newznab.NewznabProvider(name, config.clean_url(url)) @@ -8697,12 +8705,12 @@ class ConfigProviders(Config): error = '\nNo provider %s specified' % error return json_dumps({'success': False, 'error': error}) - if name in [n.name for n in sickgear.newznabProviderList if n.url == url]: - provider = [n for n in sickgear.newznabProviderList if n.name == name][0] + if name in [n.name for n in sickgear.newznab_providers if n.url == url]: + provider = [n for n in sickgear.newznab_providers if n.name == name][0] tv_categories = provider.clean_newznab_categories(provider.all_cats) state = provider.is_enabled() else: - providers = dict(zip([x.get_id() for x in sickgear.newznabProviderList], sickgear.newznabProviderList)) + providers = dict(zip([x.get_id() for x in sickgear.newznab_providers], sickgear.newznab_providers)) temp_provider = newznab.NewznabProvider(name, url, key) if None is not key and starify(key, True): temp_provider.key = providers[temp_provider.get_id()].key @@ -8718,7 +8726,7 @@ class ConfigProviders(Config): return json_dumps({'error': 'Invalid name specified'}) provider_dict = dict( - zip([x.get_id() for x in sickgear.torrentRssProviderList], sickgear.torrentRssProviderList)) + zip([x.get_id() for x in sickgear.torrent_rss_providers], sickgear.torrent_rss_providers)) temp_provider = rsstorrent.TorrentRssProvider(name, url, cookies) @@ -8733,7 +8741,7 @@ class ConfigProviders(Config): @staticmethod def check_providers_ping(): - for p in sickgear.providers.sortedProviderList(): + for p in sickgear.providers.sorted_sources(): if getattr(p, 'ping_iv', None): if p.is_active() and (p.get_id() not in sickgear.provider_ping_thread_pool or not sickgear.provider_ping_thread_pool[p.get_id()].is_alive()): @@ -8751,7 +8759,7 @@ class ConfigProviders(Config): pass # stop removed providers - prov = [n.get_id() for n in sickgear.providers.sortedProviderList()] + prov = [n.get_id() for n in sickgear.providers.sorted_sources()] for p in [x for x in sickgear.provider_ping_thread_pool if x not in prov]: sickgear.provider_ping_thread_pool[p].stop = True try: @@ -8767,7 +8775,7 @@ class ConfigProviders(Config): provider_list = [] # add all the newznab info we have into our list - newznab_sources = dict(zip([x.get_id() for x in sickgear.newznabProviderList], sickgear.newznabProviderList)) + newznab_sources = dict(zip([x.get_id() for x in sickgear.newznab_providers], sickgear.newznab_providers)) active_ids = [] reload_page = False if newznab_string: @@ -8810,7 +8818,7 @@ class ConfigProviders(Config): [k for k in nzb_src.may_filter if config.checkbox_to_value(kwargs.get('%s_filter_%s' % (cur_id, k)))]) - for attr in filter_iter(lambda a: hasattr(nzb_src, a), [ + for attr in filter(lambda a: hasattr(nzb_src, a), [ 'search_fallback', 'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog', 'scene_only', 'scene_loose', 'scene_loose_active', 'scene_rej_nuked', 'scene_nuked_active' ]): @@ -8824,18 +8832,18 @@ class ConfigProviders(Config): new_provider.enabled = True _ = new_provider.caps # when adding a custom, trigger server_type update new_provider.enabled = False - sickgear.newznabProviderList.append(new_provider) + sickgear.newznab_providers.append(new_provider) active_ids.append(cur_id) # delete anything that is missing if sickgear.USE_NZBS: - for source in [x for x in sickgear.newznabProviderList if x.get_id() not in active_ids]: - sickgear.newznabProviderList.remove(source) + for source in [x for x in sickgear.newznab_providers if x.get_id() not in active_ids]: + sickgear.newznab_providers.remove(source) # add all the torrent RSS info we have into our list - torrent_rss_sources = dict(zip([x.get_id() for x in sickgear.torrentRssProviderList], - sickgear.torrentRssProviderList)) + torrent_rss_sources = dict(zip([x.get_id() for x in sickgear.torrent_rss_providers], + sickgear.torrent_rss_providers)) active_ids = [] if torrentrss_string: for curTorrentRssProviderStr in torrentrss_string.split('!!!'): @@ -8871,19 +8879,19 @@ class ConfigProviders(Config): if attr_check in kwargs: setattr(torrss_src, attr, str(kwargs.get(attr_check) or '').strip()) else: - sickgear.torrentRssProviderList.append(new_provider) + sickgear.torrent_rss_providers.append(new_provider) active_ids.append(cur_id) # delete anything that is missing if sickgear.USE_TORRENTS: - for source in [x for x in sickgear.torrentRssProviderList if x.get_id() not in active_ids]: - sickgear.torrentRssProviderList.remove(source) + for source in [x for x in sickgear.torrent_rss_providers if x.get_id() not in active_ids]: + sickgear.torrent_rss_providers.remove(source) # enable/disable states of source providers provider_str_list = provider_order.split() - sources = dict(zip([x.get_id() for x in sickgear.providers.sortedProviderList()], - sickgear.providers.sortedProviderList())) + sources = dict(zip([x.get_id() for x in sickgear.providers.sorted_sources()], + sickgear.providers.sorted_sources())) for cur_src_str in provider_str_list: src_name, src_enabled = cur_src_str.split(':') @@ -8907,7 +8915,7 @@ class ConfigProviders(Config): torrent_rss_sources[src_name].enabled = src_enabled # update torrent source settings - for torrent_src in [src for src in sickgear.providers.sortedProviderList() + for torrent_src in [src for src in sickgear.providers.sorted_sources() if sickgear.GenericProvider.TORRENT == src.providerType]: # type: TorrentProvider src_id_prefix = torrent_src.get_id() + '_' @@ -8924,12 +8932,12 @@ class ConfigProviders(Config): elif not starify(key, True): setattr(torrent_src, attr, key) - for attr in filter_iter(lambda a: hasattr(torrent_src, a), [ + for attr in filter(lambda a: hasattr(torrent_src, a), [ 'username', 'uid', '_seed_ratio', 'scene_or_contain' ]): setattr(torrent_src, attr, str(kwargs.get(src_id_prefix + attr.replace('_seed_', ''), '')).strip()) - for attr in filter_iter(lambda a: hasattr(torrent_src, a), [ + for attr in filter(lambda a: hasattr(torrent_src, a), [ 'minseed', 'minleech', 'seed_time' ]): setattr(torrent_src, attr, config.to_int(str(kwargs.get(src_id_prefix + attr, '')).strip())) @@ -8940,7 +8948,7 @@ class ConfigProviders(Config): [k for k in getattr(torrent_src, 'may_filter', 'nop') if config.checkbox_to_value(kwargs.get('%sfilter_%s' % (src_id_prefix, k)))]) - for attr in filter_iter(lambda a: hasattr(torrent_src, a), [ + for attr in filter(lambda a: hasattr(torrent_src, a), [ 'confirmed', 'freeleech', 'reject_m2ts', 'use_after_get_data', 'enable_recentsearch', 'enable_backlog', 'search_fallback', 'enable_scheduled_backlog', 'scene_only', 'scene_loose', 'scene_loose_active', @@ -8948,13 +8956,13 @@ class ConfigProviders(Config): ]): setattr(torrent_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr))) - for attr, default in filter_iter(lambda arg: hasattr(torrent_src, arg[0]), [ + for attr, default in filter(lambda arg: hasattr(torrent_src, arg[0]), [ ('search_mode', 'eponly'), ]): setattr(torrent_src, attr, str(kwargs.get(src_id_prefix + attr) or default).strip()) # update nzb source settings - for nzb_src in [src for src in sickgear.providers.sortedProviderList() if + for nzb_src in [src for src in sickgear.providers.sorted_sources() if sickgear.GenericProvider.NZB == src.providerType]: src_id_prefix = nzb_src.get_id() + '_' @@ -8972,17 +8980,17 @@ class ConfigProviders(Config): setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr)) or not getattr(nzb_src, 'supports_backlog', True)) - for attr in filter_iter(lambda a: hasattr(nzb_src, a), - ['search_fallback', 'enable_backlog', 'enable_scheduled_backlog', - 'scene_only', 'scene_loose', 'scene_loose_active', - 'scene_rej_nuked', 'scene_nuked_active']): + for attr in filter(lambda a: hasattr(nzb_src, a), + ['search_fallback', 'enable_backlog', 'enable_scheduled_backlog', + 'scene_only', 'scene_loose', 'scene_loose_active', + 'scene_rej_nuked', 'scene_nuked_active']): setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr))) for (attr, default) in [('scene_or_contain', ''), ('search_mode', 'eponly')]: if hasattr(nzb_src, attr): setattr(nzb_src, attr, str(kwargs.get(src_id_prefix + attr) or default).strip()) - sickgear.NEWZNAB_DATA = '!!!'.join([x.config_str() for x in sickgear.newznabProviderList]) + sickgear.NEWZNAB_DATA = '!!!'.join([x.config_str() for x in sickgear.newznab_providers]) sickgear.PROVIDER_ORDER = provider_list helpers.clear_unused_providers() @@ -8994,10 +9002,10 @@ class ConfigProviders(Config): if 0 < len(results): for x in results: - logger.log(x, logger.ERROR) + logger.error(x) ui.notifications.error('Error(s) Saving Configuration', '
      \n'.join(results)) else: - ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickgear.CONFIG_FILE)) + ui.notifications.message('Configuration Saved', os.path.join(sickgear.CONFIG_FILE)) if reload_page: self.write('reload') @@ -9261,11 +9269,11 @@ class ConfigNotifications(Config): if 0 < len(results): for x in results: - logger.log(x, logger.ERROR) + logger.error(x) ui.notifications.error('Error(s) Saving Configuration', '
      \n'.join(results)) else: - ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickgear.CONFIG_FILE)) + ui.notifications.message('Configuration Saved', os.path.join(sickgear.CONFIG_FILE)) self.redirect('/config/notifications/') @@ -9316,11 +9324,11 @@ class ConfigSubtitles(Config): if 0 < len(results): for x in results: - logger.log(x, logger.ERROR) + logger.error(x) ui.notifications.error('Error(s) Saving Configuration', '
      \n'.join(results)) else: - ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickgear.CONFIG_FILE)) + ui.notifications.message('Configuration Saved', os.path.join(sickgear.CONFIG_FILE)) self.redirect('/config/subtitles/') @@ -9349,11 +9357,11 @@ class ConfigAnime(Config): if 0 < len(results): for x in results: - logger.log(x, logger.ERROR) + logger.error(x) ui.notifications.error('Error(s) Saving Configuration', '
      \n'.join(results)) else: - ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickgear.CONFIG_FILE)) + ui.notifications.message('Configuration Saved', os.path.join(sickgear.CONFIG_FILE)) self.redirect('/config/anime/') @@ -9409,7 +9417,7 @@ class EventLogs(MainHandler): min_level = int(min_level) - regex = re.compile(r'^\d{4}-\d{2}-\d{2}\s*\d{2}:\d{2}:\d{2}\s*([A-Z]+)\s*([^\s]+)\s+:{2}\s*(.*\r?\n)$') + regex = re.compile(r'^\d{4}-\d{2}-\d{2}\s*\d{2}:\d{2}:\d{2}\s*([A-Z]+)\s*(\S+)\s+:{2}\s*(.*\r?\n)$') final_data = [] normal_data = [] @@ -9490,17 +9498,11 @@ class EventLogs(MainHandler): class WebFileBrowser(MainHandler): def index(self, path='', include_files=False, **kwargs): - """ prevent issues with requests using legacy params """ - include_files = include_files or kwargs.get('includeFiles') or False - """ /legacy """ self.set_header('Content-Type', 'application/json') return json_dumps(folders_at_path(path, True, bool(int(include_files)))) def complete(self, term, include_files=0, **kwargs): - """ prevent issues with requests using legacy params """ - include_files = include_files or kwargs.get('includeFiles') or False - """ /legacy """ self.set_header('Content-Type', 'application/json') return json_dumps([entry['path'] for entry in folders_at_path( @@ -9577,11 +9579,11 @@ class CachedImages(MainHandler): def should_try_image(filename, source, days=1, minutes=0): result = True try: - dummy_file = '%s.%s.dummy' % (ek.ek(os.path.splitext, filename)[0], source) - if ek.ek(os.path.isfile, dummy_file): - if ek.ek(os.stat, dummy_file).st_mtime \ - < (int(timestamp_near((datetime.datetime.now() - - datetime.timedelta(days=days, minutes=minutes))))): + dummy_file = '%s.%s.dummy' % (os.path.splitext(filename)[0], source) + if os.path.isfile(dummy_file): + if os.stat(dummy_file).st_mtime \ + < (SGDatetime.timestamp_near(datetime.datetime.now() + - datetime.timedelta(days=days, minutes=minutes))): CachedImages.delete_dummy_image(dummy_file) else: result = False @@ -9591,7 +9593,7 @@ class CachedImages(MainHandler): @staticmethod def create_dummy_image(filename, source): - dummy_file = '%s.%s.dummy' % (ek.ek(os.path.splitext, filename)[0], source) + dummy_file = '%s.%s.dummy' % (os.path.splitext(filename)[0], source) CachedImages.delete_dummy_image(dummy_file) try: with open(dummy_file, 'w'): @@ -9602,28 +9604,28 @@ class CachedImages(MainHandler): @staticmethod def delete_dummy_image(dummy_file): try: - if ek.ek(os.path.isfile, dummy_file): - ek.ek(os.remove, dummy_file) + if os.path.isfile(dummy_file): + os.remove(dummy_file) except (BaseException, Exception): pass @staticmethod def delete_all_dummy_images(filename): for f in ['tmdb', 'tvdb', 'tvmaze']: - CachedImages.delete_dummy_image('%s.%s.dummy' % (ek.ek(os.path.splitext, filename)[0], f)) + CachedImages.delete_dummy_image('%s.%s.dummy' % (os.path.splitext(filename)[0], f)) def index(self, path='', source=None, filename=None, tmdbid=None, tvdbid=None, trans=True): path = path.strip('/') file_name = '' if None is not source: - file_name = ek.ek(os.path.basename, source) + file_name = os.path.basename(source) elif filename not in [None, 0, '0']: file_name = filename - image_file = ek.ek(os.path.join, sickgear.CACHE_DIR, 'images', path, file_name) - image_file = ek.ek(os.path.abspath, image_file.replace('\\', '/')) - if not ek.ek(os.path.isfile, image_file) and has_image_ext(file_name): - basepath = ek.ek(os.path.dirname, image_file) + image_file = os.path.join(sickgear.CACHE_DIR, 'images', path, file_name) + image_file = os.path.abspath(image_file.replace('\\', '/')) + if not os.path.isfile(image_file) and has_image_ext(file_name): + basepath = os.path.dirname(image_file) helpers.make_path(basepath) poster_url = '' tmdb_image = False @@ -9640,13 +9642,15 @@ class CachedImages(MainHandler): poster_url = show_obj.poster except (BaseException, Exception): poster_url = '' - if poster_url and not sg_helpers.download_file(poster_url, image_file, nocache=True) and poster_url.find('trakt.us'): + if poster_url \ + and not sg_helpers.download_file(poster_url, image_file, nocache=True) \ + and poster_url.find('trakt.us'): sg_helpers.download_file(poster_url.replace('trakt.us', 'trakt.tv'), image_file, nocache=True) - if tmdb_image and not ek.ek(os.path.isfile, image_file): + if tmdb_image and not os.path.isfile(image_file): self.create_dummy_image(image_file, 'tmdb') if None is source and tvdbid not in [None, 'None', 0, '0'] \ - and not ek.ek(os.path.isfile, image_file) \ + and not os.path.isfile(image_file) \ and self.should_try_image(image_file, 'tvdb'): try: tvinfo_config = sickgear.TVInfoAPI(TVINFO_TVDB).api_params.copy() @@ -9659,15 +9663,15 @@ class CachedImages(MainHandler): poster_url = '' if poster_url: sg_helpers.download_file(poster_url, image_file, nocache=True) - if not ek.ek(os.path.isfile, image_file): + if not os.path.isfile(image_file): self.create_dummy_image(image_file, 'tvdb') - if ek.ek(os.path.isfile, image_file): + if os.path.isfile(image_file): self.delete_all_dummy_images(image_file) - if not ek.ek(os.path.isfile, image_file): - image_file = ek.ek(os.path.join, sickgear.PROG_DIR, 'gui', 'slick', - 'images', ('image-light.png', 'trans.png')[bool(int(trans))]) + if not os.path.isfile(image_file): + image_file = os.path.join(sickgear.PROG_DIR, 'gui', 'slick', 'images', + ('image-light.png', 'trans.png')[bool(int(trans))]) else: helpers.set_file_timestamp(image_file, min_age=3, new_time=None) @@ -9682,9 +9686,9 @@ class CachedImages(MainHandler): :param filename: image file name with path :param days: max age to trigger reload of image """ - if not ek.ek(os.path.isfile, filename) or \ - ek.ek(os.stat, filename).st_mtime < \ - (int(timestamp_near((datetime.datetime.now() - datetime.timedelta(days=days))))): + if not os.path.isfile(filename) or \ + os.stat(filename).st_mtime < \ + SGDatetime.timestamp_near(td=datetime.timedelta(days=days)): return True return False @@ -9701,7 +9705,7 @@ class CachedImages(MainHandler): :param tvid_prodid: :param thumb: return thumb or normal as fallback :param pid: optional person_id - :param prefer_person: prefer person image if person_id is set and character has more then 1 person assigned + :param prefer_person: prefer person image if person_id is set and character has more than 1 person assigned """ _ = kwargs.get('oid') # suppress pyc non used var highlight, oid (original id) is a visual ui key show_obj = tvid_prodid and helpers.find_show_by_id(tvid_prodid) @@ -9734,9 +9738,9 @@ class CachedImages(MainHandler): sg_helpers.download_file(char_obj.thumb_url, image_thumb, nocache=True) primary, fallback = ((image_normal, image_thumb), (image_thumb, image_normal))[thumb] - if ek.ek(os.path.isfile, primary): + if os.path.isfile(primary): image_file = primary - elif ek.ek(os.path.isfile, fallback): + elif os.path.isfile(fallback): image_file = fallback elif person_id: @@ -9772,9 +9776,9 @@ class CachedImages(MainHandler): sg_helpers.download_file(person_obj.thumb_url, image_thumb, nocache=True) primary, fallback = ((image_normal, image_thumb), (image_thumb, image_normal))[thumb] - if ek.ek(os.path.isfile, primary): + if os.path.isfile(primary): image_file = primary - elif ek.ek(os.path.isfile, fallback): + elif os.path.isfile(fallback): image_file = fallback return self.image_data(image_file, cast_default=True) @@ -9789,7 +9793,7 @@ class CachedImages(MainHandler): :return: binary image data or None """ if cast_default and None is image_file: - image_file = ek.ek(os.path.join, sickgear.PROG_DIR, 'gui', 'slick', 'images', 'poster-person.jpg') + image_file = os.path.join(sickgear.PROG_DIR, 'gui', 'slick', 'images', 'poster-person.jpg') mime_type, encoding = MimeTypes().guess_type(image_file) self.set_header('Content-Type', mime_type) diff --git a/sickgear/webserveInit.py b/sickgear/webserveInit.py index 8fd7b086..c03d6258 100644 --- a/sickgear/webserveInit.py +++ b/sickgear/webserveInit.py @@ -1,5 +1,5 @@ import os -from sys import exc_info, platform +from sys import exc_info import threading from tornado.ioloop import IOLoop @@ -8,14 +8,9 @@ from tornado.routing import AnyMatches, Rule from tornado.web import Application, _ApplicationRouter from . import logger, webapi, webserve -from ._legacy import LegacyConfigPostProcessing, LegacyHomeAddShows, \ - LegacyManageManageSearches, LegacyManageShowProcesses, LegacyErrorLogs from .helpers import create_https_certificates, re_valid_hostname import sickgear -from _23 import PY38 -from six import PY2 - # noinspection PyUnreachableCode if False: # noinspection PyUnresolvedReferences @@ -79,7 +74,7 @@ class WebServer(threading.Thread): # If either the HTTPS certificate or key do not exist, make some self-signed ones. if make_cert: if not create_https_certificates(self.https_cert, self.https_key): - logger.log(u'Unable to create CERT/KEY files, disabling HTTPS') + logger.log('Unable to create CERT/KEY files, disabling HTTPS') update_cfg |= False is not sickgear.ENABLE_HTTPS sickgear.ENABLE_HTTPS = False self.enable_https = False @@ -87,7 +82,7 @@ class WebServer(threading.Thread): update_cfg = True if not (os.path.isfile(self.https_cert) and os.path.isfile(self.https_key)): - logger.log(u'Disabled HTTPS because of missing CERT and KEY files', logger.WARNING) + logger.warning('Disabled HTTPS because of missing CERT and KEY files') update_cfg |= False is not sickgear.ENABLE_HTTPS sickgear.ENABLE_HTTPS = False self.enable_https = False @@ -219,22 +214,8 @@ class WebServer(threading.Thread): (r'%s/api/builder(/?)(.*)' % self.options['web_root'], webserve.ApiBuilder), (r'%s/api(/?.*)' % self.options['web_root'], webapi.Api), # ---------------------------------------------------------------------------------------------------------- - # legacy deprecated Aug 2019 - (r'%s/home/addShows/?$' % self.options['web_root'], LegacyHomeAddShows), - (r'%s/manage/manageSearches/?$' % self.options['web_root'], LegacyManageManageSearches), - (r'%s/manage/showProcesses/?$' % self.options['web_root'], LegacyManageShowProcesses), - (r'%s/config/postProcessing/?$' % self.options['web_root'], LegacyConfigPostProcessing), - (r'%s/errorlogs/?$' % self.options['web_root'], LegacyErrorLogs), - (r'%s/home/is_alive(/?.*)' % self.options['web_root'], webserve.IsAliveHandler), - (r'%s/home/addShows(/?.*)' % self.options['web_root'], webserve.AddShows), - (r'%s/manage/manageSearches(/?.*)' % self.options['web_root'], webserve.ManageSearch), - (r'%s/manage/showProcesses(/?.*)' % self.options['web_root'], webserve.ShowTasks), - (r'%s/config/postProcessing(/?.*)' % self.options['web_root'], webserve.ConfigMediaProcess), - (r'%s/errorlogs(/?.*)' % self.options['web_root'], webserve.EventLogs), - # ---------------------------------------------------------------------------------------------------------- - # legacy deprecated Aug 2019 - never remove as used in external scripts + # legacy deprecated Aug 2019 - NEVER remove as used in external scripts (r'%s/home/postprocess(/?.*)' % self.options['web_root'], webserve.HomeProcessMedia), - (r'%s(/?update_watched_state_kodi/?)' % self.options['web_root'], webserve.NoXSRFHandler), # regular catchall routes - keep here at the bottom (r'%s/home(/?.*)' % self.options['web_root'], webserve.Home), (r'%s/manage/(/?.*)' % self.options['web_root'], webserve.Manage), @@ -252,25 +233,20 @@ class WebServer(threading.Thread): protocol, ssl_options = (('http', None), ('https', {'certfile': self.https_cert, 'keyfile': self.https_key}))[self.enable_https] - logger.log(u'Starting SickGear on %s://%s:%s/' % (protocol, self.options['host'], self.options['port'])) + logger.log(f'Starting SickGear on {protocol}://{self.options["host"]}:{self.options["port"]}/') # python 3 needs to start event loop first - if not PY2: - import asyncio - if 'win32' == platform and PY38: - # noinspection PyUnresolvedReferences - asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) - asyncio.set_event_loop(asyncio.new_event_loop()) - from tornado.platform.asyncio import AnyThreadEventLoopPolicy - asyncio.set_event_loop_policy(AnyThreadEventLoopPolicy()) + import asyncio + asyncio.set_event_loop(asyncio.new_event_loop()) + from tornado.platform.asyncio import AnyThreadEventLoopPolicy + asyncio.set_event_loop_policy(AnyThreadEventLoopPolicy()) try: self.server = self.app.listen(self.options['port'], self.options['host'], ssl_options=ssl_options, xheaders=sickgear.HANDLE_REVERSE_PROXY, protocol=protocol) except (BaseException, Exception): etype, evalue, etb = exc_info() - logger.log('Could not start webserver on %s. Exception: %s, Error: %s' % ( - self.options['port'], etype, evalue), logger.ERROR) + logger.error(f'Could not start webserver on {self.options["port"]}. Exception: {etype}, Error: {evalue}') return self.io_loop = IOLoop.current() @@ -296,7 +272,7 @@ class WebServer(threading.Thread): getattr(s, nh)() sickgear.classes.loading_msg.reset() self.io_loop.add_callback(d_f, self, new_handler) - logger.log('Switching HTTP Server handlers to %s' % new_handler, logger.DEBUG) + logger.debug('Switching HTTP Server handlers to %s' % new_handler) def shut_down(self): self.alive = False diff --git a/tests/common_tests.py b/tests/common_tests.py index 2f77839b..b255b592 100644 --- a/tests/common_tests.py +++ b/tests/common_tests.py @@ -144,7 +144,7 @@ class QualityTests(unittest.TestCase): def check_quality_names(self, quality, cases): for fn in cases: - second = common.Quality.nameQuality(fn) + second = common.Quality.name_quality(fn) self.assertEqual(quality, second, msg='fail [%s] != [%s] for case: %s' % (Quality.qualityStrings[quality], Quality.qualityStrings[second], fn)) @@ -157,7 +157,7 @@ class QualityTests(unittest.TestCase): def check_wantedquality_list(self, cases): for show_quality, result in cases: - sq = common.Quality.combineQualities(*show_quality) + sq = common.Quality.combine_qualities(*show_quality) wd = common.WantedQualities() _ = wd.get_wantedlist(sq, False, common.Quality.NONE, common.UNAIRED, manual=True) for w, v in iteritems(wd): @@ -167,7 +167,7 @@ class QualityTests(unittest.TestCase): def check_wantedquality_get_wantedlist(self, cases): for show_quality, result in cases: - sq = common.Quality.combineQualities(*show_quality) + sq = common.Quality.combine_qualities(*show_quality) wd = common.WantedQualities() for case, wlist in result: ka = {'qualities': sq} @@ -178,7 +178,7 @@ class QualityTests(unittest.TestCase): def check_sceneQuality(self, cases): msg = 'Test case: "%s", actual: [%s] != expected: [%s]' for show_name, result in cases: - sq = common.Quality.sceneQuality(show_name[0], show_name[1]) + sq = common.Quality.scene_quality(show_name[0], show_name[1]) self.assertEqual(result, sq, msg=msg % (show_name[0], Quality.qualityStrings[sq], Quality.qualityStrings[result])) @@ -186,8 +186,8 @@ class QualityTests(unittest.TestCase): def test_SDTV(self): - self.assertEqual(common.Quality.compositeStatus(common.DOWNLOADED, common.Quality.SDTV), - common.Quality.statusFromName('Test.Show.S01E02-GROUP.mkv')) + self.assertEqual(common.Quality.composite_status(common.DOWNLOADED, common.Quality.SDTV), + common.Quality.status_from_name('Test.Show.S01E02-GROUP.mkv')) def test_qualites(self): self.longMessage = True diff --git a/tests/helpers_tests.py b/tests/helpers_tests.py index e78da024..e80e1827 100644 --- a/tests/helpers_tests.py +++ b/tests/helpers_tests.py @@ -65,7 +65,7 @@ class HelpersTests(unittest.TestCase): ((WANTED, Quality.NONE), True), ] for c, b in test_cases: - self.assertEqual(helpers.should_delete_episode(Quality.compositeStatus(*c)), b) + self.assertEqual(helpers.should_delete_episode(Quality.composite_status(*c)), b) def test_encrypt(self): helpers.unique_key1 = '0x12d48f154876c16164a1646' diff --git a/tests/migration_tests.py b/tests/migration_tests.py index 1682afdb..a03cd27c 100644 --- a/tests/migration_tests.py +++ b/tests/migration_tests.py @@ -48,7 +48,7 @@ class MigrationBasicTests(test.SickbeardTestDBCase): update.execute() sleep(0.1) - db.MigrationCode(my_db) + db.migration_code(my_db) my_db.close() # force python to garbage collect all db connections, so that the file can be deleted @@ -67,9 +67,9 @@ class MigrationBasicTests(test.SickbeardTestDBCase): # 0 -> 31 class OldInitialSchema(db.SchemaUpgrade): def execute(self): - db.backup_database(self.connection, 'sickbeard.db', self.checkDBVersion()) + db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) - if not self.hasTable('tv_shows') and not self.hasTable('db_version'): + if not self.has_table('tv_shows') and not self.has_table('db_version'): queries = [ 'CREATE TABLE db_version (db_version INTEGER);', 'CREATE TABLE history (' @@ -105,35 +105,31 @@ class OldInitialSchema(db.SchemaUpgrade): self.connection.action(query) else: - cur_db_version = self.checkDBVersion() + cur_db_version = self.call_check_db_version() if cur_db_version < MIN_DB_VERSION: logger.log_error_and_exit( - u'Your database version (' - + str(cur_db_version) - + ') is too old to migrate from what this version of SickGear supports (' - + str(MIN_DB_VERSION) + ').' + '\n' - + 'Upgrade using a previous version (tag) build 496 to build 501 of SickGear first or' - ' remove database file to begin fresh.' + f'Your database version ({str(cur_db_version)}) is too old to migrate from what' + f' this version of SickGear supports ({str(MIN_DB_VERSION)}).\n' + f'Upgrade using a previous version (tag) build 496 to build 501 of SickGear first' + f' or remove database file to begin fresh.' ) if cur_db_version > MAX_DB_VERSION: logger.log_error_and_exit( - u'Your database version (' - + str(cur_db_version) - + ') has been incremented past what this version of SickGear supports (' - + str(MAX_DB_VERSION) + ').' + '\n' - + 'If you have used other forks of SickGear,' - ' your database may be unusable due to their modifications.' + f'Your database version ({str(cur_db_version)}) has been incremented past what' + f' this version of SickGear supports ({str(MAX_DB_VERSION)}).\n' + f'If you have used other forks of SickGear,' + f' your database may be unusable due to their modifications.' ) - return self.checkDBVersion() + return self.call_check_db_version() class AddDefaultEpStatusToTvShows(db.SchemaUpgrade): def execute(self): - self.addColumn('tv_shows', 'default_ep_status', 'TEXT', '') - self.setDBVersion(41, check_db_version=False) + self.add_column('tv_shows', 'default_ep_status', 'TEXT', '') + self.set_db_version(41, check_db_version=False) if '__main__' == __name__: diff --git a/tests/name_parser_tests.py b/tests/name_parser_tests.py index daa49edc..05cefe99 100644 --- a/tests/name_parser_tests.py +++ b/tests/name_parser_tests.py @@ -367,16 +367,16 @@ combination_test_cases = [ ] unicode_test_cases = [ - (u'The.Big.Bang.Theory.2x07.The.Panty.Pi\xf1ata.Polarization.720p.HDTV.x264.AC3-SHELDON.mkv', + ('The.Big.Bang.Theory.2x07.The.Panty.Pi\xf1ata.Polarization.720p.HDTV.x264.AC3-SHELDON.mkv', parser.ParseResult( - u'The.Big.Bang.Theory.2x07.The.Panty.Pi\xf1ata.Polarization.720p.HDTV.x264.AC3-SHELDON.mkv', - u'The Big Bang Theory', 2, [7], u'The.Panty.Pi\xf1ata.Polarization.720p.HDTV.x264.AC3', u'SHELDON', + 'The.Big.Bang.Theory.2x07.The.Panty.Pi\xf1ata.Polarization.720p.HDTV.x264.AC3-SHELDON.mkv', + 'The Big Bang Theory', 2, [7], 'The.Panty.Pi\xf1ata.Polarization.720p.HDTV.x264.AC3', 'SHELDON', version=-1) ), ('The.Big.Bang.Theory.2x07.The.Panty.Pi\xc3\xb1ata.Polarization.720p.HDTV.x264.AC3-SHELDON.mkv', parser.ParseResult( - u'The.Big.Bang.Theory.2x07.The.Panty.Pi\xf1ata.Polarization.720p.HDTV.x264.AC3-SHELDON.mkv', - u'The Big Bang Theory', 2, [7], u'The.Panty.Pi\xf1ata.Polarization.720p.HDTV.x264.AC3', u'SHELDON', + 'The.Big.Bang.Theory.2x07.The.Panty.Pi\xf1ata.Polarization.720p.HDTV.x264.AC3-SHELDON.mkv', + 'The Big Bang Theory', 2, [7], 'The.Panty.Pi\xf1ata.Polarization.720p.HDTV.x264.AC3', 'SHELDON', version=-1) ), ] @@ -508,8 +508,8 @@ class MultiSceneNumbering(test.SickbeardTestDBCase): ) my_db = db.DBConnection() my_db.mass_action(c_l) - name_cache.addNameToCache(e_t['show_obj']['name'], tvid=e_t['show_obj']['tvid'], - prodid=e_t['show_obj']['prodid']) + name_cache.add_name_to_cache(e_t['show_obj']['name'], tvid=e_t['show_obj']['tvid'], + prodid=e_t['show_obj']['prodid']) for _t in e_t['tests']: try: res = parser.NameParser(True, convert=True).parse(_t['parse_name']) @@ -533,8 +533,8 @@ class EpisodeNameCases(unittest.TestCase): e_obj.season = e_o['season'] e_obj.episode = e_o['number'] s.sxe_ep_obj.setdefault(e_obj.season, {})[e_obj.episode] = e_obj - name_cache.addNameToCache(e_t['show_obj']['name'], tvid=e_t['show_obj']['tvid'], - prodid=e_t['show_obj']['prodid']) + name_cache.add_name_to_cache(e_t['show_obj']['name'], tvid=e_t['show_obj']['tvid'], + prodid=e_t['show_obj']['prodid']) try: res = parser.NameParser(True).parse(e_t['parse_name']) except (BaseException, Exception): @@ -550,7 +550,7 @@ class InvalidCases(unittest.TestCase): for s in [TVShowTest(name=rls_name, prodid=prodid, tvid=tvid, is_anime=is_anime)]: sickgear.showList.append(s) sickgear.showDict[s.sid_int] = s - name_cache.addNameToCache(show_name, tvid=tvid, prodid=prodid) + name_cache.add_name_to_cache(show_name, tvid=tvid, prodid=prodid) invalidexception = False try: _ = parser.NameParser(True).parse(rls_name) @@ -939,7 +939,7 @@ class ExtraInfoNoNameTests(test.SickbeardTestDBCase): sickgear.showList = [tvs] sickgear.showDict = {tvs.sid_int: tvs} name_cache.nameCache = {} - name_cache.buildNameCache() + name_cache.build_name_cache() np = parser.NameParser() r = np.parse(case[2], cache_result=False) diff --git a/tests/network_timezone_tests.py b/tests/network_timezone_tests.py index dc757af8..886f5f1e 100644 --- a/tests/network_timezone_tests.py +++ b/tests/network_timezone_tests.py @@ -10,8 +10,6 @@ import datetime from lib.dateutil import tz import sickgear from sickgear import network_timezones, helpers -# noinspection PyPep8Naming -import encodingKludge as ek class NetworkTimezoneTests(test.SickbeardTestDBCase): @@ -33,12 +31,12 @@ class NetworkTimezoneTests(test.SickbeardTestDBCase): @classmethod def remove_zoneinfo(cls): # delete all existing zoneinfo files - for (path, dirs, files) in ek.ek(os.walk, helpers.real_path(sickgear.ZONEINFO_DIR)): + for (path, dirs, files) in os.walk(helpers.real_path(sickgear.ZONEINFO_DIR)): for filename in files: if filename.endswith('.tar.gz'): - file_w_path = ek.ek(os.path.join, path, filename) + file_w_path = os.path.join(path, filename) try: - ek.ek(os.remove, file_w_path) + os.remove(file_w_path) except (BaseException, Exception): pass diff --git a/tests/newznab_tests.py b/tests/newznab_tests.py index 25bacb50..a5afea27 100644 --- a/tests/newznab_tests.py +++ b/tests/newznab_tests.py @@ -39,12 +39,12 @@ item_parse_test_cases = [ ('Show.Name.S02E04.720p.HDTV.x264-GROUP', 'https://test.h')), (('Show.Name.S02E05.720p.HDTV.x264-GROUP-JUNK[JUNK]', 'https://test.h'), ('Show.Name.S02E05.720p.HDTV.x264-GROUP', 'https://test.h')), - ((u'Show.Name.S02E06.720p.HDTV.x264-GROUP-JUNK[JUNK帝]', 'https://test.h'), - (u'Show.Name.S02E06.720p.HDTV.x264-GROUP', 'https://test.h')), - ((u'Show.Name.S02E07-EpName帝.720p.HDTV.x264-GROUP帝-JUNK[JUNK帝]', 'https://test.h'), - (u'Show.Name.S02E07-EpName帝.720p.HDTV.x264-GROUP帝', 'https://test.h')), - ((u'[grp 帝] Show Name - 11 [1024x576 h264 AAC ger-sub][123456].mp4', 'https://test.h'), - (u'[grp.帝].Show.Name.-.11.[1024x576.h264.AAC.ger-sub][123456]', 'https://test.h')), + (('Show.Name.S02E06.720p.HDTV.x264-GROUP-JUNK[JUNK帝]', 'https://test.h'), + ('Show.Name.S02E06.720p.HDTV.x264-GROUP', 'https://test.h')), + (('Show.Name.S02E07-EpName帝.720p.HDTV.x264-GROUP帝-JUNK[JUNK帝]', 'https://test.h'), + ('Show.Name.S02E07-EpName帝.720p.HDTV.x264-GROUP帝', 'https://test.h')), + (('[grp 帝] Show Name - 11 [1024x576 h264 AAC ger-sub][123456].mp4', 'https://test.h'), + ('[grp.帝].Show.Name.-.11.[1024x576.h264.AAC.ger-sub][123456]', 'https://test.h')), ] size_test_cases = [ diff --git a/tests/pp_tests.py b/tests/pp_tests.py index f6fd97a4..58e05bf5 100644 --- a/tests/pp_tests.py +++ b/tests/pp_tests.py @@ -27,7 +27,7 @@ import unittest import sickgear from sickgear.helpers import real_path -from sickgear.name_cache import addNameToCache +from sickgear.name_cache import add_name_to_cache from sickgear.postProcessor import PostProcessor from sickgear.processTV import ProcessTVShow from sickgear.tv import TVEpisode, TVShow, logger @@ -94,7 +94,7 @@ class PPBasicTests(test.SickbeardTestDBCase): ep_obj.release_name = 'test setter' ep_obj.save_to_db() - addNameToCache('show name', tvid=TVINFO_TVDB, prodid=3) + add_name_to_cache('show name', tvid=TVINFO_TVDB, prodid=3) sickgear.PROCESS_METHOD = 'move' pp = PostProcessor(test.FILEPATH) diff --git a/tests/scene_helpers_tests.py b/tests/scene_helpers_tests.py index 7cbe6257..29f80deb 100644 --- a/tests/scene_helpers_tests.py +++ b/tests/scene_helpers_tests.py @@ -26,7 +26,7 @@ class SceneTests(test.SickbeardTestDBCase): s.tvid = TVINFO_TVDB s.name = name - result = show_name_helpers.allPossibleShowNames(s, season=season) + result = show_name_helpers.all_possible_show_names(s, season=season) self.assertTrue(len(set(expected).intersection(set(result))) == len(expected)) def _test_pass_wordlist_checks(self, name, expected): @@ -75,7 +75,7 @@ class SceneExceptionTestCase(test.SickbeardTestDBCase): sickgear.showDict[s.sid_int] = s sickgear.webserve.Home.make_showlist_unique_names() scene_exceptions.retrieve_exceptions() - name_cache.buildNameCache() + name_cache.build_name_cache() def test_sceneExceptionsEmpty(self): self.assertEqual(scene_exceptions.get_scene_exceptions(0, 0), []) @@ -99,9 +99,9 @@ class SceneExceptionTestCase(test.SickbeardTestDBCase): sickgear.showList.append(s) sickgear.showDict[s.sid_int] = s scene_exceptions.retrieve_exceptions() - name_cache.buildNameCache() - self.assertEqual(scene_exceptions.get_scene_exception_by_name(u'ブラック・ラグーン'), [1, 79604, -1]) - self.assertEqual(scene_exceptions.get_scene_exception_by_name(u'Burakku Ragūn'), [1, 79604, -1]) + name_cache.build_name_cache() + self.assertEqual(scene_exceptions.get_scene_exception_by_name('ブラック・ラグーン'), [1, 79604, -1]) + self.assertEqual(scene_exceptions.get_scene_exception_by_name('Burakku Ragūn'), [1, 79604, -1]) self.assertEqual(scene_exceptions.get_scene_exception_by_name('Rokka no Yuusha'), [1, 295243, -1]) def test_sceneExceptionByNameEmpty(self): @@ -114,11 +114,11 @@ class SceneExceptionTestCase(test.SickbeardTestDBCase): my_db.action('DELETE FROM scene_exceptions WHERE 1=1') # put something in the cache - name_cache.addNameToCache('Cached Name', prodid=0) + name_cache.add_name_to_cache('Cached Name', prodid=0) # updating should not clear the cache this time since our exceptions didn't change scene_exceptions.retrieve_exceptions() - self.assertEqual(name_cache.retrieveNameFromCache('Cached Name'), (0, 0)) + self.assertEqual(name_cache.retrieve_name_from_cache('Cached Name'), (0, 0)) if '__main__' == __name__: diff --git a/tests/show_tests.py b/tests/show_tests.py index 06471da0..82bac9b0 100644 --- a/tests/show_tests.py +++ b/tests/show_tests.py @@ -31,7 +31,7 @@ from sickgear.tv import TVEpisode, TVShow wanted_tests = [ dict( name='Start and End', - show=dict(indexer=1, indexerid=1, quality=Quality.combineQualities([Quality.SDTV], [])), + show=dict(indexer=1, indexerid=1, quality=Quality.combine_qualities([Quality.SDTV], [])), episodes=[ dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)), dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)), @@ -64,7 +64,7 @@ wanted_tests = [ dict( name='Start and End, entire season', - show=dict(indexer=1, indexerid=10, quality=Quality.combineQualities([Quality.SDTV], [])), + show=dict(indexer=1, indexerid=10, quality=Quality.combine_qualities([Quality.SDTV], [])), episodes=[ dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 2)), dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)), @@ -97,7 +97,7 @@ wanted_tests = [ dict( name='Start, entire season', - show=dict(indexer=1, indexerid=210, quality=Quality.combineQualities([Quality.SDTV], [])), + show=dict(indexer=1, indexerid=210, quality=Quality.combine_qualities([Quality.SDTV], [])), episodes=[ dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 2)), dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)), @@ -130,7 +130,7 @@ wanted_tests = [ dict( name='End only', - show=dict(indexer=1, indexerid=2, quality=Quality.combineQualities([Quality.SDTV], [])), + show=dict(indexer=1, indexerid=2, quality=Quality.combine_qualities([Quality.SDTV], [])), episodes=[ dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 3)), dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)), @@ -163,7 +163,7 @@ wanted_tests = [ dict( name='End only, entire season', - show=dict(indexer=1, indexerid=20, quality=Quality.combineQualities([Quality.SDTV], [])), + show=dict(indexer=1, indexerid=20, quality=Quality.combine_qualities([Quality.SDTV], [])), episodes=[ dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 4)), dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)), @@ -196,7 +196,7 @@ wanted_tests = [ dict( name='End only, multi season', - show=dict(indexer=1, indexerid=3, quality=Quality.combineQualities([Quality.SDTV], [])), + show=dict(indexer=1, indexerid=3, quality=Quality.combine_qualities([Quality.SDTV], [])), episodes=[ dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 5)), dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)), @@ -229,7 +229,7 @@ wanted_tests = [ dict( name='End only, multi season, entire season', - show=dict(indexer=1, indexerid=30, quality=Quality.combineQualities([Quality.SDTV], [])), + show=dict(indexer=1, indexerid=30, quality=Quality.combine_qualities([Quality.SDTV], [])), episodes=[ dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 6)), dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)), @@ -262,7 +262,7 @@ wanted_tests = [ dict( name='End only, multi season, cross season', - show=dict(indexer=1, indexerid=33, quality=Quality.combineQualities([Quality.SDTV], [])), + show=dict(indexer=1, indexerid=33, quality=Quality.combine_qualities([Quality.SDTV], [])), episodes=[ dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 7)), dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)), @@ -295,7 +295,7 @@ wanted_tests = [ dict( name='all episodes unaired', - show=dict(indexer=1, indexerid=35, quality=Quality.combineQualities([Quality.SDTV], [])), + show=dict(indexer=1, indexerid=35, quality=Quality.combine_qualities([Quality.SDTV], [])), episodes=[ dict(season=1, episode=1, status=UNAIRED, quality=Quality.NONE, airdate=datetime.date.fromordinal(1)), dict(season=1, episode=2, status=UNAIRED, quality=Quality.NONE, airdate=datetime.date.fromordinal(1)), @@ -317,7 +317,7 @@ wanted_tests = [ dict( name='no episodes', - show=dict(indexer=1, indexerid=36, quality=Quality.combineQualities([Quality.SDTV], [])), + show=dict(indexer=1, indexerid=36, quality=Quality.combine_qualities([Quality.SDTV], [])), episodes=[ ], start_wanted=7, end_wanted=3, @@ -332,7 +332,7 @@ wanted_tests = [ dict( name='no episodes, whole first season', - show=dict(indexer=1, indexerid=37, quality=Quality.combineQualities([Quality.SDTV], [])), + show=dict(indexer=1, indexerid=37, quality=Quality.combine_qualities([Quality.SDTV], [])), episodes=[ ], start_wanted=-1, end_wanted=0, @@ -347,7 +347,7 @@ wanted_tests = [ dict( name='no episodes, whole last season', - show=dict(indexer=1, indexerid=38, quality=Quality.combineQualities([Quality.SDTV], [])), + show=dict(indexer=1, indexerid=38, quality=Quality.combine_qualities([Quality.SDTV], [])), episodes=[ ], start_wanted=0, end_wanted=-1, @@ -362,7 +362,7 @@ wanted_tests = [ dict( name='no episodes, whole first and last season', - show=dict(indexer=1, indexerid=39, quality=Quality.combineQualities([Quality.SDTV], [])), + show=dict(indexer=1, indexerid=39, quality=Quality.combine_qualities([Quality.SDTV], [])), episodes=[ ], start_wanted=-1, end_wanted=-1, @@ -408,7 +408,7 @@ class ShowAddTests(test.SickbeardTestDBCase): show_obj.sxe_ep_obj[ep['season']] = {} show_obj.sxe_ep_obj[ep['season']][ep['episode']] = TVEpisode(show_obj, ep['season'], ep['episode']) episode = show_obj.sxe_ep_obj[ep['season']][ep['episode']] - episode.status = Quality.compositeStatus(ep['status'], ep['quality']) + episode.status = Quality.composite_status(ep['status'], ep['quality']) episode.airdate = ep['airdate'] episode.name = 'nothing' episode.epid = ep_id diff --git a/tests/snatch_tests.py b/tests/snatch_tests.py index 6e75c8f6..5240becd 100644 --- a/tests/snatch_tests.py +++ b/tests/snatch_tests.py @@ -57,7 +57,7 @@ class SearchTest(test.SickbeardTestDBCase): return True def __init__(self, something): - for provider in sickgear.providers.sortedProviderList(): + for provider in sickgear.providers.sorted_sources(): provider.get_url = self._fake_getURL #provider.isActive = self._fake_isActive diff --git a/tests/test_lib.py b/tests/test_lib.py index 59b255f4..58b07ac8 100644 --- a/tests/test_lib.py +++ b/tests/test_lib.py @@ -42,10 +42,10 @@ TESTDBNAME = 'sickbeard.db' TESTCACHEDBNAME = 'cache.db' TESTFAILEDDBNAME = 'failed.db' -SHOWNAME = u'show name' +SHOWNAME = 'show name' SEASON = 4 EPISODE = 2 -FILENAME = u'show name - s0' + str(SEASON) + 'e0' + str(EPISODE) + '.mkv' +FILENAME = f'show name - s0{SEASON}e0{EPISODE}.mkv' FILEDIR = os.path.join(TESTDIR, SHOWNAME) FILEPATH = os.path.join(FILEDIR, FILENAME) @@ -91,8 +91,8 @@ sickgear.NAMING_SPORTS_PATTERN = '' sickgear.NAMING_MULTI_EP = 1 sickgear.PROVIDER_ORDER = [] -sickgear.newznabProviderList = providers.getNewznabProviderList('') -sickgear.providerList = providers.makeProviderList() +sickgear.newznab_providers = providers.newznab_source_list('') +sickgear.provider_list = providers.provider_modules() sickgear.PROG_DIR = os.path.abspath('..') # sickgear.DATA_DIR = os.path.join(sickgear.PROG_DIR, 'tests') @@ -195,16 +195,16 @@ def setup_test_db(): """upgrades the db to the latest version """ # upgrading the db - db.MigrationCode(db.DBConnection()) + db.migration_code(db.DBConnection()) # fix up any db problems - db.sanityCheckDatabase(db.DBConnection(), mainDB.MainSanityCheck) + db.sanity_check_db(db.DBConnection(), mainDB.MainSanityCheck) # and for cachedb too - db.upgradeDatabase(db.DBConnection('cache.db'), cache_db.InitialSchema) + db.upgrade_database(db.DBConnection('cache.db'), cache_db.InitialSchema) # and for faileddb too - db.upgradeDatabase(db.DBConnection('failed.db'), failed_db.InitialSchema) + db.upgrade_database(db.DBConnection('failed.db'), failed_db.InitialSchema) def teardown_test_db(): diff --git a/tests/webapi_tests.py b/tests/webapi_tests.py index 7b5d410b..a7b1c9a9 100644 --- a/tests/webapi_tests.py +++ b/tests/webapi_tests.py @@ -75,7 +75,7 @@ test_shows = [ 'quality_init': [], 'quality_upgrade': [], 'episodes': { 1: { - 1: {'name': 'ep1', 'status': Quality.compositeStatus(DOWNLOADED, Quality.HDWEBDL), + 1: {'name': 'ep1', 'status': Quality.composite_status(DOWNLOADED, Quality.HDWEBDL), 'airdate': old_date, 'description': 'ep1 description'}, 2: {'name': 'ep2', 'status': WANTED, 'airdate': last_week, 'description': 'ep2 description'}, 3: {'name': 'ep3', 'status': WANTED, 'airdate': today, 'description': 'ep3 description'}, @@ -174,17 +174,17 @@ class WebAPICase(test.SickbeardTestDBCase): sickgear.events = Events(None) sickgear.show_queue_scheduler = scheduler.Scheduler( show_queue.ShowQueue(), - cycleTime=datetime.timedelta(seconds=3), - threadName='SHOWQUEUE') + cycle_time=datetime.timedelta(seconds=3), + thread_name='SHOWQUEUE') sickgear.search_queue_scheduler = scheduler.Scheduler( search_queue.SearchQueue(), - cycleTime=datetime.timedelta(seconds=3), - threadName='SEARCHQUEUE') + cycle_time=datetime.timedelta(seconds=3), + thread_name='SEARCHQUEUE') sickgear.backlog_search_scheduler = search_backlog.BacklogSearchScheduler( search_backlog.BacklogSearcher(), - cycleTime=datetime.timedelta(minutes=60), + cycle_time=datetime.timedelta(minutes=60), run_delay=datetime.timedelta(minutes=60), - threadName='BACKLOG') + thread_name='BACKLOG') sickgear.indexermapper.indexer_list = [i for i in sickgear.indexers.indexer_api.TVInfoAPI().all_sources] for root_dirs, path, expected in root_folder_tests: sickgear.ROOT_DIRS = root_dirs @@ -198,8 +198,8 @@ class WebAPICase(test.SickbeardTestDBCase): elif k in show_obj.__dict__: show_obj.__dict__[k] = v if 'quality_init' in cur_show and cur_show['quality_init']: - show_obj.quality = Quality.combineQualities(cur_show['quality_init'], - cur_show.get('quality_upgrade', [])) + show_obj.quality = Quality.combine_qualities(cur_show['quality_init'], + cur_show.get('quality_upgrade', [])) show_obj.dirty = True show_obj.save_to_db(True) @@ -216,7 +216,7 @@ class WebAPICase(test.SickbeardTestDBCase): ep_obj.__dict__[k] = v show_obj.sxe_ep_obj.setdefault(season, {})[ep] = ep_obj ep_obj.save_to_db(True) - status, quality = Quality.splitCompositeStatus(ep_obj.status) + status, quality = Quality.split_composite_status(ep_obj.status) if status in (DOWNLOADED, SNATCHED): s_r = SearchResult([ep_obj]) s_r.show_obj, s_r.quality, s_r.provider, s_r.name = \ @@ -240,8 +240,8 @@ class WebAPICase(test.SickbeardTestDBCase): for cur_show in test_shows: show_obj = sickgear.helpers.find_show_by_id({cur_show['tvid']: cur_show['prodid']}) if 'quality_init' in cur_show and cur_show['quality_init']: - show_obj.quality = Quality.combineQualities(cur_show['quality_init'], - cur_show.get('quality_upgrade', [])) + show_obj.quality = Quality.combine_qualities(cur_show['quality_init'], + cur_show.get('quality_upgrade', [])) else: show_obj.quality = int(sickgear.QUALITY_DEFAULT) show_obj.upgrade_once = int(cur_show.get('upgrade_once', 0)) @@ -821,7 +821,7 @@ class WebAPICase(test.SickbeardTestDBCase): if cur_quality: params.update({'quality': cur_quality_str}) old_status = ep_obj.status - status, quality = Quality.splitCompositeStatus(ep_obj.status) + status, quality = Quality.split_composite_status(ep_obj.status) expect_fail = UNAIRED == status or (DOWNLOADED == status and not cur_quality) expected_msg = (success_msg, failed_msg)[expect_fail] data = self._request_from_api(webapi.CMD_SickGearEpisodeSetStatus, params=params) diff --git a/tests/xem_tests.py b/tests/xem_tests.py index 81fa0cfb..1d3cb6d6 100644 --- a/tests/xem_tests.py +++ b/tests/xem_tests.py @@ -61,12 +61,12 @@ class XEMBasicTests(test.SickbeardTestDBCase): curRegex = '^' + escaped_name + r'\W+(?:(?:S\d[\dE._ -])|(?:\d\d?x)|(?:\d{4}\W\d\d\W\d\d)|(?:(?:part|pt)' \ r'[\._ -]?(\d|[ivx]))|Season\W+\d+\W+|E\d+\W+|(?:\d{1,3}.+\d{1,}[a-zA-Z]{2}' \ r'\W+[a-zA-Z]{3,}\W+\d{4}.+))' - # print(u"Checking if show " + name + " matches " + curRegex) + # print("Checking if show " + name + " matches " + curRegex) # noinspection PyUnusedLocal match = re.search(curRegex, name, re.I) # if match: - # print(u"Matched " + curRegex + " to " + name) + # print("Matched " + curRegex + " to " + name) if '__main__' == __name__: