Merge branch 'feature/ChangeUnicodePy3' into dev

This commit is contained in:
JackDandy 2023-03-08 14:56:11 +00:00
commit 59de9f8bda
133 changed files with 1799 additions and 1930 deletions

View file

@ -125,7 +125,7 @@ for cleaned_path, test_path, dir_list in cleanups:
pass pass
with io.open(cleaned_file, 'w+', encoding='utf-8') as fp: with io.open(cleaned_file, 'w+', encoding='utf-8') as fp:
fp.write(u'This file exists to prevent a rerun delete of *.pyc, *.pyo files') fp.write('This file exists to prevent a rerun delete of *.pyc, *.pyo files')
fp.flush() fp.flush()
os.fsync(fp.fileno()) os.fsync(fp.fileno())
@ -166,10 +166,10 @@ if not os.path.isfile(cleaned_file) or os.path.exists(test):
swap_name = cleaned_file swap_name = cleaned_file
cleaned_file = danger_output cleaned_file = danger_output
danger_output = swap_name danger_output = swap_name
msg = u'Failed (permissions?) to delete file(s). You must manually delete:\r\n%s' % '\r\n'.join(bad_files) msg = 'Failed (permissions?) to delete file(s). You must manually delete:\r\n%s' % '\r\n'.join(bad_files)
print(msg) print(msg)
else: else:
msg = u'This file exists to prevent a rerun delete of dead lib/html5lib files' msg = 'This file exists to prevent a rerun delete of dead lib/html5lib files'
with io.open(cleaned_file, 'w+', encoding='utf-8') as fp: with io.open(cleaned_file, 'w+', encoding='utf-8') as fp:
fp.write(msg) fp.write(msg)

View file

@ -277,7 +277,7 @@ class TraktAPI(object):
code = getattr(e.response, 'status_code', None) code = getattr(e.response, 'status_code', None)
if not code: if not code:
if 'timed out' in ex(e): if 'timed out' in ex(e):
log.warning(u'Timeout connecting to Trakt') log.warning('Timeout connecting to Trakt')
if count >= self.max_retrys: if count >= self.max_retrys:
raise TraktTimeout() raise TraktTimeout()
return self.trakt_request(path, data, headers, url, count=count, sleep_retry=sleep_retry, return self.trakt_request(path, data, headers, url, count=count, sleep_retry=sleep_retry,
@ -285,12 +285,12 @@ class TraktAPI(object):
# This is pretty much a fatal error if there is no status_code # This is pretty much a fatal error if there is no status_code
# It means there basically was no response at all # It means there basically was no response at all
else: else:
log.warning(u'Could not connect to Trakt. Error: %s' % ex(e)) log.warning('Could not connect to Trakt. Error: %s' % ex(e))
raise TraktException('Could not connect to Trakt. Error: %s' % ex(e)) raise TraktException('Could not connect to Trakt. Error: %s' % ex(e))
elif 502 == code: elif 502 == code:
# Retry the request, Cloudflare had a proxying issue # Retry the request, Cloudflare had a proxying issue
log.warning(u'Retrying Trakt api request: %s' % path) log.warning(f'Retrying Trakt api request: {path}')
if count >= self.max_retrys: if count >= self.max_retrys:
raise TraktCloudFlareException() raise TraktCloudFlareException()
return self.trakt_request(path, data, headers, url, count=count, sleep_retry=sleep_retry, return self.trakt_request(path, data, headers, url, count=count, sleep_retry=sleep_retry,
@ -303,7 +303,7 @@ class TraktAPI(object):
return self.trakt_request(path, data, headers, url, count=count, sleep_retry=sleep_retry, return self.trakt_request(path, data, headers, url, count=count, sleep_retry=sleep_retry,
send_oauth=send_oauth, method=method) send_oauth=send_oauth, method=method)
log.warning(u'Unauthorized. Please check your Trakt settings') log.warning('Unauthorized. Please check your Trakt settings')
sickgear.TRAKT_ACCOUNTS[send_oauth].auth_failure() sickgear.TRAKT_ACCOUNTS[send_oauth].auth_failure()
raise TraktAuthException() raise TraktAuthException()
@ -318,18 +318,18 @@ class TraktAPI(object):
raise TraktAuthException() raise TraktAuthException()
elif code in (500, 501, 503, 504, 520, 521, 522): elif code in (500, 501, 503, 504, 520, 521, 522):
if count >= self.max_retrys: if count >= self.max_retrys:
log.warning(u'Trakt may have some issues and it\'s unavailable. Code: %s' % code) log.warning(f'Trakt may have some issues and it\'s unavailable. Code: {code}')
raise TraktServerError(error_code=code) raise TraktServerError(error_code=code)
# http://docs.trakt.apiary.io/#introduction/status-codes # http://docs.trakt.apiary.io/#introduction/status-codes
log.warning(u'Trakt may have some issues and it\'s unavailable. Trying again') log.warning('Trakt may have some issues and it\'s unavailable. Trying again')
return self.trakt_request(path, data, headers, url, count=count, sleep_retry=sleep_retry, return self.trakt_request(path, data, headers, url, count=count, sleep_retry=sleep_retry,
send_oauth=send_oauth, method=method) send_oauth=send_oauth, method=method)
elif 404 == code: elif 404 == code:
log.warning(u'Trakt error (404) the resource does not exist: %s%s' % (url, path)) log.warning(f'Trakt error (404) the resource does not exist: {url}{path}')
raise TraktMethodNotExisting('Trakt error (404) the resource does not exist: %s%s' % (url, path)) raise TraktMethodNotExisting('Trakt error (404) the resource does not exist: %s%s' % (url, path))
elif 429 == code: elif 429 == code:
if count >= self.max_retrys: if count >= self.max_retrys:
log.warning(u'Trakt replied with Rate-Limiting, maximum retries exceeded.') log.warning('Trakt replied with Rate-Limiting, maximum retries exceeded.')
raise TraktServerError(error_code=code) raise TraktServerError(error_code=code)
r_headers = getattr(e.response, 'headers', None) r_headers = getattr(e.response, 'headers', None)
if None is not r_headers: if None is not r_headers:
@ -356,14 +356,14 @@ class TraktAPI(object):
'revoked, does not match the redirection URI used in the authorization request,' 'revoked, does not match the redirection URI used in the authorization request,'
' or was issued to another client.') ' or was issued to another client.')
else: else:
log.error(u'Could not connect to Trakt. Code error: {0}'.format(code)) log.error('Could not connect to Trakt. Code error: {0}'.format(code))
raise TraktException('Could not connect to Trakt. Code error: %s' % code) raise TraktException('Could not connect to Trakt. Code error: %s' % code)
except ConnectionSkipException as e: except ConnectionSkipException as e:
log.warning('Connection is skipped') log.warning('Connection is skipped')
raise e raise e
except ValueError as e: except ValueError as e:
log.error(u'Value Error: %s' % ex(e)) log.error(f'Value Error: {ex(e)}')
raise TraktValueError(u'Value Error: %s' % ex(e)) raise TraktValueError(f'Value Error: {ex(e)}')
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
log.error('Exception: %s' % ex(e)) log.error('Exception: %s' % ex(e))
raise TraktException('Could not connect to Trakt. Code error: %s' % ex(e)) raise TraktException('Could not connect to Trakt. Code error: %s' % ex(e))

View file

@ -138,7 +138,7 @@ class Tvdb(TVInfoBase):
"""Create easy-to-use interface to name of season/episode name """Create easy-to-use interface to name of season/episode name
>> t = Tvdb() >> t = Tvdb()
>> t['Scrubs'][1][24]['episodename'] >> t['Scrubs'][1][24]['episodename']
u'My Last Day' 'My Last Day'
""" """
map_languages = {} map_languages = {}
reverse_map_languages = {v: k for k, v in iteritems(map_languages)} reverse_map_languages = {v: k for k, v in iteritems(map_languages)}
@ -201,7 +201,7 @@ class Tvdb(TVInfoBase):
>> t = Tvdb(actors=True) >> t = Tvdb(actors=True)
>> t['scrubs']['actors'][0]['name'] >> t['scrubs']['actors'][0]['name']
u'Zach Braff' 'Zach Braff'
custom_ui (tvdb_ui.BaseUI subclass): custom_ui (tvdb_ui.BaseUI subclass):
A callable subclass of tvdb_ui.BaseUI (overrides interactive option) A callable subclass of tvdb_ui.BaseUI (overrides interactive option)
@ -580,7 +580,7 @@ class Tvdb(TVInfoBase):
data_list.append(cr) data_list.append(cr)
resp['data'] = data_list resp['data'] = data_list
return resp return resp
return dict([(u'data', (None, resp)[isinstance(resp, string_types)])]) return dict([('data', (None, resp)[isinstance(resp, string_types)])])
def _getetsrc(self, url, params=None, language=None, parse_json=False): def _getetsrc(self, url, params=None, language=None, parse_json=False):
"""Loads a URL using caching """Loads a URL using caching
@ -1015,14 +1015,14 @@ class Tvdb(TVInfoBase):
url_image = self._make_image(self.config['url_artworks'], image_data['data'][0]['filename']) url_image = self._make_image(self.config['url_artworks'], image_data['data'][0]['filename'])
url_thumb = self._make_image(self.config['url_artworks'], image_data['data'][0]['thumbnail']) url_thumb = self._make_image(self.config['url_artworks'], image_data['data'][0]['thumbnail'])
self._set_show_data(sid, image_type, url_image) self._set_show_data(sid, image_type, url_image)
self._set_show_data(sid, u'%s_thumb' % image_type, url_thumb) self._set_show_data(sid, f'{image_type}_thumb', url_thumb)
excluded_main_data = True # artwork found so prevent fallback excluded_main_data = True # artwork found so prevent fallback
self._parse_banners(sid, image_data['data']) self._parse_banners(sid, image_data['data'])
self.shows[sid].__dict__[loaded_name] = True self.shows[sid].__dict__[loaded_name] = True
# fallback image thumbnail for none excluded_main_data if artwork is not found # fallback image thumbnail for none excluded_main_data if artwork is not found
if not excluded_main_data and show_data['data'].get(image_type): if not excluded_main_data and show_data['data'].get(image_type):
self._set_show_data(sid, u'%s_thumb' % image_type, self._set_show_data(sid, f'{image_type}_thumb',
re.sub(r'\.jpg$', '_t.jpg', show_data['data'][image_type], flags=re.I)) re.sub(r'\.jpg$', '_t.jpg', show_data['data'][image_type], flags=re.I))
def _get_show_data(self, def _get_show_data(self,
@ -1067,11 +1067,11 @@ class Tvdb(TVInfoBase):
else: else:
show_data = {'data': {}} show_data = {'data': {}}
for img_type, en_type, p_type in [(u'poster', 'posters_enabled', posters), for img_type, en_type, p_type in [('poster', 'posters_enabled', posters),
(u'banner', 'banners_enabled', banners), ('banner', 'banners_enabled', banners),
(u'fanart', 'fanart_enabled', fanart), ('fanart', 'fanart_enabled', fanart),
(u'season', 'seasons_enabled', seasons), ('season', 'seasons_enabled', seasons),
(u'seasonwide', 'seasonwides_enabled', seasonwides)]: ('seasonwide', 'seasonwides_enabled', seasonwides)]:
self._parse_images(sid, language, show_data, img_type, en_type, p_type) self._parse_images(sid, language, show_data, img_type, en_type, p_type)
if (actors or self.config['actors_enabled']) and not getattr(self.shows.get(sid), 'actors_loaded', False): if (actors or self.config['actors_enabled']) and not getattr(self.shows.get(sid), 'actors_loaded', False):
@ -1175,9 +1175,9 @@ class Tvdb(TVInfoBase):
else: else:
page += 1 page += 1
ep_map_keys = {'absolutenumber': u'absolute_number', 'airedepisodenumber': u'episodenumber', ep_map_keys = {'absolutenumber': 'absolute_number', 'airedepisodenumber': 'episodenumber',
'airedseason': u'seasonnumber', 'airedseasonid': u'seasonid', 'airedseason': 'seasonnumber', 'airedseasonid': 'seasonid',
'dvdepisodenumber': u'dvd_episodenumber', 'dvdseason': u'dvd_season'} 'dvdepisodenumber': 'dvd_episodenumber', 'dvdseason': 'dvd_season'}
for cur_ep in episodes: for cur_ep in episodes:
if self.config['dvdorder']: if self.config['dvdorder']:

View file

@ -17,8 +17,8 @@ It must have a method "select_series", this is passed a list of dicts, each dict
contains the the keys "name" (human readable show name), and "sid" (the shows contains the the keys "name" (human readable show name), and "sid" (the shows
ID as on thetvdb.com). For example: ID as on thetvdb.com). For example:
[{'name': u'Lost', 'sid': u'73739'}, [{'name': 'Lost', 'sid': '73739'},
{'name': u'Lost Universe', 'sid': u'73181'}] {'name': 'Lost Universe', 'sid': '73181'}]
The "select_series" method must return the appropriate dict, or it can raise The "select_series" method must return the appropriate dict, or it can raise
tvdb_userabort (if the selection is aborted), tvdb_shownotfound (if the show tvdb_userabort (if the selection is aborted), tvdb_shownotfound (if the show

View file

@ -77,7 +77,7 @@ def generate_key(key_size=4096, output_file='server.key'):
# Ported from cryptography docs/x509/tutorial.rst # Ported from cryptography docs/x509/tutorial.rst
def generate_local_cert(private_key, days_valid=3650, output_file='server.crt', loc_name=None, org_name=None): def generate_local_cert(private_key, days_valid=3650, output_file='server.crt', loc_name=None, org_name=None):
def_name = u'SickGear' def_name = 'SickGear'
# Various details about who we are. For a self-signed certificate the # Various details about who we are. For a self-signed certificate the
# subject and issuer are always the same. # subject and issuer are always the same.
@ -88,7 +88,7 @@ def generate_local_cert(private_key, days_valid=3650, output_file='server.crt',
# build Subject Alternate Names (aka SAN) list # build Subject Alternate Names (aka SAN) list
# First the host names, add with x509.DNSName(): # First the host names, add with x509.DNSName():
san_list = [x509.DNSName(u'localhost')] san_list = [x509.DNSName('localhost')]
try: try:
thishostname = text_type(socket.gethostname()) thishostname = text_type(socket.gethostname())
san_list.append(x509.DNSName(thishostname)) san_list.append(x509.DNSName(thishostname))
@ -100,13 +100,13 @@ def generate_local_cert(private_key, days_valid=3650, output_file='server.crt',
try: try:
# noinspection PyCompatibility # noinspection PyCompatibility
from ipaddress import IPv4Address, IPv6Address from ipaddress import IPv4Address, IPv6Address
san_list.append(x509.IPAddress(IPv4Address(u'127.0.0.1'))) san_list.append(x509.IPAddress(IPv4Address('127.0.0.1')))
san_list.append(x509.IPAddress(IPv6Address(u'::1'))) san_list.append(x509.IPAddress(IPv6Address('::1')))
# append local v4 ip # append local v4 ip
mylocalipv4 = localipv4() mylocalipv4 = localipv4()
if mylocalipv4: if mylocalipv4:
san_list.append(x509.IPAddress(IPv4Address(u'' + mylocalipv4))) san_list.append(x509.IPAddress(IPv4Address('' + mylocalipv4)))
except (ImportError, Exception): except (ImportError, Exception):
pass pass

View file

@ -96,7 +96,7 @@ class Plex(object):
if self.use_logger: if self.use_logger:
msg = 'Plex:: ' + msg msg = 'Plex:: ' + msg
if debug: if debug:
logger.log(msg, logger.DEBUG) logger.debug(msg)
else: else:
logger.log(msg) logger.log(msg)
# else: # else:

View file

@ -660,7 +660,7 @@ def clean_data(data):
if isinstance(data, dict): if isinstance(data, dict):
return {k: clean_data(v) for k, v in iteritems(data)} return {k: clean_data(v) for k, v in iteritems(data)}
if isinstance(data, string_types): if isinstance(data, string_types):
return unicodedata.normalize('NFKD', html_unescape(data).strip().replace(u'&', u'&')) return unicodedata.normalize('NFKD', html_unescape(data).strip().replace('&', '&'))
return data return data
@ -938,8 +938,8 @@ def get_url(url, # type: AnyStr
else: else:
http_err_text = 'Custom HTTP error code' http_err_text = 'Custom HTTP error code'
if 'mute_http_error' not in mute: if 'mute_http_error' not in mute:
logger.debug(u'Response not ok. %s: %s from requested url %s' logger.debug(f'Response not ok. {response.status_code}: {http_err_text} from requested url'
% (response.status_code, http_err_text, url)) f' {url}')
except requests.exceptions.HTTPError as e: except requests.exceptions.HTTPError as e:
raised = e raised = e
@ -948,29 +948,29 @@ def get_url(url, # type: AnyStr
not (exclude_client_http_codes and is_client_error): not (exclude_client_http_codes and is_client_error):
connection_fail_params = dict(fail_type=ConnectionFailTypes.http, code=e.response.status_code) connection_fail_params = dict(fail_type=ConnectionFailTypes.http, code=e.response.status_code)
if not raise_status_code: if not raise_status_code:
logger.warning(u'HTTP error %s while loading URL%s' % (e.errno, _maybe_request_url(e))) logger.warning(f'HTTP error {e.errno} while loading URL{_maybe_request_url(e)}')
except requests.exceptions.ConnectionError as e: except requests.exceptions.ConnectionError as e:
raised = e raised = e
if 'mute_connect_err' not in mute: if 'mute_connect_err' not in mute:
logger.warning(u'Connection error msg:%s while loading URL%s' % (ex(e), _maybe_request_url(e))) logger.warning(f"Connection error msg:{ex(e)} while loading URL{_maybe_request_url(e)}")
if failure_monitor: if failure_monitor:
connection_fail_params = dict(fail_type=ConnectionFailTypes.connection) connection_fail_params = dict(fail_type=ConnectionFailTypes.connection)
except requests.exceptions.ReadTimeout as e: except requests.exceptions.ReadTimeout as e:
raised = e raised = e
if 'mute_read_timeout' not in mute: if 'mute_read_timeout' not in mute:
logger.warning(u'Read timed out msg:%s while loading URL%s' % (ex(e), _maybe_request_url(e))) logger.warning(f'Read timed out msg:{ex(e)} while loading URL{_maybe_request_url(e)}')
if failure_monitor: if failure_monitor:
connection_fail_params = dict(fail_type=ConnectionFailTypes.timeout) connection_fail_params = dict(fail_type=ConnectionFailTypes.timeout)
except (requests.exceptions.Timeout, socket.timeout) as e: except (requests.exceptions.Timeout, socket.timeout) as e:
raised = e raised = e
if 'mute_connect_timeout' not in mute: if 'mute_connect_timeout' not in mute:
logger.warning(u'Connection timed out msg:%s while loading URL %s' % (ex(e), _maybe_request_url(e, url))) logger.warning(f'Connection timed out msg:{ex(e)} while loading URL {_maybe_request_url(e, url)}')
if failure_monitor: if failure_monitor:
connection_fail_params = dict(fail_type=ConnectionFailTypes.connection_timeout) connection_fail_params = dict(fail_type=ConnectionFailTypes.connection_timeout)
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
raised = e raised = e
logger.warning((u'Exception caught while loading URL {0}\r\nDetail... %s\r\n{1}' % ex(e), logger.warning(('Exception caught while loading URL {0}\r\nDetail... %s\r\n{1}' % ex(e),
u'Unknown exception while loading URL {0}\r\nDetail... {1}')[not ex(e)] 'Unknown exception while loading URL {0}\r\nDetail... {1}')[not ex(e)]
.format(url, traceback.format_exc())) .format(url, traceback.format_exc()))
if failure_monitor: if failure_monitor:
connection_fail_params = dict(fail_type=ConnectionFailTypes.other) connection_fail_params = dict(fail_type=ConnectionFailTypes.other)
@ -1009,8 +1009,8 @@ def get_url(url, # type: AnyStr
result = result, session result = result, session
except (TypeError, Exception) as e: except (TypeError, Exception) as e:
raised = e raised = e
logger.warning(u'%s data issue from URL %s\r\nDetail... %s' % ( logger.warning(f'{("Proxy browser", "JSON")[parse_json]} data issue from URL {url}\r\n'
('Proxy browser', 'JSON')[parse_json], url, ex(e))) f'Detail... {ex(e)}')
elif savename: elif savename:
try: try:
@ -1135,15 +1135,15 @@ def fix_set_group_id(child_path):
user_id = os.geteuid() # only available on UNIX user_id = os.geteuid() # only available on UNIX
if 0 != user_id and user_id != child_path_owner: if 0 != user_id and user_id != child_path_owner:
logger.debug(u'Not running as root or owner of %s, not trying to set the set-group-id' % child_path) logger.debug(f'Not running as root or owner of {child_path}, not trying to set the set-group-id')
return return
try: try:
os.chown(child_path, -1, parent_gid) # only available on UNIX os.chown(child_path, -1, parent_gid) # only available on UNIX
logger.debug(u'Respecting the set-group-ID bit on the parent directory for %s' % child_path) logger.debug(f'Respecting the set-group-ID bit on the parent directory for {child_path}')
except OSError: except OSError:
logger.error(u'Failed to respect the set-group-id bit on the parent directory for %s (setting group id %i)' logger.error(f'Failed to respect the set-group-id bit on the parent directory for {child_path}'
% (child_path, parent_gid)) f' (setting group id {parent_gid:d})')
def remove_file_perm(filepath, log_err=True): def remove_file_perm(filepath, log_err=True):
@ -1203,9 +1203,9 @@ def remove_file(filepath, tree=False, prefix_failure='', log_level=logging.INFO)
os.remove(filepath) os.remove(filepath)
except OSError as e: except OSError as e:
if getattr(e, 'winerror', 0) not in (5, 32): # 5=access denied (e.g. av), 32=another process has lock if getattr(e, 'winerror', 0) not in (5, 32): # 5=access denied (e.g. av), 32=another process has lock
logger.log(level=log_level, msg=u'%sUnable to %s %s %s: %s' % logger.log(level=log_level,
(prefix_failure, ('delete', 'trash')[TRASH_REMOVE_SHOW], msg=f'{prefix_failure}Unable to {("delete", "trash")[TRASH_REMOVE_SHOW]}'
('file', 'dir')[tree], filepath, ex(e))) f' {("file", "dir")[tree]} {filepath}: {ex(e)}')
break break
time.sleep(t) time.sleep(t)
if not os.path.exists(filepath): if not os.path.exists(filepath):
@ -1258,10 +1258,10 @@ def make_path(name, syno=False):
# Windows, create all missing folders # Windows, create all missing folders
if os.name in ('nt', 'ce'): if os.name in ('nt', 'ce'):
try: try:
logger.debug(u'Path %s doesn\'t exist, creating it' % name) logger.debug(f"Path {name} doesn't exist, creating it")
os.makedirs(name) os.makedirs(name)
except (OSError, IOError) as e: except (OSError, IOError) as e:
logger.error(u'Failed creating %s : %s' % (name, ex(e))) logger.error(f'Failed creating {name} : {ex(e)}')
return False return False
# not Windows, create all missing folders and set permissions # not Windows, create all missing folders and set permissions
@ -1278,7 +1278,7 @@ def make_path(name, syno=False):
continue continue
try: try:
logger.debug(u'Path %s doesn\'t exist, creating it' % sofar) logger.debug(f"Path {sofar} doesn't exist, creating it")
os.mkdir(sofar) os.mkdir(sofar)
# use normpath to remove end separator, otherwise checks permissions against itself # use normpath to remove end separator, otherwise checks permissions against itself
chmod_as_parent(os.path.normpath(sofar)) chmod_as_parent(os.path.normpath(sofar))
@ -1286,7 +1286,7 @@ def make_path(name, syno=False):
# do the library update for synoindex # do the library update for synoindex
NOTIFIERS.NotifierFactory().get('SYNOINDEX').addFolder(sofar) NOTIFIERS.NotifierFactory().get('SYNOINDEX').addFolder(sofar)
except (OSError, IOError) as e: except (OSError, IOError) as e:
logger.error(u'Failed creating %s : %s' % (sofar, ex(e))) logger.error(f'Failed creating {sofar} : {ex(e)}')
return False return False
return True return True
@ -1306,7 +1306,7 @@ def chmod_as_parent(child_path):
parent_path = os.path.dirname(child_path) parent_path = os.path.dirname(child_path)
if not parent_path: if not parent_path:
logger.debug(u'No parent path provided in %s, unable to get permissions from it' % child_path) logger.debug(f'No parent path provided in {child_path}, unable to get permissions from it')
return return
parent_path_stat = os.stat(parent_path) parent_path_stat = os.stat(parent_path)
@ -1327,15 +1327,14 @@ def chmod_as_parent(child_path):
user_id = os.geteuid() # only available on UNIX user_id = os.geteuid() # only available on UNIX
if 0 != user_id and user_id != child_path_owner: if 0 != user_id and user_id != child_path_owner:
logger.debug(u'Not running as root or owner of %s, not trying to set permissions' % child_path) logger.debug(f'Not running as root or owner of {child_path}, not trying to set permissions')
return return
try: try:
os.chmod(child_path, child_mode) os.chmod(child_path, child_mode)
logger.debug(u'Setting permissions for %s to %o as parent directory has %o' logger.debug(f'Setting permissions for {child_path} to {child_mode:o} as parent directory has {parent_mode:o}')
% (child_path, child_mode, parent_mode))
except OSError: except OSError:
logger.error(u'Failed to set permission for %s to %o' % (child_path, child_mode)) logger.error(f'Failed to set permission for {child_path} to {child_mode:o}')
def file_bit_filter(mode): def file_bit_filter(mode):

View file

@ -190,7 +190,7 @@ class SickGear(object):
rc.load_msg = load_msg rc.load_msg = load_msg
rc.run(max_v) rc.run(max_v)
else: else:
print(u'ERROR: Could not download Rollback Module.') print('ERROR: Could not download Rollback Module.')
except (BaseException, Exception): except (BaseException, Exception):
pass pass
@ -290,13 +290,13 @@ class SickGear(object):
if self.run_as_daemon: if self.run_as_daemon:
pid_dir = os.path.dirname(self.pid_file) pid_dir = os.path.dirname(self.pid_file)
if not os.access(pid_dir, os.F_OK): if not os.access(pid_dir, os.F_OK):
sys.exit(u"PID dir: %s doesn't exist. Exiting." % pid_dir) sys.exit(f"PID dir: {pid_dir} doesn't exist. Exiting.")
if not os.access(pid_dir, os.W_OK): if not os.access(pid_dir, os.W_OK):
sys.exit(u'PID dir: %s must be writable (write permissions). Exiting.' % pid_dir) sys.exit(f'PID dir: {pid_dir} must be writable (write permissions). Exiting.')
else: else:
if self.console_logging: if self.console_logging:
print(u'Not running in daemon mode. PID file creation disabled') print('Not running in daemon mode. PID file creation disabled')
self.create_pid = False self.create_pid = False
@ -309,27 +309,27 @@ class SickGear(object):
try: try:
os.makedirs(sickgear.DATA_DIR, 0o744) os.makedirs(sickgear.DATA_DIR, 0o744)
except os.error: except os.error:
sys.exit(u'Unable to create data directory: %s Exiting.' % sickgear.DATA_DIR) sys.exit(f'Unable to create data directory: {sickgear.DATA_DIR} Exiting.')
# Make sure we can write to the data dir # Make sure we can write to the data dir
if not os.access(sickgear.DATA_DIR, os.W_OK): if not os.access(sickgear.DATA_DIR, os.W_OK):
sys.exit(u'Data directory: %s must be writable (write permissions). Exiting.' % sickgear.DATA_DIR) sys.exit(f'Data directory: {sickgear.DATA_DIR} must be writable (write permissions). Exiting.')
# Make sure we can write to the config file # Make sure we can write to the config file
if not os.access(sickgear.CONFIG_FILE, os.W_OK): if not os.access(sickgear.CONFIG_FILE, os.W_OK):
if os.path.isfile(sickgear.CONFIG_FILE): if os.path.isfile(sickgear.CONFIG_FILE):
sys.exit(u'Config file: %s must be writeable (write permissions). Exiting.' % sickgear.CONFIG_FILE) sys.exit(f'Config file: {sickgear.CONFIG_FILE} must be writeable (write permissions). Exiting.')
elif not os.access(os.path.dirname(sickgear.CONFIG_FILE), os.W_OK): elif not os.access(os.path.dirname(sickgear.CONFIG_FILE), os.W_OK):
sys.exit(u'Config file directory: %s must be writeable (write permissions). Exiting' sys.exit(f'Config file directory: {os.path.dirname(sickgear.CONFIG_FILE)}'
% os.path.dirname(sickgear.CONFIG_FILE)) f' must be writeable (write permissions). Exiting')
os.chdir(sickgear.DATA_DIR) os.chdir(sickgear.DATA_DIR)
if self.console_logging: if self.console_logging:
print(u'Starting up SickGear from %s' % sickgear.CONFIG_FILE) print(f'Starting up SickGear from {sickgear.CONFIG_FILE}')
# Load the config and publish it to the sickgear package # Load the config and publish it to the sickgear package
if not os.path.isfile(sickgear.CONFIG_FILE): if not os.path.isfile(sickgear.CONFIG_FILE):
print(u'Unable to find "%s", all settings will be default!' % sickgear.CONFIG_FILE) print(f'Unable to find "{sickgear.CONFIG_FILE}", all settings will be default!')
sickgear.CFG = ConfigObj(sickgear.CONFIG_FILE) sickgear.CFG = ConfigObj(sickgear.CONFIG_FILE)
try: try:
@ -353,7 +353,7 @@ class SickGear(object):
sickgear.initialize(console_logging=self.console_logging) sickgear.initialize(console_logging=self.console_logging)
if self.forced_port: if self.forced_port:
logger.log(u'Forcing web server to port %s' % self.forced_port) logger.log(f'Forcing web server to port {self.forced_port}')
self.start_port = self.forced_port self.start_port = self.forced_port
else: else:
self.start_port = sickgear.WEB_PORT self.start_port = sickgear.WEB_PORT
@ -403,12 +403,11 @@ class SickGear(object):
self.webserver.wait_server_start() self.webserver.wait_server_start()
sickgear.started = True sickgear.started = True
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Unable to start web server, is something else running on port %d?' % self.start_port, logger.error(f'Unable to start web server, is something else running on port {self.start_port:d}?')
logger.ERROR)
if self.run_as_systemd: if self.run_as_systemd:
self.exit(0) self.exit(0)
if sickgear.LAUNCH_BROWSER and not self.no_launch: if sickgear.LAUNCH_BROWSER and not self.no_launch:
logger.log(u'Launching browser and exiting', logger.ERROR) logger.error('Launching browser and exiting')
sickgear.launch_browser(self.start_port) sickgear.launch_browser(self.start_port)
self.exit(1) self.exit(1)
@ -439,11 +438,11 @@ class SickGear(object):
self.execute_rollback(mo, max_v, load_msg) self.execute_rollback(mo, max_v, load_msg)
cur_db_version = db.DBConnection(d).check_db_version() cur_db_version = db.DBConnection(d).check_db_version()
if 100000 <= cur_db_version: if 100000 <= cur_db_version:
print(u'Rollback to production failed.') print('Rollback to production failed.')
sys.exit(u'If you have used other forks, your database may be unusable due to their changes') sys.exit('If you have used other forks, your database may be unusable due to their changes')
if 100000 <= max_v and None is not base_v: if 100000 <= max_v and None is not base_v:
max_v = base_v # set max_v to the needed base production db for test_db max_v = base_v # set max_v to the needed base production db for test_db
print(u'Rollback to production of [%s] successful.' % d) print(f'Rollback to production of [{d}] successful.')
sickgear.classes.loading_msg.set_msg_progress(load_msg, 'Finished') sickgear.classes.loading_msg.set_msg_progress(load_msg, 'Finished')
# handling of production version higher than current base of test db # handling of production version higher than current base of test db
@ -454,30 +453,29 @@ class SickGear(object):
self.execute_rollback(mo, base_v, load_msg) self.execute_rollback(mo, base_v, load_msg)
cur_db_version = db.DBConnection(d).check_db_version() cur_db_version = db.DBConnection(d).check_db_version()
if 100000 <= cur_db_version: if 100000 <= cur_db_version:
print(u'Rollback to production base failed.') print('Rollback to production base failed.')
sys.exit(u'If you have used other forks, your database may be unusable due to their changes') sys.exit('If you have used other forks, your database may be unusable due to their changes')
if 100000 <= max_v and None is not base_v: if 100000 <= max_v and None is not base_v:
max_v = base_v # set max_v to the needed base production db for test_db max_v = base_v # set max_v to the needed base production db for test_db
print(u'Rollback to production base of [%s] successful.' % d) print(f'Rollback to production base of [{d}] successful.')
sickgear.classes.loading_msg.set_msg_progress(load_msg, 'Finished') sickgear.classes.loading_msg.set_msg_progress(load_msg, 'Finished')
# handling of production db versions # handling of production db versions
if 0 < cur_db_version < 100000: if 0 < cur_db_version < 100000:
if cur_db_version < min_v: if cur_db_version < min_v:
print(u'Your [%s] database version (%s) is too old to migrate from with this version of SickGear' print(f'Your [{d}] database version ({cur_db_version})'
% (d, cur_db_version)) f' is too old to migrate from with this version of SickGear')
sys.exit(u'Upgrade using a previous version of SG first,' sys.exit('Upgrade using a previous version of SG first,'
+ u' or start with no database file to begin fresh') ' or start with no database file to begin fresh')
if cur_db_version > max_v: if cur_db_version > max_v:
sickgear.classes.loading_msg.set_msg_progress(load_msg, 'Rollback') sickgear.classes.loading_msg.set_msg_progress(load_msg, 'Rollback')
print(u'Your [%s] database version (%s) has been incremented past' print(f'Your [{d}] database version ({cur_db_version}) has been incremented past what this'
u' what this version of SickGear supports. Trying to rollback now. Please wait...' % f' version of SickGear supports. Trying to rollback now. Please wait...')
(d, cur_db_version))
self.execute_rollback(mo, max_v, load_msg) self.execute_rollback(mo, max_v, load_msg)
if db.DBConnection(d).check_db_version() > max_v: if db.DBConnection(d).check_db_version() > max_v:
print(u'Rollback failed.') print('Rollback failed.')
sys.exit(u'If you have used other forks, your database may be unusable due to their changes') sys.exit('If you have used other forks, your database may be unusable due to their changes')
print(u'Rollback of [%s] successful.' % d) print(f'Rollback of [{d}] successful.')
sickgear.classes.loading_msg.set_msg_progress(load_msg, 'Finished') sickgear.classes.loading_msg.set_msg_progress(load_msg, 'Finished')
# migrate the config if it needs it # migrate the config if it needs it
@ -501,9 +499,9 @@ class SickGear(object):
if os.path.exists(restore_dir): if os.path.exists(restore_dir):
sickgear.classes.loading_msg.message = 'Restoring files' sickgear.classes.loading_msg.message = 'Restoring files'
if self.restore(restore_dir, sickgear.DATA_DIR): if self.restore(restore_dir, sickgear.DATA_DIR):
logger.log(u'Restore successful...') logger.log('Restore successful...')
else: else:
logger.log_error_and_exit(u'Restore FAILED!') logger.log_error_and_exit('Restore FAILED!')
# refresh network timezones # refresh network timezones
sickgear.classes.loading_msg.message = 'Checking network timezones' sickgear.classes.loading_msg.message = 'Checking network timezones'
@ -669,7 +667,7 @@ class SickGear(object):
# Write pid # Write pid
if self.create_pid: if self.create_pid:
pid = str(os.getpid()) pid = str(os.getpid())
logger.log(u'Writing PID: %s to %s' % (pid, self.pid_file)) logger.log(f'Writing PID: {pid} to {self.pid_file}')
try: try:
os.fdopen(os.open(self.pid_file, os.O_CREAT | os.O_WRONLY, 0o644), 'w').write('%s\n' % pid) os.fdopen(os.open(self.pid_file, os.O_CREAT | os.O_WRONLY, 0o644), 'w').write('%s\n' % pid)
except (BaseException, Exception) as er: except (BaseException, Exception) as er:
@ -705,7 +703,7 @@ class SickGear(object):
Populates the showList with shows from the database Populates the showList with shows from the database
""" """
logger.log(u'Loading initial show list') logger.log('Loading initial show list')
my_db = db.DBConnection(row_type='dict') my_db = db.DBConnection(row_type='dict')
sql_result = my_db.select( sql_result = my_db.select(
@ -749,8 +747,7 @@ class SickGear(object):
sickgear.showDict[show_obj.sid_int] = show_obj sickgear.showDict[show_obj.sid_int] = show_obj
_ = show_obj.ids _ = show_obj.ids
except (BaseException, Exception) as err: except (BaseException, Exception) as err:
logger.log('There was an error creating the show in %s: %s' % ( logger.error('There was an error creating the show in %s: %s' % (cur_result['location'], ex(err)))
cur_result['location'], ex(err)), logger.ERROR)
sickgear.webserve.Home.make_showlist_unique_names() sickgear.webserve.Home.make_showlist_unique_names()
@staticmethod @staticmethod
@ -801,13 +798,13 @@ class SickGear(object):
popen_list += sickgear.MY_ARGS popen_list += sickgear.MY_ARGS
if self.run_as_systemd: if self.run_as_systemd:
logger.log(u'Restarting SickGear with exit(1) handler and %s' % popen_list) logger.log(f'Restarting SickGear with exit(1) handler and {popen_list}')
logger.close() logger.close()
self.exit(1) self.exit(1)
if '--nolaunch' not in popen_list: if '--nolaunch' not in popen_list:
popen_list += ['--nolaunch'] popen_list += ['--nolaunch']
logger.log(u'Restarting SickGear with %s' % popen_list) logger.log(f'Restarting SickGear with {popen_list}')
logger.close() logger.close()
from _23 import Popen from _23 import Popen
with Popen(popen_list, cwd=os.getcwd()): with Popen(popen_list, cwd=os.getcwd()):

View file

@ -803,7 +803,7 @@ def init_stage_1(console_logging):
CACHE_DIR = ACTUAL_CACHE_DIR CACHE_DIR = ACTUAL_CACHE_DIR
if not helpers.make_dir(CACHE_DIR): if not helpers.make_dir(CACHE_DIR):
logger.log(u'!!! Creating local cache dir failed, using system default', logger.ERROR) logger.error('!!! creating local cache dir failed, using system default')
CACHE_DIR = None CACHE_DIR = None
# clean cache folders # clean cache folders
@ -811,7 +811,7 @@ def init_stage_1(console_logging):
helpers.clear_cache() helpers.clear_cache()
ZONEINFO_DIR = os.path.join(CACHE_DIR, 'zoneinfo') ZONEINFO_DIR = os.path.join(CACHE_DIR, 'zoneinfo')
if not os.path.isdir(ZONEINFO_DIR) and not helpers.make_path(ZONEINFO_DIR): if not os.path.isdir(ZONEINFO_DIR) and not helpers.make_path(ZONEINFO_DIR):
logger.log(u'!!! Creating local zoneinfo dir failed', logger.ERROR) logger.error('!!! creating local zoneinfo dir failed')
sg_helpers.CACHE_DIR = CACHE_DIR sg_helpers.CACHE_DIR = CACHE_DIR
sg_helpers.DATA_DIR = DATA_DIR sg_helpers.DATA_DIR = DATA_DIR
@ -830,7 +830,7 @@ def init_stage_1(console_logging):
TRIM_ZERO = bool(check_setting_int(CFG, 'GUI', 'trim_zero', 0)) TRIM_ZERO = bool(check_setting_int(CFG, 'GUI', 'trim_zero', 0))
DATE_PRESET = check_setting_str(CFG, 'GUI', 'date_preset', '%x') DATE_PRESET = check_setting_str(CFG, 'GUI', 'date_preset', '%x')
TIME_PRESET_W_SECONDS = check_setting_str(CFG, 'GUI', 'time_preset', '%I:%M:%S %p') TIME_PRESET_W_SECONDS = check_setting_str(CFG, 'GUI', 'time_preset', '%I:%M:%S %p')
TIME_PRESET = TIME_PRESET_W_SECONDS.replace(u':%S', u'') TIME_PRESET = TIME_PRESET_W_SECONDS.replace(':%S', '')
TIMEZONE_DISPLAY = check_setting_str(CFG, 'GUI', 'timezone_display', 'network') TIMEZONE_DISPLAY = check_setting_str(CFG, 'GUI', 'timezone_display', 'network')
SHOW_TAGS = check_setting_str(CFG, 'GUI', 'show_tags', 'Show List').split(',') SHOW_TAGS = check_setting_str(CFG, 'GUI', 'show_tags', 'Show List').split(',')
SHOW_TAG_DEFAULT = check_setting_str(CFG, 'GUI', 'show_tag_default', SHOW_TAG_DEFAULT = check_setting_str(CFG, 'GUI', 'show_tag_default',
@ -842,7 +842,7 @@ def init_stage_1(console_logging):
LOG_DIR = os.path.normpath(os.path.join(DATA_DIR, ACTUAL_LOG_DIR)) LOG_DIR = os.path.normpath(os.path.join(DATA_DIR, ACTUAL_LOG_DIR))
if not helpers.make_dir(LOG_DIR): if not helpers.make_dir(LOG_DIR):
logger.log(u'!!! No log folder, logging to screen only!', logger.ERROR) logger.error('!!! no log folder, logging to screen only!')
FILE_LOGGING_PRESET = check_setting_str(CFG, 'General', 'file_logging_preset', 'DEBUG') FILE_LOGGING_PRESET = check_setting_str(CFG, 'General', 'file_logging_preset', 'DEBUG')
if bool(check_setting_int(CFG, 'General', 'file_logging_db', 0)): if bool(check_setting_int(CFG, 'General', 'file_logging_db', 0)):
@ -1488,7 +1488,7 @@ def init_stage_1(console_logging):
('docker/other', 'snap')['snap' in CUR_COMMIT_HASH] ('docker/other', 'snap')['snap' in CUR_COMMIT_HASH]
if not os.path.isfile(CONFIG_FILE): if not os.path.isfile(CONFIG_FILE):
logger.log(u'Unable to find \'%s\', all settings will be default!' % CONFIG_FILE, logger.DEBUG) logger.debug(f'Unable to find \'{CONFIG_FILE}\', all settings will be default!')
update_config = True update_config = True
# Get expected config version # Get expected config version
@ -1747,20 +1747,20 @@ def restart(soft=True, update_pkg=None):
if update_pkg: if update_pkg:
MY_ARGS.append('--update-pkg') MY_ARGS.append('--update-pkg')
logger.log(u'Trigger event restart') logger.log('Trigger event restart')
events.put(events.SystemEvent.RESTART) events.put(events.SystemEvent.RESTART)
else: else:
halt() halt()
save_all() save_all()
logger.log(u'Re-initializing all data') logger.log('Re-initializing all data')
initialize() initialize()
def sig_handler(signum=None, _=None): def sig_handler(signum=None, _=None):
is_ctrlbreak = 'win32' == sys.platform and signal.SIGBREAK == signum is_ctrlbreak = 'win32' == sys.platform and signal.SIGBREAK == signum
msg = u'Signal "%s" found' % (signal.SIGINT == signum and 'CTRL-C' or is_ctrlbreak and 'CTRL+BREAK' or msg = 'Signal "%s" found' % (signal.SIGINT == signum and 'CTRL-C' or is_ctrlbreak and 'CTRL+BREAK' or
signal.SIGTERM == signum and 'Termination' or signum) signal.SIGTERM == signum and 'Termination' or signum)
if None is signum or signum in (signal.SIGINT, signal.SIGTERM) or is_ctrlbreak: if None is signum or signum in (signal.SIGINT, signal.SIGTERM) or is_ctrlbreak:
logger.log('%s, saving and exiting...' % msg) logger.log('%s, saving and exiting...' % msg)
events.put(events.SystemEvent.SHUTDOWN) events.put(events.SystemEvent.SHUTDOWN)
@ -1831,12 +1831,12 @@ def save_all():
global showList global showList
# write all shows # write all shows
logger.log(u'Saving all shows to the database') logger.log('Saving all shows to the database')
for show_obj in showList: # type: tv.TVShow for show_obj in showList: # type: tv.TVShow
show_obj.save_to_db() show_obj.save_to_db()
# save config # save config
logger.log(u'Saving config file to disk') logger.log('Saving config file to disk')
save_config() save_config()
@ -2400,4 +2400,4 @@ def launch_browser(start_port=None):
try: try:
webbrowser.open(browser_url, 1, True) webbrowser.open(browser_url, 1, True)
except (BaseException, Exception): except (BaseException, Exception):
logger.log('Unable to launch a browser', logger.ERROR) logger.error('Unable to launch a browser')

View file

@ -52,7 +52,7 @@ class AniGroupList(object):
self.load() self.load()
def load(self): def load(self):
logger.log(u'Building allow amd block list for %s' % self.tvid_prodid, logger.DEBUG) logger.debug(f'Building allow amd block list for {self.tvid_prodid}')
self.allowlist = self._load_list('allowlist') self.allowlist = self._load_list('allowlist')
self.blocklist = self._load_list('blocklist') self.blocklist = self._load_list('blocklist')
@ -74,8 +74,7 @@ class AniGroupList(object):
for cur_result in sql_result: for cur_result in sql_result:
groups.append(cur_result['keyword']) groups.append(cur_result['keyword'])
logger.log('AniPermsList: %s loaded keywords from %s: %s' % (self.tvid_prodid, table, groups), logger.debug('AniPermsList: %s loaded keywords from %s: %s' % (self.tvid_prodid, table, groups))
logger.DEBUG)
return groups return groups
@ -88,7 +87,7 @@ class AniGroupList(object):
self._del_all_keywords('allowlist') self._del_all_keywords('allowlist')
self._add_keywords('allowlist', values) self._add_keywords('allowlist', values)
self.allowlist = values self.allowlist = values
logger.log('Allowlist set to: %s' % self.allowlist, logger.DEBUG) logger.debug('Allowlist set to: %s' % self.allowlist)
def set_block_keywords(self, values): def set_block_keywords(self, values):
# type: (List[AnyStr]) -> None # type: (List[AnyStr]) -> None
@ -99,7 +98,7 @@ class AniGroupList(object):
self._del_all_keywords('blocklist') self._del_all_keywords('blocklist')
self._add_keywords('blocklist', values) self._add_keywords('blocklist', values)
self.blocklist = values self.blocklist = values
logger.log('Blocklist set to: %s' % self.blocklist, logger.DEBUG) logger.debug('Blocklist set to: %s' % self.blocklist)
def _del_all_keywords(self, table): def _del_all_keywords(self, table):
# type: (AnyStr) -> None # type: (AnyStr) -> None
@ -133,15 +132,14 @@ class AniGroupList(object):
:return: True or False :return: True or False
""" """
if not result.release_group: if not result.release_group:
logger.log('Failed to detect release group, invalid result', logger.DEBUG) logger.debug('Failed to detect release group, invalid result')
return False return False
allowed = result.release_group.lower() in [x.lower() for x in self.allowlist] or not self.allowlist allowed = result.release_group.lower() in [x.lower() for x in self.allowlist] or not self.allowlist
blocked = result.release_group.lower() in [x.lower() for x in self.blocklist] blocked = result.release_group.lower() in [x.lower() for x in self.blocklist]
logger.log('Result %sallowed%s in block list. Parsed group name: "%s" from result "%s"' % logger.debug(f'Result {("not ", "")[allowed]}allowed{(", but", " and not")[not blocked]} in block list.'
(('not ', '')[allowed], (', but', ' and not')[not blocked], result.release_group, result.name), f' Parsed group name: "{result.release_group}" from result "{result.name}"')
logger.DEBUG)
return allowed and not blocked return allowed and not blocked
@ -193,29 +191,29 @@ def create_anidb_obj(**kwargs):
def set_up_anidb_connection(): def set_up_anidb_connection():
if not sickgear.USE_ANIDB: if not sickgear.USE_ANIDB:
logger.log(u'Usage of anidb disabled. Skipping', logger.DEBUG) logger.debug('Usage of anidb disabled. Skipping')
return False return False
if not sickgear.ANIDB_USERNAME and not sickgear.ANIDB_PASSWORD: if not sickgear.ANIDB_USERNAME and not sickgear.ANIDB_PASSWORD:
logger.log(u'anidb username and/or password are not set. Aborting anidb lookup.', logger.DEBUG) logger.debug('anidb username and/or password are not set. Aborting anidb lookup.')
return False return False
if not sickgear.ADBA_CONNECTION: if not sickgear.ADBA_CONNECTION:
# anidb_logger = (lambda x: logger.log('ANIDB: ' + str(x)), logger.DEBUG) # anidb_logger = (lambda x: logger.debug('ANIDB: ' + str(x)))
sickgear.ADBA_CONNECTION = adba.Connection(keepAlive=True) # , log=anidb_logger) sickgear.ADBA_CONNECTION = adba.Connection(keepAlive=True) # , log=anidb_logger)
auth = False auth = False
try: try:
auth = sickgear.ADBA_CONNECTION.authed() auth = sickgear.ADBA_CONNECTION.authed()
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log(u'exception msg: ' + ex(e)) logger.log(f'exception msg: {ex(e)}')
pass pass
if not auth: if not auth:
try: try:
sickgear.ADBA_CONNECTION.auth(sickgear.ANIDB_USERNAME, sickgear.ANIDB_PASSWORD) sickgear.ADBA_CONNECTION.auth(sickgear.ANIDB_USERNAME, sickgear.ANIDB_PASSWORD)
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log(u'exception msg: ' + ex(e)) logger.log(f'exception msg: {ex(e)}')
return False return False
else: else:
return True return True
@ -230,7 +228,7 @@ def pull_anidb_groups(show_name):
anime = create_anidb_obj(name=show_name) anime = create_anidb_obj(name=show_name)
return anime.get_groups() return anime.get_groups()
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log(u'Anidb exception: %s' % ex(e), logger.DEBUG) logger.debug(f'Anidb exception: {ex(e)}')
return False return False
@ -258,7 +256,7 @@ def push_anidb_mylist(filepath, anidb_episode):
log = ('Adding the file to the anidb mylist', logger.DEBUG) log = ('Adding the file to the anidb mylist', logger.DEBUG)
result = True result = True
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
log = (u'exception msg: %s' % ex(e), logger.MESSAGE) log = (f'exception msg: {ex(e)}', logger.MESSAGE)
result = False result = False
return result, log return result, log

View file

@ -38,13 +38,12 @@ class PostProcesser(object):
def _main(): def _main():
if not os.path.isdir(sickgear.TV_DOWNLOAD_DIR): if not os.path.isdir(sickgear.TV_DOWNLOAD_DIR):
logger.log(u"Automatic post-processing attempted but dir %s doesn't exist" % sickgear.TV_DOWNLOAD_DIR, logger.error('Automatic post-processing attempted but dir %s doesn\'t exist' % sickgear.TV_DOWNLOAD_DIR)
logger.ERROR)
return return
if not os.path.isabs(sickgear.TV_DOWNLOAD_DIR): if not os.path.isabs(sickgear.TV_DOWNLOAD_DIR):
logger.log(u'Automatic post-processing attempted but dir %s is relative ' logger.error('Automatic post-processing attempted but dir %s is relative '
'(and probably not what you really want to process)' % sickgear.TV_DOWNLOAD_DIR, logger.ERROR) '(and probably not what you really want to process)' % sickgear.TV_DOWNLOAD_DIR)
return return
processTV.processDir(sickgear.TV_DOWNLOAD_DIR, is_basedir=True) processTV.processDir(sickgear.TV_DOWNLOAD_DIR, is_basedir=True)

View file

@ -78,7 +78,7 @@ def folders_at_path(path, include_parent=False, include_files=False):
try: try:
file_list = get_file_list(path, include_files) file_list = get_file_list(path, include_files)
except OSError as e: except OSError as e:
logger.log('Unable to open %s: %r / %s' % (path, e, ex(e)), logger.WARNING) logger.warning('Unable to open %s: %r / %s' % (path, e, ex(e)))
file_list = get_file_list(parent_path, include_files) file_list = get_file_list(parent_path, include_files)
file_list = sorted(file_list, key=lambda x: os.path.basename(x['name']).lower()) file_list = sorted(file_list, key=lambda x: os.path.basename(x['name']).lower())

View file

@ -52,7 +52,7 @@ class DelugeAPI(GenericClient):
if not connected: if not connected:
hosts = self._post_json({'method': 'web.get_hosts', 'params': [], 'id': 11}) hosts = self._post_json({'method': 'web.get_hosts', 'params': [], 'id': 11})
if 0 == len(hosts): if 0 == len(hosts):
logger.log('%s: WebUI does not contain daemons' % self.name, logger.ERROR) logger.error('%s: WebUI does not contain daemons' % self.name)
return None return None
self._post_json({'method': 'web.connect', 'params': [hosts[0][0]], 'id': 11}, False) self._post_json({'method': 'web.connect', 'params': [hosts[0][0]], 'id': 11}, False)
@ -60,7 +60,7 @@ class DelugeAPI(GenericClient):
connected = self._post_json({'method': 'web.connected', 'params': [], 'id': 10}) connected = self._post_json({'method': 'web.connected', 'params': [], 'id': 10})
if not connected: if not connected:
logger.log('%s: WebUI could not connect to daemon' % self.name, logger.ERROR) logger.error('%s: WebUI could not connect to daemon' % self.name)
return None return None
except RequestException: except RequestException:
return None return None
@ -94,7 +94,7 @@ class DelugeAPI(GenericClient):
label = sickgear.TORRENT_LABEL label = sickgear.TORRENT_LABEL
if ' ' in label: if ' ' in label:
logger.log('%s: Invalid label. Label must not contain a space' % self.name, logger.ERROR) logger.error('%s: Invalid label. Label must not contain a space' % self.name)
return False return False
if label: if label:
@ -106,22 +106,21 @@ class DelugeAPI(GenericClient):
if None is not labels: if None is not labels:
if label not in labels: if label not in labels:
logger.log('%s: %s label does not exist in Deluge we must add it' % (self.name, label), logger.debug('%s: %s label does not exist in Deluge we must add it' % (self.name, label))
logger.DEBUG)
self._request_json({ self._request_json({
'method': 'label.add', 'method': 'label.add',
'params': [label], 'params': [label],
'id': 4}) 'id': 4})
logger.log('%s: %s label added to Deluge' % (self.name, label), logger.DEBUG) logger.debug('%s: %s label added to Deluge' % (self.name, label))
# add label to torrent # add label to torrent
self._request_json({ self._request_json({
'method': 'label.set_torrent', 'method': 'label.set_torrent',
'params': [result.hash, label], 'params': [result.hash, label],
'id': 5}) 'id': 5})
logger.log('%s: %s label added to torrent' % (self.name, label), logger.DEBUG) logger.debug('%s: %s label added to torrent' % (self.name, label))
else: else:
logger.log('%s: label plugin not detected' % self.name, logger.DEBUG) logger.debug('%s: label plugin not detected' % self.name)
return False return False
return True return True

View file

@ -71,7 +71,7 @@ class DownloadStationAPI(GenericClient):
# type: (AnyStr) -> None # type: (AnyStr) -> None
out = '%s%s: %s' % (self.name, (' replied with', '')['Could not' in msg], msg) out = '%s%s: %s' % (self.name, (' replied with', '')['Could not' in msg], msg)
self._errmsg = '<br>%s.' % out self._errmsg = '<br>%s.' % out
logger.log(out, logger.ERROR) logger.error(out)
def _error_task(self, response): def _error_task(self, response):
@ -234,7 +234,7 @@ class DownloadStationAPI(GenericClient):
i = 0 i = 0
while retry_ids: while retry_ids:
for i in tries: for i in tries:
logger.log('%s: retry %s %s item(s) in %ss' % (self.name, act, len(item['fail']), i), logger.DEBUG) logger.debug('%s: retry %s %s item(s) in %ss' % (self.name, act, len(item['fail']), i))
time.sleep(i) time.sleep(i)
item['fail'] = [] item['fail'] = []
for task in filter(filter_func, self._tinf(retry_ids, err=True)): for task in filter(filter_func, self._tinf(retry_ids, err=True)):
@ -246,8 +246,8 @@ class DownloadStationAPI(GenericClient):
retry_ids = item['fail'] retry_ids = item['fail']
else: else:
if max(tries) == i: if max(tries) == i:
logger.log('%s: failed to %s %s item(s) after %s tries over %s mins, aborted' % logger.debug('%s: failed to %s %s item(s) after %s tries over %s mins, aborted' %
(self.name, act, len(item['fail']), len(tries), sum(tries) / 60), logger.DEBUG) (self.name, act, len(item['fail']), len(tries), sum(tries) / 60))
return (item['fail'] + item['ignore']) or True return (item['fail'] + item['ignore']) or True
@ -261,8 +261,8 @@ class DownloadStationAPI(GenericClient):
if 3 <= self._task_version: if 3 <= self._task_version:
return self._add_torrent(uri={'uri': search_result.url}) return self._add_torrent(uri={'uri': search_result.url})
logger.log('%s: the API at %s doesn\'t support torrent magnet, download skipped' % logger.warning('%s: the API at %s doesn\'t support torrent magnet, download skipped' %
(self.name, self.host), logger.WARNING) (self.name, self.host))
def _add_torrent_file(self, search_result): def _add_torrent_file(self, search_result):
# type: (TorrentSearchResult) -> Union[AnyStr, bool] # type: (TorrentSearchResult) -> Union[AnyStr, bool]

View file

@ -51,7 +51,7 @@ class GenericClient(object):
seg = seg[0:c - (len(sample) - 2)] + sample seg = seg[0:c - (len(sample) - 2)] + sample
output += ['%s: request %s= %s%s%s' % (self.name, arg, ('', '..')[bool(i)], seg, ('', '..')[i != nch])] output += ['%s: request %s= %s%s%s' % (self.name, arg, ('', '..')[bool(i)], seg, ('', '..')[i != nch])]
logger.log(output, logger.DEBUG) logger.debug(output)
def _request(self, method='get', params=None, data=None, files=None, **kwargs): def _request(self, method='get', params=None, data=None, files=None, **kwargs):
@ -61,7 +61,7 @@ class GenericClient(object):
self.last_time = time.time() self.last_time = time.time()
if not self._get_auth(): if not self._get_auth():
logger.log('%s: Authentication failed' % self.name, logger.ERROR) logger.error('%s: Authentication failed' % self.name)
return False return False
# self._log_request_details(method, params, data, files, **kwargs) # self._log_request_details(method, params, data, files, **kwargs)
@ -70,31 +70,30 @@ class GenericClient(object):
response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files, response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files,
timeout=kwargs.pop('timeout', 120), verify=False, **kwargs) timeout=kwargs.pop('timeout', 120), verify=False, **kwargs)
except requests.exceptions.ConnectionError as e: except requests.exceptions.ConnectionError as e:
logger.log('%s: Unable to connect %s' % (self.name, ex(e)), logger.ERROR) logger.error('%s: Unable to connect %s' % (self.name, ex(e)))
return False return False
except (requests.exceptions.MissingSchema, requests.exceptions.InvalidURL): except (requests.exceptions.MissingSchema, requests.exceptions.InvalidURL):
logger.log('%s: Invalid host' % self.name, logger.ERROR) logger.error('%s: Invalid host' % self.name)
return False return False
except requests.exceptions.HTTPError as e: except requests.exceptions.HTTPError as e:
logger.log('%s: Invalid HTTP request %s' % (self.name, ex(e)), logger.ERROR) logger.error('%s: Invalid HTTP request %s' % (self.name, ex(e)))
return False return False
except requests.exceptions.Timeout as e: except requests.exceptions.Timeout as e:
logger.log('%s: Connection timeout %s' % (self.name, ex(e)), logger.ERROR) logger.error('%s: Connection timeout %s' % (self.name, ex(e)))
return False return False
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log('%s: Unknown exception raised when sending torrent to %s: %s' % (self.name, self.name, ex(e)), logger.error('%s: Unknown exception raised when sending torrent to %s: %s' % (self.name, self.name, ex(e)))
logger.ERROR)
return False return False
if 401 == response.status_code: if 401 == response.status_code:
logger.log('%s: Invalid username or password, check your config' % self.name, logger.ERROR) logger.error('%s: Invalid username or password, check your config' % self.name)
return False return False
if response.status_code in http_error_code: if response.status_code in http_error_code:
logger.log('%s: %s' % (self.name, http_error_code[response.status_code]), logger.DEBUG) logger.debug('%s: %s' % (self.name, http_error_code[response.status_code]))
return False return False
logger.log('%s: Response to %s request is %s' % (self.name, method.upper(), response.text), logger.DEBUG) logger.debug('%s: Response to %s request is %s' % (self.name, method.upper(), response.text))
return response return response
@ -213,10 +212,10 @@ class GenericClient(object):
r_code = False r_code = False
logger.log('Calling %s client' % self.name, logger.DEBUG) logger.debug('Calling %s client' % self.name)
if not self._get_auth(): if not self._get_auth():
logger.log('%s: Authentication failed' % self.name, logger.ERROR) logger.error('%s: Authentication failed' % self.name)
return r_code return r_code
try: try:
@ -225,8 +224,8 @@ class GenericClient(object):
result = self._get_torrent_hash(result) result = self._get_torrent_hash(result)
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log('Bad torrent data: hash is %s for [%s]' % (result.hash, result.name), logger.ERROR) logger.error('Bad torrent data: hash is %s for [%s]' % (result.hash, result.name))
logger.log('Exception raised when checking torrent data: %s' % (ex(e)), logger.DEBUG) logger.debug('Exception raised when checking torrent data: %s' % (ex(e)))
return r_code return r_code
try: try:
@ -237,30 +236,30 @@ class GenericClient(object):
self.created_id = isinstance(r_code, string_types) and r_code or None self.created_id = isinstance(r_code, string_types) and r_code or None
if not r_code: if not r_code:
logger.log('%s: Unable to send torrent to client' % self.name, logger.ERROR) logger.error('%s: Unable to send torrent to client' % self.name)
return False return False
if not self._set_torrent_pause(result): if not self._set_torrent_pause(result):
logger.log('%s: Unable to set the pause for torrent' % self.name, logger.ERROR) logger.error('%s: Unable to set the pause for torrent' % self.name)
if not self._set_torrent_label(result): if not self._set_torrent_label(result):
logger.log('%s: Unable to set the label for torrent' % self.name, logger.ERROR) logger.error('%s: Unable to set the label for torrent' % self.name)
if not self._set_torrent_ratio(result): if not self._set_torrent_ratio(result):
logger.log('%s: Unable to set the ratio for torrent' % self.name, logger.ERROR) logger.error('%s: Unable to set the ratio for torrent' % self.name)
if not self._set_torrent_seed_time(result): if not self._set_torrent_seed_time(result):
logger.log('%s: Unable to set the seed time for torrent' % self.name, logger.ERROR) logger.error('%s: Unable to set the seed time for torrent' % self.name)
if not self._set_torrent_path(result): if not self._set_torrent_path(result):
logger.log('%s: Unable to set the path for torrent' % self.name, logger.ERROR) logger.error('%s: Unable to set the path for torrent' % self.name)
if 0 != result.priority and not self._set_torrent_priority(result): if 0 != result.priority and not self._set_torrent_priority(result):
logger.log('%s: Unable to set priority for torrent' % self.name, logger.ERROR) logger.error('%s: Unable to set priority for torrent' % self.name)
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log('%s: Failed sending torrent: %s - %s' % (self.name, result.name, result.hash), logger.ERROR) logger.error('%s: Failed sending torrent: %s - %s' % (self.name, result.name, result.hash))
logger.log('%s: Exception raised when sending torrent: %s' % (self.name, ex(e)), logger.DEBUG) logger.debug('%s: Exception raised when sending torrent: %s' % (self.name, ex(e)))
return r_code return r_code

View file

@ -168,7 +168,7 @@ class QbittorrentAPI(GenericClient):
task = self._tinf(t.get('hash'), use_props=False, err=True)[0] task = self._tinf(t.get('hash'), use_props=False, err=True)[0]
return 1 < task.get('priority') or self._ignore_state(task) # then mark fail return 1 < task.get('priority') or self._ignore_state(task) # then mark fail
elif isinstance(response, string_types) and 'queueing' in response.lower(): elif isinstance(response, string_types) and 'queueing' in response.lower():
logger.log('%s: %s' % (self.name, response), logger.ERROR) logger.error('%s: %s' % (self.name, response))
return not mark_fail return not mark_fail
return mark_fail return mark_fail
@ -195,7 +195,7 @@ class QbittorrentAPI(GenericClient):
task = self._tinf(t.get('hash'), use_props=False, err=True)[0] task = self._tinf(t.get('hash'), use_props=False, err=True)[0]
return label not in task.get('category') or self._ignore_state(task) # then mark fail return label not in task.get('category') or self._ignore_state(task) # then mark fail
elif isinstance(response, string_types) and 'incorrect' in response.lower(): elif isinstance(response, string_types) and 'incorrect' in response.lower():
logger.log('%s: %s. "%s" isn\'t known to qB' % (self.name, response, label), logger.ERROR) logger.error('%s: %s. "%s" isn\'t known to qB' % (self.name, response, label))
return not mark_fail return not mark_fail
return mark_fail return mark_fail
@ -312,7 +312,7 @@ class QbittorrentAPI(GenericClient):
i = 0 i = 0
while retry_ids: while retry_ids:
for i in tries: for i in tries:
logger.log('%s: retry %s %s item(s) in %ss' % (self.name, act, len(item['fail']), i), logger.DEBUG) logger.debug('%s: retry %s %s item(s) in %ss' % (self.name, act, len(item['fail']), i))
time.sleep(i) time.sleep(i)
item['fail'] = [] item['fail'] = []
for task in filter(filter_func, self._tinf(retry_ids, use_props=False, err=True)): for task in filter(filter_func, self._tinf(retry_ids, use_props=False, err=True)):
@ -324,8 +324,8 @@ class QbittorrentAPI(GenericClient):
retry_ids = item['fail'] retry_ids = item['fail']
else: else:
if max(tries) == i: if max(tries) == i:
logger.log('%s: failed to %s %s item(s) after %s tries over %s mins, aborted' % logger.debug('%s: failed to %s %s item(s) after %s tries over %s mins, aborted' %
(self.name, act, len(item['fail']), len(tries), sum(tries) / 60), logger.DEBUG) (self.name, act, len(item['fail']), len(tries), sum(tries) / 60))
return (item['fail'] + item['ignore']) or True return (item['fail'] + item['ignore']) or True
@ -356,7 +356,7 @@ class QbittorrentAPI(GenericClient):
:return: True if created, else Falsy if nothing created :return: True if created, else Falsy if nothing created
""" """
if self._tinf(data.hash): if self._tinf(data.hash):
logger.log('Could not create task, the hash is already in use', logger.ERROR) logger.error('Could not create task, the hash is already in use')
return return
label = sickgear.TORRENT_LABEL.replace(' ', '_') label = sickgear.TORRENT_LABEL.replace(' ', '_')
@ -401,7 +401,7 @@ class QbittorrentAPI(GenericClient):
authless = bool(re.search('(?i)login|version', cmd)) authless = bool(re.search('(?i)login|version', cmd))
if authless or self.auth: if authless or self.auth:
if not authless and not self._get_auth(): if not authless and not self._get_auth():
logger.log('%s: Authentication failed' % self.name, logger.ERROR) logger.error('%s: Authentication failed' % self.name)
return return
# self._log_request_details('%s%s' % (self.api_ns, cmd.strip('/')), **kwargs) # self._log_request_details('%s%s' % (self.api_ns, cmd.strip('/')), **kwargs)
@ -431,7 +431,7 @@ class QbittorrentAPI(GenericClient):
self.api_ns = 'api/v2/' self.api_ns = 'api/v2/'
response = self._client_request('auth/login', post_data=post_data, raise_status_code=True) response = self._client_request('auth/login', post_data=post_data, raise_status_code=True)
if isinstance(response, string_types) and 'banned' in response.lower(): if isinstance(response, string_types) and 'banned' in response.lower():
logger.log('%s: %s' % (self.name, response), logger.ERROR) logger.error('%s: %s' % (self.name, response))
response = False response = False
elif not response: elif not response:
self.api_ns = '' self.api_ns = ''

View file

@ -43,7 +43,7 @@ class RtorrentAPI(GenericClient):
if self.auth: if self.auth:
try: try:
if self.auth.has_local_id(data.hash): if self.auth.has_local_id(data.hash):
logger.log('%s: Item already exists %s' % (self.name, data.name), logger.WARNING) logger.warning('%s: Item already exists %s' % (self.name, data.name))
raise raise
custom_var = (1, sickgear.TORRENT_LABEL_VAR or '')[0 <= sickgear.TORRENT_LABEL_VAR <= 5] custom_var = (1, sickgear.TORRENT_LABEL_VAR or '')[0 <= sickgear.TORRENT_LABEL_VAR <= 5]
@ -62,8 +62,8 @@ class RtorrentAPI(GenericClient):
if torrent and sickgear.TORRENT_LABEL: if torrent and sickgear.TORRENT_LABEL:
label = torrent.get_custom(custom_var) label = torrent.get_custom(custom_var)
if sickgear.TORRENT_LABEL != label: if sickgear.TORRENT_LABEL != label:
logger.log('%s: could not change custom%s label value \'%s\' to \'%s\' for %s' % ( logger.warning('%s: could not change custom%s label value \'%s\' to \'%s\' for %s' % (
self.name, custom_var, label, sickgear.TORRENT_LABEL, torrent.name), logger.WARNING) self.name, custom_var, label, sickgear.TORRENT_LABEL, torrent.name))
except (BaseException, Exception): except (BaseException, Exception):
pass pass

View file

@ -86,7 +86,7 @@ class TransmissionAPI(GenericClient):
# populate blanked and download_dir # populate blanked and download_dir
if not self._get_auth(): if not self._get_auth():
logger.log('%s: Authentication failed' % self.name, logger.ERROR) logger.error('%s: Authentication failed' % self.name)
return False return False
download_dir = None download_dir = None
@ -95,7 +95,7 @@ class TransmissionAPI(GenericClient):
elif self.download_dir: elif self.download_dir:
download_dir = self.download_dir download_dir = self.download_dir
else: else:
logger.log('Path required for Transmission Downloaded files location', logger.ERROR) logger.error('Path required for Transmission Downloaded files location')
if not download_dir and not self.blankable: if not download_dir and not self.blankable:
return False return False

View file

@ -300,7 +300,7 @@ class Quality(object):
if not hd_options and full_hd: if not hd_options and full_hd:
return Quality.FULLHDBLURAY return Quality.FULLHDBLURAY
if sickgear.ANIME_TREAT_AS_HDTV: if sickgear.ANIME_TREAT_AS_HDTV:
logger.log(u'Treating file: %s with "unknown" quality as HDTV per user settings' % name, logger.DEBUG) logger.debug(f'Treating file: {name} with "unknown" quality as HDTV per user settings')
return Quality.HDTV return Quality.HDTV
return Quality.UNKNOWN return Quality.UNKNOWN
@ -371,10 +371,10 @@ class Quality(object):
try: try:
parser = createParser(filename) parser = createParser(filename)
except InputStreamError as e: except InputStreamError as e:
logger.log(msg % (filename, ex(e)), logger.WARNING) logger.warning(msg % (filename, ex(e)))
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log(msg % (filename, ex(e)), logger.ERROR) logger.error(msg % (filename, ex(e)))
logger.log(traceback.format_exc(), logger.ERROR) logger.error(traceback.format_exc())
if parser: if parser:
extract = None extract = None
@ -385,7 +385,7 @@ class Quality(object):
parser.parse_comments = False parser.parse_comments = False
extract = extractMetadata(parser, **args) extract = extractMetadata(parser, **args)
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log(msg % (filename, ex(e)), logger.WARNING) logger.warning(msg % (filename, ex(e)))
if extract: if extract:
try: try:
height = extract.get('height') height = extract.get('height')

View file

@ -56,7 +56,7 @@ def change_https_cert(https_cert):
if os.path.normpath(sickgear.HTTPS_CERT) != os.path.normpath(https_cert): if os.path.normpath(sickgear.HTTPS_CERT) != os.path.normpath(https_cert):
if helpers.make_dir(os.path.dirname(os.path.abspath(https_cert))): if helpers.make_dir(os.path.dirname(os.path.abspath(https_cert))):
sickgear.HTTPS_CERT = os.path.normpath(https_cert) sickgear.HTTPS_CERT = os.path.normpath(https_cert)
logger.log(u'Changed https cert path to %s' % https_cert) logger.log(f'Changed https cert path to {https_cert}')
else: else:
return False return False
@ -71,7 +71,7 @@ def change_https_key(https_key):
if os.path.normpath(sickgear.HTTPS_KEY) != os.path.normpath(https_key): if os.path.normpath(sickgear.HTTPS_KEY) != os.path.normpath(https_key):
if helpers.make_dir(os.path.dirname(os.path.abspath(https_key))): if helpers.make_dir(os.path.dirname(os.path.abspath(https_key))):
sickgear.HTTPS_KEY = os.path.normpath(https_key) sickgear.HTTPS_KEY = os.path.normpath(https_key)
logger.log(u'Changed https key path to %s' % https_key) logger.log(f'Changed https key path to {https_key}')
else: else:
return False return False
@ -89,7 +89,7 @@ def change_log_dir(log_dir, web_log):
sickgear.LOG_DIR = abs_log_dir sickgear.LOG_DIR = abs_log_dir
logger.sb_log_instance.init_logging() logger.sb_log_instance.init_logging()
logger.log(u'Initialized new log file in %s' % sickgear.LOG_DIR) logger.log(f'Initialized new log file in {sickgear.LOG_DIR}')
log_dir_changed = True log_dir_changed = True
else: else:
@ -109,7 +109,7 @@ def change_nzb_dir(nzb_dir):
if os.path.normpath(sickgear.NZB_DIR) != os.path.normpath(nzb_dir): if os.path.normpath(sickgear.NZB_DIR) != os.path.normpath(nzb_dir):
if helpers.make_dir(nzb_dir): if helpers.make_dir(nzb_dir):
sickgear.NZB_DIR = os.path.normpath(nzb_dir) sickgear.NZB_DIR = os.path.normpath(nzb_dir)
logger.log(u'Changed NZB folder to %s' % nzb_dir) logger.log(f'Changed NZB folder to {nzb_dir}')
else: else:
return False return False
@ -124,7 +124,7 @@ def change_torrent_dir(torrent_dir):
if os.path.normpath(sickgear.TORRENT_DIR) != os.path.normpath(torrent_dir): if os.path.normpath(sickgear.TORRENT_DIR) != os.path.normpath(torrent_dir):
if helpers.make_dir(torrent_dir): if helpers.make_dir(torrent_dir):
sickgear.TORRENT_DIR = os.path.normpath(torrent_dir) sickgear.TORRENT_DIR = os.path.normpath(torrent_dir)
logger.log(u'Changed torrent folder to %s' % torrent_dir) logger.log(f'Changed torrent folder to {torrent_dir}')
else: else:
return False return False
@ -139,7 +139,7 @@ def change_tv_download_dir(tv_download_dir):
if os.path.normpath(sickgear.TV_DOWNLOAD_DIR) != os.path.normpath(tv_download_dir): if os.path.normpath(sickgear.TV_DOWNLOAD_DIR) != os.path.normpath(tv_download_dir):
if helpers.make_dir(tv_download_dir): if helpers.make_dir(tv_download_dir):
sickgear.TV_DOWNLOAD_DIR = os.path.normpath(tv_download_dir) sickgear.TV_DOWNLOAD_DIR = os.path.normpath(tv_download_dir)
logger.log(u'Changed TV download folder to %s' % tv_download_dir) logger.log(f'Changed TV download folder to {tv_download_dir}')
else: else:
return False return False
@ -407,7 +407,7 @@ def check_setting_int(config, cfg_name, item_name, def_val):
except (BaseException, Exception): except (BaseException, Exception):
config[cfg_name] = {} config[cfg_name] = {}
config[cfg_name][item_name] = my_val config[cfg_name][item_name] = my_val
logger.log('%s -> %s' % (item_name, my_val), logger.DEBUG) logger.debug('%s -> %s' % (item_name, my_val))
return my_val return my_val
@ -422,7 +422,7 @@ def check_setting_float(config, cfg_name, item_name, def_val):
config[cfg_name] = {} config[cfg_name] = {}
config[cfg_name][item_name] = my_val config[cfg_name][item_name] = my_val
logger.log('%s -> %s' % (item_name, my_val), logger.DEBUG) logger.debug('%s -> %s' % (item_name, my_val))
return my_val return my_val
@ -449,9 +449,9 @@ def check_setting_str(config, cfg_name, item_name, def_val, log=True):
config[cfg_name][item_name] = helpers.encrypt(my_val, encryption_version) config[cfg_name][item_name] = helpers.encrypt(my_val, encryption_version)
if log: if log:
logger.log('%s -> %s' % (item_name, my_val), logger.DEBUG) logger.debug('%s -> %s' % (item_name, my_val))
else: else:
logger.log('%s -> ******' % item_name, logger.DEBUG) logger.debug('%s -> ******' % item_name)
return (my_val, def_val)['None' == my_val] return (my_val, def_val)['None' == my_val]
@ -497,9 +497,10 @@ class ConfigMigrator(object):
if self.config_version > self.expected_config_version: if self.config_version > self.expected_config_version:
logger.log_error_and_exit( logger.log_error_and_exit(
u'Your config version (%s) has been incremented past what this version of SickGear supports (%s).\n' f'Your config version ({self.config_version})'
'If you have used other forks or a newer version of SickGear, your config file may be unusable due to ' f' has been incremented past what this version of SickGear supports ({self.expected_config_version}).\n'
'their modifications.' % (self.config_version, self.expected_config_version)) f'If you have used other forks or a newer version of SickGear,'
f' your config file may be unusable due to their modifications.')
sickgear.CONFIG_VERSION = self.config_version sickgear.CONFIG_VERSION = self.config_version
@ -511,20 +512,20 @@ class ConfigMigrator(object):
else: else:
migration_name = '' migration_name = ''
logger.log(u'Backing up config before upgrade') logger.log('Backing up config before upgrade')
if not helpers.backup_versioned_file(sickgear.CONFIG_FILE, self.config_version): if not helpers.backup_versioned_file(sickgear.CONFIG_FILE, self.config_version):
logger.log_error_and_exit(u'Config backup failed, abort upgrading config') logger.log_error_and_exit('Config backup failed, abort upgrading config')
else: else:
logger.log(u'Proceeding with upgrade') logger.log('Proceeding with upgrade')
# do the migration, expect a method named _migrate_v<num> # do the migration, expect a method named _migrate_v<num>
logger.log(u'Migrating config up to version %s %s' % (next_version, migration_name)) logger.log(f'Migrating config up to version {next_version} {migration_name}')
getattr(self, '_migrate_v%s' % next_version)() getattr(self, '_migrate_v%s' % next_version)()
self.config_version = next_version self.config_version = next_version
# save new config after migration # save new config after migration
sickgear.CONFIG_VERSION = self.config_version sickgear.CONFIG_VERSION = self.config_version
logger.log(u'Saving config file to disk') logger.log('Saving config file to disk')
sickgear.save_config() sickgear.save_config()
@staticmethod @staticmethod
@ -569,17 +570,17 @@ class ConfigMigrator(object):
new_season_format = str(new_season_format).replace('09', '%0S') new_season_format = str(new_season_format).replace('09', '%0S')
new_season_format = new_season_format.replace('9', '%S') new_season_format = new_season_format.replace('9', '%S')
logger.log(u'Changed season folder format from %s to %s, prepending it to your naming config' % logger.log(f'Changed season folder format from {old_season_format} to {new_season_format},'
(old_season_format, new_season_format)) f' prepending it to your naming config')
sickgear.NAMING_PATTERN = new_season_format + os.sep + sickgear.NAMING_PATTERN sickgear.NAMING_PATTERN = new_season_format + os.sep + sickgear.NAMING_PATTERN
except (TypeError, ValueError): except (TypeError, ValueError):
logger.log(u'Can not change %s to new season format' % old_season_format, logger.ERROR) logger.error(f'Can not change {old_season_format} to new season format')
# if no shows had it on then don't flatten any shows and don't put season folders in the config # if no shows had it on then don't flatten any shows and don't put season folders in the config
else: else:
logger.log(u'No shows were using season folders before so I am disabling flattening on all shows') logger.log('No shows were using season folders before so I am disabling flattening on all shows')
# don't flatten any shows at all # don't flatten any shows at all
my_db.action('UPDATE tv_shows SET flatten_folders = 0 WHERE 1=1') my_db.action('UPDATE tv_shows SET flatten_folders = 0 WHERE 1=1')
@ -672,8 +673,7 @@ class ConfigMigrator(object):
try: try:
name, url, key, enabled = cur_provider_data.split('|') name, url, key, enabled = cur_provider_data.split('|')
except ValueError: except ValueError:
logger.log(u'Skipping Newznab provider string: "%s", incorrect format' % cur_provider_data, logger.error(f'Skipping Newznab provider string: "{cur_provider_data}", incorrect format')
logger.ERROR)
continue continue
cat_ids = '5030,5040,5060' cat_ids = '5030,5040,5060'
@ -727,7 +727,7 @@ class ConfigMigrator(object):
cur_metadata = metadata.split('|') cur_metadata = metadata.split('|')
# if target has the old number of values, do upgrade # if target has the old number of values, do upgrade
if 6 == len(cur_metadata): if 6 == len(cur_metadata):
logger.log(u'Upgrading ' + metadata_name + ' metadata, old value: ' + metadata) logger.log('Upgrading ' + metadata_name + ' metadata, old value: ' + metadata)
cur_metadata.insert(4, '0') cur_metadata.insert(4, '0')
cur_metadata.append('0') cur_metadata.append('0')
cur_metadata.append('0') cur_metadata.append('0')
@ -740,15 +740,15 @@ class ConfigMigrator(object):
cur_metadata[4], cur_metadata[3] = cur_metadata[3], '0' cur_metadata[4], cur_metadata[3] = cur_metadata[3], '0'
# write new format # write new format
metadata = '|'.join(cur_metadata) metadata = '|'.join(cur_metadata)
logger.log(u'Upgrading %s metadata, new value: %s' % (metadata_name, metadata)) logger.log(f'Upgrading {metadata_name} metadata, new value: {metadata}')
elif 10 == len(cur_metadata): elif 10 == len(cur_metadata):
metadata = '|'.join(cur_metadata) metadata = '|'.join(cur_metadata)
logger.log(u'Keeping %s metadata, value: %s' % (metadata_name, metadata)) logger.log(f'Keeping {metadata_name} metadata, value: {metadata}')
else: else:
logger.log(u'Skipping %s: "%s", incorrect format' % (metadata_name, metadata), logger.ERROR) logger.error(f'Skipping {metadata_name}: "{metadata}", incorrect format')
metadata = '0|0|0|0|0|0|0|0|0|0' metadata = '0|0|0|0|0|0|0|0|0|0'
logger.log(u'Setting %s metadata, new value: %s' % (metadata_name, metadata)) logger.log(f'Setting {metadata_name} metadata, new value: {metadata}')
return metadata return metadata

View file

@ -86,7 +86,7 @@ class MainSanityCheck(db.DBSanityCheck):
if 0 < len(cl): if 0 < len(cl):
self.connection.mass_action(cl) self.connection.mass_action(cl)
logger.log(u'Performing a vacuum on the database.', logger.DEBUG) logger.debug('Performing a vacuum on the database.')
self.connection.upgrade_log(fix_msg % 'VACUUM') self.connection.upgrade_log(fix_msg % 'VACUUM')
self.connection.action('VACUUM') self.connection.action('VACUUM')
self.connection.upgrade_log(fix_msg % 'finished') self.connection.upgrade_log(fix_msg % 'finished')
@ -111,8 +111,7 @@ class MainSanityCheck(db.DBSanityCheck):
for cur_result in sql_result: for cur_result in sql_result:
logger.log(u'Duplicate show detected! %s: %s count: %s' % ( logger.debug(f'Duplicate show detected! {column}: {cur_result[column]} count: {cur_result["count"]}')
column, cur_result[column], cur_result['count']), logger.DEBUG)
cur_dupe_results = self.connection.select( cur_dupe_results = self.connection.select(
'SELECT show_id, ' + column + ' FROM tv_shows WHERE ' + column + ' = ? LIMIT ?', 'SELECT show_id, ' + column + ' FROM tv_shows WHERE ' + column + ' = ? LIMIT ?',
@ -121,15 +120,15 @@ class MainSanityCheck(db.DBSanityCheck):
cl = [] cl = []
for cur_dupe_id in cur_dupe_results: for cur_dupe_id in cur_dupe_results:
logger.log(u'Deleting duplicate show with %s: %s show_id: %s' % ( logger.log(f'Deleting duplicate show with {column}: {cur_dupe_id[column]}'
column, cur_dupe_id[column], cur_dupe_id['show_id'])) f' show_id: {cur_dupe_id["show_id"]}')
cl.append(['DELETE FROM tv_shows WHERE show_id = ?', [cur_dupe_id['show_id']]]) cl.append(['DELETE FROM tv_shows WHERE show_id = ?', [cur_dupe_id['show_id']]])
if 0 < len(cl): if 0 < len(cl):
self.connection.mass_action(cl) self.connection.mass_action(cl)
else: else:
logger.log(u'No duplicate show, check passed') logger.log('No duplicate show, check passed')
def fix_duplicate_episodes(self): def fix_duplicate_episodes(self):
@ -146,9 +145,9 @@ class MainSanityCheck(db.DBSanityCheck):
for cur_result in sql_result: for cur_result in sql_result:
logger.log(u'Duplicate episode detected! prod_id: %s season: %s episode: %s count: %s' % logger.debug(f'Duplicate episode detected! prod_id: {cur_result["prod_id"]}'
(cur_result['prod_id'], cur_result['season'], cur_result['episode'], f' season: {cur_result["season"]} episode: {cur_result["episode"]}'
cur_result['count']), logger.DEBUG) f' count: {cur_result["count"]}')
cur_dupe_results = self.connection.select( cur_dupe_results = self.connection.select(
'SELECT episode_id' 'SELECT episode_id'
@ -163,14 +162,14 @@ class MainSanityCheck(db.DBSanityCheck):
cl = [] cl = []
for cur_dupe_id in cur_dupe_results: for cur_dupe_id in cur_dupe_results:
logger.log(u'Deleting duplicate episode with episode_id: %s' % cur_dupe_id['episode_id']) logger.log(f'Deleting duplicate episode with episode_id: {cur_dupe_id["episode_id"]}')
cl.append(['DELETE FROM tv_episodes WHERE episode_id = ?', [cur_dupe_id['episode_id']]]) cl.append(['DELETE FROM tv_episodes WHERE episode_id = ?', [cur_dupe_id["episode_id"]]])
if 0 < len(cl): if 0 < len(cl):
self.connection.mass_action(cl) self.connection.mass_action(cl)
else: else:
logger.log(u'No duplicate episode, check passed') logger.log('No duplicate episode, check passed')
def fix_orphan_episodes(self): def fix_orphan_episodes(self):
@ -182,16 +181,16 @@ class MainSanityCheck(db.DBSanityCheck):
cl = [] cl = []
for cur_result in sql_result: for cur_result in sql_result:
logger.log(u'Orphan episode detected! episode_id: %s showid: %s' % ( logger.debug(f'Orphan episode detected! episode_id: {cur_result["episode_id"]}'
cur_result['episode_id'], cur_result['showid']), logger.DEBUG) f' showid: {cur_result["showid"]}')
logger.log(u'Deleting orphan episode with episode_id: %s' % cur_result['episode_id']) logger.log(f'Deleting orphan episode with episode_id: {cur_result["episode_id"]}')
cl.append(['DELETE FROM tv_episodes WHERE episode_id = ?', [cur_result['episode_id']]]) cl.append(['DELETE FROM tv_episodes WHERE episode_id = ?', [cur_result['episode_id']]])
if 0 < len(cl): if 0 < len(cl):
self.connection.mass_action(cl) self.connection.mass_action(cl)
else: else:
logger.log(u'No orphan episodes, check passed') logger.log('No orphan episodes, check passed')
def fix_missing_table_indexes(self): def fix_missing_table_indexes(self):
if not self.connection.select('PRAGMA index_info("idx_indexer_id")'): if not self.connection.select('PRAGMA index_info("idx_indexer_id")'):
@ -240,9 +239,9 @@ class MainSanityCheck(db.DBSanityCheck):
cl = [] cl = []
for cur_result in sql_result: for cur_result in sql_result:
logger.log(u'UNAIRED episode detected! episode_id: %s showid: %s' % ( logger.debug(f'UNAIRED episode detected! episode_id: {cur_result["episode_id"]}'
cur_result['episode_id'], cur_result['showid']), logger.DEBUG) f' showid: {cur_result["showid"]}')
logger.log(u'Fixing unaired episode status with episode_id: %s' % cur_result['episode_id']) logger.log(f'Fixing unaired episode status with episode_id: {cur_result["episode_id"]}')
cl.append(['UPDATE tv_episodes SET status = ? WHERE episode_id = ?', cl.append(['UPDATE tv_episodes SET status = ? WHERE episode_id = ?',
[common.UNAIRED, cur_result['episode_id']]]) [common.UNAIRED, cur_result['episode_id']]])
@ -250,7 +249,7 @@ class MainSanityCheck(db.DBSanityCheck):
self.connection.mass_action(cl) self.connection.mass_action(cl)
else: else:
logger.log(u'No UNAIRED episodes, check passed') logger.log('No UNAIRED episodes, check passed')
def fix_scene_exceptions(self): def fix_scene_exceptions(self):
@ -387,21 +386,17 @@ class InitialSchema(db.SchemaUpgrade):
if cur_db_version < MIN_DB_VERSION: if cur_db_version < MIN_DB_VERSION:
logger.log_error_and_exit( logger.log_error_and_exit(
u'Your database version (' + str(cur_db_version) f'Your database version ({cur_db_version}) is too old to migrate from'
+ ') is too old to migrate from what this version of SickGear supports (' f' what this version of SickGear supports ({MIN_DB_VERSION}).\n'
+ str(MIN_DB_VERSION) + ').' + "\n"
+ 'Upgrade using a previous version (tag) build 496 to build 501 of SickGear' + 'Upgrade using a previous version (tag) build 496 to build 501 of SickGear'
' first or remove database file to begin fresh.' ' first or remove database file to begin fresh.')
)
if cur_db_version > MAX_DB_VERSION: if cur_db_version > MAX_DB_VERSION:
logger.log_error_and_exit( logger.log_error_and_exit(
u'Your database version (' + str(cur_db_version) f'Your database version ({cur_db_version}) has been incremented past'
+ ') has been incremented past what this version of SickGear supports (' f' what this version of SickGear supports ({MAX_DB_VERSION}).\n'
+ str(MAX_DB_VERSION) + ').\n'
+ 'If you have used other forks of SickGear,' + 'If you have used other forks of SickGear,'
' your database may be unusable due to their modifications.' ' your database may be unusable due to their modifications.')
)
return self.call_check_db_version() return self.call_check_db_version()
@ -423,7 +418,7 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
sql_result = self.connection.select('SELECT episode_id, location, file_size FROM tv_episodes') sql_result = self.connection.select('SELECT episode_id, location, file_size FROM tv_episodes')
self.upgrade_log(u'Adding file size to all episodes in DB, please be patient') self.upgrade_log('Adding file size to all episodes in DB, please be patient')
for cur_result in sql_result: for cur_result in sql_result:
if not cur_result['location']: if not cur_result['location']:
continue continue
@ -439,7 +434,7 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
# noinspection SqlRedundantOrderingDirection # noinspection SqlRedundantOrderingDirection
history_sql_result = self.connection.select('SELECT * FROM history WHERE provider != -1 ORDER BY date ASC') history_sql_result = self.connection.select('SELECT * FROM history WHERE provider != -1 ORDER BY date ASC')
self.upgrade_log(u'Adding release name to all episodes still in history') self.upgrade_log('Adding release name to all episodes still in history')
for cur_result in history_sql_result: for cur_result in history_sql_result:
# find the associated download, if there isn't one then ignore it # find the associated download, if there isn't one then ignore it
# noinspection SqlResolve # noinspection SqlResolve
@ -449,8 +444,8 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
' WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?', ' WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?',
[cur_result['showid'], cur_result['season'], cur_result['episode'], cur_result['date']]) [cur_result['showid'], cur_result['season'], cur_result['episode'], cur_result['date']])
if not download_sql_result: if not download_sql_result:
self.upgrade_log(u'Found a snatch in the history for ' + cur_result['resource'] self.upgrade_log(f'Found a snatch in the history for {cur_result["resource"]}'
+ ' but couldn\'t find the associated download, skipping it', logger.DEBUG) f' but couldn\'t find the associated download, skipping it', logger.DEBUG)
continue continue
nzb_name = cur_result['resource'] nzb_name = cur_result['resource']
@ -468,9 +463,8 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
' WHERE showid = ? AND season = ? AND episode = ? AND location != ""', ' WHERE showid = ? AND season = ? AND episode = ? AND location != ""',
[cur_result['showid'], cur_result['season'], cur_result['episode']]) [cur_result['showid'], cur_result['season'], cur_result['episode']])
if not sql_result: if not sql_result:
logger.log( logger.debug(f'The episode {nzb_name} was found in history but doesn\'t exist on disk anymore,'
u'The episode ' + nzb_name + ' was found in history but doesn\'t exist on disk anymore, skipping', f' skipping')
logger.DEBUG)
continue continue
# get the status/quality of the existing ep and make sure it's what we expect # get the status/quality of the existing ep and make sure it's what we expect
@ -483,7 +477,7 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
# make sure this is actually a real release name and not a season pack or something # make sure this is actually a real release name and not a season pack or something
for cur_name in (nzb_name, file_name): for cur_name in (nzb_name, file_name):
logger.log(u'Checking if ' + cur_name + ' is actually a good release name', logger.DEBUG) logger.debug(f'Checking if {cur_name} is actually a good release name')
try: try:
np = NameParser(False) np = NameParser(False)
parse_result = np.parse(cur_name) parse_result = np.parse(cur_name)
@ -503,7 +497,7 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
' FROM tv_episodes' ' FROM tv_episodes'
' WHERE release_name = ""') ' WHERE release_name = ""')
self.upgrade_log(u'Adding release name to all episodes with obvious scene filenames') self.upgrade_log('Adding release name to all episodes with obvious scene filenames')
for cur_result in empty_sql_result: for cur_result in empty_sql_result:
ep_file_name = os.path.basename(cur_result['location']) ep_file_name = os.path.basename(cur_result['location'])
@ -522,9 +516,7 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
if not parse_result.release_group: if not parse_result.release_group:
continue continue
logger.log( logger.debug(f'Name {ep_file_name} gave release group of {parse_result.release_group}, seems valid')
u'Name ' + ep_file_name + ' gave release group of ' + parse_result.release_group + ', seems valid',
logger.DEBUG)
self.connection.action('UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?', self.connection.action('UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?',
[ep_file_name, cur_result['episode_id']]) [ep_file_name, cur_result['episode_id']])
@ -651,7 +643,7 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
common.Quality.UNKNOWN], []) common.Quality.UNKNOWN], [])
# update qualities (including templates) # update qualities (including templates)
self.upgrade_log(u'[1/4] Updating pre-defined templates and the quality for each show...') self.upgrade_log('[1/4] Updating pre-defined templates and the quality for each show...')
cl = [] cl = []
shows = self.connection.select('SELECT * FROM tv_shows') shows = self.connection.select('SELECT * FROM tv_shows')
for cur_show in shows: for cur_show in shows:
@ -666,7 +658,7 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
# update status that are are within the old hdwebdl # update status that are are within the old hdwebdl
# (1<<3 which is 8) and better -- exclude unknown (1<<15 which is 32768) # (1<<3 which is 8) and better -- exclude unknown (1<<15 which is 32768)
self.upgrade_log(u'[2/4] Updating the status for the episodes within each show...') self.upgrade_log('[2/4] Updating the status for the episodes within each show...')
cl = [] cl = []
sql_result = self.connection.select('SELECT * FROM tv_episodes WHERE status < 3276800 AND status >= 800') sql_result = self.connection.select('SELECT * FROM tv_episodes WHERE status < 3276800 AND status >= 800')
for cur_result in sql_result: for cur_result in sql_result:
@ -678,7 +670,7 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
# may not always coordinate together # may not always coordinate together
# update previous history so it shows the correct action # update previous history so it shows the correct action
self.upgrade_log(u'[3/4] Updating history to reflect the correct action...') self.upgrade_log('[3/4] Updating history to reflect the correct action...')
cl = [] cl = []
# noinspection SqlResolve # noinspection SqlResolve
history_action = self.connection.select('SELECT * FROM history WHERE action < 3276800 AND action >= 800') history_action = self.connection.select('SELECT * FROM history WHERE action < 3276800 AND action >= 800')
@ -688,7 +680,7 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
self.connection.mass_action(cl) self.connection.mass_action(cl)
# update previous history so it shows the correct quality # update previous history so it shows the correct quality
self.upgrade_log(u'[4/4] Updating history to reflect the correct quality...') self.upgrade_log('[4/4] Updating history to reflect the correct quality...')
cl = [] cl = []
# noinspection SqlResolve # noinspection SqlResolve
history_quality = self.connection.select('SELECT * FROM history WHERE quality < 32768 AND quality >= 8') history_quality = self.connection.select('SELECT * FROM history WHERE quality < 32768 AND quality >= 8')
@ -700,7 +692,7 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
self.inc_db_version() self.inc_db_version()
# cleanup and reduce db if any previous data was removed # cleanup and reduce db if any previous data was removed
self.upgrade_log(u'Performing a vacuum on the database.', logger.DEBUG) self.upgrade_log('Performing a vacuum on the database.', logger.DEBUG)
self.connection.action('VACUUM') self.connection.action('VACUUM')
return self.call_check_db_version() return self.call_check_db_version()
@ -712,10 +704,10 @@ class AddShowidTvdbidIndex(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Checking for duplicate shows before adding unique index.') self.upgrade_log('Checking for duplicate shows before adding unique index.')
MainSanityCheck(self.connection).fix_duplicate_shows('tvdb_id') MainSanityCheck(self.connection).fix_duplicate_shows('tvdb_id')
self.upgrade_log(u'Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries.') self.upgrade_log('Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries.')
if not self.has_table('idx_showid'): if not self.has_table('idx_showid'):
self.connection.action('CREATE INDEX idx_showid ON tv_episodes (showid);') self.connection.action('CREATE INDEX idx_showid ON tv_episodes (showid);')
if not self.has_table('idx_tvdb_id'): if not self.has_table('idx_tvdb_id'):
@ -732,7 +724,7 @@ class AddLastUpdateTVDB(db.SchemaUpgrade):
def execute(self): def execute(self):
if not self.has_column('tv_shows', 'last_update_tvdb'): if not self.has_column('tv_shows', 'last_update_tvdb'):
self.upgrade_log(u'Adding column last_update_tvdb to tv_shows') self.upgrade_log('Adding column last_update_tvdb to tv_shows')
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.add_column('tv_shows', 'last_update_tvdb', default=1) self.add_column('tv_shows', 'last_update_tvdb', default=1)
@ -745,7 +737,7 @@ class AddDBIncreaseTo15(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Bumping database version to v%s' % self.call_check_db_version()) self.upgrade_log(f'Bumping database version to v{self.call_check_db_version()}')
self.inc_db_version() self.inc_db_version()
return self.call_check_db_version() return self.call_check_db_version()
@ -756,7 +748,7 @@ class AddIMDbInfo(db.SchemaUpgrade):
db_backed_up = False db_backed_up = False
if not self.has_table('imdb_info'): if not self.has_table('imdb_info'):
self.upgrade_log(u'Creating IMDb table imdb_info') self.upgrade_log('Creating IMDb table imdb_info')
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
db_backed_up = True db_backed_up = True
@ -766,7 +758,7 @@ class AddIMDbInfo(db.SchemaUpgrade):
' rating TEXT, votes INTEGER, last_update NUMERIC)') ' rating TEXT, votes INTEGER, last_update NUMERIC)')
if not self.has_column('tv_shows', 'imdb_id'): if not self.has_column('tv_shows', 'imdb_id'):
self.upgrade_log(u'Adding IMDb column imdb_id to tv_shows') self.upgrade_log('Adding IMDb column imdb_id to tv_shows')
if not db_backed_up: if not db_backed_up:
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
@ -786,7 +778,7 @@ class AddProperNamingSupport(db.SchemaUpgrade):
return self.set_db_version(5816) return self.set_db_version(5816)
if not self.has_column('tv_episodes', 'is_proper'): if not self.has_column('tv_episodes', 'is_proper'):
self.upgrade_log(u'Adding column is_proper to tv_episodes') self.upgrade_log('Adding column is_proper to tv_episodes')
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.add_column('tv_episodes', 'is_proper') self.add_column('tv_episodes', 'is_proper')
@ -805,7 +797,7 @@ class AddEmailSubscriptionTable(db.SchemaUpgrade):
return self.set_db_version(5817) return self.set_db_version(5817)
if not self.has_column('tv_shows', 'notify_list'): if not self.has_column('tv_shows', 'notify_list'):
self.upgrade_log(u'Adding column notify_list to tv_shows') self.upgrade_log('Adding column notify_list to tv_shows')
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.add_column('tv_shows', 'notify_list', 'TEXT', None) self.add_column('tv_shows', 'notify_list', 'TEXT', None)
@ -827,7 +819,7 @@ class AddProperSearch(db.SchemaUpgrade):
return self.set_db_version(5818) return self.set_db_version(5818)
if not self.has_column('info', 'last_proper_search'): if not self.has_column('info', 'last_proper_search'):
self.upgrade_log(u'Adding column last_proper_search to info') self.upgrade_log('Adding column last_proper_search to info')
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.add_column('info', 'last_proper_search', default=1) self.add_column('info', 'last_proper_search', default=1)
@ -839,7 +831,7 @@ class AddProperSearch(db.SchemaUpgrade):
class AddDvdOrderOption(db.SchemaUpgrade): class AddDvdOrderOption(db.SchemaUpgrade):
def execute(self): def execute(self):
if not self.has_column('tv_shows', 'dvdorder'): if not self.has_column('tv_shows', 'dvdorder'):
self.upgrade_log(u'Adding column dvdorder to tv_shows') self.upgrade_log('Adding column dvdorder to tv_shows')
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.add_column('tv_shows', 'dvdorder', 'NUMERIC', '0') self.add_column('tv_shows', 'dvdorder', 'NUMERIC', '0')
@ -851,7 +843,7 @@ class AddDvdOrderOption(db.SchemaUpgrade):
class AddSubtitlesSupport(db.SchemaUpgrade): class AddSubtitlesSupport(db.SchemaUpgrade):
def execute(self): def execute(self):
if not self.has_column('tv_shows', 'subtitles'): if not self.has_column('tv_shows', 'subtitles'):
self.upgrade_log(u'Adding subtitles to tv_shows and tv_episodes') self.upgrade_log('Adding subtitles to tv_shows and tv_episodes')
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.add_column('tv_shows', 'subtitles') self.add_column('tv_shows', 'subtitles')
self.add_column('tv_episodes', 'subtitles', 'TEXT', '') self.add_column('tv_episodes', 'subtitles', 'TEXT', '')
@ -867,10 +859,10 @@ class ConvertTVShowsToIndexerScheme(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Converting TV Shows table to Indexer Scheme...') self.upgrade_log('Converting TV Shows table to Indexer Scheme...')
if self.has_table('tmp_tv_shows'): if self.has_table('tmp_tv_shows'):
self.upgrade_log(u'Removing temp tv show tables left behind from previous updates...') self.upgrade_log('Removing temp tv show tables left behind from previous updates...')
# noinspection SqlResolve # noinspection SqlResolve
self.connection.action('DROP TABLE tmp_tv_shows') self.connection.action('DROP TABLE tmp_tv_shows')
@ -908,10 +900,10 @@ class ConvertTVEpisodesToIndexerScheme(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Converting TV Episodes table to Indexer Scheme...') self.upgrade_log('Converting TV Episodes table to Indexer Scheme...')
if self.has_table('tmp_tv_episodes'): if self.has_table('tmp_tv_episodes'):
self.upgrade_log(u'Removing temp tv episode tables left behind from previous updates...') self.upgrade_log('Removing temp tv episode tables left behind from previous updates...')
# noinspection SqlResolve # noinspection SqlResolve
self.connection.action('DROP TABLE tmp_tv_episodes') self.connection.action('DROP TABLE tmp_tv_episodes')
@ -949,10 +941,10 @@ class ConvertIMDBInfoToIndexerScheme(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Converting IMDb Info table to Indexer Scheme...') self.upgrade_log('Converting IMDb Info table to Indexer Scheme...')
if self.has_table('tmp_imdb_info'): if self.has_table('tmp_imdb_info'):
self.upgrade_log(u'Removing temp imdb info tables left behind from previous updates...') self.upgrade_log('Removing temp imdb info tables left behind from previous updates...')
# noinspection SqlResolve # noinspection SqlResolve
self.connection.action('DROP TABLE tmp_imdb_info') self.connection.action('DROP TABLE tmp_imdb_info')
@ -978,10 +970,10 @@ class ConvertInfoToIndexerScheme(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Converting Info table to Indexer Scheme...') self.upgrade_log('Converting Info table to Indexer Scheme...')
if self.has_table('tmp_info'): if self.has_table('tmp_info'):
self.upgrade_log(u'Removing temp info tables left behind from previous updates...') self.upgrade_log('Removing temp info tables left behind from previous updates...')
# noinspection SqlResolve # noinspection SqlResolve
self.connection.action('DROP TABLE tmp_info') self.connection.action('DROP TABLE tmp_info')
@ -1005,7 +997,7 @@ class AddArchiveFirstMatchOption(db.SchemaUpgrade):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
if not self.has_column('tv_shows', 'archive_firstmatch'): if not self.has_column('tv_shows', 'archive_firstmatch'):
self.upgrade_log(u'Adding column archive_firstmatch to tv_shows') self.upgrade_log('Adding column archive_firstmatch to tv_shows')
self.add_column('tv_shows', 'archive_firstmatch', 'NUMERIC', '0') self.add_column('tv_shows', 'archive_firstmatch', 'NUMERIC', '0')
self.inc_db_version() self.inc_db_version()
@ -1020,7 +1012,7 @@ class AddSceneNumbering(db.SchemaUpgrade):
if self.has_table('scene_numbering'): if self.has_table('scene_numbering'):
self.connection.action('DROP TABLE scene_numbering') self.connection.action('DROP TABLE scene_numbering')
self.upgrade_log(u'Upgrading table scene_numbering ...') self.upgrade_log('Upgrading table scene_numbering ...')
self.connection.action( self.connection.action(
'CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER,' 'CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER,'
' scene_season INTEGER, scene_episode INTEGER,' ' scene_season INTEGER, scene_episode INTEGER,'
@ -1036,7 +1028,7 @@ class ConvertIndexerToInteger(db.SchemaUpgrade):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
cl = [] cl = []
self.upgrade_log(u'Converting Indexer to Integer ...') self.upgrade_log('Converting Indexer to Integer ...')
cl.append(['UPDATE tv_shows SET indexer = ? WHERE LOWER(indexer) = ?', ['1', 'tvdb']]) cl.append(['UPDATE tv_shows SET indexer = ? WHERE LOWER(indexer) = ?', ['1', 'tvdb']])
cl.append(['UPDATE tv_shows SET indexer = ? WHERE LOWER(indexer) = ?', ['2', 'tvrage']]) cl.append(['UPDATE tv_shows SET indexer = ? WHERE LOWER(indexer) = ?', ['2', 'tvrage']])
cl.append(['UPDATE tv_episodes SET indexer = ? WHERE LOWER(indexer) = ?', ['1', 'tvdb']]) cl.append(['UPDATE tv_episodes SET indexer = ? WHERE LOWER(indexer) = ?', ['1', 'tvdb']])
@ -1060,13 +1052,13 @@ class AddRequireAndIgnoreWords(db.SchemaUpgrade):
db_backed_up = False db_backed_up = False
if not self.has_column('tv_shows', 'rls_require_words'): if not self.has_column('tv_shows', 'rls_require_words'):
self.upgrade_log(u'Adding column rls_require_words to tv_shows') self.upgrade_log('Adding column rls_require_words to tv_shows')
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
db_backed_up = True db_backed_up = True
self.add_column('tv_shows', 'rls_require_words', 'TEXT', '') self.add_column('tv_shows', 'rls_require_words', 'TEXT', '')
if not self.has_column('tv_shows', 'rls_ignore_words'): if not self.has_column('tv_shows', 'rls_ignore_words'):
self.upgrade_log(u'Adding column rls_ignore_words to tv_shows') self.upgrade_log('Adding column rls_ignore_words to tv_shows')
if not db_backed_up: if not db_backed_up:
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.add_column('tv_shows', 'rls_ignore_words', 'TEXT', '') self.add_column('tv_shows', 'rls_ignore_words', 'TEXT', '')
@ -1080,14 +1072,14 @@ class AddSportsOption(db.SchemaUpgrade):
def execute(self): def execute(self):
db_backed_up = False db_backed_up = False
if not self.has_column('tv_shows', 'sports'): if not self.has_column('tv_shows', 'sports'):
self.upgrade_log(u'Adding column sports to tv_shows') self.upgrade_log('Adding column sports to tv_shows')
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
db_backed_up = True db_backed_up = True
self.add_column('tv_shows', 'sports', 'NUMERIC', '0') self.add_column('tv_shows', 'sports', 'NUMERIC', '0')
if self.has_column('tv_shows', 'air_by_date') and self.has_column('tv_shows', 'sports'): if self.has_column('tv_shows', 'air_by_date') and self.has_column('tv_shows', 'sports'):
# update sports column # update sports column
self.upgrade_log(u'[4/4] Updating tv_shows to reflect the correct sports value...') self.upgrade_log('[4/4] Updating tv_shows to reflect the correct sports value...')
if not db_backed_up: if not db_backed_up:
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
cl = [] cl = []
@ -1108,7 +1100,7 @@ class AddSceneNumberingToTvEpisodes(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Adding columns scene_season and scene_episode to tvepisodes') self.upgrade_log('Adding columns scene_season and scene_episode to tvepisodes')
self.add_column('tv_episodes', 'scene_season', 'NUMERIC', 'NULL') self.add_column('tv_episodes', 'scene_season', 'NUMERIC', 'NULL')
self.add_column('tv_episodes', 'scene_episode', 'NUMERIC', 'NULL') self.add_column('tv_episodes', 'scene_episode', 'NUMERIC', 'NULL')
@ -1121,7 +1113,7 @@ class AddAnimeTVShow(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Adding column anime to tv_episodes') self.upgrade_log('Adding column anime to tv_episodes')
self.add_column('tv_shows', 'anime', 'NUMERIC', '0') self.add_column('tv_shows', 'anime', 'NUMERIC', '0')
self.inc_db_version() self.inc_db_version()
@ -1133,7 +1125,7 @@ class AddAbsoluteNumbering(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Adding column absolute_number to tv_episodes') self.upgrade_log('Adding column absolute_number to tv_episodes')
self.add_column('tv_episodes', 'absolute_number', 'NUMERIC', '0') self.add_column('tv_episodes', 'absolute_number', 'NUMERIC', '0')
self.inc_db_version() self.inc_db_version()
@ -1145,7 +1137,7 @@ class AddSceneAbsoluteNumbering(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Adding columns absolute_number and scene_absolute_number to scene_numbering') self.upgrade_log('Adding columns absolute_number and scene_absolute_number to scene_numbering')
self.add_column('scene_numbering', 'absolute_number', 'NUMERIC', '0') self.add_column('scene_numbering', 'absolute_number', 'NUMERIC', '0')
self.add_column('scene_numbering', 'scene_absolute_number', 'NUMERIC', '0') self.add_column('scene_numbering', 'scene_absolute_number', 'NUMERIC', '0')
@ -1160,7 +1152,7 @@ class AddAnimeAllowlistBlocklist(db.SchemaUpgrade):
cl = [['CREATE TABLE allowlist (show_id INTEGER, range TEXT, keyword TEXT, indexer NUMERIC)'], cl = [['CREATE TABLE allowlist (show_id INTEGER, range TEXT, keyword TEXT, indexer NUMERIC)'],
['CREATE TABLE blocklist (show_id INTEGER, range TEXT, keyword TEXT, indexer NUMERIC)']] ['CREATE TABLE blocklist (show_id INTEGER, range TEXT, keyword TEXT, indexer NUMERIC)']]
self.upgrade_log(u'Creating tables for anime allow and block lists') self.upgrade_log('Creating tables for anime allow and block lists')
self.connection.mass_action(cl) self.connection.mass_action(cl)
self.inc_db_version() self.inc_db_version()
@ -1172,7 +1164,7 @@ class AddSceneAbsoluteNumbering2(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Adding column scene_absolute_number to tv_episodes') self.upgrade_log('Adding column scene_absolute_number to tv_episodes')
self.add_column('tv_episodes', 'scene_absolute_number', 'NUMERIC', '0') self.add_column('tv_episodes', 'scene_absolute_number', 'NUMERIC', '0')
self.inc_db_version() self.inc_db_version()
@ -1184,7 +1176,7 @@ class AddXemRefresh(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Creating table xem_refresh') self.upgrade_log('Creating table xem_refresh')
self.connection.action( self.connection.action(
'CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)') 'CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)')
@ -1197,7 +1189,7 @@ class AddSceneToTvShows(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Adding column scene to tv_shows') self.upgrade_log('Adding column scene to tv_shows')
self.add_column('tv_shows', 'scene', 'NUMERIC', '0') self.add_column('tv_shows', 'scene', 'NUMERIC', '0')
self.inc_db_version() self.inc_db_version()
@ -1212,7 +1204,7 @@ class AddIndexerMapping(db.SchemaUpgrade):
if self.has_table('indexer_mapping'): if self.has_table('indexer_mapping'):
self.connection.action('DROP TABLE indexer_mapping') self.connection.action('DROP TABLE indexer_mapping')
self.upgrade_log(u'Adding table indexer_mapping') self.upgrade_log('Adding table indexer_mapping')
self.connection.action( self.connection.action(
'CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER, mindexer NUMERIC,' 'CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER, mindexer NUMERIC,'
' PRIMARY KEY (indexer_id, indexer))') ' PRIMARY KEY (indexer_id, indexer))')
@ -1226,11 +1218,11 @@ class AddVersionToTvEpisodes(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Adding columns release_group and version to tv_episodes') self.upgrade_log('Adding columns release_group and version to tv_episodes')
self.add_column('tv_episodes', 'release_group', 'TEXT', '') self.add_column('tv_episodes', 'release_group', 'TEXT', '')
self.add_column('tv_episodes', 'version', 'NUMERIC', '-1') self.add_column('tv_episodes', 'version', 'NUMERIC', '-1')
self.upgrade_log(u'Adding column version to history') self.upgrade_log('Adding column version to history')
self.add_column('history', 'version', 'NUMERIC', '-1') self.add_column('history', 'version', 'NUMERIC', '-1')
self.inc_db_version() self.inc_db_version()
@ -1242,7 +1234,7 @@ class BumpDatabaseVersion(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Bumping database version') self.upgrade_log('Bumping database version')
return self.set_db_version(10000) return self.set_db_version(10000)
@ -1252,7 +1244,7 @@ class Migrate41(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Bumping database version') self.upgrade_log('Bumping database version')
return self.set_db_version(10001) return self.set_db_version(10001)
@ -1267,7 +1259,7 @@ class Migrate43(db.SchemaUpgrade):
if self.has_table(table): if self.has_table(table):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
db_backed_up = True db_backed_up = True
self.upgrade_log(u'Dropping redundant table tmdb_info') self.upgrade_log('Dropping redundant table tmdb_info')
# noinspection SqlResolve # noinspection SqlResolve
self.connection.action('DROP TABLE [%s]' % table) self.connection.action('DROP TABLE [%s]' % table)
db_chg = True db_chg = True
@ -1276,7 +1268,7 @@ class Migrate43(db.SchemaUpgrade):
if not db_backed_up: if not db_backed_up:
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
db_backed_up = True db_backed_up = True
self.upgrade_log(u'Dropping redundant tmdb_info refs') self.upgrade_log('Dropping redundant tmdb_info refs')
self.drop_columns('tv_shows', 'tmdb_id') self.drop_columns('tv_shows', 'tmdb_id')
db_chg = True db_chg = True
@ -1288,7 +1280,7 @@ class Migrate43(db.SchemaUpgrade):
self.connection.action('INSERT INTO db_version (db_version) VALUES (0);') self.connection.action('INSERT INTO db_version (db_version) VALUES (0);')
if not db_chg: if not db_chg:
self.upgrade_log(u'Bumping database version') self.upgrade_log('Bumping database version')
return self.set_db_version(10001) return self.set_db_version(10001)
@ -1298,7 +1290,7 @@ class Migrate4301(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Bumping database version') self.upgrade_log('Bumping database version')
return self.set_db_version(10002) return self.set_db_version(10002)
@ -1308,7 +1300,7 @@ class Migrate4302(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Bumping database version') self.upgrade_log('Bumping database version')
return self.set_db_version(10003) return self.set_db_version(10003)
@ -1318,7 +1310,7 @@ class MigrateUpstream(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Migrate SickBeard db v%s into v15' % str(self.call_check_db_version()).replace('58', '')) self.upgrade_log(f'Migrate SickBeard db v{self.call_check_db_version().replace("58", "")} into v15')
return self.set_db_version(15) return self.set_db_version(15)
@ -1328,7 +1320,7 @@ class SickGearDatabaseVersion(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Bumping database version to new SickGear standards') self.upgrade_log('Bumping database version to new SickGear standards')
return self.set_db_version(20000) return self.set_db_version(20000)
@ -1338,7 +1330,7 @@ class RemoveDefaultEpStatusFromTvShows(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Dropping redundant column default_ep_status from tv_shows') self.upgrade_log('Dropping redundant column default_ep_status from tv_shows')
self.drop_columns('tv_shows', 'default_ep_status') self.drop_columns('tv_shows', 'default_ep_status')
return self.set_db_version(10000) return self.set_db_version(10000)
@ -1349,7 +1341,7 @@ class RemoveMinorDBVersion(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Dropping redundant column db_minor_version from db_version') self.upgrade_log('Dropping redundant column db_minor_version from db_version')
self.drop_columns('db_version', 'db_minor_version') self.drop_columns('db_version', 'db_minor_version')
return self.set_db_version(10001) return self.set_db_version(10001)
@ -1359,7 +1351,7 @@ class RemoveMinorDBVersion(db.SchemaUpgrade):
class RemoveMetadataSub(db.SchemaUpgrade): class RemoveMetadataSub(db.SchemaUpgrade):
def execute(self): def execute(self):
if self.has_column('tv_shows', 'sub_use_sr_metadata'): if self.has_column('tv_shows', 'sub_use_sr_metadata'):
self.upgrade_log(u'Dropping redundant column metadata sub') self.upgrade_log('Dropping redundant column metadata sub')
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.drop_columns('tv_shows', 'sub_use_sr_metadata') self.drop_columns('tv_shows', 'sub_use_sr_metadata')
@ -1371,10 +1363,10 @@ class DBIncreaseTo20001(db.SchemaUpgrade):
def execute(self): def execute(self):
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.upgrade_log(u'Bumping database version to force a backup before new database code') self.upgrade_log('Bumping database version to force a backup before new database code')
self.connection.action('VACUUM') self.connection.action('VACUUM')
self.upgrade_log(u'Performed a vacuum on the database', logger.DEBUG) self.upgrade_log('Performed a vacuum on the database', logger.DEBUG)
return self.set_db_version(20001) return self.set_db_version(20001)
@ -1383,7 +1375,7 @@ class DBIncreaseTo20001(db.SchemaUpgrade):
class AddTvShowOverview(db.SchemaUpgrade): class AddTvShowOverview(db.SchemaUpgrade):
def execute(self): def execute(self):
if not self.has_column('tv_shows', 'overview'): if not self.has_column('tv_shows', 'overview'):
self.upgrade_log(u'Adding column overview to tv_shows') self.upgrade_log('Adding column overview to tv_shows')
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.add_column('tv_shows', 'overview', 'TEXT', '') self.add_column('tv_shows', 'overview', 'TEXT', '')
@ -1394,7 +1386,7 @@ class AddTvShowOverview(db.SchemaUpgrade):
class AddTvShowTags(db.SchemaUpgrade): class AddTvShowTags(db.SchemaUpgrade):
def execute(self): def execute(self):
if not self.has_column('tv_shows', 'tag'): if not self.has_column('tv_shows', 'tag'):
self.upgrade_log(u'Adding tag to tv_shows') self.upgrade_log('Adding tag to tv_shows')
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.add_column('tv_shows', 'tag', 'TEXT', 'Show List') self.add_column('tv_shows', 'tag', 'TEXT', 'Show List')
@ -1410,7 +1402,7 @@ class ChangeMapIndexer(db.SchemaUpgrade):
if self.has_table('indexer_mapping'): if self.has_table('indexer_mapping'):
self.connection.action('DROP TABLE indexer_mapping') self.connection.action('DROP TABLE indexer_mapping')
self.upgrade_log(u'Changing table indexer_mapping') self.upgrade_log('Changing table indexer_mapping')
self.connection.action( self.connection.action(
'CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER NOT NULL,' 'CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER NOT NULL,'
' mindexer NUMERIC, date NUMERIC NOT NULL DEFAULT 0, status INTEGER NOT NULL DEFAULT 0,' ' mindexer NUMERIC, date NUMERIC NOT NULL DEFAULT 0, status INTEGER NOT NULL DEFAULT 0,'
@ -1422,7 +1414,7 @@ class ChangeMapIndexer(db.SchemaUpgrade):
self.upgrade_log('Adding last_run_backlog to info') self.upgrade_log('Adding last_run_backlog to info')
self.add_column('info', 'last_run_backlog', 'NUMERIC', 1) self.add_column('info', 'last_run_backlog', 'NUMERIC', 1)
self.upgrade_log(u'Moving table scene_exceptions from cache.db to sickbeard.db') self.upgrade_log('Moving table scene_exceptions from cache.db to sickbeard.db')
if self.has_table('scene_exceptions_refresh'): if self.has_table('scene_exceptions_refresh'):
self.connection.action('DROP TABLE scene_exceptions_refresh') self.connection.action('DROP TABLE scene_exceptions_refresh')
self.connection.action('CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER)') self.connection.action('CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER)')
@ -1467,7 +1459,7 @@ class ChangeMapIndexer(db.SchemaUpgrade):
class AddShowNotFoundCounter(db.SchemaUpgrade): class AddShowNotFoundCounter(db.SchemaUpgrade):
def execute(self): def execute(self):
if not self.has_table('tv_shows_not_found'): if not self.has_table('tv_shows_not_found'):
self.upgrade_log(u'Adding table tv_shows_not_found') self.upgrade_log('Adding table tv_shows_not_found')
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.connection.action( self.connection.action(
@ -1482,7 +1474,7 @@ class AddShowNotFoundCounter(db.SchemaUpgrade):
class AddFlagTable(db.SchemaUpgrade): class AddFlagTable(db.SchemaUpgrade):
def execute(self): def execute(self):
if not self.has_table('flags'): if not self.has_table('flags'):
self.upgrade_log(u'Adding table flags') self.upgrade_log('Adding table flags')
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.connection.action('CREATE TABLE flags (flag PRIMARY KEY NOT NULL )') self.connection.action('CREATE TABLE flags (flag PRIMARY KEY NOT NULL )')
@ -1494,7 +1486,7 @@ class AddFlagTable(db.SchemaUpgrade):
class DBIncreaseTo20007(db.SchemaUpgrade): class DBIncreaseTo20007(db.SchemaUpgrade):
def execute(self): def execute(self):
self.upgrade_log(u'Bumping database version') self.upgrade_log('Bumping database version')
return self.set_db_version(20007) return self.set_db_version(20007)
@ -1517,7 +1509,7 @@ class AddWatched(db.SchemaUpgrade):
self.connection.action('VACUUM') self.connection.action('VACUUM')
if not self.has_table('tv_episodes_watched'): if not self.has_table('tv_episodes_watched'):
self.upgrade_log(u'Adding table tv_episodes_watched') self.upgrade_log('Adding table tv_episodes_watched')
db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version()) db.backup_database(self.connection, 'sickbeard.db', self.call_check_db_version())
self.connection.action( self.connection.action(
@ -1561,7 +1553,7 @@ class AddIndexerToTables(db.SchemaUpgrade):
for t in [(allowtbl, 'show_id'), (blocktbl, 'show_id'), for t in [(allowtbl, 'show_id'), (blocktbl, 'show_id'),
('history', 'showid'), ('scene_exceptions', 'indexer_id')]: ('history', 'showid'), ('scene_exceptions', 'indexer_id')]:
if not self.has_column(t[0], 'indexer'): if not self.has_column(t[0], 'indexer'):
self.upgrade_log(u'Adding TV info support to %s table' % t[0]) self.upgrade_log(f'Adding TV info support to {t[0]} table')
self.add_column(t[0], 'indexer') self.add_column(t[0], 'indexer')
cl = [] cl = []
for s_id, i in iteritems(show_ids): for s_id, i in iteritems(show_ids):

View file

@ -132,21 +132,21 @@ class DBConnection(object):
:return: success, message :return: success, message
""" """
if not db_supports_backup: if not db_supports_backup:
logger.log('this python sqlite3 version doesn\'t support backups', logger.DEBUG) logger.debug('this python sqlite3 version doesn\'t support backups')
return False, 'this python sqlite3 version doesn\'t support backups' return False, 'this python sqlite3 version doesn\'t support backups'
if not os.path.isdir(target): if not os.path.isdir(target):
logger.log('Backup target invalid', logger.ERROR) logger.error('Backup target invalid')
return False, 'Backup target invalid' return False, 'Backup target invalid'
target_db = os.path.join(target, (backup_filename, self.filename)[None is backup_filename]) target_db = os.path.join(target, (backup_filename, self.filename)[None is backup_filename])
if os.path.exists(target_db): if os.path.exists(target_db):
logger.log('Backup target file already exists', logger.ERROR) logger.error('Backup target file already exists')
return False, 'Backup target file already exists' return False, 'Backup target file already exists'
# noinspection PyUnusedLocal # noinspection PyUnusedLocal
def progress(status, remaining, total): def progress(status, remaining, total):
logger.log('Copied %s of %s pages...' % (total - remaining, total), logger.DEBUG) logger.debug('Copied %s of %s pages...' % (total - remaining, total))
backup_con = None backup_con = None
@ -156,9 +156,9 @@ class DBConnection(object):
with backup_con: with backup_con:
with db_lock: with db_lock:
self.connection.backup(backup_con, progress=progress) self.connection.backup(backup_con, progress=progress)
logger.log('%s backup successful' % self.filename, logger.DEBUG) logger.debug('%s backup successful' % self.filename)
except sqlite3.Error as error: except sqlite3.Error as error:
logger.log("Error while taking backup: %s" % ex(error), logger.ERROR) logger.error("Error while taking backup: %s" % ex(error))
return False, 'Backup failed' return False, 'Backup failed'
finally: finally:
if backup_con: if backup_con:
@ -226,8 +226,8 @@ class DBConnection(object):
self.connection.commit() self.connection.commit()
if 0 < affected: if 0 < affected:
logger.debug(u'Transaction with %s queries executed affected at least %i row%s' % ( logger.debug(f'Transaction with {len(queries)} queries executed affected at least {affected:d}'
len(queries), affected, helpers.maybe_plural(affected))) f' row{helpers.maybe_plural(affected)}')
return sql_result return sql_result
except sqlite3.OperationalError as e: except sqlite3.OperationalError as e:
sql_result = [] sql_result = []
@ -239,7 +239,7 @@ class DBConnection(object):
except sqlite3.DatabaseError as e: except sqlite3.DatabaseError as e:
if self.connection: if self.connection:
self.connection.rollback() self.connection.rollback()
logger.error(u'Fatal error executing query: ' + ex(e)) logger.error(f'Fatal error executing query: {ex(e)}')
raise raise
return sql_result return sql_result
@ -248,10 +248,10 @@ class DBConnection(object):
def action_error(e): def action_error(e):
if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]: if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]:
logger.log(u'DB error: ' + ex(e), logger.WARNING) logger.warning(f'DB error: {ex(e)}')
time.sleep(1) time.sleep(1)
return True return True
logger.log(u'DB error: ' + ex(e), logger.ERROR) logger.error(f'DB error: {ex(e)}')
def action(self, query, args=None): def action(self, query, args=None):
# type: (AnyStr, Optional[List, Tuple]) -> Optional[Union[List, sqlite3.Cursor]] # type: (AnyStr, Optional[List, Tuple]) -> Optional[Union[List, sqlite3.Cursor]]
@ -280,7 +280,7 @@ class DBConnection(object):
raise raise
attempt += 1 attempt += 1
except sqlite3.DatabaseError as e: except sqlite3.DatabaseError as e:
logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR) logger.error(f'Fatal error executing query: {ex(e)}')
raise raise
return sql_result return sql_result
@ -424,7 +424,7 @@ class DBSanityCheck(object):
def upgrade_database(connection, schema): def upgrade_database(connection, schema):
logger.log(u'Checking database structure...', logger.MESSAGE) logger.log('Checking database structure...', logger.MESSAGE)
connection.is_upgrading = False connection.is_upgrading = False
connection.new_db = 0 == connection.check_db_version() connection.new_db = 0 == connection.check_db_version()
_process_upgrade(connection, schema) _process_upgrade(connection, schema)
@ -438,16 +438,16 @@ def _pretty_name(class_name):
def _restore_database(filename, version): def _restore_database(filename, version):
logger.log(u'Restoring database before trying upgrade again') logger.log('Restoring database before trying upgrade again')
if not sickgear.helpers.restore_versioned_file(db_filename(filename=filename, suffix='v%s' % version), version): if not sickgear.helpers.restore_versioned_file(db_filename(filename=filename, suffix='v%s' % version), version):
logger.log_error_and_exit(u'Database restore failed, abort upgrading database') logger.log_error_and_exit('Database restore failed, abort upgrading database')
return False return False
return True return True
def _process_upgrade(connection, upgrade_class): def _process_upgrade(connection, upgrade_class):
instance = upgrade_class(connection) instance = upgrade_class(connection)
logger.log('Checking %s database upgrade' % _pretty_name(upgrade_class.__name__), logger.DEBUG) logger.debug('Checking %s database upgrade' % _pretty_name(upgrade_class.__name__))
if not instance.test(): if not instance.test():
connection.is_upgrading = True connection.is_upgrading = True
connection.upgrade_log(getattr(upgrade_class, 'pretty_name', None) or _pretty_name(upgrade_class.__name__)) connection.upgrade_log(getattr(upgrade_class, 'pretty_name', None) or _pretty_name(upgrade_class.__name__))
@ -471,9 +471,9 @@ def _process_upgrade(connection, upgrade_class):
else: else:
logger.log_error_and_exit('Database upgrade failed, can\'t determine old db version, not restoring.') logger.log_error_and_exit('Database upgrade failed, can\'t determine old db version, not restoring.')
logger.log('%s upgrade completed' % upgrade_class.__name__, logger.DEBUG) logger.debug('%s upgrade completed' % upgrade_class.__name__)
else: else:
logger.log('%s upgrade not required' % upgrade_class.__name__, logger.DEBUG) logger.debug('%s upgrade not required' % upgrade_class.__name__)
for upgradeSubClass in upgrade_class.__subclasses__(): for upgradeSubClass in upgrade_class.__subclasses__():
_process_upgrade(connection, upgradeSubClass) _process_upgrade(connection, upgradeSubClass)
@ -710,15 +710,15 @@ def migration_code(my_db):
db_version = my_db.check_db_version() db_version = my_db.check_db_version()
my_db.new_db = 0 == db_version my_db.new_db = 0 == db_version
logger.log(u'Detected database version: v%s' % db_version, logger.DEBUG) logger.debug(f'Detected database version: v{db_version}')
if not (db_version in schema): if not (db_version in schema):
if db_version == sickgear.mainDB.MAX_DB_VERSION: if db_version == sickgear.mainDB.MAX_DB_VERSION:
logger.log(u'Database schema is up-to-date, no upgrade required') logger.log('Database schema is up-to-date, no upgrade required')
elif 10000 > db_version: elif 10000 > db_version:
logger.log_error_and_exit(u'SickGear does not currently support upgrading from this database version') logger.log_error_and_exit('SickGear does not currently support upgrading from this database version')
else: else:
logger.log_error_and_exit(u'Invalid database version') logger.log_error_and_exit('Invalid database version')
else: else:
@ -733,13 +733,13 @@ def migration_code(my_db):
cleanup_old_db_backups(my_db.filename) cleanup_old_db_backups(my_db.filename)
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
my_db.close() my_db.close()
logger.log(u'Failed to update database with error: %s attempting recovery...' % ex(e), logger.ERROR) logger.error(f'Failed to update database with error: {ex(e)} attempting recovery...')
if _restore_database(my_db.filename, db_version): if _restore_database(my_db.filename, db_version):
# initialize the main SB database # initialize the main SB database
logger.log_error_and_exit(u'Successfully restored database version: %s' % db_version) logger.log_error_and_exit(f'Successfully restored database version: {db_version}')
else: else:
logger.log_error_and_exit(u'Failed to restore database version: %s' % db_version) logger.log_error_and_exit(f'Failed to restore database version: {db_version}')
my_db.upgrade_log('Finished') my_db.upgrade_log('Finished')
@ -765,11 +765,11 @@ def backup_database(db_connection, filename, version):
logger.debug('new db, no backup required') logger.debug('new db, no backup required')
return return
logger.log(u'Backing up database before upgrade') logger.log('Backing up database before upgrade')
if not sickgear.helpers.backup_versioned_file(db_filename(filename), version): if not sickgear.helpers.backup_versioned_file(db_filename(filename), version):
logger.log_error_and_exit(u'Database backup failed, abort upgrading database') logger.log_error_and_exit('Database backup failed, abort upgrading database')
else: else:
logger.log(u'Proceeding with upgrade') logger.log('Proceeding with upgrade')
def get_rollback_module(): def get_rollback_module():
@ -836,7 +836,7 @@ def backup_all_dbs(target, compress=True, prefer_7z=True):
:return: success, message :return: success, message
""" """
if not make_path(target): if not make_path(target):
logger.log('Failed to create db backup dir', logger.ERROR) logger.error('Failed to create db backup dir')
return False, 'Failed to create db backup dir' return False, 'Failed to create db backup dir'
my_db = DBConnection('cache.db') my_db = DBConnection('cache.db')
last_backup = my_db.select('SELECT time FROM lastUpdate WHERE provider = ?', ['sickgear_db_backup']) last_backup = my_db.select('SELECT time FROM lastUpdate WHERE provider = ?', ['sickgear_db_backup'])

View file

@ -67,30 +67,33 @@ class FailedProcessor(LegacyFailedProcessor):
:return: success :return: success
:type: bool or None :type: bool or None
""" """
self._log(u'Failed download detected: (%s, %s)' % (self.nzb_name, self.dir_name)) self._log(f'Failed download detected: ({self.nzb_name}, {self.dir_name})')
release_name = show_name_helpers.determine_release_name(self.dir_name, self.nzb_name) release_name = show_name_helpers.determine_release_name(self.dir_name, self.nzb_name)
if None is release_name: if None is release_name:
self._log(u'Warning: unable to find a valid release name.', logger.WARNING) self._log('Warning: unable to find a valid release name.', logger.WARNING)
raise exceptions_helper.FailedProcessingFailed() raise exceptions_helper.FailedProcessingFailed()
try: try:
parser = NameParser(False, show_obj=self.show_obj, convert=True) parser = NameParser(False, show_obj=self.show_obj, convert=True)
parsed = parser.parse(release_name) parsed = parser.parse(release_name)
except InvalidNameException: except InvalidNameException:
self._log(u'Error: release name is invalid: ' + release_name, logger.DEBUG) self._log(f'Error: release name is invalid: {release_name}', logger.DEBUG)
raise exceptions_helper.FailedProcessingFailed() raise exceptions_helper.FailedProcessingFailed()
except InvalidShowException: except InvalidShowException:
self._log(u'Error: unable to parse release name %s into a valid show' % release_name, logger.DEBUG) self._log(f'Error: unable to parse release name {release_name} into a valid show', logger.DEBUG)
raise exceptions_helper.FailedProcessingFailed() raise exceptions_helper.FailedProcessingFailed()
logger.log(u"name_parser info: ", logger.DEBUG) for cur_msg in (
logger.log(u" - " + str(parsed.series_name), logger.DEBUG) 'name_parser info: ',
logger.log(u" - " + str(parsed.season_number), logger.DEBUG) f' - {parsed.series_name}',
logger.log(u" - " + str(parsed.episode_numbers), logger.DEBUG) f' - {parsed.season_number}',
logger.log(u" - " + str(parsed.extra_info), logger.DEBUG) f' - {parsed.episode_numbers}',
logger.log(u" - " + str(parsed.release_group), logger.DEBUG) f' - {parsed.extra_info}',
logger.log(u" - " + str(parsed.air_date), logger.DEBUG) f' - {parsed.release_group}',
f' - {parsed.air_date}'
):
logger.debug(cur_msg)
for episode in parsed.episode_numbers: for episode in parsed.episode_numbers:
segment = parsed.show_obj.get_episode(parsed.season_number, episode) segment = parsed.show_obj.get_episode(parsed.season_number, episode)

View file

@ -99,21 +99,20 @@ def add_failed(release):
sql_result = db_select('SELECT * FROM history t WHERE t.release=?', [release]) sql_result = db_select('SELECT * FROM history t WHERE t.release=?', [release])
if not any(sql_result): if not any(sql_result):
logger.log('Release not found in failed.db snatch history', logger.WARNING) logger.warning('Release not found in failed.db snatch history')
elif 1 < len(sql_result): elif 1 < len(sql_result):
logger.log('Multiple logged snatches found for release in failed.db', logger.WARNING) logger.warning('Multiple logged snatches found for release in failed.db')
sizes = len(set([x['size'] for x in sql_result])) sizes = len(set([x['size'] for x in sql_result]))
providers = len(set([x['provider'] for x in sql_result])) providers = len(set([x['provider'] for x in sql_result]))
if 1 == sizes: if 1 == sizes:
logger.log('However, they\'re all the same size. Continuing with found size', logger.WARNING) logger.warning('However, they\'re all the same size. Continuing with found size')
size = sql_result[0]['size'] size = sql_result[0]['size']
else: else:
logger.log( logger.warning(
'They also vary in size. Deleting logged snatches and recording this release with no size/provider', 'They also vary in size. Deleting logged snatches and recording this release with no size/provider')
logger.WARNING)
for cur_result in sql_result: for cur_result in sql_result:
remove_snatched(cur_result['release'], cur_result['size'], cur_result['provider']) remove_snatched(cur_result['release'], cur_result['size'], cur_result['provider'])
@ -165,7 +164,7 @@ def set_episode_failed(ep_obj):
ep_obj.save_to_db() ep_obj.save_to_db()
except EpisodeNotFoundException as e: except EpisodeNotFoundException as e:
logger.log('Unable to get episode, please set its status manually: %s' % ex(e), logger.WARNING) logger.warning('Unable to get episode, please set its status manually: %s' % ex(e))
def remove_failed(release): def remove_failed(release):
@ -237,13 +236,13 @@ def revert_episode(ep_obj):
else: else:
status_revert = WANTED status_revert = WANTED
logger.log('Episode not found in failed.db history. Setting it to WANTED', logger.WARNING) logger.warning('Episode not found in failed.db history. Setting it to WANTED')
ep_obj.status = status_revert ep_obj.status = status_revert
ep_obj.save_to_db() ep_obj.save_to_db()
except EpisodeNotFoundException as e: except EpisodeNotFoundException as e:
logger.log('Unable to create episode, please set its status manually: %s' % ex(e), logger.WARNING) logger.warning('Unable to create episode, please set its status manually: %s' % ex(e))
def find_old_status(ep_obj): def find_old_status(ep_obj):
@ -289,8 +288,7 @@ def find_release(ep_obj):
db_action('DELETE FROM history WHERE %s=? AND %s!=?' % ('`release`', '`date`'), [release, r['date']]) db_action('DELETE FROM history WHERE %s=? AND %s!=?' % ('`release`', '`date`'), [release, r['date']])
# Found a previously failed release # Found a previously failed release
logger.log('Found failed.db history release %sx%s: [%s]' % ( logger.debug(f'Found failed.db history release {ep_obj.season}x{ep_obj.episode}: [{release}]')
ep_obj.season, ep_obj.episode, release), logger.DEBUG)
else: else:
release = None release = None
provider = None provider = None

View file

@ -89,7 +89,7 @@ class GenericQueue(object):
my_db = db.DBConnection('cache.db') my_db = db.DBConnection('cache.db')
my_db.mass_action(cl) my_db.mass_action(cl)
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log('Exception saving queue %s to db: %s' % (self.__class__.__name__, ex(e)), logger.ERROR) logger.error('Exception saving queue %s to db: %s' % (self.__class__.__name__, ex(e)))
def _clear_sql(self): def _clear_sql(self):
# type: (...) -> List[List] # type: (...) -> List[List]
@ -103,7 +103,7 @@ class GenericQueue(object):
my_db = db.DBConnection('cache.db') my_db = db.DBConnection('cache.db')
my_db.mass_action(item_sql) my_db.mass_action(item_sql)
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log('Exception saving item %s to db: %s' % (item, ex(e)), logger.ERROR) logger.error('Exception saving item %s to db: %s' % (item, ex(e)))
def delete_item(self, item, finished_run=False): def delete_item(self, item, finished_run=False):
# type: (Union[QueueItem, CastQueueItem], bool) -> None # type: (Union[QueueItem, CastQueueItem], bool) -> None
@ -119,7 +119,7 @@ class GenericQueue(object):
my_db = db.DBConnection('cache.db') my_db = db.DBConnection('cache.db')
my_db.mass_action(item_sql) my_db.mass_action(item_sql)
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log('Exception deleting item %s from db: %s' % (item, ex(e)), logger.ERROR) logger.error('Exception deleting item %s from db: %s' % (item, ex(e)))
def _get_item_sql(self, item): def _get_item_sql(self, item):
# type: (Union[QueueItem, CastQueueItem]) -> List[List] # type: (Union[QueueItem, CastQueueItem]) -> List[List]
@ -211,12 +211,12 @@ class GenericQueue(object):
my_db.mass_action(del_main_sql) my_db.mass_action(del_main_sql)
def pause(self): def pause(self):
logger.log(u'Pausing queue') logger.log('Pausing queue')
if self.lock: if self.lock:
self.min_priority = 999999999999 self.min_priority = 999999999999
def unpause(self): def unpause(self):
logger.log(u'Unpausing queue') logger.log('Unpausing queue')
with self.lock: with self.lock:
self.min_priority = 0 self.min_priority = 0
@ -258,7 +258,7 @@ class GenericQueue(object):
if 0 == len(self.events[event_type]): if 0 == len(self.events[event_type]):
del self.events[event_type] del self.events[event_type]
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log('Error removing event method from queue: %s' % ex(e), logger.ERROR) logger.error('Error removing event method from queue: %s' % ex(e))
def execute_events(self, event_type, *args, **kwargs): def execute_events(self, event_type, *args, **kwargs):
# type: (int, Tuple, Dict) -> None # type: (int, Tuple, Dict) -> None
@ -267,7 +267,7 @@ class GenericQueue(object):
try: try:
event(*args, **kwargs) event(*args, **kwargs)
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log('Error executing Event: %s' % ex(e), logger.ERROR) logger.error('Error executing Event: %s' % ex(e))
def run(self): def run(self):

View file

@ -345,7 +345,7 @@ def list_media_files(path):
result = [] result = []
if path: if path:
if [direntry for direntry in scantree(path, include=[r'\.sickgearignore'], filter_kind=False, recurse=False)]: if [direntry for direntry in scantree(path, include=[r'\.sickgearignore'], filter_kind=False, recurse=False)]:
logger.log('Skipping folder "%s" because it contains ".sickgearignore"' % path, logger.DEBUG) logger.debug('Skipping folder "%s" because it contains ".sickgearignore"' % path)
else: else:
result = [direntry.path for direntry in scantree(path, exclude=['Extras'], filter_kind=False) result = [direntry.path for direntry in scantree(path, exclude=['Extras'], filter_kind=False)
if has_media_ext(direntry.name)] if has_media_ext(direntry.name)]
@ -405,8 +405,7 @@ def hardlink_file(src_file, dest_file):
link(src_file, dest_file) link(src_file, dest_file)
fix_set_group_id(dest_file) fix_set_group_id(dest_file)
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log(u"Failed to create hardlink of %s at %s: %s. Copying instead." % (src_file, dest_file, ex(e)), logger.error(f'Failed to create hardlink of {src_file} at {dest_file}: {ex(e)}. Copying instead.')
logger.ERROR)
copy_file(src_file, dest_file) copy_file(src_file, dest_file)
@ -441,7 +440,7 @@ def move_and_symlink_file(src_file, dest_file):
fix_set_group_id(dest_file) fix_set_group_id(dest_file)
symlink(dest_file, src_file) symlink(dest_file, src_file)
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u"Failed to create symlink of %s at %s. Copying instead" % (src_file, dest_file), logger.ERROR) logger.error(f'Failed to create symlink of {src_file} at {dest_file}. Copying instead')
copy_file(src_file, dest_file) copy_file(src_file, dest_file)
@ -488,10 +487,10 @@ def rename_ep_file(cur_path, new_path, old_path_length=0):
# move the file # move the file
try: try:
logger.log(u'Renaming file from %s to %s' % (cur_path, new_path)) logger.log(f'Renaming file from {cur_path} to {new_path}')
shutil.move(cur_path, new_path) shutil.move(cur_path, new_path)
except (OSError, IOError) as e: except (OSError, IOError) as e:
logger.log(u"Failed renaming " + cur_path + " to " + new_path + ": " + ex(e), logger.ERROR) logger.error(f'Failed renaming {cur_path} to {new_path}: {ex(e)}')
return False return False
# clean up any old folders that are empty # clean up any old folders that are empty
@ -513,7 +512,7 @@ def delete_empty_folders(check_empty_dir, keep_dir=None):
# treat check_empty_dir as empty when it only contains these items # treat check_empty_dir as empty when it only contains these items
ignore_items = [] ignore_items = []
logger.log(u"Trying to clean any empty folders under " + check_empty_dir) logger.log(f'Trying to clean any empty folders under {check_empty_dir}')
# as long as the folder exists and doesn't contain any files, delete it # as long as the folder exists and doesn't contain any files, delete it
while os.path.isdir(check_empty_dir) and check_empty_dir != keep_dir: while os.path.isdir(check_empty_dir) and check_empty_dir != keep_dir:
@ -523,13 +522,13 @@ def delete_empty_folders(check_empty_dir, keep_dir=None):
[check_file in ignore_items for check_file in check_files])): [check_file in ignore_items for check_file in check_files])):
# directory is empty or contains only ignore_items # directory is empty or contains only ignore_items
try: try:
logger.log(u"Deleting empty folder: " + check_empty_dir) logger.log(f"Deleting empty folder: {check_empty_dir}")
# need shutil.rmtree when ignore_items is really implemented # need shutil.rmtree when ignore_items is really implemented
os.rmdir(check_empty_dir) os.rmdir(check_empty_dir)
# do a Synology library update # do a Synology library update
notifiers.NotifierFactory().get('SYNOINDEX').deleteFolder(check_empty_dir) notifiers.NotifierFactory().get('SYNOINDEX').deleteFolder(check_empty_dir)
except OSError as e: except OSError as e:
logger.log(u"Unable to delete " + check_empty_dir + ": " + repr(e) + " / " + ex(e), logger.WARNING) logger.warning(f'Unable to delete {check_empty_dir}: {repr(e)} / {ex(e)}')
break break
check_empty_dir = os.path.dirname(check_empty_dir) check_empty_dir = os.path.dirname(check_empty_dir)
else: else:
@ -559,9 +558,7 @@ def get_absolute_number_from_season_and_episode(show_obj, season, episode):
if 1 == len(sql_result): if 1 == len(sql_result):
absolute_number = int(sql_result[0]["absolute_number"]) absolute_number = int(sql_result[0]["absolute_number"])
logger.log( logger.debug(f'Found absolute_number:{absolute_number} by {season}x{episode}')
"Found absolute_number:" + str(absolute_number) + " by " + str(season) + "x" + str(episode),
logger.DEBUG)
else: else:
logger.debug('No entries for absolute number in show: %s found using %sx%s' % logger.debug('No entries for absolute number in show: %s found using %sx%s' %
(show_obj.unique_name, str(season), str(episode))) (show_obj.unique_name, str(season), str(episode)))
@ -600,7 +597,7 @@ def sanitize_scene_name(name):
:rtype: AnyStr :rtype: AnyStr
""" """
if name: if name:
bad_chars = u',:()£\'!?\u2019' bad_chars = ',:()£\'!?\u2019'
# strip out any bad chars # strip out any bad chars
name = re.sub(r'[%s]' % bad_chars, '', name, flags=re.U) name = re.sub(r'[%s]' % bad_chars, '', name, flags=re.U)
@ -654,7 +651,7 @@ def parse_xml(data, del_xmlns=False):
try: try:
parsed_xml = etree.fromstring(data) parsed_xml = etree.fromstring(data)
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log(u"Error trying to parse xml data. Error: " + ex(e), logger.DEBUG) logger.debug(f"Error trying to parse xml data. Error: {ex(e)}")
parsed_xml = None parsed_xml = None
return parsed_xml return parsed_xml
@ -686,28 +683,28 @@ def backup_versioned_file(old_file, version):
except (BaseException, Exception): except (BaseException, Exception):
if os.path.isfile(new_file): if os.path.isfile(new_file):
continue continue
logger.log('could not rename old backup db file', logger.WARNING) logger.warning('could not rename old backup db file')
if not changed_old_db: if not changed_old_db:
raise Exception('can\'t create a backup of db') raise Exception('can\'t create a backup of db')
while not os.path.isfile(new_file): while not os.path.isfile(new_file):
if not os.path.isfile(old_file) or 0 == get_size(old_file): if not os.path.isfile(old_file) or 0 == get_size(old_file):
logger.log(u'No need to create backup', logger.DEBUG) logger.debug('No need to create backup')
break break
try: try:
logger.log(u'Trying to back up %s to %s' % (old_file, new_file), logger.DEBUG) logger.debug(f'Trying to back up {old_file} to {new_file}')
shutil.copy(old_file, new_file) shutil.copy(old_file, new_file)
logger.log(u'Backup done', logger.DEBUG) logger.debug('Backup done')
break break
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log(u'Error while trying to back up %s to %s : %s' % (old_file, new_file, ex(e)), logger.WARNING) logger.warning(f'Error while trying to back up {old_file} to {new_file} : {ex(e)}')
num_tries += 1 num_tries += 1
time.sleep(3) time.sleep(3)
logger.log(u'Trying again.', logger.DEBUG) logger.debug('Trying again.')
if 3 <= num_tries: if 3 <= num_tries:
logger.log(u'Unable to back up %s to %s please do it manually.' % (old_file, new_file), logger.ERROR) logger.error(f'Unable to back up {old_file} to {new_file} please do it manually.')
return False return False
return True return True
@ -729,39 +726,34 @@ def restore_versioned_file(backup_file, version):
restore_file = new_file + '.' + 'v' + str(version) restore_file = new_file + '.' + 'v' + str(version)
if not os.path.isfile(new_file): if not os.path.isfile(new_file):
logger.log(u"Not restoring, " + new_file + " doesn't exist", logger.DEBUG) logger.debug(f'Not restoring, {new_file} doesn\'t exist')
return False return False
try: try:
logger.log( logger.debug(f'Trying to backup {new_file} to {new_file}.r{version} before restoring backup')
u"Trying to backup " + new_file + " to " + new_file + "." + "r" + str(version) + " before restoring backup",
logger.DEBUG)
shutil.move(new_file, new_file + '.' + 'r' + str(version)) shutil.move(new_file, new_file + '.' + 'r' + str(version))
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log( logger.warning(f'Error while trying to backup DB file {restore_file} before proceeding with restore: {ex(e)}')
u"Error while trying to backup DB file " + restore_file + " before proceeding with restore: " + ex(e),
logger.WARNING)
return False return False
while not os.path.isfile(new_file): while not os.path.isfile(new_file):
if not os.path.isfile(restore_file): if not os.path.isfile(restore_file):
logger.log(u"Not restoring, " + restore_file + " doesn't exist", logger.DEBUG) logger.debug(f'Not restoring, {restore_file} doesn\'t exist')
break break
try: try:
logger.log(u"Trying to restore " + restore_file + " to " + new_file, logger.DEBUG) logger.debug(f'Trying to restore {restore_file} to {new_file}')
shutil.copy(restore_file, new_file) shutil.copy(restore_file, new_file)
logger.log(u"Restore done", logger.DEBUG) logger.debug('Restore done')
break break
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log(u"Error while trying to restore " + restore_file + ": " + ex(e), logger.WARNING) logger.warning(f'Error while trying to restore {restore_file}: {ex(e)}')
num_tries += 1 num_tries += 1
time.sleep(1) time.sleep(1)
logger.log(u"Trying again.", logger.DEBUG) logger.debug('Trying again.')
if 10 <= num_tries: if 10 <= num_tries:
logger.log(u"Unable to restore " + restore_file + " to " + new_file + " please do it manually.", logger.error(f'Unable to restore {restore_file} to {new_file} please do it manually.')
logger.ERROR)
return False return False
return True return True
@ -963,7 +955,7 @@ def get_show(name, try_scene_exceptions=False):
if tvid and prodid: if tvid and prodid:
show_obj = find_show_by_id({tvid: prodid}) show_obj = find_show_by_id({tvid: prodid})
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log(u'Error when attempting to find show: ' + name + ' in SickGear: ' + ex(e), logger.DEBUG) logger.debug(f'Error when attempting to find show: {name} in SickGear: {ex(e)}')
return show_obj return show_obj
@ -1051,8 +1043,9 @@ def clear_cache(force=False):
except OSError: except OSError:
dirty = True dirty = True
logger.log(u'%s from cache folder %s' % ((('Found items not removed', 'Found items removed')[not dirty], logger.log(
'No items found to remove')[None is dirty], sickgear.CACHE_DIR)) f'{(("Found items not removed", "Found items removed")[not dirty], "No items found to remove")[None is dirty]}'
f' from cache folder {sickgear.CACHE_DIR}')
def human(size): def human(size):
@ -1298,7 +1291,7 @@ def make_search_segment_html_string(segment, max_eps=5):
segment = [segment] segment = [segment]
if segment and len(segment) > max_eps: if segment and len(segment) > max_eps:
seasons = [x for x in set([x.season for x in segment])] seasons = [x for x in set([x.season for x in segment])]
seg_str = u'Season%s: ' % maybe_plural(len(seasons)) seg_str = f'Season{maybe_plural(len(seasons))}: '
divider = '' divider = ''
for x in seasons: for x in seasons:
eps = [str(s.episode) for s in segment if x == s.season] eps = [str(s.episode) for s in segment if x == s.season]
@ -1308,7 +1301,7 @@ def make_search_segment_html_string(segment, max_eps=5):
divider = ', ' divider = ', '
elif segment: elif segment:
episode_numbers = ['S%sE%s' % (str(x.season).zfill(2), str(x.episode).zfill(2)) for x in segment] episode_numbers = ['S%sE%s' % (str(x.season).zfill(2), str(x.episode).zfill(2)) for x in segment]
seg_str = u'Episode%s: %s' % (maybe_plural(len(episode_numbers)), ', '.join(episode_numbers)) seg_str = f'Episode{maybe_plural(len(episode_numbers))}: {", ".join(episode_numbers)}'
return seg_str return seg_str
@ -1394,7 +1387,7 @@ def should_delete_episode(status):
s = Quality.split_composite_status(status)[0] s = Quality.split_composite_status(status)[0]
if s not in SNATCHED_ANY + [DOWNLOADED, ARCHIVED, IGNORED]: if s not in SNATCHED_ANY + [DOWNLOADED, ARCHIVED, IGNORED]:
return True return True
logger.log('not safe to delete episode from db because of status: %s' % statusStrings[s], logger.DEBUG) logger.debug('not safe to delete episode from db because of status: %s' % statusStrings[s])
return False return False
@ -1573,7 +1566,7 @@ def count_files_dirs(base_dir):
try: try:
files = scandir(base_dir) files = scandir(base_dir)
except OSError as e: except OSError as e:
logger.log('Unable to count files %s / %s' % (repr(e), ex(e)), logger.WARNING) logger.warning('Unable to count files %s / %s' % (repr(e), ex(e)))
else: else:
for e in files: for e in files:
if e.is_file(): if e.is_file():
@ -1643,8 +1636,8 @@ def upgrade_new_naming():
try: try:
move_file(entry.path, new_name) move_file(entry.path, new_name)
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log('Unable to rename %s to %s: %s / %s' logger.warning('Unable to rename %s to %s: %s / %s'
% (entry.path, new_name, repr(e), ex(e)), logger.WARNING) % (entry.path, new_name, repr(e), ex(e)))
else: else:
# clean up files without reference in db # clean up files without reference in db
try: try:
@ -1664,7 +1657,7 @@ def upgrade_new_naming():
try: try:
entries = scandir(entry.path) entries = scandir(entry.path)
except OSError as e: except OSError as e:
logger.log('Unable to stat dirs %s / %s' % (repr(e), ex(e)), logger.WARNING) logger.warning('Unable to stat dirs %s / %s' % (repr(e), ex(e)))
continue continue
for d_entry in entries: for d_entry in entries:
if d_entry.is_dir(): if d_entry.is_dir():
@ -1679,14 +1672,13 @@ def upgrade_new_naming():
try: try:
move_file(d_entry.path, new_dir_name) move_file(d_entry.path, new_dir_name)
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log('Unable to rename %s to %s: %s / %s' % logger.warning(f'Unable to rename {d_entry.path} to {new_dir_name}:'
(d_entry.path, new_dir_name, repr(e), ex(e)), logger.WARNING) f' {repr(e)} / {ex(e)}')
if os.path.isdir(new_dir_name): if os.path.isdir(new_dir_name):
try: try:
f_n = filter(lambda fn: fn.is_file(), scandir(new_dir_name)) f_n = filter(lambda fn: fn.is_file(), scandir(new_dir_name))
except OSError as e: except OSError as e:
logger.log('Unable to rename %s / %s' % (repr(e), ex(e)), logger.warning('Unable to rename %s / %s' % (repr(e), ex(e)))
logger.WARNING)
else: else:
rename_args = [] rename_args = []
# noinspection PyTypeChecker # noinspection PyTypeChecker
@ -1697,8 +1689,8 @@ def upgrade_new_naming():
try: try:
move_file(*args) move_file(*args)
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log('Unable to rename %s to %s: %s / %s' % logger.warning(f'Unable to rename {args[0]} to {args[1]}:'
(args[0], args[1], repr(e), ex(e)), logger.WARNING) f' {repr(e)} / {ex(e)}')
else: else:
try: try:
shutil.rmtree(d_entry.path) shutil.rmtree(d_entry.path)
@ -1754,11 +1746,11 @@ def normalise_chars(text):
:return: Text with entities replaced :return: Text with entities replaced
:rtype: AnyStr :rtype: AnyStr
""" """
result = text.replace(u'\u2010', u'-').replace(u'\u2011', u'-').replace(u'\u2012', u'-') \ result = text.replace('\u2010', '-').replace('\u2011', '-').replace('\u2012', '-') \
.replace(u'\u2013', u'-').replace(u'\u2014', u'-').replace(u'\u2015', u'-') \ .replace('\u2013', '-').replace('\u2014', '-').replace('\u2015', '-') \
.replace(u'\u2018', u"'").replace(u'\u2019', u"'") \ .replace('\u2018', "'").replace('\u2019', "'") \
.replace(u'\u201c', u'\"').replace(u'\u201d', u'\"') \ .replace('\u201c', '\"').replace('\u201d', '\"') \
.replace(u'\u0020', u' ').replace(u'\u00a0', u' ') .replace('\u0020', ' ').replace('\u00a0', ' ')
return result return result

View file

@ -277,9 +277,9 @@ class ImageCache(object):
result = [] result = []
for filename in glob.glob(image_file): for filename in glob.glob(image_file):
result.append(os.path.isfile(filename) and filename) result.append(os.path.isfile(filename) and filename)
logger.log(u'Found cached %s' % filename, logger.DEBUG) logger.debug(f'Found cached {filename}')
not any(result) and logger.log(u'No cache for %s' % image_file, logger.DEBUG) not any(result) and logger.debug(f'No cache for {image_file}')
return any(result) return any(result)
def has_poster(self, tvid, prodid): def has_poster(self, tvid, prodid):
@ -365,7 +365,7 @@ class ImageCache(object):
:param is_binary: is data instead of path :param is_binary: is data instead of path
""" """
if not is_binary and not os.path.isfile(image): if not is_binary and not os.path.isfile(image):
logger.warning(u'File not found to determine image type of %s' % image) logger.warning(f'File not found to determine image type of {image}')
return return
if not image: if not image:
logger.warning('No Image Data to determinate image type') logger.warning('No Image Data to determinate image type')
@ -381,7 +381,7 @@ class ImageCache(object):
img_parser.parse_photoshop_content = False img_parser.parse_photoshop_content = False
img_metadata = extractMetadata(img_parser) img_metadata = extractMetadata(img_parser)
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.debug(u'Unable to extract metadata from %s, not using file. Error: %s' % (image, ex(e))) logger.debug(f'Unable to extract metadata from {image}, not using file. Error: {ex(e)}')
return return
if not img_metadata: if not img_metadata:
@ -389,7 +389,7 @@ class ImageCache(object):
msg = 'Image Data' msg = 'Image Data'
else: else:
msg = image msg = image
logger.debug(u'Unable to extract metadata from %s, not using file' % msg) logger.debug(f'Unable to extract metadata from {msg}, not using file')
return return
width = img_metadata.get('width') width = img_metadata.get('width')
@ -441,9 +441,9 @@ class ImageCache(object):
logger.debug(msg_success % 'fanart') logger.debug(msg_success % 'fanart')
return self.FANART return self.FANART
logger.warning(u'Skipped image with fanart aspect ratio but less than 500 pixels wide') logger.warning('Skipped image with fanart aspect ratio but less than 500 pixels wide')
else: else:
logger.warning(u'Skipped image with useless ratio %s' % img_ratio) logger.warning(f'Skipped image with useless ratio {img_ratio}')
def should_refresh(self, image_type=None, provider='local'): def should_refresh(self, image_type=None, provider='local'):
# type: (int, Optional[AnyStr]) -> bool # type: (int, Optional[AnyStr]) -> bool
@ -522,13 +522,13 @@ class ImageCache(object):
dest_path = self.fanart_path(*id_args + (prefix,)).replace('.fanart.jpg', '.%s.fanart.jpg' % crc) dest_path = self.fanart_path(*id_args + (prefix,)).replace('.fanart.jpg', '.%s.fanart.jpg' % crc)
fanart_dir = [self._fanart_dir(*id_args)] fanart_dir = [self._fanart_dir(*id_args)]
else: else:
logger.log(u'Invalid cache image type: ' + str(img_type), logger.ERROR) logger.error(f'Invalid cache image type: {img_type}')
return False return False
for cache_dir in [self.shows_dir, self._thumbnails_dir(*id_args)] + fanart_dir: for cache_dir in [self.shows_dir, self._thumbnails_dir(*id_args)] + fanart_dir:
sg_helpers.make_path(cache_dir) sg_helpers.make_path(cache_dir)
logger.log(u'%sing from %s to %s' % (('Copy', 'Mov')[move_file], image_path, dest_path)) logger.log(f'{("Copy", "Mov")[move_file]}ing from {image_path} to {dest_path}')
# copy poster, banner as thumb, even if moved we need to duplicate the images # copy poster, banner as thumb, even if moved we need to duplicate the images
if img_type in (self.POSTER, self.BANNER) and dest_thumb_path: if img_type in (self.POSTER, self.BANNER) and dest_thumb_path:
sg_helpers.copy_file(image_path, dest_thumb_path) sg_helpers.copy_file(image_path, dest_thumb_path)
@ -574,7 +574,7 @@ class ImageCache(object):
img_type_name = 'banner_thumb' img_type_name = 'banner_thumb'
dest_path = self.banner_thumb_path(*arg_tvid_prodid) dest_path = self.banner_thumb_path(*arg_tvid_prodid)
else: else:
logger.log(u'Invalid cache image type: ' + str(img_type), logger.ERROR) logger.error(f'Invalid cache image type: {img_type}')
return False return False
# retrieve the image from TV info source using the generic metadata class # retrieve the image from TV info source using the generic metadata class
@ -625,10 +625,9 @@ class ImageCache(object):
if num_files > max_files: if num_files > max_files:
break break
total = len(glob.glob(dest_path)) total = len(glob.glob(dest_path))
logger.log(u'Saved %s fanart images%s. Cached %s of max %s fanart file%s' logger.log(f'Saved {success} fanart images'
% (success, f'{("", " from " + ", ".join([x for x in list(set(sources))]))[0 < len(sources)]}.'
('', ' from ' + ', '.join([x for x in list(set(sources))]))[0 < len(sources)], f' Cached {total} of max {sickgear.FANART_LIMIT} fanart file{sg_helpers.maybe_plural(total)}')
total, sickgear.FANART_LIMIT, sg_helpers.maybe_plural(total)))
return bool(success) return bool(success)
image_urls = metadata_generator.retrieve_show_image(img_type_name, show_obj, return_links=True, image_urls = metadata_generator.retrieve_show_image(img_type_name, show_obj, return_links=True,
@ -656,7 +655,7 @@ class ImageCache(object):
break break
if result: if result:
logger.log(u'Saved image type %s' % img_type_name) logger.log(f'Saved image type {img_type_name}')
return result return result
def fill_cache(self, show_obj, force=False): def fill_cache(self, show_obj, force=False):
@ -683,7 +682,7 @@ class ImageCache(object):
self.BANNER_THUMB: not self.has_banner_thumbnail(*arg_tvid_prodid) or force} self.BANNER_THUMB: not self.has_banner_thumbnail(*arg_tvid_prodid) or force}
if not any(itervalues(need_images)): if not any(itervalues(need_images)):
logger.log(u'%s: No new cache images needed. Done.' % show_obj.tvid_prodid) logger.log(f'{show_obj.tvid_prodid}: No new cache images needed. Done.')
return return
show_infos = GenericMetadata.gen_show_infos_dict(show_obj) show_infos = GenericMetadata.gen_show_infos_dict(show_obj)
@ -698,7 +697,7 @@ class ImageCache(object):
del (sickgear.FANART_RATINGS[show_obj.tvid_prodid]) del (sickgear.FANART_RATINGS[show_obj.tvid_prodid])
result = sg_helpers.remove_file(cache_dir, tree=True) result = sg_helpers.remove_file(cache_dir, tree=True)
if result: if result:
logger.log(u'%s cache file %s' % (result, cache_dir), logger.DEBUG) logger.debug(f'{result} cache file {cache_dir}')
try: try:
checked_files = [] checked_files = []
@ -718,7 +717,7 @@ class ImageCache(object):
if 0 == len(needed): if 0 == len(needed):
break break
logger.log(u'Checking for images from optional %s metadata' % cur_provider.name, logger.DEBUG) logger.debug(f'Checking for images from optional {cur_provider.name} metadata')
for all_meta_provs, path_file in needed: for all_meta_provs, path_file in needed:
checked_files += [path_file] checked_files += [path_file]
@ -735,9 +734,10 @@ class ImageCache(object):
if None is cur_file_type: if None is cur_file_type:
continue continue
logger.log(u'Checking if image %s (type %s needs metadata: %s)' logger.debug(f'Checking if image {cache_file_name} '
% (cache_file_name, str(cur_file_type), f'(type {str(cur_file_type)}'
('No', 'Yes')[True is need_images[cur_file_type]]), logger.DEBUG) f' needs metadata: {("No", "Yes")[True is need_images[cur_file_type]]}'
f')')
if need_images.get(cur_file_type): if need_images.get(cur_file_type):
need_images[cur_file_type] = ( need_images[cur_file_type] = (
@ -746,8 +746,8 @@ class ImageCache(object):
if self.FANART == cur_file_type and \ if self.FANART == cur_file_type and \
(not sickgear.FANART_LIMIT or sickgear.FANART_LIMIT < need_images[cur_file_type]): (not sickgear.FANART_LIMIT or sickgear.FANART_LIMIT < need_images[cur_file_type]):
continue continue
logger.log(u'Caching image found in the show directory to the image cache: %s, type %s' logger.debug(f'Caching image found in the show directory to the image cache: {cache_file_name},'
% (cache_file_name, cur_file_type), logger.DEBUG) f' type {cur_file_type}')
self._cache_image_from_file( self._cache_image_from_file(
cache_file_name, cur_file_type, cache_file_name, cur_file_type,
@ -755,7 +755,7 @@ class ImageCache(object):
isinstance(need_images[cur_file_type], bool)],)) isinstance(need_images[cur_file_type], bool)],))
except exceptions_helper.ShowDirNotFoundException: except exceptions_helper.ShowDirNotFoundException:
logger.log(u'Unable to search for images in show directory because it doesn\'t exist', logger.WARNING) logger.warning('Unable to search for images in show directory because it doesn\'t exist')
# download images from TV info sources # download images from TV info sources
for image_type, name_type in [[self.POSTER, 'Poster'], [self.BANNER, 'Banner'], [self.FANART, 'Fanart']]: for image_type, name_type in [[self.POSTER, 'Poster'], [self.BANNER, 'Banner'], [self.FANART, 'Fanart']]:
@ -763,12 +763,12 @@ class ImageCache(object):
if not max_files or max_files < need_images[image_type]: if not max_files or max_files < need_images[image_type]:
continue continue
logger.log(u'Seeing if we still need an image of type %s: %s' logger.debug(f'Seeing if we still need an image of type {name_type}:'
% (name_type, ('No', 'Yes')[True is need_images[image_type]]), logger.DEBUG) f' {("No", "Yes")[True is need_images[image_type]]}')
if need_images[image_type]: if need_images[image_type]:
file_num = (need_images[image_type] + 1, 1)[isinstance(need_images[image_type], bool)] file_num = (need_images[image_type] + 1, 1)[isinstance(need_images[image_type], bool)]
if file_num <= max_files: if file_num <= max_files:
self._cache_info_source_images(show_obj, image_type, file_num, max_files, force=force, self._cache_info_source_images(show_obj, image_type, file_num, max_files, force=force,
show_infos=show_infos) show_infos=show_infos)
logger.log(u'Done cache check') logger.log('Done cache check')

View file

@ -408,7 +408,7 @@ def load_mapped_ids(**kwargs):
cur_show_obj.ids = sickgear.indexermapper.map_indexers_to_show(cur_show_obj, **n_kargs) cur_show_obj.ids = sickgear.indexermapper.map_indexers_to_show(cur_show_obj, **n_kargs)
except (BaseException, Exception): except (BaseException, Exception):
logger.debug('Error loading mapped id\'s for show: %s' % cur_show_obj.unique_name) logger.debug('Error loading mapped id\'s for show: %s' % cur_show_obj.unique_name)
logger.log('Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error('Traceback: %s' % traceback.format_exc())
logger.log('TV info mappings loaded') logger.log('TV info mappings loaded')

View file

@ -51,7 +51,7 @@ MESSAGE = logging.INFO
DEBUG = logging.DEBUG DEBUG = logging.DEBUG
DB = 5 DB = 5
reverseNames = {u'ERROR': ERROR, u'WARNING': WARNING, u'INFO': MESSAGE, u'DEBUG': DEBUG, u'DB': DB} reverseNames = {'ERROR': ERROR, 'WARNING': WARNING, 'INFO': MESSAGE, 'DEBUG': DEBUG, 'DB': DB}
# suppress output with this handler # suppress output with this handler

View file

@ -150,31 +150,31 @@ class GenericMetadata(object):
def _has_show_metadata(self, show_obj): def _has_show_metadata(self, show_obj):
# type: (sickgear.tv.TVShow) -> bool # type: (sickgear.tv.TVShow) -> bool
result = os.path.isfile(self.get_show_file_path(show_obj)) result = os.path.isfile(self.get_show_file_path(show_obj))
logger.log(u"Checking if " + self.get_show_file_path(show_obj) + " exists: " + str(result), logger.DEBUG) logger.debug(f'Checking if {self.get_show_file_path(show_obj)} exists: {result}')
return result return result
def has_episode_metadata(self, ep_obj): def has_episode_metadata(self, ep_obj):
# type: (sickgear.tv.TVEpisode) -> bool # type: (sickgear.tv.TVEpisode) -> bool
result = os.path.isfile(self.get_episode_file_path(ep_obj)) result = os.path.isfile(self.get_episode_file_path(ep_obj))
logger.log(u"Checking if " + self.get_episode_file_path(ep_obj) + " exists: " + str(result), logger.DEBUG) logger.debug(f'Checking if {self.get_episode_file_path(ep_obj)} exists: {result}')
return result return result
def _has_fanart(self, show_obj): def _has_fanart(self, show_obj):
# type: (sickgear.tv.TVShow) -> bool # type: (sickgear.tv.TVShow) -> bool
result = os.path.isfile(self.get_fanart_path(show_obj)) result = os.path.isfile(self.get_fanart_path(show_obj))
logger.log(u"Checking if " + self.get_fanart_path(show_obj) + " exists: " + str(result), logger.DEBUG) logger.debug(f'Checking if {self.get_fanart_path(show_obj)} exists: {result}')
return result return result
def _has_poster(self, show_obj): def _has_poster(self, show_obj):
# type: (sickgear.tv.TVShow) -> bool # type: (sickgear.tv.TVShow) -> bool
result = os.path.isfile(self.get_poster_path(show_obj)) result = os.path.isfile(self.get_poster_path(show_obj))
logger.log(u"Checking if " + self.get_poster_path(show_obj) + " exists: " + str(result), logger.DEBUG) logger.debug(f'Checking if {self.get_poster_path(show_obj)} exists: {result}')
return result return result
def _has_banner(self, show_obj): def _has_banner(self, show_obj):
# type: (sickgear.tv.TVShow) -> bool # type: (sickgear.tv.TVShow) -> bool
result = os.path.isfile(self.get_banner_path(show_obj)) result = os.path.isfile(self.get_banner_path(show_obj))
logger.log(u"Checking if " + self.get_banner_path(show_obj) + " exists: " + str(result), logger.DEBUG) logger.debug(f'Checking if {self.get_banner_path(show_obj)} exists: {result}')
return result return result
def has_episode_thumb(self, ep_obj): def has_episode_thumb(self, ep_obj):
@ -182,7 +182,7 @@ class GenericMetadata(object):
location = self.get_episode_thumb_path(ep_obj) location = self.get_episode_thumb_path(ep_obj)
result = None is not location and os.path.isfile(location) result = None is not location and os.path.isfile(location)
if location: if location:
logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG) logger.debug(f'Checking if {location} exists: {result}')
return result return result
def _has_season_poster(self, show_obj, season): def _has_season_poster(self, show_obj, season):
@ -190,7 +190,7 @@ class GenericMetadata(object):
location = self.get_season_poster_path(show_obj, season) location = self.get_season_poster_path(show_obj, season)
result = None is not location and os.path.isfile(location) result = None is not location and os.path.isfile(location)
if location: if location:
logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG) logger.debug(f'Checking if {location} exists: {result}')
return result return result
def _has_season_banner(self, show_obj, season): def _has_season_banner(self, show_obj, season):
@ -198,21 +198,19 @@ class GenericMetadata(object):
location = self.get_season_banner_path(show_obj, season) location = self.get_season_banner_path(show_obj, season)
result = None is not location and os.path.isfile(location) result = None is not location and os.path.isfile(location)
if location: if location:
logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG) logger.debug(f'Checking if {location} exists: {result}')
return result return result
def _has_season_all_poster(self, show_obj): def _has_season_all_poster(self, show_obj):
# type: (sickgear.tv.TVShow) -> bool # type: (sickgear.tv.TVShow) -> bool
result = os.path.isfile(self.get_season_all_poster_path(show_obj)) result = os.path.isfile(self.get_season_all_poster_path(show_obj))
logger.log(u"Checking if " + self.get_season_all_poster_path(show_obj) + " exists: " + str(result), logger.debug(f'Checking if {self.get_season_all_poster_path(show_obj)} exists: {result}')
logger.DEBUG)
return result return result
def _has_season_all_banner(self, show_obj): def _has_season_all_banner(self, show_obj):
# type: (sickgear.tv.TVShow) -> bool # type: (sickgear.tv.TVShow) -> bool
result = os.path.isfile(self.get_season_all_banner_path(show_obj)) result = os.path.isfile(self.get_season_all_banner_path(show_obj))
logger.log(u"Checking if " + self.get_season_all_banner_path(show_obj) + " exists: " + str(result), logger.debug(f'Checking if {self.get_season_all_banner_path(show_obj)} exists: {result}')
logger.DEBUG)
return result return result
@staticmethod @staticmethod
@ -343,8 +341,7 @@ class GenericMetadata(object):
isinstance(getattr(fetched_show_info, 'data', None), (list, dict)) and isinstance(getattr(fetched_show_info, 'data', None), (list, dict)) and
'seriesname' in getattr(fetched_show_info, 'data', [])) and \ 'seriesname' in getattr(fetched_show_info, 'data', [])) and \
not hasattr(fetched_show_info, 'seriesname'): not hasattr(fetched_show_info, 'seriesname'):
logger.log(u'Show %s not found on %s ' % logger.warning(f'Show {show_obj.name} not found on {sickgear.TVInfoAPI(show_obj.tvid).name} ')
(show_obj.name, sickgear.TVInfoAPI(show_obj.tvid).name), logger.WARNING)
return False return False
return True return True
@ -364,8 +361,8 @@ class GenericMetadata(object):
try: try:
result = self.write_show_file(show_obj) result = self.write_show_file(show_obj)
except BaseTVinfoError as e: except BaseTVinfoError as e:
logger.log('Unable to find useful show metadata for %s on %s: %s' % ( logger.warning(f'Unable to find useful show metadata for {self.name}'
self.name, sickgear.TVInfoAPI(show_obj.tvid).name, ex(e)), logger.WARNING) f' on {sickgear.TVInfoAPI(show_obj.tvid).name}: {ex(e)}')
return result return result
@ -373,21 +370,20 @@ class GenericMetadata(object):
# type: (sickgear.tv.TVEpisode, bool) -> bool # type: (sickgear.tv.TVEpisode, bool) -> bool
result = False result = False
if self.episode_metadata and ep_obj and (not self.has_episode_metadata(ep_obj) or force): if self.episode_metadata and ep_obj and (not self.has_episode_metadata(ep_obj) or force):
logger.log('Metadata provider %s creating episode metadata for %s' % (self.name, ep_obj.pretty_name()), logger.debug('Metadata provider %s creating episode metadata for %s' % (self.name, ep_obj.pretty_name()))
logger.DEBUG)
try: try:
result = self.write_ep_file(ep_obj) result = self.write_ep_file(ep_obj)
except BaseTVinfoError as e: except BaseTVinfoError as e:
logger.log('Unable to find useful episode metadata for %s on %s: %s' % ( logger.warning(f'Unable to find useful episode metadata for {self.name}'
self.name, sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name, ex(e)), logger.WARNING) f' on {sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name}: {ex(e)}')
return result return result
def update_show_indexer_metadata(self, show_obj): def update_show_indexer_metadata(self, show_obj):
# type: (sickgear.tv.TVShow) -> bool # type: (sickgear.tv.TVShow) -> bool
if self.show_metadata and show_obj and self._has_show_metadata(show_obj): if self.show_metadata and show_obj and self._has_show_metadata(show_obj):
logger.debug(u'Metadata provider %s updating show indexer metadata file for %s' % ( logger.debug(f'Metadata provider {self.name}'
self.name, show_obj.unique_name)) f' updating show indexer metadata file for {show_obj.unique_name}')
nfo_file_path = self.get_show_file_path(show_obj) nfo_file_path = self.get_show_file_path(show_obj)
with io.open(nfo_file_path, 'r', encoding='utf8') as xmlFileObj: with io.open(nfo_file_path, 'r', encoding='utf8') as xmlFileObj:
@ -419,29 +415,28 @@ class GenericMetadata(object):
def create_fanart(self, show_obj): def create_fanart(self, show_obj):
# type: (sickgear.tv.TVShow) -> bool # type: (sickgear.tv.TVShow) -> bool
if self.fanart and show_obj and not self._has_fanart(show_obj): if self.fanart and show_obj and not self._has_fanart(show_obj):
logger.debug(u'Metadata provider %s creating fanart for %s' % (self.name, show_obj.unique_name)) logger.debug(f'Metadata provider {self.name} creating fanart for {show_obj.unique_name}')
return self.save_fanart(show_obj) return self.save_fanart(show_obj)
return False return False
def create_poster(self, show_obj): def create_poster(self, show_obj):
# type: (sickgear.tv.TVShow) -> bool # type: (sickgear.tv.TVShow) -> bool
if self.poster and show_obj and not self._has_poster(show_obj): if self.poster and show_obj and not self._has_poster(show_obj):
logger.debug(u'Metadata provider %s creating poster for %s' % (self.name, show_obj.unique_name)) logger.debug(f'Metadata provider {self.name} creating poster for {show_obj.unique_name}')
return self.save_poster(show_obj) return self.save_poster(show_obj)
return False return False
def create_banner(self, show_obj): def create_banner(self, show_obj):
# type: (sickgear.tv.TVShow) -> bool # type: (sickgear.tv.TVShow) -> bool
if self.banner and show_obj and not self._has_banner(show_obj): if self.banner and show_obj and not self._has_banner(show_obj):
logger.debug(u'Metadata provider %s creating banner for %s' % (self.name, show_obj.unique_name)) logger.debug(f'Metadata provider {self.name} creating banner for {show_obj.unique_name}')
return self.save_banner(show_obj) return self.save_banner(show_obj)
return False return False
def create_episode_thumb(self, ep_obj): def create_episode_thumb(self, ep_obj):
# type: (sickgear.tv.TVEpisode) -> bool # type: (sickgear.tv.TVEpisode) -> bool
if self.episode_thumbnails and ep_obj and not self.has_episode_thumb(ep_obj): if self.episode_thumbnails and ep_obj and not self.has_episode_thumb(ep_obj):
logger.log(u"Metadata provider " + self.name + " creating episode thumbnail for " + ep_obj.pretty_name(), logger.debug(f'Metadata provider {self.name} creating episode thumbnail for {ep_obj.pretty_name()}')
logger.DEBUG)
return self.save_thumbnail(ep_obj) return self.save_thumbnail(ep_obj)
return False return False
@ -451,8 +446,7 @@ class GenericMetadata(object):
result = [] result = []
for season, _ in iteritems(show_obj.sxe_ep_obj): for season, _ in iteritems(show_obj.sxe_ep_obj):
if not self._has_season_poster(show_obj, season): if not self._has_season_poster(show_obj, season):
logger.debug(u'Metadata provider %s creating season posters for %s' % ( logger.debug(f'Metadata provider {self.name} creating season posters for {show_obj.unique_name}')
self.name, show_obj.unique_name))
result = result + [self.save_season_posters(show_obj, season)] result = result + [self.save_season_posters(show_obj, season)]
return all(result) return all(result)
return False return False
@ -463,8 +457,7 @@ class GenericMetadata(object):
result = [] result = []
for season, _ in iteritems(show_obj.sxe_ep_obj): for season, _ in iteritems(show_obj.sxe_ep_obj):
if not self._has_season_banner(show_obj, season): if not self._has_season_banner(show_obj, season):
logger.debug(u'Metadata provider %s creating season banners for %s' % ( logger.debug(f'Metadata provider {self.name} creating season banners for {show_obj.unique_name}')
self.name, show_obj.unique_name))
result = result + [self.save_season_banners(show_obj, season)] result = result + [self.save_season_banners(show_obj, season)]
return all(result) return all(result)
return False return False
@ -472,16 +465,14 @@ class GenericMetadata(object):
def create_season_all_poster(self, show_obj): def create_season_all_poster(self, show_obj):
# type: (sickgear.tv.TVShow) -> bool # type: (sickgear.tv.TVShow) -> bool
if self.season_all_poster and show_obj and not self._has_season_all_poster(show_obj): if self.season_all_poster and show_obj and not self._has_season_all_poster(show_obj):
logger.debug(u'Metadata provider %s creating season all posters for %s' % ( logger.debug(f'Metadata provider {self.name} creating season all posters for {show_obj.unique_name}')
self.name, show_obj.unique_name))
return self.save_season_all_poster(show_obj) return self.save_season_all_poster(show_obj)
return False return False
def create_season_all_banner(self, show_obj): def create_season_all_banner(self, show_obj):
# type: (sickgear.tv.TVShow) -> bool # type: (sickgear.tv.TVShow) -> bool
if self.season_all_banner and show_obj and not self._has_season_all_banner(show_obj): if self.season_all_banner and show_obj and not self._has_season_all_banner(show_obj):
logger.debug(u'Metadata provider %s creating season all banner for %s' % ( logger.debug(f'Metadata provider {self.name} creating season all banner for {show_obj.unique_name}')
self.name, show_obj.unique_name))
return self.save_season_all_banner(show_obj) return self.save_season_all_banner(show_obj)
return False return False
@ -557,7 +548,7 @@ class GenericMetadata(object):
nfo_file_path = self.get_show_file_path(show_obj) nfo_file_path = self.get_show_file_path(show_obj)
logger.log(u'Writing show metadata file: %s' % nfo_file_path, logger.DEBUG) logger.debug(f'Writing show metadata file: {nfo_file_path}')
return sg_helpers.write_file(nfo_file_path, data, xmltree=True, utf8=True) return sg_helpers.write_file(nfo_file_path, data, xmltree=True, utf8=True)
@ -586,7 +577,7 @@ class GenericMetadata(object):
nfo_file_path = self.get_episode_file_path(ep_obj) nfo_file_path = self.get_episode_file_path(ep_obj)
logger.log(u'Writing episode metadata file: %s' % nfo_file_path, logger.DEBUG) logger.debug(f'Writing episode metadata file: {nfo_file_path}')
return sg_helpers.write_file(nfo_file_path, data, xmltree=True, utf8=True) return sg_helpers.write_file(nfo_file_path, data, xmltree=True, utf8=True)
@ -603,14 +594,14 @@ class GenericMetadata(object):
file_path = self.get_episode_thumb_path(ep_obj) file_path = self.get_episode_thumb_path(ep_obj)
if not file_path: if not file_path:
logger.log(u"Unable to find a file path to use for this thumbnail, not generating it", logger.DEBUG) logger.debug('Unable to find a file path to use for this thumbnail, not generating it')
return False return False
thumb_url = self._get_episode_thumb_url(ep_obj) thumb_url = self._get_episode_thumb_url(ep_obj)
# if we can't find one then give up # if we can't find one then give up
if not thumb_url: if not thumb_url:
logger.log(u"No thumb is available for this episode, not creating a thumb", logger.DEBUG) logger.debug('No thumb is available for this episode, not creating a thumb')
return False return False
thumb_data = metadata_helpers.get_show_image(thumb_url, show_name=ep_obj.show_obj.name) thumb_data = metadata_helpers.get_show_image(thumb_url, show_name=ep_obj.show_obj.name)
@ -641,7 +632,7 @@ class GenericMetadata(object):
img_cache_type=sickgear.image_cache.ImageCache.FANART) img_cache_type=sickgear.image_cache.ImageCache.FANART)
if not fanart_data: if not fanart_data:
logger.log(u"No fanart image was retrieved, unable to write fanart", logger.DEBUG) logger.debug('No fanart image was retrieved, unable to write fanart')
return False return False
return self._write_image(fanart_data, fanart_path) return self._write_image(fanart_data, fanart_path)
@ -662,7 +653,7 @@ class GenericMetadata(object):
img_cache_type=sickgear.image_cache.ImageCache.POSTER) img_cache_type=sickgear.image_cache.ImageCache.POSTER)
if not poster_data: if not poster_data:
logger.log(u"No show poster image was retrieved, unable to write poster", logger.DEBUG) logger.debug('No show poster image was retrieved, unable to write poster')
return False return False
return self._write_image(poster_data, poster_path) return self._write_image(poster_data, poster_path)
@ -683,7 +674,7 @@ class GenericMetadata(object):
img_cache_type=sickgear.image_cache.ImageCache.BANNER) img_cache_type=sickgear.image_cache.ImageCache.BANNER)
if not banner_data: if not banner_data:
logger.log(u"No show banner image was retrieved, unable to write banner", logger.DEBUG) logger.debug('No show banner image was retrieved, unable to write banner')
return False return False
return self._write_image(banner_data, banner_path) return self._write_image(banner_data, banner_path)
@ -717,14 +708,13 @@ class GenericMetadata(object):
season_poster_file_path = self.get_season_poster_path(show_obj, cur_season) season_poster_file_path = self.get_season_poster_path(show_obj, cur_season)
if not season_poster_file_path: if not season_poster_file_path:
logger.log(u'Path for season ' + str(cur_season) + ' came back blank, skipping this season', logger.debug(f'Path for season {cur_season} came back blank, skipping this season')
logger.DEBUG)
continue continue
season_data = metadata_helpers.get_show_image(season_url, show_name=show_obj.name) season_data = metadata_helpers.get_show_image(season_url, show_name=show_obj.name)
if not season_data: if not season_data:
logger.log(u'No season poster data available, skipping this season', logger.DEBUG) logger.debug('No season poster data available, skipping this season')
continue continue
result = result + [self._write_image(season_data, season_poster_file_path)] result = result + [self._write_image(season_data, season_poster_file_path)]
@ -762,14 +752,13 @@ class GenericMetadata(object):
season_banner_file_path = self.get_season_banner_path(show_obj, cur_season) season_banner_file_path = self.get_season_banner_path(show_obj, cur_season)
if not season_banner_file_path: if not season_banner_file_path:
logger.log(u'Path for season ' + str(cur_season) + ' came back blank, skipping this season', logger.debug(f'Path for season {cur_season} came back blank, skipping this season')
logger.DEBUG)
continue continue
season_data = metadata_helpers.get_show_image(season_url, show_name=show_obj.name) season_data = metadata_helpers.get_show_image(season_url, show_name=show_obj.name)
if not season_data: if not season_data:
logger.log(u'No season banner data available, skipping this season', logger.DEBUG) logger.debug('No season banner data available, skipping this season')
continue continue
result = result + [self._write_image(season_data, season_banner_file_path)] result = result + [self._write_image(season_data, season_banner_file_path)]
@ -787,7 +776,7 @@ class GenericMetadata(object):
img_cache_type=sickgear.image_cache.ImageCache.POSTER) img_cache_type=sickgear.image_cache.ImageCache.POSTER)
if not poster_data: if not poster_data:
logger.log(u"No show poster image was retrieved, unable to write season all poster", logger.DEBUG) logger.debug('No show poster image was retrieved, unable to write season all poster')
return False return False
return self._write_image(poster_data, poster_path) return self._write_image(poster_data, poster_path)
@ -801,7 +790,7 @@ class GenericMetadata(object):
img_cache_type=sickgear.image_cache.ImageCache.BANNER) img_cache_type=sickgear.image_cache.ImageCache.BANNER)
if not banner_data: if not banner_data:
logger.log(u"No show banner image was retrieved, unable to write season all banner", logger.DEBUG) logger.debug('No show banner image was retrieved, unable to write season all banner')
return False return False
return self._write_image(banner_data, banner_path) return self._write_image(banner_data, banner_path)
@ -819,18 +808,18 @@ class GenericMetadata(object):
# don't bother overwriting it # don't bother overwriting it
if not force and os.path.isfile(image_path): if not force and os.path.isfile(image_path):
logger.log(u"Image already exists, not downloading", logger.DEBUG) logger.debug('Image already exists, not downloading')
return False return False
if not image_data: if not image_data:
logger.log(u"Unable to retrieve image, skipping", logger.WARNING) logger.warning('Unable to retrieve image, skipping')
return False return False
image_dir = os.path.dirname(image_path) image_dir = os.path.dirname(image_path)
try: try:
if not os.path.isdir(image_dir): if not os.path.isdir(image_dir):
logger.log(u"Metadata dir didn't exist, creating it at " + image_dir, logger.DEBUG) logger.debug(f'Metadata dir didn"t exist, creating it at {image_dir}')
os.makedirs(image_dir) os.makedirs(image_dir)
sg_helpers.chmod_as_parent(image_dir) sg_helpers.chmod_as_parent(image_dir)
@ -839,9 +828,7 @@ class GenericMetadata(object):
out_file.close() out_file.close()
sg_helpers.chmod_as_parent(image_path) sg_helpers.chmod_as_parent(image_path)
except IOError as e: except IOError as e:
logger.log( logger.error(f'Unable to write image to {image_path} - are you sure the show folder is writable? {ex(e)}')
u"Unable to write image to " + image_path + " - are you sure the show folder is writable? " + ex(e),
logger.ERROR)
return False return False
return True return True
@ -869,8 +856,8 @@ class GenericMetadata(object):
return t.get_show((show_obj.ids[tv_id]['id'], show_obj.prodid)[tv_src == show_obj.tvid], return t.get_show((show_obj.ids[tv_id]['id'], show_obj.prodid)[tv_src == show_obj.tvid],
load_episodes=False, banners=True, posters=True, fanart=True, language=show_obj.lang) load_episodes=False, banners=True, posters=True, fanart=True, language=show_obj.lang)
except (BaseTVinfoError, IOError) as e: except (BaseTVinfoError, IOError) as e:
logger.log(u"Unable to look up show on " + sickgear.TVInfoAPI( logger.warning(f'Unable to look up show on {sickgear.TVInfoAPI(tv_id).name},'
tv_id).name + ", not downloading images: " + ex(e), logger.WARNING) f' not downloading images: {ex(e)}')
# todo: when tmdb is added as tv source remove the hardcoded TVINFO_TMDB # todo: when tmdb is added as tv source remove the hardcoded TVINFO_TMDB
for tv_src in list(OrderedDict.fromkeys([show_obj.tvid] + list(sickgear.TVInfoAPI().search_sources) + for tv_src in list(OrderedDict.fromkeys([show_obj.tvid] + list(sickgear.TVInfoAPI().search_sources) +
@ -1042,8 +1029,8 @@ class GenericMetadata(object):
image_type = 'fanart' image_type = 'fanart'
if image_type not in ('poster', 'banner', 'fanart', 'poster_thumb', 'banner_thumb'): if image_type not in ('poster', 'banner', 'fanart', 'poster_thumb', 'banner_thumb'):
logger.log(u"Invalid image type " + str(image_type) + ", couldn't find it in the " + sickgear.TVInfoAPI( logger.error(f'Invalid image type {image_type}, couldn\'t find it in the'
show_obj.tvid).name + " object", logger.ERROR) f' {sickgear.TVInfoAPI(show_obj.tvid).name} object')
return return
image_urls = self._retrieve_image_urls(show_obj, image_type, show_infos) image_urls = self._retrieve_image_urls(show_obj, image_type, show_infos)
@ -1094,8 +1081,8 @@ class GenericMetadata(object):
t = sickgear.TVInfoAPI(show_obj.tvid).setup(**tvinfo_config) t = sickgear.TVInfoAPI(show_obj.tvid).setup(**tvinfo_config)
tvinfo_obj_show = t[show_obj.prodid] tvinfo_obj_show = t[show_obj.prodid]
except (BaseTVinfoError, IOError) as e: except (BaseTVinfoError, IOError) as e:
logger.log(u'Unable to look up show on ' + sickgear.TVInfoAPI( logger.warning(f'Unable to look up show on {sickgear.TVInfoAPI(show_obj.tvid).name},'
show_obj.tvid).name + ', not downloading images: ' + ex(e), logger.WARNING) f' not downloading images: {ex(e)}')
return result return result
if not self._valid_show(tvinfo_obj_show, show_obj): if not self._valid_show(tvinfo_obj_show, show_obj):
@ -1124,10 +1111,10 @@ class GenericMetadata(object):
metadata_path = os.path.join(folder, self._show_metadata_filename) metadata_path = os.path.join(folder, self._show_metadata_filename)
if not os.path.isdir(folder) or not os.path.isfile(metadata_path): if not os.path.isdir(folder) or not os.path.isfile(metadata_path):
logger.log(u"Can't load the metadata file from " + repr(metadata_path) + ", it doesn't exist", logger.DEBUG) logger.debug(f'Can\'t load the metadata file from {repr(metadata_path)}, it doesn\'t exist')
return empty_return return empty_return
logger.log(u"Loading show info from metadata file in " + folder, logger.DEBUG) logger.debug(f'Loading show info from metadata file in {folder}')
try: try:
with io.open(metadata_path, 'r', encoding='utf8') as xmlFileObj: with io.open(metadata_path, 'r', encoding='utf8') as xmlFileObj:
@ -1138,11 +1125,9 @@ class GenericMetadata(object):
show_xml.findtext('tvdbid'), show_xml.findtext('tvdbid'),
show_xml.findtext('id'), show_xml.findtext('id'),
show_xml.findtext('indexer'))): show_xml.findtext('indexer'))):
logger.log(u"Invalid info in tvshow.nfo (missing name or id):" logger.log(f'Invalid info in tvshow.nfo (missing name or id):'
+ str(show_xml.findtext('title')) + ' ' f'{show_xml.findtext("title")} {show_xml.findtext("indexer")} '
+ str(show_xml.findtext('indexer')) + ' ' f'{show_xml.findtext("tvdbid")} {show_xml.findtext("id")}')
+ str(show_xml.findtext('tvdbid')) + ' '
+ str(show_xml.findtext('id')))
return empty_return return empty_return
name = show_xml.findtext('title') name = show_xml.findtext('title')
@ -1178,17 +1163,15 @@ class GenericMetadata(object):
except (BaseException, Exception): except (BaseException, Exception):
pass pass
else: else:
logger.log(u"Empty <id> or <tvdbid> field in NFO, unable to find a ID", logger.WARNING) logger.warning('Empty <id> or <tvdbid> field in NFO, unable to find a ID')
return empty_return return empty_return
if None is prodid: if None is prodid:
logger.log(u"Invalid Show ID (%s), not using metadata file" % prodid, logger.WARNING) logger.warning(f'Invalid Show ID (%s), not using metadata file {prodid}')
return empty_return return empty_return
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log( logger.warning(f'There was an error parsing your existing metadata file: "{metadata_path}" error: {ex(e)}')
u"There was an error parsing your existing metadata file: '" + metadata_path + "' error: " + ex(e),
logger.WARNING)
return empty_return return empty_return
return tvid, prodid, name return tvid, prodid, name
@ -1202,7 +1185,7 @@ class GenericMetadata(object):
except (BaseException, Exception): except (BaseException, Exception):
pass pass
logger.log(u'Could not find any %s images on Fanart.tv for %s' % (image_type, show_obj.name), logger.DEBUG) logger.debug(f'Could not find any {image_type} images on Fanart.tv for {show_obj.name}')
@staticmethod @staticmethod
def _fanart_urls(tvdb_id, image_type='banner', lang='en', thumb=False): def _fanart_urls(tvdb_id, image_type='banner', lang='en', thumb=False):

View file

@ -42,7 +42,7 @@ def get_show_image(url, img_num=None, show_name=None, supress_log=False):
# if they provided a fanart number try to use it instead # if they provided a fanart number try to use it instead
temp_url = url if None is img_num else url.split('-')[0] + '-' + str(img_num) + '.jpg' temp_url = url if None is img_num else url.split('-')[0] + '-' + str(img_num) + '.jpg'
logger.log(u'Fetching image from ' + temp_url, logger.DEBUG) logger.debug(f'Fetching image from {temp_url}')
from sickgear import FLARESOLVERR_HOST, MEMCACHE from sickgear import FLARESOLVERR_HOST, MEMCACHE
MEMCACHE.setdefault('cookies', {}) MEMCACHE.setdefault('cookies', {})
@ -51,8 +51,8 @@ def get_show_image(url, img_num=None, show_name=None, supress_log=False):
if None is image_data: if None is image_data:
if supress_log: if supress_log:
return return
logger.log('There was an error trying to retrieve the image%s, aborting' % logger.warning(f'There was an error trying to retrieve the image'
('', ' for show: %s' % show_name)[None is not show_name], logger.WARNING) f'{("", " for show: %s" % show_name)[None is not show_name]}, aborting')
return return
return image_data return image_data

View file

@ -127,13 +127,11 @@ class KODIMetadata(generic.GenericMetadata):
try: try:
show_info = t[int(show_id)] show_info = t[int(show_id)]
except BaseTVinfoShownotfound as e: except BaseTVinfoShownotfound as e:
logger.log('Unable to find show with id %s on %s, skipping it' % (show_id, sickgear.TVInfoAPI( logger.error(f'Unable to find show with id {show_id} on {sickgear.TVInfoAPI(show_obj.tvid).name},'
show_obj.tvid).name), logger.ERROR) f' skipping it')
raise e raise e
except BaseTVinfoError as e: except BaseTVinfoError as e:
logger.log( logger.error(f'{sickgear.TVInfoAPI(show_obj.tvid).name} is down, can\'t use its data to add this show')
'%s is down, can\'t use its data to add this show' % sickgear.TVInfoAPI(show_obj.tvid).name,
logger.ERROR)
raise e raise e
if not self._valid_show(show_info, show_obj): if not self._valid_show(show_info, show_obj):
@ -141,8 +139,8 @@ class KODIMetadata(generic.GenericMetadata):
# check for title and id # check for title and id
if None is getattr(show_info, 'seriesname', None) or None is getattr(show_info, 'id', None): if None is getattr(show_info, 'seriesname', None) or None is getattr(show_info, 'id', None):
logger.log('Incomplete info for show with id %s on %s, skipping it' % (show_id, sickgear.TVInfoAPI( logger.error(f'Incomplete info for show with id {show_id} on {sickgear.TVInfoAPI(show_obj.tvid).name},'
show_obj.tvid).name), logger.ERROR) f' skipping it')
return False return False
title = etree.SubElement(tv_node, 'title') title = etree.SubElement(tv_node, 'title')
@ -171,8 +169,8 @@ class KODIMetadata(generic.GenericMetadata):
uniqueid = etree.SubElement(tv_node, 'uniqueid', **kwargs) uniqueid = etree.SubElement(tv_node, 'uniqueid', **kwargs)
uniqueid.text = '%s%s' % (('', 'tt')[TVINFO_IMDB == tvid], mid) uniqueid.text = '%s%s' % (('', 'tt')[TVINFO_IMDB == tvid], mid)
if not has_id: if not has_id:
logger.log('Incomplete info for show with id %s on %s, skipping it' % (show_id, sickgear.TVInfoAPI( logger.error(f'Incomplete info for show with id {show_id} on {sickgear.TVInfoAPI(show_obj.tvid).name},'
show_obj.tvid).name), logger.ERROR) f' skipping it')
return False return False
ratings = etree.SubElement(tv_node, 'ratings') ratings = etree.SubElement(tv_node, 'ratings')
@ -235,7 +233,7 @@ class KODIMetadata(generic.GenericMetadata):
nfo_file_path = self.get_show_file_path(show_obj) nfo_file_path = self.get_show_file_path(show_obj)
logger.log(u'Writing Kodi metadata file: %s' % nfo_file_path, logger.DEBUG) logger.debug(f'Writing Kodi metadata file: {nfo_file_path}')
data = '<?xml version="1.0" encoding="UTF-8"?>\n%s' % data data = '<?xml version="1.0" encoding="UTF-8"?>\n%s' % data
return sg_helpers.write_file(nfo_file_path, data, utf8=True) return sg_helpers.write_file(nfo_file_path, data, utf8=True)
@ -261,7 +259,7 @@ class KODIMetadata(generic.GenericMetadata):
nfo_file_path = self.get_episode_file_path(ep_obj) nfo_file_path = self.get_episode_file_path(ep_obj)
logger.log(u'Writing episode metadata file: %s' % nfo_file_path, logger.DEBUG) logger.debug(f'Writing episode metadata file: {nfo_file_path}')
return sg_helpers.write_file(nfo_file_path, data, xmltree=True, xml_header=True, utf8=True) return sg_helpers.write_file(nfo_file_path, data, xmltree=True, xml_header=True, utf8=True)
@ -292,8 +290,8 @@ class KODIMetadata(generic.GenericMetadata):
except BaseTVinfoShownotfound as e: except BaseTVinfoShownotfound as e:
raise exceptions_helper.ShowNotFoundException(ex(e)) raise exceptions_helper.ShowNotFoundException(ex(e))
except BaseTVinfoError as e: except BaseTVinfoError as e:
logger.log('Unable to connect to %s while creating meta files - skipping - %s' % (sickgear.TVInfoAPI( logger.error(f'Unable to connect to {sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name}'
ep_obj.show_obj.tvid).name, ex(e)), logger.ERROR) f' while creating meta files - skipping - {ex(e)}')
return return
if not self._valid_show(show_info, ep_obj.show_obj): if not self._valid_show(show_info, ep_obj.show_obj):
@ -318,10 +316,10 @@ class KODIMetadata(generic.GenericMetadata):
ep_info['firstaired'] = str(datetime.date.fromordinal(1)) ep_info['firstaired'] = str(datetime.date.fromordinal(1))
if None is getattr(ep_info, 'episodename', None): if None is getattr(ep_info, 'episodename', None):
logger.log(u'Not generating nfo because the episode has no title', logger.DEBUG) logger.debug('Not generating nfo because the episode has no title')
return None return None
logger.log('Creating metadata for episode %sx%s' % (ep_obj.season, ep_obj.episode), logger.DEBUG) logger.debug('Creating metadata for episode %sx%s' % (ep_obj.season, ep_obj.episode))
if 1 < len(ep_obj_list_to_write): if 1 < len(ep_obj_list_to_write):
ep_node = etree.SubElement(root_node, 'episodedetails') ep_node = etree.SubElement(root_node, 'episodedetails')

View file

@ -127,10 +127,10 @@ class Mede8erMetadata(mediabrowser.MediaBrowserMetadata):
try: try:
show_info = t[int(show_obj.prodid)] show_info = t[int(show_obj.prodid)]
except BaseTVinfoShownotfound as e: except BaseTVinfoShownotfound as e:
logger.log(u'Unable to find show with id ' + str(show_obj.prodid) + ' on tvdb, skipping it', logger.ERROR) logger.error(f'Unable to find show with id {show_obj.prodid} on tvdb, skipping it')
raise e raise e
except BaseTVinfoError as e: except BaseTVinfoError as e:
logger.log(u'TVDB is down, can\'t use its data to make the NFO', logger.ERROR) logger.error(f'TVDB is down, can\'t use its data to make the NFO')
raise e raise e
if not self._valid_show(show_info, show_obj): if not self._valid_show(show_info, show_obj):
@ -142,12 +142,12 @@ class Mede8erMetadata(mediabrowser.MediaBrowserMetadata):
or '' == show_info['seriesname'] \ or '' == show_info['seriesname'] \
or None is show_info['id'] \ or None is show_info['id'] \
or '' == show_info['id']: or '' == show_info['id']:
logger.log('Incomplete info for show with id %s on %s, skipping it' % logger.error(f'Incomplete info for show with id {show_obj.prodid}'
(show_obj.prodid, sickgear.TVInfoAPI(show_obj.tvid).name), logger.ERROR) f' on {sickgear.TVInfoAPI(show_obj.tvid).name}, skipping it')
return False return False
except BaseTVinfoAttributenotfound: except BaseTVinfoAttributenotfound:
logger.log('Incomplete info for show with id %s on %s, skipping it' % logger.error(f'Incomplete info for show with id {show_obj.prodid}'
(show_obj.prodid, sickgear.TVInfoAPI(show_obj.tvid).name), logger.ERROR) f' on {sickgear.TVInfoAPI(show_obj.tvid).name}, skipping it')
return False return False
SeriesName = etree.SubElement(tv_node, 'title') SeriesName = etree.SubElement(tv_node, 'title')
@ -241,8 +241,8 @@ class Mede8erMetadata(mediabrowser.MediaBrowserMetadata):
except BaseTVinfoShownotfound as e: except BaseTVinfoShownotfound as e:
raise exceptions_helper.ShowNotFoundException(ex(e)) raise exceptions_helper.ShowNotFoundException(ex(e))
except BaseTVinfoError as e: except BaseTVinfoError as e:
logger.log('Unable to connect to %s while creating meta files - skipping - %s' % logger.error(f'Unable to connect to {sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name}'
(sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name, ex(e)), logger.ERROR) f' while creating meta files - skipping - {ex(e)}')
return False return False
if not self._valid_show(show_info, ep_obj.show_obj): if not self._valid_show(show_info, ep_obj.show_obj):
@ -261,8 +261,8 @@ class Mede8erMetadata(mediabrowser.MediaBrowserMetadata):
try: try:
ep_info = show_info[cur_ep_obj.season][cur_ep_obj.episode] ep_info = show_info[cur_ep_obj.season][cur_ep_obj.episode]
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Unable to find episode %sx%s on tvdb... has it been removed? Should I delete from db?' % logger.log(f'Unable to find episode {cur_ep_obj.season}x{cur_ep_obj.episode} on tvdb...'
(cur_ep_obj.season, cur_ep_obj.episode)) f' has it been removed? Should it be deleted from the db?')
return None return None
if cur_ep_obj == ep_obj: if cur_ep_obj == ep_obj:

View file

@ -123,7 +123,7 @@ class MediaBrowserMetadata(generic.GenericMetadata):
metadata_dir_name = os.path.join(os.path.dirname(ep_obj.location), 'metadata') metadata_dir_name = os.path.join(os.path.dirname(ep_obj.location), 'metadata')
xml_file_path = os.path.join(metadata_dir_name, xml_file_name) xml_file_path = os.path.join(metadata_dir_name, xml_file_name)
else: else:
logger.log(u"Episode location doesn't exist: " + str(ep_obj.location), logger.DEBUG) logger.debug(f'Episode location doesn\'t exist: {ep_obj.location}')
return '' return ''
return xml_file_path return xml_file_path
@ -175,10 +175,10 @@ class MediaBrowserMetadata(generic.GenericMetadata):
break break
if not season_dir: if not season_dir:
logger.log(u"Unable to find a season dir for season " + str(season), logger.DEBUG) logger.debug(f'Unable to find a season dir for season {season}')
return None return None
logger.log(u"Using " + str(season_dir) + "/folder.jpg as season dir for season " + str(season), logger.DEBUG) logger.debug(f'Using {season_dir}/folder.jpg as season dir for season {season}')
return os.path.join(show_obj.location, season_dir, 'folder.jpg') return os.path.join(show_obj.location, season_dir, 'folder.jpg')
@ -215,10 +215,10 @@ class MediaBrowserMetadata(generic.GenericMetadata):
break break
if not season_dir: if not season_dir:
logger.log(u"Unable to find a season dir for season " + str(season), logger.DEBUG) logger.debug(f'Unable to find a season dir for season {season}')
return None return None
logger.log(u"Using " + str(season_dir) + "/banner.jpg as season dir for season " + str(season), logger.DEBUG) logger.debug(f'Using {season_dir}/banner.jpg as season dir for season {season}')
return os.path.join(show_obj.location, season_dir, 'banner.jpg') return os.path.join(show_obj.location, season_dir, 'banner.jpg')
@ -252,12 +252,11 @@ class MediaBrowserMetadata(generic.GenericMetadata):
try: try:
show_info = t[int(show_obj.prodid)] show_info = t[int(show_obj.prodid)]
except BaseTVinfoShownotfound as e: except BaseTVinfoShownotfound as e:
logger.log("Unable to find show with id %s on %s, skipping it" % logger.error(f'Unable to find show with id {show_obj.prodid} '
(show_obj.prodid, sickgear.TVInfoAPI(show_obj.tvid).name), logger.ERROR) f'on {sickgear.TVInfoAPI(show_obj.tvid).name}, skipping it')
raise e raise e
except BaseTVinfoError as e: except BaseTVinfoError as e:
logger.log("%s is down, can't use its data to make the NFO" % sickgear.TVInfoAPI(show_obj.tvid).name, logger.error('%s is down, can\'t use its data to make the NFO' % sickgear.TVInfoAPI(show_obj.tvid).name)
logger.ERROR)
raise e raise e
if not self._valid_show(show_info, show_obj): if not self._valid_show(show_info, show_obj):
@ -265,8 +264,8 @@ class MediaBrowserMetadata(generic.GenericMetadata):
# check for title and id # check for title and id
if None is getattr(show_info, 'seriesname', None) or None is getattr(show_info, 'id', None): if None is getattr(show_info, 'seriesname', None) or None is getattr(show_info, 'id', None):
logger.log("Incomplete info for show with id %s on %s, skipping it" % logger.error(f'Incomplete info for show with id {show_obj.prodid}'
(show_obj.prodid, sickgear.TVInfoAPI(show_obj.tvid).name), logger.ERROR) f' on {sickgear.TVInfoAPI(show_obj.tvid).name}, skipping it')
return False return False
prodid = etree.SubElement(tv_node, "id") prodid = etree.SubElement(tv_node, "id")
@ -415,8 +414,8 @@ class MediaBrowserMetadata(generic.GenericMetadata):
except BaseTVinfoShownotfound as e: except BaseTVinfoShownotfound as e:
raise exceptions_helper.ShowNotFoundException(ex(e)) raise exceptions_helper.ShowNotFoundException(ex(e))
except BaseTVinfoError as e: except BaseTVinfoError as e:
logger.log("Unable to connect to %s while creating meta files - skipping - %s" % logger.error(f'Unable to connect to {sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name}'
(sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name, ex(e)), logger.ERROR) f' while creating meta files - skipping - {ex(e)}')
return False return False
if not self._valid_show(show_info, ep_obj.show_obj): if not self._valid_show(show_info, ep_obj.show_obj):

View file

@ -158,7 +158,7 @@ class TIVOMetadata(generic.GenericMetadata):
metadata_dir_name = os.path.join(os.path.dirname(ep_obj.location), '.meta') metadata_dir_name = os.path.join(os.path.dirname(ep_obj.location), '.meta')
metadata_file_path = os.path.join(metadata_dir_name, metadata_file_name) metadata_file_path = os.path.join(metadata_dir_name, metadata_file_name)
else: else:
logger.log(u"Episode location doesn't exist: " + str(ep_obj.location), logger.DEBUG) logger.debug(f'Episode location doesn\'t exist: {ep_obj.location}')
return '' return ''
return metadata_file_path return metadata_file_path
@ -203,8 +203,8 @@ class TIVOMetadata(generic.GenericMetadata):
except BaseTVinfoShownotfound as e: except BaseTVinfoShownotfound as e:
raise exceptions_helper.ShowNotFoundException(ex(e)) raise exceptions_helper.ShowNotFoundException(ex(e))
except BaseTVinfoError as e: except BaseTVinfoError as e:
logger.log("Unable to connect to %s while creating meta files - skipping - %s" % logger.error(f'Unable to connect to {sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name}'
(sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name, ex(e)), logger.ERROR) f' while creating meta files - skipping - {ex(e)}')
return False return False
if not self._valid_show(show_info, ep_obj.show_obj): if not self._valid_show(show_info, ep_obj.show_obj):
@ -251,10 +251,10 @@ class TIVOMetadata(generic.GenericMetadata):
# Write the synopsis of the video here # Write the synopsis of the video here
sanitizedDescription = cur_ep_obj.description sanitizedDescription = cur_ep_obj.description
# Replace double curly quotes # Replace double curly quotes
sanitizedDescription = sanitizedDescription.replace(u"\u201c", "\"").replace(u"\u201d", "\"") sanitizedDescription = sanitizedDescription.replace('\u201c', '"').replace('\u201d', '"')
# Replace single curly quotes # Replace single curly quotes
sanitizedDescription = sanitizedDescription.replace(u"\u2018", "'").replace(u"\u2019", "'").replace( sanitizedDescription = sanitizedDescription.replace('\u2018', '\'').replace('\u2019', '\'').replace(
u"\u02BC", "'") '\u02BC', '\'')
data += ("description : " + sanitizedDescription + "\n") data += ("description : " + sanitizedDescription + "\n")
@ -337,11 +337,11 @@ class TIVOMetadata(generic.GenericMetadata):
try: try:
if not os.path.isdir(nfo_file_dir): if not os.path.isdir(nfo_file_dir):
logger.log(u"Metadata dir didn't exist, creating it at " + nfo_file_dir, logger.DEBUG) logger.debug(f'Metadata dir didn\'t exist, creating it at {nfo_file_dir}')
os.makedirs(nfo_file_dir) os.makedirs(nfo_file_dir)
sg_helpers.chmod_as_parent(nfo_file_dir) sg_helpers.chmod_as_parent(nfo_file_dir)
logger.log(u"Writing episode nfo file to " + nfo_file_path, logger.DEBUG) logger.debug(f'Writing episode nfo file to {nfo_file_path}')
with open(nfo_file_path, 'w') as nfo_file: with open(nfo_file_path, 'w') as nfo_file:
# Calling encode directly, b/c often descriptions have wonky characters. # Calling encode directly, b/c often descriptions have wonky characters.
@ -350,8 +350,7 @@ class TIVOMetadata(generic.GenericMetadata):
sg_helpers.chmod_as_parent(nfo_file_path) sg_helpers.chmod_as_parent(nfo_file_path)
except EnvironmentError as e: except EnvironmentError as e:
logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), logger.error(f'Unable to write file to {nfo_file_path} - are you sure the folder is writable? {ex(e)}')
logger.ERROR)
return False return False
return True return True

View file

@ -168,10 +168,10 @@ class WDTVMetadata(generic.GenericMetadata):
break break
if not season_dir: if not season_dir:
logger.log(u"Unable to find a season dir for season " + str(season), logger.DEBUG) logger.debug(f'Unable to find a season dir for season {season}')
return None return None
logger.log(u"Using " + str(season_dir) + "/folder.jpg as season dir for season " + str(season), logger.DEBUG) logger.debug(f'Using {season_dir}/folder.jpg as season dir for season {season}')
return os.path.join(show_obj.location, season_dir, 'folder.jpg') return os.path.join(show_obj.location, season_dir, 'folder.jpg')
@ -204,8 +204,8 @@ class WDTVMetadata(generic.GenericMetadata):
except BaseTVinfoShownotfound as e: except BaseTVinfoShownotfound as e:
raise exceptions_helper.ShowNotFoundException(ex(e)) raise exceptions_helper.ShowNotFoundException(ex(e))
except BaseTVinfoError as e: except BaseTVinfoError as e:
logger.log("Unable to connect to %s while creating meta files - skipping - %s" % logger.error(f'Unable to connect to {sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name}'
(sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name, ex(e)), logger.ERROR) f' while creating meta files - skipping - {ex(e)}')
return False return False
if not self._valid_show(show_info, ep_obj.show_obj): if not self._valid_show(show_info, ep_obj.show_obj):

View file

@ -123,12 +123,11 @@ class XBMC12PlusMetadata(generic.GenericMetadata):
try: try:
show_info = t[int(show_id)] show_info = t[int(show_id)]
except BaseTVinfoShownotfound as e: except BaseTVinfoShownotfound as e:
logger.log('Unable to find show with id %s on %s, skipping it' % logger.error(f'Unable to find show with id {show_id} on {sickgear.TVInfoAPI(show_obj.tvid).name},'
(show_id, sickgear.TVInfoAPI(show_obj.tvid).name), logger.ERROR) f' skipping it')
raise e raise e
except BaseTVinfoError as e: except BaseTVinfoError as e:
logger.log('%s is down, can\'t use its data to add this show' % sickgear.TVInfoAPI(show_obj.tvid).name, logger.error('%s is down, can\'t use its data to add this show' % sickgear.TVInfoAPI(show_obj.tvid).name)
logger.ERROR)
raise e raise e
if not self._valid_show(show_info, show_obj): if not self._valid_show(show_info, show_obj):
@ -136,8 +135,8 @@ class XBMC12PlusMetadata(generic.GenericMetadata):
# check for title and id # check for title and id
if None is getattr(show_info, 'seriesname', None) or None is getattr(show_info, 'id', None): if None is getattr(show_info, 'seriesname', None) or None is getattr(show_info, 'id', None):
logger.log('Incomplete info for show with id %s on %s, skipping it' % logger.error(f'Incomplete info for show with id {show_id} on {sickgear.TVInfoAPI(show_obj.tvid).name},'
(show_id, sickgear.TVInfoAPI(show_obj.tvid).name), logger.ERROR) f' skipping it')
return False return False
title = etree.SubElement(tv_node, 'title') title = etree.SubElement(tv_node, 'title')
@ -227,8 +226,9 @@ class XBMC12PlusMetadata(generic.GenericMetadata):
except BaseTVinfoShownotfound as e: except BaseTVinfoShownotfound as e:
raise exceptions_helper.ShowNotFoundException(ex(e)) raise exceptions_helper.ShowNotFoundException(ex(e))
except BaseTVinfoError as e: except BaseTVinfoError as e:
logger.log('Unable to connect to %s while creating meta files - skipping - %s' % logger.error(
(sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name, ex(e)), logger.ERROR) f'Unable to connect to {sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name} while creating meta files'
f' - skipping - {ex(e)}')
return return
if not self._valid_show(show_info, ep_obj.show_obj): if not self._valid_show(show_info, ep_obj.show_obj):
@ -249,17 +249,17 @@ class XBMC12PlusMetadata(generic.GenericMetadata):
(cur_ep_obj.season, cur_ep_obj.episode, sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name)) (cur_ep_obj.season, cur_ep_obj.episode, sickgear.TVInfoAPI(ep_obj.show_obj.tvid).name))
return None return None
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Not generating nfo because failed to fetched tv info data at this time', logger.DEBUG) logger.debug('Not generating nfo because failed to fetched tv info data at this time')
return None return None
if None is getattr(ep_info, 'firstaired', None): if None is getattr(ep_info, 'firstaired', None):
ep_info['firstaired'] = str(datetime.date.fromordinal(1)) ep_info['firstaired'] = str(datetime.date.fromordinal(1))
if None is getattr(ep_info, 'episodename', None): if None is getattr(ep_info, 'episodename', None):
logger.log(u'Not generating nfo because the ep has no title', logger.DEBUG) logger.debug('Not generating nfo because the ep has no title')
return None return None
logger.log(u'Creating metadata for episode ' + str(ep_obj.season) + 'x' + str(ep_obj.episode), logger.DEBUG) logger.debug(f'Creating metadata for episode {ep_obj.season}x{ep_obj.episode}')
if 1 < len(ep_obj_list_to_write): if 1 < len(ep_obj_list_to_write):
episode = etree.SubElement(rootNode, 'episodedetails') episode = etree.SubElement(rootNode, 'episodedetails')

View file

@ -98,7 +98,7 @@ class NameParser(object):
cur_pattern = strip_comment.sub('', cur_pattern) cur_pattern = strip_comment.sub('', cur_pattern)
cur_regex = re.compile('(?x)' + cur_pattern, re.VERBOSE | re.IGNORECASE) cur_regex = re.compile('(?x)' + cur_pattern, re.VERBOSE | re.IGNORECASE)
except re.error as errormsg: except re.error as errormsg:
logger.log(u'WARNING: Invalid episode_pattern, %s. %s' % (errormsg, cur_pattern)) logger.log(f'WARNING: Invalid episode_pattern, {errormsg}. {cur_pattern}')
else: else:
cls.compiled_regexes[index].append([cur_pattern_num, cur_pattern_name, cur_regex]) cls.compiled_regexes[index].append([cur_pattern_num, cur_pattern_name, cur_regex])
index += 1 index += 1
@ -380,12 +380,11 @@ class NameParser(object):
season_number = int(ep_obj['seasonnumber']) season_number = int(ep_obj['seasonnumber'])
episode_numbers = [int(ep_obj['episodenumber'])] episode_numbers = [int(ep_obj['episodenumber'])]
except BaseTVinfoEpisodenotfound: except BaseTVinfoEpisodenotfound:
logger.warning(u'Unable to find episode with date %s for show %s, skipping' % logger.warning(f'Unable to find episode with date {best_result.air_date}'
(best_result.air_date, show_obj.unique_name)) f' for show {show_obj.unique_name}, skipping')
episode_numbers = [] episode_numbers = []
except BaseTVinfoError as e: except BaseTVinfoError as e:
logger.log(u'Unable to contact ' + sickgear.TVInfoAPI(show_obj.tvid).name logger.warning(f'Unable to contact {sickgear.TVInfoAPI(show_obj.tvid).name}: {ex(e)}')
+ ': ' + ex(e), logger.WARNING)
episode_numbers = [] episode_numbers = []
for epNo in episode_numbers: for epNo in episode_numbers:
@ -468,9 +467,8 @@ class NameParser(object):
best_result.season_number = new_season_numbers[0] best_result.season_number = new_season_numbers[0]
if self.convert and show_obj.is_scene: if self.convert and show_obj.is_scene:
logger.log(u'Converted parsed result %s into %s' logger.debug(f'Converted parsed result {best_result.original_name}'
% (best_result.original_name, decode_str(str(best_result), errors='xmlcharrefreplace')), f' into {decode_str(best_result, errors="xmlcharrefreplace")}')
logger.DEBUG)
helpers.cpu_sleep() helpers.cpu_sleep()
@ -646,7 +644,7 @@ class NameParser(object):
and any('anime' in wr for wr in final_result.which_regex) == bool(final_result.show_obj.is_anime): and any('anime' in wr for wr in final_result.which_regex) == bool(final_result.show_obj.is_anime):
name_parser_cache.add(name, final_result) name_parser_cache.add(name, final_result)
logger.log(u'Parsed %s into %s' % (name, final_result), logger.DEBUG) logger.debug(f'Parsed {name} into {final_result}')
return final_result return final_result
@ -752,9 +750,9 @@ class ParseResult(LegacyParseResult):
def __unicode__(self): def __unicode__(self):
if None is not self.series_name: if None is not self.series_name:
to_return = self.series_name + u' - ' to_return = f'{self.series_name} - '
else: else:
to_return = u'' to_return = ''
if None is not self.season_number: if None is not self.season_number:
to_return += 'S' + str(self.season_number) to_return += 'S' + str(self.season_number)
if self.episode_numbers and len(self.episode_numbers): if self.episode_numbers and len(self.episode_numbers):
@ -863,7 +861,7 @@ class NameParserCache(object):
key = self._previous_parsed.first_key() key = self._previous_parsed.first_key()
del self._previous_parsed[key] del self._previous_parsed[key]
except KeyError: except KeyError:
logger.log('Could not remove old NameParserCache entry: %s' % key, logger.DEBUG) logger.debug('Could not remove old NameParserCache entry: %s' % key)
def get(self, name): def get(self, name):
# type: (AnyStr) -> ParseResult # type: (AnyStr) -> ParseResult
@ -876,7 +874,7 @@ class NameParserCache(object):
""" """
with self.lock: with self.lock:
if name in self._previous_parsed: if name in self._previous_parsed:
logger.log('Using cached parse result for: ' + name, logger.DEBUG) logger.debug('Using cached parse result for: ' + name)
self._previous_parsed.move_to_end(name) self._previous_parsed.move_to_end(name)
return self._previous_parsed[name] return self._previous_parsed[name]

View file

@ -165,11 +165,11 @@ def check_valid_naming(pattern=None, multi=None, anime_type=None):
if None is anime_type: if None is anime_type:
anime_type = sickgear.NAMING_ANIME anime_type = sickgear.NAMING_ANIME
logger.log(u'Checking whether the pattern %s is valid for a single episode' % pattern, logger.DEBUG) logger.debug(f'Checking whether the pattern {pattern} is valid for a single episode')
valid = validate_name(pattern, None, anime_type) valid = validate_name(pattern, None, anime_type)
if None is not multi: if None is not multi:
logger.log(u'Checking whether the pattern %s is valid for a multi episode' % pattern, logger.DEBUG) logger.debug(f'Checking whether the pattern {pattern} is valid for a multi episode')
valid = valid and validate_name(pattern, multi, anime_type) valid = valid and validate_name(pattern, multi, anime_type)
return valid return valid
@ -188,7 +188,7 @@ def check_valid_abd_naming(pattern=None):
if None is pattern: if None is pattern:
pattern = sickgear.NAMING_PATTERN pattern = sickgear.NAMING_PATTERN
logger.log(u'Checking whether the pattern %s is valid for an air-by-date episode' % pattern, logger.DEBUG) logger.debug(f'Checking whether the pattern {pattern} is valid for an air-by-date episode')
valid = validate_name(pattern, abd=True) valid = validate_name(pattern, abd=True)
return valid return valid
@ -207,7 +207,7 @@ def check_valid_sports_naming(pattern=None):
if None is pattern: if None is pattern:
pattern = sickgear.NAMING_PATTERN pattern = sickgear.NAMING_PATTERN
logger.log(u'Checking whether the pattern %s is valid for an sports episode' % pattern, logger.DEBUG) logger.debug(f'Checking whether the pattern {pattern} is valid for an sports episode')
valid = validate_name(pattern, sports=True) valid = validate_name(pattern, sports=True)
return valid return valid
@ -233,43 +233,43 @@ def validate_name(pattern, multi=None, anime_type=None, file_only=False, abd=Fal
""" """
sample_ep_obj = generate_sample_ep(multi, abd, sports, anime_type=anime_type) sample_ep_obj = generate_sample_ep(multi, abd, sports, anime_type=anime_type)
new_name = u'%s.ext' % sample_ep_obj.formatted_filename(pattern, multi, anime_type) new_name = f'{sample_ep_obj.formatted_filename(pattern, multi, anime_type)}.ext'
new_path = sample_ep_obj.formatted_dir(pattern, multi) new_path = sample_ep_obj.formatted_dir(pattern, multi)
if not file_only: if not file_only:
new_name = os.path.join(new_path, new_name) new_name = os.path.join(new_path, new_name)
if not new_name: if not new_name:
logger.log(u'Unable to create a name out of %s' % pattern, logger.DEBUG) logger.debug(f'Unable to create a name out of {pattern}')
return False return False
logger.log(u'Trying to parse %s' % new_name, logger.DEBUG) logger.debug(f'Trying to parse {new_name}')
parser = NameParser(True, show_obj=sample_ep_obj.show_obj, naming_pattern=True) parser = NameParser(True, show_obj=sample_ep_obj.show_obj, naming_pattern=True)
try: try:
result = parser.parse(new_name) result = parser.parse(new_name)
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Unable to parse %s, not valid' % new_name, logger.DEBUG) logger.debug(f'Unable to parse {new_name}, not valid')
return False return False
logger.log(u'The name %s parsed into %s' % (new_name, result), logger.DEBUG) logger.debug(f'The name {new_name} parsed into {result}')
if abd or sports: if abd or sports:
if result.air_date != sample_ep_obj.airdate: if result.air_date != sample_ep_obj.airdate:
logger.log(u'Air date incorrect in parsed episode, pattern isn\'t valid', logger.DEBUG) logger.debug('Air date incorrect in parsed episode, pattern isn\'t valid')
return False return False
elif 3 == anime_type: elif 3 == anime_type:
if result.season_number != sample_ep_obj.season: if result.season_number != sample_ep_obj.season:
logger.log(u'Season number incorrect in parsed episode, pattern isn\'t valid', logger.DEBUG) logger.debug('Season number incorrect in parsed episode, pattern isn\'t valid')
return False return False
if result.episode_numbers != [x.episode for x in [sample_ep_obj] + sample_ep_obj.related_ep_obj]: if result.episode_numbers != [x.episode for x in [sample_ep_obj] + sample_ep_obj.related_ep_obj]:
logger.log(u'Episode numbering incorrect in parsed episode, pattern isn\'t valid', logger.DEBUG) logger.debug('Episode numbering incorrect in parsed episode, pattern isn\'t valid')
return False return False
else: else:
if len(result.ab_episode_numbers) \ if len(result.ab_episode_numbers) \
and result.ab_episode_numbers != [x.absolute_number and result.ab_episode_numbers != [x.absolute_number
for x in [sample_ep_obj] + sample_ep_obj.related_ep_obj]: for x in [sample_ep_obj] + sample_ep_obj.related_ep_obj]:
logger.log(u'Absolute numbering incorrect in parsed episode, pattern isn\'t valid', logger.DEBUG) logger.debug('Absolute numbering incorrect in parsed episode, pattern isn\'t valid')
return False return False
return True return True

View file

@ -156,9 +156,9 @@ def _remove_old_zoneinfo():
for _dir in (sickgear.ZONEINFO_DIR, )]): # type: DirEntry for _dir in (sickgear.ZONEINFO_DIR, )]): # type: DirEntry
if current_file != entry.path: if current_file != entry.path:
if remove_file_perm(entry.path, log_err=False): if remove_file_perm(entry.path, log_err=False):
logger.log(u'Delete unneeded old zoneinfo File: %s' % entry.path) logger.log(f'Delete unneeded old zoneinfo File: {entry.path}')
else: else:
logger.log(u'Unable to delete: %s' % entry.path, logger.ERROR) logger.error(f'Unable to delete: {entry.path}')
def _update_zoneinfo(): def _update_zoneinfo():
@ -175,16 +175,15 @@ def _update_zoneinfo():
if None is url_data: if None is url_data:
update_last_retry() update_last_retry()
# when None is urlData, trouble connecting to GitHub # when None is urlData, trouble connecting to GitHub
logger.log(u'Fetching zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s' % url, logger.warning(f'Fetching zoneinfo.txt failed, this can happen from time to time. Unable to get URL: {url}')
logger.WARNING)
return return
reset_last_retry() reset_last_retry()
try: try:
(new_zoneinfo, zoneinfo_md5) = url_data.strip().rsplit(u' ') (new_zoneinfo, zoneinfo_md5) = url_data.strip().rsplit(' ')
except (BaseException, Exception): except (BaseException, Exception):
logger.log('Fetching zoneinfo.txt failed, update contains unparsable data: %s' % url_data, logger.DEBUG) logger.debug('Fetching zoneinfo.txt failed, update contains unparsable data: %s' % url_data)
return return
current_file = zoneinfo.ZONEFILENAME current_file = zoneinfo.ZONEFILENAME
@ -206,25 +205,25 @@ def _update_zoneinfo():
return return
# load the new zoneinfo # load the new zoneinfo
url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/%s' % new_zoneinfo url_tar = f'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/{new_zoneinfo}'
zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile) zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile)
if not remove_file_perm(zonefile_tmp, log_err=False): if not remove_file_perm(zonefile_tmp, log_err=False):
logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.ERROR) logger.error(f'Unable to delete: {zonefile_tmp}')
return return
if not helpers.download_file(url_tar, zonefile_tmp): if not helpers.download_file(url_tar, zonefile_tmp):
return return
if not os.path.exists(zonefile_tmp): if not os.path.exists(zonefile_tmp):
logger.log(u'Download of %s failed.' % zonefile_tmp, logger.ERROR) logger.error(f'Download of {zonefile_tmp} failed.')
return return
new_hash = str(helpers.md5_for_file(zonefile_tmp)) new_hash = str(helpers.md5_for_file(zonefile_tmp))
if zoneinfo_md5.upper() == new_hash.upper(): if zoneinfo_md5.upper() == new_hash.upper():
logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo, logger.MESSAGE) logger.log(f'Updating timezone info with new one: {new_zoneinfo}', logger.MESSAGE)
try: try:
# remove the old zoneinfo file # remove the old zoneinfo file
if None is not current_file: if None is not current_file:
@ -245,7 +244,7 @@ def _update_zoneinfo():
return return
else: else:
remove_file_perm(zonefile_tmp, log_err=False) remove_file_perm(zonefile_tmp, log_err=False)
logger.log(u'MD5 hash does not match: %s File: %s' % (zoneinfo_md5.upper(), new_hash.upper()), logger.ERROR) logger.error(f'MD5 hash does not match: {zoneinfo_md5.upper()} File: {new_hash.upper()}')
return return
@ -270,7 +269,7 @@ def update_network_dict():
if url_data in (None, ''): if url_data in (None, ''):
update_last_retry() update_last_retry()
# When None is urlData, trouble connecting to GitHub # When None is urlData, trouble connecting to GitHub
logger.debug(u'Updating network timezones failed, this can happen from time to time. URL: %s' % url) logger.debug(f'Updating network timezones failed, this can happen from time to time. URL: {url}')
load_network_dict(load=False) load_network_dict(load=False)
return return
@ -279,7 +278,7 @@ def update_network_dict():
try: try:
for line in url_data.splitlines(): for line in url_data.splitlines():
try: try:
(name, tzone) = line.strip().rsplit(u':', 1) (name, tzone) = line.strip().rsplit(':', 1)
except (BaseException, Exception): except (BaseException, Exception):
continue continue
if None is name or None is tzone: if None is name or None is tzone:
@ -512,14 +511,14 @@ def _load_network_conversions():
if url_data in (None, ''): if url_data in (None, ''):
update_last_retry() update_last_retry()
# when no url_data, trouble connecting to GitHub # when no url_data, trouble connecting to GitHub
logger.debug(u'Updating network conversions failed, this can happen from time to time. URL: %s' % url) logger.debug(f'Updating network conversions failed, this can happen from time to time. URL: {url}')
return return
reset_last_retry() reset_last_retry()
try: try:
for line in url_data.splitlines(): for line in url_data.splitlines():
(tvdb_network, tvrage_network, tvrage_country) = line.strip().rsplit(u'::', 2) (tvdb_network, tvrage_network, tvrage_country) = line.strip().rsplit('::', 2)
if not (tvdb_network and tvrage_network and tvrage_country): if not (tvdb_network and tvrage_network and tvrage_country):
continue continue
conversions_in.append( conversions_in.append(

View file

@ -72,7 +72,7 @@ class Boxcar2Notifier(Notifier):
except urllib.error.HTTPError as e: except urllib.error.HTTPError as e:
if not hasattr(e, 'code'): if not hasattr(e, 'code'):
self._log_error(u'Notification failed: %s' % ex(e)) self._log_error(f'Notification failed: {ex(e)}')
else: else:
result = 'Notification failed. Error code: %s' % e.code result = 'Notification failed. Error code: %s' % e.code
self._log_error(result) self._log_error(result)
@ -91,7 +91,7 @@ class Boxcar2Notifier(Notifier):
result = 'Wrong data sent to Boxcar' result = 'Wrong data sent to Boxcar'
self._log_error(result) self._log_error(result)
except urllib.error.URLError as e: except urllib.error.URLError as e:
self._log_error(u'Notification failed: %s' % ex(e)) self._log_error(f'Notification failed: ex(e)')
return self._choose((True, 'Failed to send notification: %s' % result)[bool(result)], not bool(result)) return self._choose((True, 'Failed to send notification: %s' % result)[bool(result)], not bool(result))

View file

@ -44,8 +44,8 @@ class EmailNotifier(Notifier):
use_tls = 1 == sickgear.helpers.try_int(use_tls) use_tls = 1 == sickgear.helpers.try_int(use_tls)
login = any(user) and any(pwd) login = any(user) and any(pwd)
self._log_debug(u'Sendmail HOST: %s; PORT: %s; LOGIN: %s, TLS: %s, USER: %s, FROM: %s, TO: %s' % ( self._log_debug(f'Sendmail HOST: {host}; PORT: {port};'
host, port, login, use_tls, user, smtp_from, to)) f' LOGIN: {login}, TLS: {use_tls}, USER: {user}, FROM: {smtp_from}, TO: {to}')
try: try:
srv = smtplib.SMTP(host, int(port)) srv = smtplib.SMTP(host, int(port))
@ -54,16 +54,16 @@ class EmailNotifier(Notifier):
if use_tls or login: if use_tls or login:
srv.ehlo() srv.ehlo()
self._log_debug(u'Sent initial EHLO command') self._log_debug('Sent initial EHLO command')
if use_tls: if use_tls:
srv.starttls() srv.starttls()
srv.ehlo() srv.ehlo()
self._log_debug(u'Sent STARTTLS and EHLO command') self._log_debug('Sent STARTTLS and EHLO command')
if login: if login:
srv.login(user, pwd) srv.login(user, pwd)
self._log_debug(u'Sent LOGIN command') self._log_debug('Sent LOGIN command')
srv.sendmail(smtp_from, to, msg.as_string()) srv.sendmail(smtp_from, to, msg.as_string())
srv.quit() srv.quit()
@ -101,10 +101,10 @@ class EmailNotifier(Notifier):
show_name = body.split(' - ')[0] show_name = body.split(' - ')[0]
to = self._get_recipients(show_name) to = self._get_recipients(show_name)
if not any(to): if not any(to):
self._log_warning(u'No email recipients to notify, skipping') self._log_warning('No email recipients to notify, skipping')
return return
self._log_debug(u'Email recipients to notify: %s' % to) self._log_debug(f'Email recipients to notify: {to}')
try: try:
msg = MIMEMultipart('alternative') msg = MIMEMultipart('alternative')
@ -131,9 +131,9 @@ class EmailNotifier(Notifier):
msg['Date'] = formatdate(localtime=True) msg['Date'] = formatdate(localtime=True)
if self._sendmail(sickgear.EMAIL_HOST, sickgear.EMAIL_PORT, sickgear.EMAIL_FROM, sickgear.EMAIL_TLS, if self._sendmail(sickgear.EMAIL_HOST, sickgear.EMAIL_PORT, sickgear.EMAIL_FROM, sickgear.EMAIL_TLS,
sickgear.EMAIL_USER, sickgear.EMAIL_PASSWORD, to, msg): sickgear.EMAIL_USER, sickgear.EMAIL_PASSWORD, to, msg):
self._log_debug(u'%s notification sent to [%s] for "%s"' % (title, to, body)) self._log_debug(f'{title} notification sent to [{to}] for "{body}"')
else: else:
self._log_error(u'%s notification ERROR: %s' % (title, self.last_err)) self._log_error(f'{title} notification ERROR: {self.last_err}')
def test_notify(self, host, port, smtp_from, use_tls, user, pwd, to): def test_notify(self, host, port, smtp_from, use_tls, user, pwd, to):
self._testing = True self._testing = True

View file

@ -61,7 +61,7 @@ class EmbyNotifier(Notifier):
""" """
hosts, keys, message = self._check_config() hosts, keys, message = self._check_config()
if not hosts: if not hosts:
self._log_warning(u'Issue with hosts or api keys, check your settings') self._log_warning('Issue with hosts or api keys, check your settings')
return False return False
from sickgear.indexers import indexer_config from sickgear.indexers import indexer_config
@ -98,10 +98,10 @@ class EmbyNotifier(Notifier):
timeout=20, hooks=dict(response=self._cb_response), **args) timeout=20, hooks=dict(response=self._cb_response), **args)
# Emby will initiate a LibraryMonitor path refresh one minute after this success # Emby will initiate a LibraryMonitor path refresh one minute after this success
if self.response and 204 == self.response.get('status_code') and self.response.get('ok'): if self.response and 204 == self.response.get('status_code') and self.response.get('ok'):
self._log(u'Success: update %s sent to host %s in a library updated call' % (mode_to_log, cur_host)) self._log(f'Success: update {mode_to_log} sent to host {cur_host} in a library updated call')
continue continue
elif self.response and 401 == self.response.get('status_code'): elif self.response and 401 == self.response.get('status_code'):
self._log_warning(u'Failed to authenticate with %s' % cur_host) self._log_warning(f'Failed to authenticate with {cur_host}')
elif self.response and 404 == self.response.get('status_code'): elif self.response and 404 == self.response.get('status_code'):
self.response = None self.response = None
sickgear.helpers.get_url( sickgear.helpers.get_url(
@ -109,16 +109,16 @@ class EmbyNotifier(Notifier):
headers={'Content-type': 'application/json', 'X-MediaBrowser-Token': keys[i]}, headers={'Content-type': 'application/json', 'X-MediaBrowser-Token': keys[i]},
timeout=20, hooks=dict(response=self._cb_response), post_json={'Path': '', 'UpdateType': ''}) timeout=20, hooks=dict(response=self._cb_response), post_json={'Path': '', 'UpdateType': ''})
if self.response and 204 == self.response.get('status_code') and self.response.get('ok'): if self.response and 204 == self.response.get('status_code') and self.response.get('ok'):
self._log(u'Success: fallback to sending Library/Media/Updated call' self._log(f'Success: fallback to sending Library/Media/Updated call'
u' to scan all shows at host %s' % cur_host) f' to scan all shows at host {cur_host}')
continue continue
self._log_debug(u'Warning, Library update responded 404 not found and' self._log_debug(f'Warning, Library update responded 404 not found and'
u' fallback to new /Library/Media/Updated api call failed at %s' % cur_host) f' fallback to new /Library/Media/Updated api call failed at {cur_host}')
elif not response and not self.response or not self.response.get('ok'): elif not response and not self.response or not self.response.get('ok'):
self._log_warning(u'Warning, could not connect with server at %s' % cur_host) self._log_warning(f'Warning, could not connect with server at {cur_host}')
else: else:
self._log_debug(u'Warning, unknown response %sfrom %s, can most likely be ignored' self._log_debug(f'Warning, unknown response %sfrom {cur_host}, can most likely be ignored'
% (self.response and '%s ' % self.response.get('status_code') or '', cur_host)) % (self.response and '%s ' % self.response.get('status_code') or ''))
total_success = False total_success = False
return total_success return total_success
@ -181,7 +181,7 @@ class EmbyNotifier(Notifier):
if len(hosts) != len(apikeys): if len(hosts) != len(apikeys):
message = ('Not enough Api keys for hosts', 'More Api keys than hosts')[len(apikeys) > len(hosts)] message = ('Not enough Api keys for hosts', 'More Api keys than hosts')[len(apikeys) > len(hosts)]
self._log_warning(u'%s, check your settings' % message) self._log_warning(f'{message}, check your settings')
return False, False, message return False, False, message
return hosts, apikeys, 'OK' return hosts, apikeys, 'OK'
@ -215,12 +215,12 @@ class EmbyNotifier(Notifier):
if self.response and 401 == self.response.get('status_code'): if self.response and 401 == self.response.get('status_code'):
success = False success = False
message += ['Fail: Cannot authenticate API key with %s' % cur_host] message += ['Fail: Cannot authenticate API key with %s' % cur_host]
self._log_warning(u'Failed to authenticate with %s' % cur_host) self._log_warning(f'Failed to authenticate with {cur_host}')
continue continue
elif not response and not self.response or not self.response.get('ok'): elif not response and not self.response or not self.response.get('ok'):
success = False success = False
message += ['Fail: No supported Emby server found at %s' % cur_host] message += ['Fail: No supported Emby server found at %s' % cur_host]
self._log_warning(u'Warning, could not connect with server at ' + cur_host) self._log_warning(f'Warning, could not connect with server at {cur_host}')
continue continue
message += ['OK: %s' % cur_host] message += ['OK: %s' % cur_host]

View file

@ -25,7 +25,7 @@ notify_strings = dict(
git_updated='SickGear updated', git_updated='SickGear updated',
git_updated_text='SickGear updated to commit#: ', git_updated_text='SickGear updated to commit#: ',
test_title='SickGear notification test', test_title='SickGear notification test',
test_body=u'Success testing %s settings from SickGear ʕ•ᴥ•ʔ', test_body='Success testing %s settings from SickGear ʕ•ᴥ•ʔ',
) )
@ -40,7 +40,7 @@ class BaseNotifier(object):
return 'https://raw.githubusercontent.com/SickGear/SickGear/main/gui/slick/images/ico/' + self.sg_logo_file return 'https://raw.githubusercontent.com/SickGear/SickGear/main/gui/slick/images/ico/' + self.sg_logo_file
def _log(self, msg, level=logger.MESSAGE): def _log(self, msg, level=logger.MESSAGE):
logger.log(u'%s: %s' % (self.name, msg), level) logger.log(f'{self.name}: {msg}', level)
def _log_debug(self, msg): def _log_debug(self, msg):
self._log(msg, logger.DEBUG) self._log(msg, logger.DEBUG)
@ -108,7 +108,7 @@ class BaseNotifier(object):
@staticmethod @staticmethod
def _body_only(title, body): def _body_only(title, body):
# don't use title with updates or testing, as only one str is used # don't use title with updates or testing, as only one str is used
return body if 'SickGear' in title else u'%s: %s' % (title, body.replace('#: ', '# ')) return body if 'SickGear' in title else f'{title}: {body.replace("#: ", "# ")}'
class Notifier(BaseNotifier): class Notifier(BaseNotifier):
@ -136,7 +136,7 @@ class Notifier(BaseNotifier):
self._pre_notify('git_updated', notify_strings['git_updated_text'] + new_version, **kwargs) self._pre_notify('git_updated', notify_strings['git_updated_text'] + new_version, **kwargs)
def _pre_notify(self, notify_string, message, *args, **kwargs): def _pre_notify(self, notify_string, message, *args, **kwargs):
self._log_debug(u'Sending notification "%s"' % (self._body_only(notify_strings[notify_string], message))) self._log_debug(f'Sending notification "{self._body_only(notify_strings[notify_string], message)}"')
try: try:
return self._notify(notify_strings[notify_string], message, *args, **kwargs) return self._notify(notify_strings[notify_string], message, *args, **kwargs)
except (BaseException, Exception): except (BaseException, Exception):

View file

@ -94,7 +94,7 @@ class GrowlNotifier(Notifier):
success = True success = True
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
self._log_warning(u'Unable to send growl to %s:%s - %s' % (opts['host'], opts['port'], ex(e))) self._log_warning(f'Unable to send growl to {opts["host"]}:{opts["port"]} - {ex(e)}')
return success return success

View file

@ -94,7 +94,7 @@ class KodiNotifier(Notifier):
Returns: True if processing succeeded with no issues else False if any issues found Returns: True if processing succeeded with no issues else False if any issues found
""" """
if not sickgear.KODI_HOST: if not sickgear.KODI_HOST:
self._log_warning(u'No Kodi hosts specified, check your settings') self._log_warning('No Kodi hosts specified, check your settings')
return False return False
# either update each host, or only attempt to update until one successful result # either update each host, or only attempt to update until one successful result
@ -108,7 +108,7 @@ class KodiNotifier(Notifier):
response = self._send_json(cur_host, dict(method='Profiles.GetCurrentProfile')) response = self._send_json(cur_host, dict(method='Profiles.GetCurrentProfile'))
if self.response and 401 == self.response.get('status_code'): if self.response and 401 == self.response.get('status_code'):
self._log_debug(u'Failed to authenticate with %s' % cur_host) self._log_debug(f'Failed to authenticate with {cur_host}')
continue continue
if not response: if not response:
self._maybe_log_failed_detection(cur_host) self._maybe_log_failed_detection(cur_host)
@ -117,7 +117,7 @@ class KodiNotifier(Notifier):
if self._send_library_update(cur_host, show_name): if self._send_library_update(cur_host, show_name):
only_first.update(dict(profile=response.get('label') or 'Master', host=cur_host)) only_first.update(dict(profile=response.get('label') or 'Master', host=cur_host))
self._log('Success: profile;' + self._log('Success: profile;' +
u'"%(profile)s" at%(first)s host;%(host)s updated%(show)s%(first_note)s' % only_first) '"%(profile)s" at%(first)s host;%(host)s updated%(show)s%(first_note)s' % only_first)
else: else:
self._maybe_log_failed_detection(cur_host) self._maybe_log_failed_detection(cur_host)
result += 1 result += 1
@ -148,10 +148,10 @@ class KodiNotifier(Notifier):
failed_msg = 'Single show update failed,' failed_msg = 'Single show update failed,'
if sickgear.KODI_UPDATE_FULL: if sickgear.KODI_UPDATE_FULL:
self._log_debug(u'%s falling back to full update' % failed_msg) self._log_debug(f'{failed_msg} falling back to full update')
return __method_update(host) return __method_update(host)
self._log_debug(u'%s consider enabling "Perform full library update" in config/notifications' % failed_msg) self._log_debug(f'{failed_msg} consider enabling "Perform full library update" in config/notifications')
return False return False
############################################################################## ##############################################################################
@ -169,7 +169,7 @@ class KodiNotifier(Notifier):
""" """
if not host: if not host:
self._log_warning(u'No host specified, aborting update') self._log_warning('No host specified, aborting update')
return False return False
args = {} args = {}
@ -198,14 +198,14 @@ class KodiNotifier(Notifier):
""" """
if not host: if not host:
self._log_warning(u'No host specified, aborting update') self._log_warning('No host specified, aborting update')
return False return False
self._log_debug(u'Updating library via HTTP method for host: %s' % host) self._log_debug(f'Updating library via HTTP method for host: {host}')
# if we're doing per-show # if we're doing per-show
if show_name: if show_name:
self._log_debug(u'Updating library via HTTP method for show %s' % show_name) self._log_debug(f'Updating library via HTTP method for show {show_name}')
# noinspection SqlResolve # noinspection SqlResolve
path_sql = 'SELECT path.strPath' \ path_sql = 'SELECT path.strPath' \
@ -223,29 +223,28 @@ class KodiNotifier(Notifier):
# sql used to grab path(s) # sql used to grab path(s)
response = self._send(host, {'command': 'QueryVideoDatabase(%s)' % path_sql}) response = self._send(host, {'command': 'QueryVideoDatabase(%s)' % path_sql})
if not response: if not response:
self._log_debug(u'Invalid response for %s on %s' % (show_name, host)) self._log_debug(f'Invalid response for {show_name} on {host}')
return False return False
try: try:
et = etree.fromstring(quote(response, ':\\/<>')) et = etree.fromstring(quote(response, ':\\/<>'))
except SyntaxError as e: except SyntaxError as e:
self._log_error(u'Unable to parse XML in response: %s' % ex(e)) self._log_error(f'Unable to parse XML in response: {ex(e)}')
return False return False
paths = et.findall('.//field') paths = et.findall('.//field')
if not paths: if not paths:
self._log_debug(u'No valid path found for %s on %s' % (show_name, host)) self._log_debug(f'No valid path found for {show_name} on {host}')
return False return False
for path in paths: for path in paths:
# we do not need it double-encoded, gawd this is dumb # we do not need it double-encoded, gawd this is dumb
un_enc_path = decode_str(unquote(path.text), sickgear.SYS_ENCODING) un_enc_path = decode_str(unquote(path.text), sickgear.SYS_ENCODING)
self._log_debug(u'Updating %s on %s at %s' % (show_name, host, un_enc_path)) self._log_debug(f'Updating {show_name} on {host} at {un_enc_path}')
if not self._send( if not self._send(
host, dict(command='ExecBuiltIn', parameter='Kodi.updatelibrary(video, %s)' % un_enc_path)): host, dict(command='ExecBuiltIn', parameter='Kodi.updatelibrary(video, %s)' % un_enc_path)):
self._log_error(u'Update of show directory failed for %s on %s at %s' self._log_error(f'Update of show directory failed for {show_name} on {host} at {un_enc_path}')
% (show_name, host, un_enc_path))
return False return False
# sleep for a few seconds just to be sure kodi has a chance to finish each directory # sleep for a few seconds just to be sure kodi has a chance to finish each directory
@ -253,10 +252,10 @@ class KodiNotifier(Notifier):
time.sleep(5) time.sleep(5)
# do a full update if requested # do a full update if requested
else: else:
self._log_debug(u'Full library update on host: %s' % host) self._log_debug(f'Full library update on host: {host}')
if not self._send(host, dict(command='ExecBuiltIn', parameter='Kodi.updatelibrary(video)')): if not self._send(host, dict(command='ExecBuiltIn', parameter='Kodi.updatelibrary(video)')):
self._log_error(u'Failed full library update on: %s' % host) self._log_error(f'Failed full library update on: {host}')
return False return False
return True return True
@ -277,7 +276,7 @@ class KodiNotifier(Notifier):
result = {} result = {}
if not host: if not host:
self._log_warning(u'No host specified, aborting update') self._log_warning('No host specified, aborting update')
return result return result
if isinstance(command, dict): if isinstance(command, dict):
@ -300,8 +299,8 @@ class KodiNotifier(Notifier):
if not response.get('error'): if not response.get('error'):
return 'OK' == response.get('result') and {'OK': True} or response.get('result') return 'OK' == response.get('result') and {'OK': True} or response.get('result')
self._log_error(u'API error; %s from %s in response to command: %s' self._log_error(f'API error; {json_dumps(response["error"])} from {host}'
% (json_dumps(response['error']), host, json_dumps(command))) f' in response to command: {json_dumps(command)}')
return result return result
def _update_json(self, host=None, show_name=None): def _update_json(self, host=None, show_name=None):
@ -317,12 +316,12 @@ class KodiNotifier(Notifier):
""" """
if not host: if not host:
self._log_warning(u'No host specified, aborting update') self._log_warning('No host specified, aborting update')
return False return False
# if we're doing per-show # if we're doing per-show
if show_name: if show_name:
self._log_debug(u'JSON library update. Host: %s Show: %s' % (host, show_name)) self._log_debug(f'JSON library update. Host: {host} Show: {show_name}')
# try fetching tvshowid using show_name with a fallback to getting show list # try fetching tvshowid using show_name with a fallback to getting show list
show_name = unquote_plus(show_name) show_name = unquote_plus(show_name)
@ -339,7 +338,7 @@ class KodiNotifier(Notifier):
break break
if not shows: if not shows:
self._log_debug(u'No items in GetTVShows response') self._log_debug('No items in GetTVShows response')
return False return False
tvshowid = -1 tvshowid = -1
@ -354,7 +353,7 @@ class KodiNotifier(Notifier):
# we didn't find the show (exact match), thus revert to just doing a full update if enabled # we didn't find the show (exact match), thus revert to just doing a full update if enabled
if -1 == tvshowid: if -1 == tvshowid:
self._log_debug(u'Doesn\'t have "%s" in it\'s known shows, full library update required' % show_name) self._log_debug(f'Doesn\'t have "{show_name}" in it\'s known shows, full library update required')
return False return False
# lookup tv-show path if we don't already know it # lookup tv-show path if we don't already know it
@ -365,24 +364,24 @@ class KodiNotifier(Notifier):
path = 'tvshowdetails' in response and response['tvshowdetails'].get('file', '') or '' path = 'tvshowdetails' in response and response['tvshowdetails'].get('file', '') or ''
if not len(path): if not len(path):
self._log_warning(u'No valid path found for %s with ID: %s on %s' % (show_name, tvshowid, host)) self._log_warning(f'No valid path found for {show_name} with ID: {tvshowid} on {host}')
return False return False
self._log_debug(u'Updating %s on %s at %s' % (show_name, host, path)) self._log_debug(f'Updating {show_name} on {host} at {path}')
command = dict(method='VideoLibrary.Scan', command = dict(method='VideoLibrary.Scan',
params={'directory': '%s' % json_dumps(path)[1:-1].replace('\\\\', '\\')}) params={'directory': '%s' % json_dumps(path)[1:-1].replace('\\\\', '\\')})
response_scan = self._send_json(host, command) response_scan = self._send_json(host, command)
if not response_scan.get('OK'): if not response_scan.get('OK'):
self._log_error(u'Update of show directory failed for %s on %s at %s response: %s' % self._log_error(f'Update of show directory failed for {show_name} on {host} at {path}'
(show_name, host, path, response_scan)) f' response: {response_scan}')
return False return False
# do a full update if requested # do a full update if requested
else: else:
self._log_debug(u'Full library update on host: %s' % host) self._log_debug(f'Full library update on host: {host}')
response_scan = self._send_json(host, dict(method='VideoLibrary.Scan')) response_scan = self._send_json(host, dict(method='VideoLibrary.Scan'))
if not response_scan.get('OK'): if not response_scan.get('OK'):
self._log_error(u'Failed full library update on: %s response: %s' % (host, response_scan)) self._log_error(f'Failed full library update on: {host} response: {response_scan}')
return False return False
return True return True
@ -400,7 +399,7 @@ class KodiNotifier(Notifier):
def _maybe_log_failed_detection(self, host, msg='connect to'): def _maybe_log_failed_detection(self, host, msg='connect to'):
self._maybe_log(u'Failed to %s %s, check device(s) and config' % (msg, host), logger.ERROR) self._maybe_log(f'Failed to {msg} {host}, check device(s) and config', logger.ERROR)
def _notify(self, title, body, hosts=None, username=None, password=None, **kwargs): def _notify(self, title, body, hosts=None, username=None, password=None, **kwargs):
""" Internal wrapper for the notify_snatch and notify_download functions """ Internal wrapper for the notify_snatch and notify_download functions
@ -429,20 +428,20 @@ class KodiNotifier(Notifier):
if self.response and 401 == self.response.get('status_code'): if self.response and 401 == self.response.get('status_code'):
success = False success = False
message += ['Fail: Cannot authenticate with %s' % cur_host] message += ['Fail: Cannot authenticate with %s' % cur_host]
self._log_debug(u'Failed to authenticate with %s' % cur_host) self._log_debug(f'Failed to authenticate with {cur_host}')
elif not api_version: elif not api_version:
success = False success = False
message += ['Fail: No supported Kodi found at %s' % cur_host] message += ['Fail: No supported Kodi found at %s' % cur_host]
self._maybe_log_failed_detection(cur_host, 'connect and detect version for') self._maybe_log_failed_detection(cur_host, 'connect and detect version for')
else: else:
if 4 >= api_version: if 4 >= api_version:
self._log_debug(u'Detected %sversion <= 11, using HTTP API' self._log_debug(f'Detected {self.prefix and " " + self.prefix.capitalize()}version <= 11,'
% self.prefix and ' ' + self.prefix.capitalize()) f' using HTTP API')
__method_send = self._send __method_send = self._send
command = dict(command='ExecBuiltIn', command = dict(command='ExecBuiltIn',
parameter='Notification(%s,%s)' % (title, body)) parameter='Notification(%s,%s)' % (title, body))
else: else:
self._log_debug(u'Detected version >= 12, using JSON API') self._log_debug('Detected version >= 12, using JSON API')
__method_send = self._send_json __method_send = self._send_json
command = dict(method='GUI.ShowNotification', params=dict( command = dict(method='GUI.ShowNotification', params=dict(
[('title', title), ('message', body), ('image', self._sg_logo_url)] [('title', title), ('message', body), ('image', self._sg_logo_url)]

View file

@ -44,14 +44,14 @@ def diagnose():
try: try:
bus = dbus.SessionBus() bus = dbus.SessionBus()
except dbus.DBusException as e: except dbus.DBusException as e:
return (u'Error: unable to connect to D-Bus session bus: <code>%s</code>. ' return (f'Error: unable to connect to D-Bus session bus: <code>{cgi.escape(e)}</code>.'
u'Are you running SickGear in a desktop session?') % (cgi.escape(e),) f' Are you running SickGear in a desktop session?')
try: try:
bus.get_object('org.freedesktop.Notifications', bus.get_object('org.freedesktop.Notifications',
'/org/freedesktop/Notifications') '/org/freedesktop/Notifications')
except dbus.DBusException as e: except dbus.DBusException as e:
return (u'Error: there doesn\'t seem to be a notification daemon available: <code>%s</code> ' return (f'Error: there doesn\'t seem to be a notification daemon available: <code>{cgi.escape(e)}</code>.'
u'Try installing notification-daemon or notify-osd.') % (cgi.escape(e),) f' Try installing notification-daemon or notify-osd.')
return 'Error: Unable to send notification.' return 'Error: Unable to send notification.'
@ -71,18 +71,18 @@ class LibnotifyNotifier(Notifier):
# noinspection PyPackageRequirements # noinspection PyPackageRequirements
import pynotify import pynotify
except ImportError: except ImportError:
self._log_error(u'Unable to import pynotify. libnotify notifications won\'t work') self._log_error("Unable to import pynotify. libnotify notifications won't work")
return False return False
try: try:
# noinspection PyPackageRequirements # noinspection PyPackageRequirements
from gi.repository import GObject from gi.repository import GObject
except ImportError: except ImportError:
self._log_error(u'Unable to import GObject from gi.repository. Cannot catch a GError in display') self._log_error('Unable to import GObject from gi.repository. Cannot catch a GError in display')
return False return False
if not pynotify.init('SickGear'): if not pynotify.init('SickGear'):
self._log_error(u'Initialization of pynotify failed. libnotify notifications won\'t work') self._log_error('Initialization of pynotify failed. libnotify notifications won\'t work')
return False return False
self.pynotify = pynotify self.pynotify = pynotify

View file

@ -43,11 +43,11 @@ class NMJNotifier(BaseNotifier):
try: try:
terminal = telnetlib.Telnet(host) terminal = telnetlib.Telnet(host)
except (BaseException, Exception): except (BaseException, Exception):
self._log_warning(u'Unable to get a telnet session to %s' % host) self._log_warning(f'Unable to get a telnet session to {host}')
if result: if result:
# tell the terminal to output the necessary info to the screen so we can search it later # tell the terminal to output the necessary info to the screen so we can search it later
self._log_debug(u'Connected to %s via telnet' % host) self._log_debug(f'Connected to {host} via telnet')
terminal.read_until('sh-3.00# ') terminal.read_until('sh-3.00# ')
terminal.write('cat /tmp/source\n') terminal.write('cat /tmp/source\n')
terminal.write('cat /tmp/netshare\n') terminal.write('cat /tmp/netshare\n')
@ -57,11 +57,11 @@ class NMJNotifier(BaseNotifier):
match = re.search(r'(.+\.db)\r\n?(.+)(?=sh-3.00# cat /tmp/netshare)', tnoutput) match = re.search(r'(.+\.db)\r\n?(.+)(?=sh-3.00# cat /tmp/netshare)', tnoutput)
# if we found the database in the terminal output then save that database to the config # if we found the database in the terminal output then save that database to the config
if not match: if not match:
self._log_warning(u'Could not get current NMJ database on %s, NMJ is probably not running!' % host) self._log_warning(f'Could not get current NMJ database on {host}, NMJ is probably not running!')
else: else:
database = match.group(1) database = match.group(1)
device = match.group(2) device = match.group(2)
self._log_debug(u'Found NMJ database %s on device %s' % (database, device)) self._log_debug(f'Found NMJ database {database} on device {device}')
sickgear.NMJ_DATABASE = database sickgear.NMJ_DATABASE = database
# if the device is a remote host then try to parse the mounting URL and save it to the config # if the device is a remote host then try to parse the mounting URL and save it to the config
if device.startswith('NETWORK_SHARE/'): if device.startswith('NETWORK_SHARE/'):
@ -72,7 +72,7 @@ class NMJNotifier(BaseNotifier):
'but could not get the mounting url') 'but could not get the mounting url')
else: else:
mount = match.group().replace('127.0.0.1', host) mount = match.group().replace('127.0.0.1', host)
self._log_debug(u'Found mounting url on the Popcorn Hour in configuration: %s' % mount) self._log_debug(f'Found mounting url on the Popcorn Hour in configuration: {mount}')
sickgear.NMJ_MOUNT = mount sickgear.NMJ_MOUNT = mount
result = True result = True
@ -96,23 +96,23 @@ class NMJNotifier(BaseNotifier):
database = self._choose(database, sickgear.NMJ_DATABASE) database = self._choose(database, sickgear.NMJ_DATABASE)
mount = self._choose(mount, sickgear.NMJ_MOUNT) mount = self._choose(mount, sickgear.NMJ_MOUNT)
self._log_debug(u'Sending scan command for NMJ ') self._log_debug('Sending scan command for NMJ')
# if a mount URL is provided then attempt to open a handle to that URL # if a mount URL is provided then attempt to open a handle to that URL
if mount: if mount:
try: try:
req = urllib.request.Request(mount) req = urllib.request.Request(mount)
self._log_debug(u'Try to mount network drive via url: %s' % mount) self._log_debug(f'Try to mount network drive via url: {mount}')
http_response_obj = urllib.request.urlopen(req) # PY2 http_response_obj has no `with` context manager http_response_obj = urllib.request.urlopen(req) # PY2 http_response_obj has no `with` context manager
http_response_obj.close() http_response_obj.close()
except IOError as e: except IOError as e:
if hasattr(e, 'reason'): if hasattr(e, 'reason'):
self._log_warning(u'Could not contact Popcorn Hour on host %s: %s' % (host, e.reason)) self._log_warning(f'Could not contact Popcorn Hour on host {host}: {e.reason}')
elif hasattr(e, 'code'): elif hasattr(e, 'code'):
self._log_warning(u'Problem with Popcorn Hour on host %s: %s' % (host, e.code)) self._log_warning(f'Problem with Popcorn Hour on host {host}: {e.code}')
return False return False
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
self._log_error(u'Unknown exception: ' + ex(e)) self._log_error(f'Unknown exception: {ex(e)}')
return False return False
# build up the request URL and parameters # build up the request URL and parameters
@ -123,18 +123,18 @@ class NMJNotifier(BaseNotifier):
# send the request to the server # send the request to the server
try: try:
req = urllib.request.Request(update_url) req = urllib.request.Request(update_url)
self._log_debug(u'Sending scan update command via url: %s' % update_url) self._log_debug(f'Sending scan update command via url: {update_url}')
http_response_obj = urllib.request.urlopen(req) http_response_obj = urllib.request.urlopen(req)
response = http_response_obj.read() response = http_response_obj.read()
http_response_obj.close() http_response_obj.close()
except IOError as e: except IOError as e:
if hasattr(e, 'reason'): if hasattr(e, 'reason'):
self._log_warning(u'Could not contact Popcorn Hour on host %s: %s' % (host, e.reason)) self._log_warning(f'Could not contact Popcorn Hour on host {host}: {e.reason}')
elif hasattr(e, 'code'): elif hasattr(e, 'code'):
self._log_warning(u'Problem with Popcorn Hour on host %s: %s' % (host, e.code)) self._log_warning(f'Problem with Popcorn Hour on host {host}: {e.code}')
return False return False
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
self._log_error(u'Unknown exception: ' + ex(e)) self._log_error(f'Unknown exception: {ex(e)}')
return False return False
# try to parse the resulting XML # try to parse the resulting XML
@ -142,15 +142,15 @@ class NMJNotifier(BaseNotifier):
et = etree.fromstring(response) et = etree.fromstring(response)
result = et.findtext('returnValue') result = et.findtext('returnValue')
except SyntaxError as e: except SyntaxError as e:
self._log_error(u'Unable to parse XML returned from the Popcorn Hour: %s' % ex(e)) self._log_error(f'Unable to parse XML returned from the Popcorn Hour: {ex(e)}')
return False return False
# if the result was a number then consider that an error # if the result was a number, then consider that an error
if 0 < int(result): if 0 < int(result):
self._log_error(u'Popcorn Hour returned an errorcode: %s' % result) self._log_error(f'Popcorn Hour returned an errorcode: {result}')
return False return False
self._log(u'NMJ started background scan') self._log('NMJ started background scan')
return True return True
def _notify(self, host=None, database=None, mount=None, **kwargs): def _notify(self, host=None, database=None, mount=None, **kwargs):

View file

@ -78,7 +78,7 @@ class NMJv2Notifier(BaseNotifier):
result = True result = True
except IOError as e: except IOError as e:
self._log_warning(u'Couldn\'t contact popcorn hour on host %s: %s' % (host, ex(e))) self._log_warning(f'Couldn\'t contact popcorn hour on host {host}: {ex(e)}')
if result: if result:
return '{"message": "Success, NMJ Database found at: %(host)s", "database": "%(database)s"}' % { return '{"message": "Success, NMJ Database found at: %(host)s", "database": "%(database)s"}' % {
@ -100,7 +100,7 @@ class NMJv2Notifier(BaseNotifier):
host = self._choose(host, sickgear.NMJv2_HOST) host = self._choose(host, sickgear.NMJv2_HOST)
self._log_debug(u'Sending scan command for NMJ ') self._log_debug('Sending scan command for NMJ ')
# if a host is provided then attempt to open a handle to that URL # if a host is provided then attempt to open a handle to that URL
try: try:
@ -108,11 +108,11 @@ class NMJv2Notifier(BaseNotifier):
url_scandir = '%s%s%s' % (base_url, 'metadata_database?', urlencode( url_scandir = '%s%s%s' % (base_url, 'metadata_database?', urlencode(
dict(arg0='update_scandir', arg1=sickgear.NMJv2_DATABASE, arg2='', arg3='update_all'))) dict(arg0='update_scandir', arg1=sickgear.NMJv2_DATABASE, arg2='', arg3='update_all')))
self._log_debug(u'Scan update command sent to host: %s' % host) self._log_debug(f'Scan update command sent to host: {host}')
url_updatedb = '%s%s%s' % (base_url, 'metadata_database?', urlencode( url_updatedb = '%s%s%s' % (base_url, 'metadata_database?', urlencode(
dict(arg0='scanner_start', arg1=sickgear.NMJv2_DATABASE, arg2='background', arg3=''))) dict(arg0='scanner_start', arg1=sickgear.NMJv2_DATABASE, arg2='background', arg3='')))
self._log_debug(u'Try to mount network drive via url: %s' % host) self._log_debug(f'Try to mount network drive via url: {host}')
prereq = urllib.request.Request(url_scandir) prereq = urllib.request.Request(url_scandir)
req = urllib.request.Request(url_updatedb) req = urllib.request.Request(url_updatedb)
@ -127,24 +127,24 @@ class NMJv2Notifier(BaseNotifier):
response2 = http_response_obj2.read() response2 = http_response_obj2.read()
http_response_obj2.close() http_response_obj2.close()
except IOError as e: except IOError as e:
self._log_warning(u'Couldn\'t contact popcorn hour on host %s: %s' % (host, ex(e))) self._log_warning(f'Couldn\'t contact popcorn hour on host {host}: {ex(e)}')
return False return False
try: try:
et = etree.fromstring(response1) et = etree.fromstring(response1)
result1 = et.findtext('returnValue') result1 = et.findtext('returnValue')
except SyntaxError as e: except SyntaxError as e:
self._log_error(u'Unable to parse XML returned from the Popcorn Hour: update_scandir, %s' % ex(e)) self._log_error(f'Unable to parse XML returned from the Popcorn Hour: update_scandir, {ex(e)}')
return False return False
try: try:
et = etree.fromstring(response2) et = etree.fromstring(response2)
result2 = et.findtext('returnValue') result2 = et.findtext('returnValue')
except SyntaxError as e: except SyntaxError as e:
self._log_error(u'Unable to parse XML returned from the Popcorn Hour: scanner_start, %s' % ex(e)) self._log_error(f'Unable to parse XML returned from the Popcorn Hour: scanner_start, {ex(e)}')
return False return False
# if the result was a number then consider that an error # if the result was a number, then consider that an error
error_codes = ['8', '11', '22', '49', '50', '51', '60'] error_codes = ['8', '11', '22', '49', '50', '51', '60']
error_messages = ['Invalid parameter(s)/argument(s)', error_messages = ['Invalid parameter(s)/argument(s)',
'Invalid database path', 'Invalid database path',
@ -155,15 +155,15 @@ class NMJv2Notifier(BaseNotifier):
'Read only file system'] 'Read only file system']
if 0 < int(result1): if 0 < int(result1):
index = error_codes.index(result1) index = error_codes.index(result1)
self._log_error(u'Popcorn Hour returned an error: %s' % (error_messages[index])) self._log_error(f'Popcorn Hour returned an error: {error_messages[index]}')
return False return False
elif 0 < int(result2): elif 0 < int(result2):
index = error_codes.index(result2) index = error_codes.index(result2)
self._log_error(u'Popcorn Hour returned an error: %s' % (error_messages[index])) self._log_error(f'Popcorn Hour returned an error: {error_messages[index]}')
return False return False
self._log(u'NMJv2 started background scan') self._log('NMJv2 started background scan')
return True return True
def _notify(self, host=None, **kwargs): def _notify(self, host=None, **kwargs):

View file

@ -45,33 +45,33 @@ class PLEXNotifier(Notifier):
""" """
if not host: if not host:
self._log_error(u'No host specified, check your settings') self._log_error('No host specified, check your settings')
return False return False
for key in command: for key in command:
command[key] = command[key].encode('utf-8') command[key] = command[key].encode('utf-8')
enc_command = urlencode(command) enc_command = urlencode(command)
self._log_debug(u'Encoded API command: ' + enc_command) self._log_debug(f'Encoded API command: {enc_command}')
url = 'http://%s/xbmcCmds/xbmcHttp/?%s' % (host, enc_command) url = 'http://%s/xbmcCmds/xbmcHttp/?%s' % (host, enc_command)
try: try:
req = urllib.request.Request(url) req = urllib.request.Request(url)
if password: if password:
req.add_header('Authorization', 'Basic %s' % b64encodestring('%s:%s' % (username, password))) req.add_header('Authorization', 'Basic %s' % b64encodestring('%s:%s' % (username, password)))
self._log_debug(u'Contacting (with auth header) via url: ' + url) self._log_debug(f'Contacting (with auth header) via url: {url}')
else: else:
self._log_debug(u'Contacting via url: ' + url) self._log_debug(f'Contacting via url: {url}')
http_response_obj = urllib.request.urlopen(req) # PY2 http_response_obj has no `with` context manager http_response_obj = urllib.request.urlopen(req) # PY2 http_response_obj has no `with` context manager
result = decode_str(http_response_obj.read(), sickgear.SYS_ENCODING) result = decode_str(http_response_obj.read(), sickgear.SYS_ENCODING)
http_response_obj.close() http_response_obj.close()
self._log_debug(u'HTTP response: ' + result.replace('\n', '')) self._log_debug('HTTP response: ' + result.replace('\n', ''))
return True return True
except (urllib.error.URLError, IOError) as e: except (urllib.error.URLError, IOError) as e:
self._log_warning(u'Couldn\'t contact Plex at ' + url + ' ' + ex(e)) self._log_warning(f'Couldn\'t contact Plex at {url} {ex(e)}')
return False return False
@staticmethod @staticmethod
@ -113,7 +113,7 @@ class PLEXNotifier(Notifier):
results = [] results = []
for cur_host in [x.strip() for x in host.split(',')]: for cur_host in [x.strip() for x in host.split(',')]:
cur_host = unquote_plus(cur_host) cur_host = unquote_plus(cur_host)
self._log(u'Sending notification to \'%s\'' % cur_host) self._log(f'Sending notification to \'{cur_host}\'')
result = self._send_to_plex(command, cur_host, username, password) result = self._send_to_plex(command, cur_host, username, password)
results += [self._choose(('%s Plex client ... %s' % (('Successful test notice sent to', results += [self._choose(('%s Plex client ... %s' % (('Successful test notice sent to',
'Failed test for')[not result], cur_host)), result)] 'Failed test for')[not result], cur_host)), result)]
@ -148,7 +148,7 @@ class PLEXNotifier(Notifier):
""" """
host = self._choose(host, sickgear.PLEX_SERVER_HOST) host = self._choose(host, sickgear.PLEX_SERVER_HOST)
if not host: if not host:
msg = u'No Plex Media Server host specified, check your settings' msg = 'No Plex Media Server host specified, check your settings'
self._log_debug(msg) self._log_debug(msg)
return '%sFail: %s' % (('', '<br>')[self._testing], msg) return '%sFail: %s' % (('', '<br>')[self._testing], msg)
@ -159,7 +159,7 @@ class PLEXNotifier(Notifier):
token_arg = None token_arg = None
if username and password: if username and password:
self._log_debug(u'Fetching plex.tv credentials for user: ' + username) self._log_debug('Fetching plex.tv credentials for user: ' + username)
req = urllib.request.Request('https://plex.tv/users/sign_in.xml', data=b'') req = urllib.request.Request('https://plex.tv/users/sign_in.xml', data=b'')
req.add_header('Authorization', 'Basic %s' % b64encodestring('%s:%s' % (username, password))) req.add_header('Authorization', 'Basic %s' % b64encodestring('%s:%s' % (username, password)))
req.add_header('X-Plex-Device-Name', 'SickGear') req.add_header('X-Plex-Device-Name', 'SickGear')
@ -176,10 +176,10 @@ class PLEXNotifier(Notifier):
token_arg = '?X-Plex-Token=' + token token_arg = '?X-Plex-Token=' + token
except urllib.error.URLError as e: except urllib.error.URLError as e:
self._log(u'Error fetching credentials from plex.tv for user %s: %s' % (username, ex(e))) self._log(f'Error fetching credentials from plex.tv for user {username}: {ex(e)}')
except (ValueError, IndexError) as e: except (ValueError, IndexError) as e:
self._log(u'Error parsing plex.tv response: ' + ex(e)) self._log('Error parsing plex.tv response: ' + ex(e))
file_location = location if None is not location else '' if None is ep_obj else ep_obj.location file_location = location if None is not location else '' if None is ep_obj else ep_obj.location
host_validate = self._get_host_list(host, all([token_arg])) host_validate = self._get_host_list(host, all([token_arg]))
@ -198,7 +198,7 @@ class PLEXNotifier(Notifier):
sections = response.findall('.//Directory') sections = response.findall('.//Directory')
if not sections: if not sections:
self._log(u'Plex Media Server not running on: ' + cur_host) self._log('Plex Media Server not running on: ' + cur_host)
hosts_failed.append(cur_host) hosts_failed.append(cur_host)
continue continue
@ -232,17 +232,17 @@ class PLEXNotifier(Notifier):
host_list.append(cur_host) host_list.append(cur_host)
else: else:
hosts_failed.append(cur_host) hosts_failed.append(cur_host)
self._log_error(u'Error updating library section for Plex Media Server: %s' % cur_host) self._log_error(f'Error updating library section for Plex Media Server: {cur_host}')
if len(hosts_failed) == len(host_validate): if len(hosts_failed) == len(host_validate):
self._log(u'No successful Plex host updated') self._log('No successful Plex host updated')
return 'Fail no successful Plex host updated: %s' % ', '.join([host for host in hosts_failed]) return 'Fail no successful Plex host updated: %s' % ', '.join([host for host in hosts_failed])
else: else:
hosts = ', '.join(set(host_list)) hosts = ', '.join(set(host_list))
if len(hosts_match): if len(hosts_match):
self._log(u'Hosts updating where TV section paths match the downloaded show: %s' % hosts) self._log(f'Hosts updating where TV section paths match the downloaded show: {hosts}')
else: else:
self._log(u'Updating all hosts with TV sections: %s' % hosts) self._log(f'Updating all hosts with TV sections: {hosts}')
return '' return ''
hosts = [ hosts = [

View file

@ -52,7 +52,7 @@ class ProwlNotifier(Notifier):
if 200 != response.status: if 200 != response.status:
if 401 == response.status: if 401 == response.status:
result = u'Authentication, %s (bad API key?)' % response.reason result = f'Authentication, {response.reason} (bad API key?)'
else: else:
result = 'Http response code "%s"' % response.status result = 'Http response code "%s"' % response.status

View file

@ -30,7 +30,7 @@ class PushalotNotifier(Notifier):
pushalot_auth_token = self._choose(pushalot_auth_token, sickgear.PUSHALOT_AUTHORIZATIONTOKEN) pushalot_auth_token = self._choose(pushalot_auth_token, sickgear.PUSHALOT_AUTHORIZATIONTOKEN)
self._log_debug(u'Title: %s, Message: %s, API: %s' % (title, body, pushalot_auth_token)) self._log_debug(f'Title: {title}, Message: {body}, API: {pushalot_auth_token}')
http_handler = moves.http_client.HTTPSConnection('pushalot.com') http_handler = moves.http_client.HTTPSConnection('pushalot.com')
@ -49,7 +49,7 @@ class PushalotNotifier(Notifier):
if 200 != response.status: if 200 != response.status:
if 410 == response.status: if 410 == response.status:
result = u'Authentication, %s (bad API key?)' % response.reason result = f'Authentication, {response.reason} (bad API key?)'
else: else:
result = 'Http response code "%s"' % response.status result = 'Http response code "%s"' % response.status

View file

@ -69,7 +69,7 @@ class PushbulletNotifier(Notifier):
result = resp.json()['error']['message'] result = resp.json()['error']['message']
except (BaseException, Exception): except (BaseException, Exception):
result = 'no response' result = 'no response'
self._log_warning(u'%s' % result) self._log_warning(f'{result}')
return self._choose((True, 'Failed to send notification: %s' % result)[bool(result)], not bool(result)) return self._choose((True, 'Failed to send notification: %s' % result)[bool(result)], not bool(result))

View file

@ -66,7 +66,7 @@ class PyTivoNotifier(BaseNotifier):
request_url = 'http://%s/TiVoConnect?%s' % (host, urlencode( request_url = 'http://%s/TiVoConnect?%s' % (host, urlencode(
dict(Command='Push', Container=container, File=file_path, tsn=tsn))) dict(Command='Push', Container=container, File=file_path, tsn=tsn)))
self._log_debug(u'Requesting ' + request_url) self._log_debug(f'Requesting {request_url}')
request = urllib.request.Request(request_url) request = urllib.request.Request(request_url)
@ -76,17 +76,17 @@ class PyTivoNotifier(BaseNotifier):
except urllib.error.HTTPError as e: except urllib.error.HTTPError as e:
if hasattr(e, 'reason'): if hasattr(e, 'reason'):
self._log_error(u'Error, failed to reach a server - ' + e.reason) self._log_error('Error, failed to reach a server - ' + e.reason)
return False return False
elif hasattr(e, 'code'): elif hasattr(e, 'code'):
self._log_error(u'Error, the server couldn\'t fulfill the request - ' + e.code) self._log_error('Error, the server couldn\'t fulfill the request - ' + e.code)
return False return False
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
self._log_error(u'Unknown exception: ' + ex(e)) self._log_error(f'Unknown exception: {ex(e)}')
return False return False
self._log(u'Successfully requested transfer of file') self._log('Successfully requested transfer of file')
return True return True

View file

@ -32,11 +32,11 @@ class SynoIndexNotifier(BaseNotifier):
self._move_object(old_file, new_file) self._move_object(old_file, new_file)
def _cmdline_run(self, synoindex_cmd): def _cmdline_run(self, synoindex_cmd):
self._log_debug(u'Executing command ' + str(synoindex_cmd)) self._log_debug(f'Executing command {str(synoindex_cmd)}')
self._log_debug(u'Absolute path to command: ' + os.path.abspath(synoindex_cmd[0])) self._log_debug(f'Absolute path to command: {os.path.abspath(synoindex_cmd[0])}')
try: try:
output, err, exit_status = cmdline_runner(synoindex_cmd) output, err, exit_status = cmdline_runner(synoindex_cmd)
self._log_debug(u'Script result: %s' % output) self._log_debug(f'Script result: {output}')
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
self._log_error('Unable to run synoindex: %s' % ex(e)) self._log_error('Unable to run synoindex: %s' % ex(e))

View file

@ -27,11 +27,11 @@ class SynologyNotifier(Notifier):
def _notify(self, title, body, **kwargs): def _notify(self, title, body, **kwargs):
synodsmnotify_cmd = ['/usr/syno/bin/synodsmnotify', '@administrators', title, body] synodsmnotify_cmd = ['/usr/syno/bin/synodsmnotify', '@administrators', title, body]
self._log(u'Executing command ' + str(synodsmnotify_cmd)) self._log(f'Executing command {synodsmnotify_cmd}')
self._log_debug(u'Absolute path to command: ' + os.path.abspath(synodsmnotify_cmd[0])) self._log_debug(f'Absolute path to command: {os.path.abspath(synodsmnotify_cmd[0])}')
try: try:
output, err, exit_status = cmdline_runner(synodsmnotify_cmd) output, err, exit_status = cmdline_runner(synodsmnotify_cmd)
self._log_debug(u'Script result: %s' % output) self._log_debug(f'Script result: {output}')
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
self._log('Unable to run synodsmnotify: %s' % ex(e)) self._log('Unable to run synodsmnotify: %s' % ex(e))

View file

@ -40,8 +40,8 @@ class TelegramNotifier(Notifier):
access_token = self._choose(access_token, sickgear.TELEGRAM_ACCESS_TOKEN) access_token = self._choose(access_token, sickgear.TELEGRAM_ACCESS_TOKEN)
cid = self._choose(chatid, sickgear.TELEGRAM_CHATID) cid = self._choose(chatid, sickgear.TELEGRAM_CHATID)
try: try:
msg = self._body_only(('' if not title else u'<b>%s</b>' % title), body) msg = self._body_only(('' if not title else f'<b>{title}</b>'), body)
msg = msg.replace(u'<b>%s</b>: ' % title, u'<b>%s:</b>\r\n' % ('SickGear ' + title, title)[use_icon]) msg = msg.replace(f'<b>{title}</b>: ', f'<b>{("SickGear " + title, title)[use_icon]}:</b>\r\n')
# HTML spaces (&nbsp;) and tabs (&emsp;) aren't supported # HTML spaces (&nbsp;) and tabs (&emsp;) aren't supported
# See https://core.telegram.org/bots/api#html-style # See https://core.telegram.org/bots/api#html-style
msg = re.sub('(?i)&nbsp;?', ' ', msg) msg = re.sub('(?i)&nbsp;?', ' ', msg)

View file

@ -102,26 +102,26 @@ class XBMCNotifier(Notifier):
""" """
self._log(u'Sending request to update library for host: "%s"' % host) self._log(f'Sending request to update library for host: "{host}"')
xbmcapi = self._get_xbmc_version(host, sickgear.XBMC_USERNAME, sickgear.XBMC_PASSWORD) xbmcapi = self._get_xbmc_version(host, sickgear.XBMC_USERNAME, sickgear.XBMC_PASSWORD)
if xbmcapi: if xbmcapi:
if 4 >= xbmcapi: if 4 >= xbmcapi:
# try to update for just the show, if it fails, do full update if enabled # try to update for just the show, if it fails, do full update if enabled
if not self._update_library_http(host, show_name) and sickgear.XBMC_UPDATE_FULL: if not self._update_library_http(host, show_name) and sickgear.XBMC_UPDATE_FULL:
self._log_warning(u'Single show update failed, falling back to full update') self._log_warning('Single show update failed, falling back to full update')
return self._update_library_http(host) return self._update_library_http(host)
else: else:
return True return True
else: else:
# try to update for just the show, if it fails, do full update if enabled # try to update for just the show, if it fails, do full update if enabled
if not self._update_library_json(host, show_name) and sickgear.XBMC_UPDATE_FULL: if not self._update_library_json(host, show_name) and sickgear.XBMC_UPDATE_FULL:
self._log_warning(u'Single show update failed, falling back to full update') self._log_warning('Single show update failed, falling back to full update')
return self._update_library_json(host) return self._update_library_json(host)
else: else:
return True return True
self._log_debug(u'Failed to detect version for "%s", check configuration and try again' % host) self._log_debug(f'Failed to detect version for "{host}", check configuration and try again')
return False return False
# ############################################################################# # #############################################################################
@ -142,7 +142,7 @@ class XBMCNotifier(Notifier):
""" """
if not host: if not host:
self._log_debug(u'No host passed, aborting update') self._log_debug('No host passed, aborting update')
return False return False
username = self._choose(username, sickgear.XBMC_USERNAME) username = self._choose(username, sickgear.XBMC_USERNAME)
@ -152,7 +152,7 @@ class XBMCNotifier(Notifier):
command[key] = command[key].encode('utf-8') command[key] = command[key].encode('utf-8')
enc_command = urlencode(command) enc_command = urlencode(command)
self._log_debug(u'Encoded API command: ' + enc_command) self._log_debug('Encoded API command: ' + enc_command)
url = 'http://%s/xbmcCmds/xbmcHttp/?%s' % (host, enc_command) url = 'http://%s/xbmcCmds/xbmcHttp/?%s' % (host, enc_command)
try: try:
@ -160,19 +160,19 @@ class XBMCNotifier(Notifier):
# if we have a password, use authentication # if we have a password, use authentication
if password: if password:
req.add_header('Authorization', 'Basic %s' % b64encodestring('%s:%s' % (username, password))) req.add_header('Authorization', 'Basic %s' % b64encodestring('%s:%s' % (username, password)))
self._log_debug(u'Contacting (with auth header) via url: ' + url) self._log_debug(f'Contacting (with auth header) via url: {url}')
else: else:
self._log_debug(u'Contacting via url: ' + url) self._log_debug(f'Contacting via url: {url}')
http_response_obj = urllib.request.urlopen(req) # PY2 http_response_obj has no `with` context manager http_response_obj = urllib.request.urlopen(req) # PY2 http_response_obj has no `with` context manager
result = decode_str(http_response_obj.read(), sickgear.SYS_ENCODING) result = decode_str(http_response_obj.read(), sickgear.SYS_ENCODING)
http_response_obj.close() http_response_obj.close()
self._log_debug(u'HTTP response: ' + result.replace('\n', '')) self._log_debug('HTTP response: ' + result.replace('\n', ''))
return result return result
except (urllib.error.URLError, IOError) as e: except (urllib.error.URLError, IOError) as e:
self._log_warning(u'Couldn\'t contact HTTP at %s %s' % (url, ex(e))) self._log_warning(f'Couldn\'t contact HTTP at {url} {ex(e)}')
return False return False
def _update_library_http(self, host=None, show_name=None): def _update_library_http(self, host=None, show_name=None):
@ -191,14 +191,14 @@ class XBMCNotifier(Notifier):
""" """
if not host: if not host:
self._log_debug(u'No host passed, aborting update') self._log_debug('No host passed, aborting update')
return False return False
self._log_debug(u'Updating XMBC library via HTTP method for host: ' + host) self._log_debug('Updating XMBC library via HTTP method for host: ' + host)
# if we're doing per-show # if we're doing per-show
if show_name: if show_name:
self._log_debug(u'Updating library via HTTP method for show ' + show_name) self._log_debug('Updating library via HTTP method for show ' + show_name)
# noinspection SqlResolve # noinspection SqlResolve
path_sql = 'select path.strPath' \ path_sql = 'select path.strPath' \
@ -224,30 +224,30 @@ class XBMCNotifier(Notifier):
self._send_to_xbmc(reset_command, host) self._send_to_xbmc(reset_command, host)
if not sql_xml: if not sql_xml:
self._log_debug(u'Invalid response for ' + show_name + ' on ' + host) self._log_debug('Invalid response for ' + show_name + ' on ' + host)
return False return False
enc_sql_xml = quote(sql_xml, ':\\/<>') enc_sql_xml = quote(sql_xml, ':\\/<>')
try: try:
et = etree.fromstring(enc_sql_xml) et = etree.fromstring(enc_sql_xml)
except SyntaxError as e: except SyntaxError as e:
self._log_error(u'Unable to parse XML response: ' + ex(e)) self._log_error(f'Unable to parse XML response: {ex(e)}')
return False return False
paths = et.findall('.//field') paths = et.findall('.//field')
if not paths: if not paths:
self._log_debug(u'No valid paths found for ' + show_name + ' on ' + host) self._log_debug('No valid paths found for ' + show_name + ' on ' + host)
return False return False
for path in paths: for path in paths:
# we do not need it double-encoded, gawd this is dumb # we do not need it double-encoded, gawd this is dumb
un_enc_path = decode_str(unquote(path.text), sickgear.SYS_ENCODING) un_enc_path = decode_str(unquote(path.text), sickgear.SYS_ENCODING)
self._log_debug(u'Updating ' + show_name + ' on ' + host + ' at ' + un_enc_path) self._log_debug('Updating ' + show_name + ' on ' + host + ' at ' + un_enc_path)
update_command = dict(command='ExecBuiltIn', parameter='XBMC.updatelibrary(video, %s)' % un_enc_path) update_command = dict(command='ExecBuiltIn', parameter='XBMC.updatelibrary(video, %s)' % un_enc_path)
request = self._send_to_xbmc(update_command, host) request = self._send_to_xbmc(update_command, host)
if not request: if not request:
self._log_error(u'Update of show directory failed on ' + show_name self._log_error('Update of show directory failed on ' + show_name
+ ' on ' + host + ' at ' + un_enc_path) + ' on ' + host + ' at ' + un_enc_path)
return False return False
# sleep for a few seconds just to be sure xbmc has a chance to finish each directory # sleep for a few seconds just to be sure xbmc has a chance to finish each directory
@ -255,12 +255,12 @@ class XBMCNotifier(Notifier):
time.sleep(5) time.sleep(5)
# do a full update if requested # do a full update if requested
else: else:
self._log(u'Doing full library update on host: ' + host) self._log('Doing full library update on host: ' + host)
update_command = {'command': 'ExecBuiltIn', 'parameter': 'XBMC.updatelibrary(video)'} update_command = {'command': 'ExecBuiltIn', 'parameter': 'XBMC.updatelibrary(video)'}
request = self._send_to_xbmc(update_command, host) request = self._send_to_xbmc(update_command, host)
if not request: if not request:
self._log_error(u'Full Library update failed on: ' + host) self._log_error('Full Library update failed on: ' + host)
return False return False
return True return True
@ -284,14 +284,14 @@ class XBMCNotifier(Notifier):
""" """
if not host: if not host:
self._log_debug(u'No host passed, aborting update') self._log_debug('No host passed, aborting update')
return False return False
username = self._choose(username, sickgear.XBMC_USERNAME) username = self._choose(username, sickgear.XBMC_USERNAME)
password = self._choose(password, sickgear.XBMC_PASSWORD) password = self._choose(password, sickgear.XBMC_PASSWORD)
command = command.encode('utf-8') command = command.encode('utf-8')
self._log_debug(u'JSON command: ' + command) self._log_debug('JSON command: ' + command)
url = 'http://%s/jsonrpc' % host url = 'http://%s/jsonrpc' % host
try: try:
@ -300,28 +300,28 @@ class XBMCNotifier(Notifier):
# if we have a password, use authentication # if we have a password, use authentication
if password: if password:
req.add_header('Authorization', 'Basic %s' % b64encodestring('%s:%s' % (username, password))) req.add_header('Authorization', 'Basic %s' % b64encodestring('%s:%s' % (username, password)))
self._log_debug(u'Contacting (with auth header) via url: ' + url) self._log_debug(f'Contacting (with auth header) via url: {url}')
else: else:
self._log_debug(u'Contacting via url: ' + url) self._log_debug(f'Contacting via url: {url}')
try: try:
http_response_obj = urllib.request.urlopen(req) # PY2 http_response_obj has no `with` context manager http_response_obj = urllib.request.urlopen(req) # PY2 http_response_obj has no `with` context manager
except urllib.error.URLError as e: except urllib.error.URLError as e:
self._log_warning(u'Error while trying to retrieve API version for "%s": %s' % (host, ex(e))) self._log_warning(f'Error while trying to retrieve API version for "{host}": {ex(e)}')
return False return False
# parse the json result # parse the json result
try: try:
result = json_load(http_response_obj) result = json_load(http_response_obj)
http_response_obj.close() http_response_obj.close()
self._log_debug(u'JSON response: ' + str(result)) self._log_debug(f'JSON response: {result}')
return result # need to return response for parsing return result # need to return response for parsing
except ValueError: except ValueError:
self._log_warning(u'Unable to decode JSON: ' + http_response_obj) self._log_warning('Unable to decode JSON: ' + http_response_obj)
return False return False
except IOError as e: except IOError as e:
self._log_warning(u'Couldn\'t contact JSON API at ' + url + ' ' + ex(e)) self._log_warning(f'Couldn\'t contact JSON API at {url} {ex(e)}')
return False return False
def _update_library_json(self, host=None, show_name=None): def _update_library_json(self, host=None, show_name=None):
@ -340,15 +340,15 @@ class XBMCNotifier(Notifier):
""" """
if not host: if not host:
self._log_debug(u'No host passed, aborting update') self._log_debug('No host passed, aborting update')
return False return False
self._log(u'Updating XMBC library via JSON method for host: ' + host) self._log('Updating XMBC library via JSON method for host: ' + host)
# if we're doing per-show # if we're doing per-show
if show_name: if show_name:
tvshowid = -1 tvshowid = -1
self._log_debug(u'Updating library via JSON method for show ' + show_name) self._log_debug('Updating library via JSON method for show ' + show_name)
# get tvshowid by showName # get tvshowid by showName
shows_command = '{"jsonrpc":"2.0","method":"VideoLibrary.GetTVShows","id":1}' shows_command = '{"jsonrpc":"2.0","method":"VideoLibrary.GetTVShows","id":1}'
@ -357,7 +357,7 @@ class XBMCNotifier(Notifier):
if shows_response and 'result' in shows_response and 'tvshows' in shows_response['result']: if shows_response and 'result' in shows_response and 'tvshows' in shows_response['result']:
shows = shows_response['result']['tvshows'] shows = shows_response['result']['tvshows']
else: else:
self._log_debug(u'No tvshows in TV show list') self._log_debug('No tvshows in TV show list')
return False return False
for show in shows: for show in shows:
@ -370,7 +370,7 @@ class XBMCNotifier(Notifier):
# we didn't find the show (exact match), thus revert to just doing a full update if enabled # we didn't find the show (exact match), thus revert to just doing a full update if enabled
if -1 == tvshowid: if -1 == tvshowid:
self._log_debug(u'Exact show name not matched in TV show list') self._log_debug('Exact show name not matched in TV show list')
return False return False
# lookup tv-show path # lookup tv-show path
@ -379,19 +379,19 @@ class XBMCNotifier(Notifier):
path_response = self._send_to_xbmc_json(path_command, host) path_response = self._send_to_xbmc_json(path_command, host)
path = path_response['result']['tvshowdetails']['file'] path = path_response['result']['tvshowdetails']['file']
self._log_debug(u'Received Show: ' + show_name + ' with ID: ' + str(tvshowid) + ' Path: ' + path) self._log_debug('Received Show: ' + show_name + ' with ID: ' + str(tvshowid) + ' Path: ' + path)
if 1 > len(path): if 1 > len(path):
self._log_warning(u'No valid path found for ' + show_name + ' with ID: ' self._log_warning('No valid path found for ' + show_name + ' with ID: '
+ str(tvshowid) + ' on ' + host) + str(tvshowid) + ' on ' + host)
return False return False
self._log_debug(u'Updating ' + show_name + ' on ' + host + ' at ' + path) self._log_debug('Updating ' + show_name + ' on ' + host + ' at ' + path)
update_command = '{"jsonrpc":"2.0","method":"VideoLibrary.Scan","params":{"directory":%s},"id":1}' % ( update_command = '{"jsonrpc":"2.0","method":"VideoLibrary.Scan","params":{"directory":%s},"id":1}' % (
json_dumps(path)) json_dumps(path))
request = self._send_to_xbmc_json(update_command, host) request = self._send_to_xbmc_json(update_command, host)
if not request: if not request:
self._log_error(u'Update of show directory failed on ' + show_name + ' on ' + host + ' at ' + path) self._log_error('Update of show directory failed on ' + show_name + ' on ' + host + ' at ' + path)
return False return False
# catch if there was an error in the returned request # catch if there was an error in the returned request
@ -399,18 +399,18 @@ class XBMCNotifier(Notifier):
for r in request: for r in request:
if 'error' in r: if 'error' in r:
self._log_error( self._log_error(
u'Error while attempting to update show directory for ' + show_name 'Error while attempting to update show directory for ' + show_name
+ ' on ' + host + ' at ' + path) + ' on ' + host + ' at ' + path)
return False return False
# do a full update if requested # do a full update if requested
else: else:
self._log(u'Doing Full Library update on host: ' + host) self._log('Doing Full Library update on host: ' + host)
update_command = '{"jsonrpc":"2.0","method":"VideoLibrary.Scan","id":1}' update_command = '{"jsonrpc":"2.0","method":"VideoLibrary.Scan","id":1}'
request = self._send_to_xbmc_json(update_command, host, sickgear.XBMC_USERNAME, sickgear.XBMC_PASSWORD) request = self._send_to_xbmc_json(update_command, host, sickgear.XBMC_USERNAME, sickgear.XBMC_PASSWORD)
if not request: if not request:
self._log_error(u'Full Library update failed on: ' + host) self._log_error('Full Library update failed on: ' + host)
return False return False
return True return True
@ -441,12 +441,12 @@ class XBMCNotifier(Notifier):
for cur_host in [x.strip() for x in hosts.split(',')]: for cur_host in [x.strip() for x in hosts.split(',')]:
cur_host = unquote_plus(cur_host) cur_host = unquote_plus(cur_host)
self._log(u'Sending notification to "%s"' % cur_host) self._log(f'Sending notification to "{cur_host}"')
xbmcapi = self._get_xbmc_version(cur_host, username, password) xbmcapi = self._get_xbmc_version(cur_host, username, password)
if xbmcapi: if xbmcapi:
if 4 >= xbmcapi: if 4 >= xbmcapi:
self._log_debug(u'Detected version <= 11, using HTTP API') self._log_debug('Detected version <= 11, using HTTP API')
command = dict(command='ExecBuiltIn', command = dict(command='ExecBuiltIn',
parameter='Notification(' + title.encode('utf-8') + ',' + body.encode('utf-8') + ')') parameter='Notification(' + title.encode('utf-8') + ',' + body.encode('utf-8') + ')')
notify_result = self._send_to_xbmc(command, cur_host, username, password) notify_result = self._send_to_xbmc(command, cur_host, username, password)
@ -454,7 +454,7 @@ class XBMCNotifier(Notifier):
result += [cur_host + ':' + str(notify_result)] result += [cur_host + ':' + str(notify_result)]
success |= 'OK' in notify_result or success success |= 'OK' in notify_result or success
else: else:
self._log_debug(u'Detected version >= 12, using JSON API') self._log_debug('Detected version >= 12, using JSON API')
command = '{"jsonrpc":"2.0","method":"GUI.ShowNotification",' \ command = '{"jsonrpc":"2.0","method":"GUI.ShowNotification",' \
'"params":{"title":"%s","message":"%s", "image": "%s"},"id":1}' % \ '"params":{"title":"%s","message":"%s", "image": "%s"},"id":1}' % \
(title.encode('utf-8'), body.encode('utf-8'), self._sg_logo_url) (title.encode('utf-8'), body.encode('utf-8'), self._sg_logo_url)
@ -464,7 +464,7 @@ class XBMCNotifier(Notifier):
success |= 'OK' in notify_result or success success |= 'OK' in notify_result or success
else: else:
if sickgear.XBMC_ALWAYS_ON or self._testing: if sickgear.XBMC_ALWAYS_ON or self._testing:
self._log_error(u'Failed to detect version for "%s", check configuration and try again' % cur_host) self._log_error(f'Failed to detect version for "{cur_host}", check configuration and try again')
result += [cur_host + ':No response'] result += [cur_host + ':No response']
success = False success = False
@ -488,7 +488,7 @@ class XBMCNotifier(Notifier):
""" """
if not sickgear.XBMC_HOST: if not sickgear.XBMC_HOST:
self._log_debug(u'No hosts specified, check your settings') self._log_debug('No hosts specified, check your settings')
return False return False
# either update each host, or only attempt to update until one successful result # either update each host, or only attempt to update until one successful result
@ -496,11 +496,11 @@ class XBMCNotifier(Notifier):
for host in [x.strip() for x in sickgear.XBMC_HOST.split(',')]: for host in [x.strip() for x in sickgear.XBMC_HOST.split(',')]:
if self._send_update_library(host, show_name): if self._send_update_library(host, show_name):
if sickgear.XBMC_UPDATE_ONLYFIRST: if sickgear.XBMC_UPDATE_ONLYFIRST:
self._log_debug(u'Successfully updated "%s", stopped sending update library commands' % host) self._log_debug(f'Successfully updated "{host}", stopped sending update library commands')
return True return True
else: else:
if sickgear.XBMC_ALWAYS_ON: if sickgear.XBMC_ALWAYS_ON:
self._log_error(u'Failed to detect version for "%s", check configuration and try again' % host) self._log_error(f'Failed to detect version for "{host}", check configuration and try again')
result = result + 1 result = result + 1
# needed for the 'update xbmc' submenu command # needed for the 'update xbmc' submenu command

View file

@ -73,7 +73,7 @@ def _get_season_nzbs(name, url_data, season):
try: try:
show_xml = etree.ElementTree(etree.XML(url_data)) show_xml = etree.ElementTree(etree.XML(url_data))
except SyntaxError: except SyntaxError:
logger.log(u'Unable to parse the XML of %s, not splitting it' % name, logger.ERROR) logger.error(f'Unable to parse the XML of {name}, not splitting it')
return {}, '' return {}, ''
filename = name.replace('.nzb', '') filename = name.replace('.nzb', '')
@ -86,7 +86,7 @@ def _get_season_nzbs(name, url_data, season):
if scene_name_match: if scene_name_match:
show_name, quality_section = scene_name_match.groups() show_name, quality_section = scene_name_match.groups()
else: else:
logger.log('%s - Not a valid season pack scene name. If it\'s a valid one, log a bug.' % name, logger.ERROR) logger.error('%s - Not a valid season pack scene name. If it\'s a valid one, log a bug.' % name)
return {}, '' return {}, ''
regex = r'(%s[\._]S%02d(?:[E0-9]+)\.[\w\._]+)' % (re.escape(show_name), season) regex = r'(%s[\._]S%02d(?:[E0-9]+)\.[\w\._]+)' % (re.escape(show_name), season)
@ -116,7 +116,7 @@ def _get_season_nzbs(name, url_data, season):
if isinstance(ext, string_types) \ if isinstance(ext, string_types) \
and re.search(r'^\.(nzb|r\d{2}|rar|7z|zip|par2|vol\d+|nfo|srt|txt|bat|sh|mkv|mp4|avi|wmv)$', ext, and re.search(r'^\.(nzb|r\d{2}|rar|7z|zip|par2|vol\d+|nfo|srt|txt|bat|sh|mkv|mp4|avi|wmv)$', ext,
flags=re.I): flags=re.I):
logger.log('Unable to split %s into episode nzb\'s' % name, logger.WARNING) logger.warning('Unable to split %s into episode nzb\'s' % name)
return {}, '' return {}, ''
if cur_ep not in ep_files: if cur_ep not in ep_files:
ep_files[cur_ep] = [cur_file] ep_files[cur_ep] = [cur_file]
@ -157,7 +157,7 @@ def _save_nzb(nzb_name, nzb_string):
nzb_fh.write(nzb_string) nzb_fh.write(nzb_string)
except EnvironmentError as e: except EnvironmentError as e:
logger.log(u'Unable to save NZB: ' + ex(e), logger.ERROR) logger.error(f'Unable to save NZB: {ex(e)}')
def _strip_ns(element, ns): def _strip_ns(element, ns):
@ -178,7 +178,7 @@ def split_result(result):
""" """
resp = helpers.get_url(result.url, failure_monitor=False) resp = helpers.get_url(result.url, failure_monitor=False)
if None is resp: if None is resp:
logger.log(u'Unable to load url %s, can\'t download season NZB' % result.url, logger.ERROR) logger.error(f'Unable to load url {result.url}, can\'t download season NZB')
return False return False
# parse the season ep name # parse the season ep name
@ -186,10 +186,10 @@ def split_result(result):
np = NameParser(False, show_obj=result.show_obj) np = NameParser(False, show_obj=result.show_obj)
parse_result = np.parse(result.name) parse_result = np.parse(result.name)
except InvalidNameException: except InvalidNameException:
logger.log(u'Unable to parse the filename %s into a valid episode' % result.name, logger.DEBUG) logger.debug(f'Unable to parse the filename {result.name} into a valid episode')
return False return False
except InvalidShowException: except InvalidShowException:
logger.log(u'Unable to parse the filename %s into a valid show' % result.name, logger.DEBUG) logger.debug(f'Unable to parse the filename {result.name} into a valid show')
return False return False
# bust it up # bust it up
@ -201,35 +201,35 @@ def split_result(result):
for new_nzb in separate_nzbs: for new_nzb in separate_nzbs:
logger.log(u'Split out %s from %s' % (new_nzb, result.name), logger.DEBUG) logger.debug(f'Split out {new_nzb} from {result.name}')
# parse the name # parse the name
try: try:
np = NameParser(False, show_obj=result.show_obj) np = NameParser(False, show_obj=result.show_obj)
parse_result = np.parse(new_nzb) parse_result = np.parse(new_nzb)
except InvalidNameException: except InvalidNameException:
logger.log(u"Unable to parse the filename %s into a valid episode" % new_nzb, logger.DEBUG) logger.debug(f'Unable to parse the filename {new_nzb} into a valid episode')
return False return False
except InvalidShowException: except InvalidShowException:
logger.log(u"Unable to parse the filename %s into a valid show" % new_nzb, logger.DEBUG) logger.debug(f'Unable to parse the filename {new_nzb} into a valid show')
return False return False
# make sure the result is sane # make sure the result is sane
if (None is not parse_result.season_number and season != parse_result.season_number) \ if (None is not parse_result.season_number and season != parse_result.season_number) \
or (None is parse_result.season_number and 1 != season): or (None is parse_result.season_number and 1 != season):
logger.log(u'Found %s inside %s but it doesn\'t seem to belong to the same season, ignoring it' logger.warning(f'Found {new_nzb} inside {result.name} but it doesn\'t seem to belong to the same season,'
% (new_nzb, result.name), logger.WARNING) f' ignoring it')
continue continue
elif 0 == len(parse_result.episode_numbers): elif 0 == len(parse_result.episode_numbers):
logger.log(u'Found %s inside %s but it doesn\'t seem to be a valid episode NZB, ignoring it' logger.warning(f'Found {new_nzb} inside {result.name} but it doesn\'t seem to be a valid episode NZB,'
% (new_nzb, result.name), logger.WARNING) f' ignoring it')
continue continue
want_ep = True want_ep = True
for ep_no in parse_result.episode_numbers: for ep_no in parse_result.episode_numbers:
if not result.show_obj.want_episode(season, ep_no, result.quality): if not result.show_obj.want_episode(season, ep_no, result.quality):
logger.log(u'Ignoring result %s because we don\'t want an episode that is %s' logger.debug(f'Ignoring result {new_nzb} because we don\'t want an episode that is'
% (new_nzb, Quality.qualityStrings[result.quality]), logger.DEBUG) f' {Quality.qualityStrings[result.quality]}')
want_ep = False want_ep = False
break break
if not want_ep: if not want_ep:

View file

@ -34,7 +34,7 @@ def test_nzbget(host, use_https, username, password, timeout=300):
result = False result = False
if not host: if not host:
msg = 'No NZBGet host found. Please configure it' msg = 'No NZBGet host found. Please configure it'
logger.log(msg, logger.ERROR) logger.error(msg)
return result, msg, None return result, msg, None
url = 'http%(scheme)s://%(username)s:%(password)s@%(host)s/xmlrpc' % { url = 'http%(scheme)s://%(username)s:%(password)s@%(host)s/xmlrpc' % {
@ -44,24 +44,24 @@ def test_nzbget(host, use_https, username, password, timeout=300):
try: try:
msg = 'Success. Connected' msg = 'Success. Connected'
if rpc_client.writelog('INFO', 'SickGear connected as a test'): if rpc_client.writelog('INFO', 'SickGear connected as a test'):
logger.log(msg, logger.DEBUG) logger.debug(msg)
else: else:
msg += ', but unable to send a message' msg += ', but unable to send a message'
logger.log(msg, logger.ERROR) logger.error(msg)
result = True result = True
logger.log(u'NZBGet URL: %s' % url, logger.DEBUG) logger.debug(f'NZBGet URL: {url}')
except moves.http_client.socket.error: except moves.http_client.socket.error:
msg = 'Please check NZBGet host and port (if it is running). NZBGet is not responding to these values' msg = 'Please check NZBGet host and port (if it is running). NZBGet is not responding to these values'
logger.log(msg, logger.ERROR) logger.error(msg)
except moves.xmlrpc_client.ProtocolError as e: except moves.xmlrpc_client.ProtocolError as e:
if 'Unauthorized' == e.errmsg: if 'Unauthorized' == e.errmsg:
msg = 'NZBGet username or password is incorrect' msg = 'NZBGet username or password is incorrect'
logger.log(msg, logger.ERROR) logger.error(msg)
else: else:
msg = 'Protocol Error: %s' % e.errmsg msg = 'Protocol Error: %s' % e.errmsg
logger.log(msg, logger.ERROR) logger.error(msg)
return result, msg, rpc_client return result, msg, rpc_client
@ -114,7 +114,7 @@ def send_nzb(search_result):
return result return result
nzbcontent64 = b64encodestring(data, keep_eol=True) nzbcontent64 = b64encodestring(data, keep_eol=True)
logger.log(u'Sending NZB to NZBGet: %s' % search_result.name) logger.log(f'Sending NZB to NZBGet: {search_result.name}')
try: try:
# Find out if nzbget supports priority (Version 9.0+), old versions beginning with a 0.x will use the old cmd # Find out if nzbget supports priority (Version 9.0+), old versions beginning with a 0.x will use the old cmd
@ -161,11 +161,11 @@ def send_nzb(search_result):
nzbget_prio, False, search_result.url) nzbget_prio, False, search_result.url)
if nzbget_result: if nzbget_result:
logger.log(u'NZB sent to NZBGet successfully', logger.DEBUG) logger.debug('NZB sent to NZBGet successfully')
result = True result = True
else: else:
logger.log(u'NZBGet could not add %s.nzb to the queue' % search_result.name, logger.ERROR) logger.error(f'NZBGet could not add {search_result.name}.nzb to the queue')
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Connect Error to NZBGet: could not add %s.nzb to the queue' % search_result.name, logger.ERROR) logger.error(f'Connect Error to NZBGet: could not add {search_result.name}.nzb to the queue')
return result return result

View file

@ -111,7 +111,7 @@ class PostProcessor(object):
""" """
logger_msg = re.sub(r'(?i)<br[\s/]+>\.*', '', message) logger_msg = re.sub(r'(?i)<br[\s/]+>\.*', '', message)
logger_msg = re.sub('(?i)<a[^>]+>([^<]+)</a>', r'\1', logger_msg) logger_msg = re.sub('(?i)<a[^>]+>([^<]+)</a>', r'\1', logger_msg)
logger.log(u'%s' % logger_msg, level) logger.log(f'{logger_msg}', level)
self.log += message + '\n' self.log += message + '\n'
def _check_for_existing_file(self, existing_file): def _check_for_existing_file(self, existing_file):
@ -129,25 +129,24 @@ class PostProcessor(object):
""" """
if not existing_file: if not existing_file:
self._log(u'There is no existing file', logger.DEBUG) self._log('There is no existing file', logger.DEBUG)
return PostProcessor.DOESNT_EXIST return PostProcessor.DOESNT_EXIST
# if the new file exists, return the appropriate code depending on the size # if the new file exists, return the appropriate code depending on the size
if os.path.isfile(existing_file): if os.path.isfile(existing_file):
new_file = u'New file %s<br />.. is ' % self.file_path new_file = f'New file {self.file_path}<br />.. is '
if os.path.getsize(self.file_path) == os.path.getsize(existing_file): if os.path.getsize(self.file_path) == os.path.getsize(existing_file):
self._log(u'%sthe same size as %s' % (new_file, existing_file), logger.DEBUG) self._log(f'{new_file}the same size as {existing_file}', logger.DEBUG)
return PostProcessor.EXISTS_SAME return PostProcessor.EXISTS_SAME
elif os.path.getsize(self.file_path) < os.path.getsize(existing_file): elif os.path.getsize(self.file_path) < os.path.getsize(existing_file):
self._log(u'%ssmaller than %s' % (new_file, existing_file), logger.DEBUG) self._log(f'{new_file}smaller than {existing_file}', logger.DEBUG)
return PostProcessor.EXISTS_LARGER return PostProcessor.EXISTS_LARGER
else: else:
self._log(u'%slarger than %s' % (new_file, existing_file), logger.DEBUG) self._log(f'{new_file}larger than {existing_file}', logger.DEBUG)
return PostProcessor.EXISTS_SMALLER return PostProcessor.EXISTS_SMALLER
else: else:
self._log(u'File doesn\'t exist %s' % existing_file, self._log(f'File doesn\'t exist {existing_file}', logger.DEBUG)
logger.DEBUG)
return PostProcessor.DOESNT_EXIST return PostProcessor.DOESNT_EXIST
@staticmethod @staticmethod
@ -222,7 +221,7 @@ class PostProcessor(object):
file_list = file_list + self.list_associated_files(file_path) file_list = file_list + self.list_associated_files(file_path)
if not file_list: if not file_list:
self._log(u'Not deleting anything because there are no files associated with %s' % file_path, logger.DEBUG) self._log(f'Not deleting anything because there are no files associated with {file_path}', logger.DEBUG)
return return
# delete the file and any other files which we want to delete # delete the file and any other files which we want to delete
@ -234,16 +233,14 @@ class PostProcessor(object):
# File is read-only, so make it writeable # File is read-only, so make it writeable
try: try:
os.chmod(cur_file, stat.S_IWRITE) os.chmod(cur_file, stat.S_IWRITE)
self._log(u'Changed read only permissions to writeable to delete file %s' self._log(f'Changed read only permissions to writeable to delete file {cur_file}', logger.DEBUG)
% cur_file, logger.DEBUG)
except (BaseException, Exception): except (BaseException, Exception):
self._log(u'Cannot change permissions to writeable to delete file: %s' self._log(f'Cannot change permissions to writeable to delete file: {cur_file}', logger.WARNING)
% cur_file, logger.WARNING)
removal_type = helpers.remove_file(cur_file, log_level=logger.DEBUG) removal_type = helpers.remove_file(cur_file, log_level=logger.DEBUG)
if True is not os.path.isfile(cur_file): if True is not os.path.isfile(cur_file):
self._log(u'%s file %s' % (removal_type, cur_file), logger.DEBUG) self._log(f'{removal_type} file {cur_file}', logger.DEBUG)
# do the library update for synoindex # do the library update for synoindex
notifiers.NotifierFactory().get('SYNOINDEX').deleteFile(cur_file) notifiers.NotifierFactory().get('SYNOINDEX').deleteFile(cur_file)
@ -271,7 +268,7 @@ class PostProcessor(object):
""" """
if not action: if not action:
self._log(u'Must provide an action for the combined file operation', logger.ERROR) self._log('Must provide an action for the combined file operation', logger.ERROR)
return return
file_list = [file_path] file_list = [file_path]
@ -281,7 +278,7 @@ class PostProcessor(object):
file_list = file_list + self.list_associated_files(file_path, subtitles_only=True) file_list = file_list + self.list_associated_files(file_path, subtitles_only=True)
if not file_list: if not file_list:
self._log(u'Not moving anything because there are no files associated with %s' % file_path, logger.DEBUG) self._log(f'Not moving anything because there are no files associated with {file_path}', logger.DEBUG)
return return
# create base name with file_path (media_file without .extension) # create base name with file_path (media_file without .extension)
@ -317,7 +314,7 @@ class PostProcessor(object):
subs_new_path = os.path.join(new_path, sickgear.SUBTITLES_DIR) subs_new_path = os.path.join(new_path, sickgear.SUBTITLES_DIR)
dir_exists = helpers.make_dir(subs_new_path) dir_exists = helpers.make_dir(subs_new_path)
if not dir_exists: if not dir_exists:
logger.log(u'Unable to create subtitles folder ' + subs_new_path, logger.ERROR) logger.error(f'Unable to create subtitles folder {subs_new_path}')
else: else:
helpers.chmod_as_parent(subs_new_path) helpers.chmod_as_parent(subs_new_path)
new_file_path = os.path.join(subs_new_path, new_file_name) new_file_path = os.path.join(subs_new_path, new_file_name)
@ -345,15 +342,16 @@ class PostProcessor(object):
:type action_tmpl: :type action_tmpl:
""" """
def _int_move(cur_file_path, new_file_path, success_tmpl=u' %s to %s'): def _int_move(cur_file_path, new_file_path, success_tmpl=' %s to %s'):
try: try:
helpers.move_file(cur_file_path, new_file_path, raise_exceptions=True) helpers.move_file(cur_file_path, new_file_path, raise_exceptions=True)
helpers.chmod_as_parent(new_file_path) helpers.chmod_as_parent(new_file_path)
self._log(u'Moved file from' + (success_tmpl % (cur_file_path, new_file_path)), logger.DEBUG) self._log(f'Moved file from{(success_tmpl % (cur_file_path, new_file_path))}',
logger.DEBUG)
except (IOError, OSError) as e: except (IOError, OSError) as e:
self._log(u'Unable to move file %s<br />.. %s' self._log(f'Unable to move file {success_tmpl % (cur_file_path, new_file_path)}<br>.. {ex(e)}',
% (success_tmpl % (cur_file_path, new_file_path), ex(e)), logger.ERROR) logger.ERROR)
raise e raise e
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, _int_move, self._combined_file_operation(file_path, new_path, new_base_name, associated_files, _int_move,
@ -375,15 +373,16 @@ class PostProcessor(object):
:type action_tmpl: :type action_tmpl:
""" """
def _int_copy(cur_file_path, new_file_path, success_tmpl=u' %s to %s'): def _int_copy(cur_file_path, new_file_path, success_tmpl=' %s to %s'):
try: try:
helpers.copy_file(cur_file_path, new_file_path) helpers.copy_file(cur_file_path, new_file_path)
helpers.chmod_as_parent(new_file_path) helpers.chmod_as_parent(new_file_path)
self._log(u'Copied file from' + (success_tmpl % (cur_file_path, new_file_path)), logger.DEBUG) self._log(f'Copied file from{(success_tmpl % (cur_file_path, new_file_path))}',
logger.DEBUG)
except (IOError, OSError) as e: except (IOError, OSError) as e:
self._log(u'Unable to copy %s<br />.. %s' self._log(f'Unable to copy {success_tmpl % (cur_file_path, new_file_path)}<br>.. {ex(e)}',
% (success_tmpl % (cur_file_path, new_file_path), ex(e)), logger.ERROR) logger.ERROR)
raise e raise e
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, _int_copy, self._combined_file_operation(file_path, new_path, new_base_name, associated_files, _int_copy,
@ -403,15 +402,16 @@ class PostProcessor(object):
:type action_tmpl: :type action_tmpl:
""" """
def _int_hard_link(cur_file_path, new_file_path, success_tmpl=u' %s to %s'): def _int_hard_link(cur_file_path, new_file_path, success_tmpl=' %s to %s'):
try: try:
helpers.hardlink_file(cur_file_path, new_file_path) helpers.hardlink_file(cur_file_path, new_file_path)
helpers.chmod_as_parent(new_file_path) helpers.chmod_as_parent(new_file_path)
self._log(u'Hard linked file from' + (success_tmpl % (cur_file_path, new_file_path)), logger.DEBUG) self._log(f'Hard linked file from{(success_tmpl % (cur_file_path, new_file_path))}',
logger.DEBUG)
except (IOError, OSError) as e: except (IOError, OSError) as e:
self._log(u'Unable to link file %s<br />.. %s' self._log(f'Unable to link file {success_tmpl % (cur_file_path, new_file_path)}<br>.. {ex(e)}',
% (success_tmpl % (cur_file_path, new_file_path), ex(e)), logger.ERROR) logger.ERROR)
raise e raise e
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, _int_hard_link, self._combined_file_operation(file_path, new_path, new_base_name, associated_files, _int_hard_link,
@ -431,16 +431,16 @@ class PostProcessor(object):
:type action_tmpl: :type action_tmpl:
""" """
def _int_move_and_sym_link(cur_file_path, new_file_path, success_tmpl=u' %s to %s'): def _int_move_and_sym_link(cur_file_path, new_file_path, success_tmpl=' %s to %s'):
try: try:
helpers.move_and_symlink_file(cur_file_path, new_file_path) helpers.move_and_symlink_file(cur_file_path, new_file_path)
helpers.chmod_as_parent(new_file_path) helpers.chmod_as_parent(new_file_path)
self._log(u'Moved then symbolic linked file from' + (success_tmpl % (cur_file_path, new_file_path)), self._log(f'Moved then symbolic linked file from{(success_tmpl % (cur_file_path, new_file_path))}',
logger.DEBUG) logger.DEBUG)
except (IOError, OSError) as e: except (IOError, OSError) as e:
self._log(u'Unable to link file %s<br />.. %s' self._log(f'Unable to link file {success_tmpl % (cur_file_path, new_file_path)}<br>.. {ex(e)}',
% (success_tmpl % (cur_file_path, new_file_path), ex(e)), logger.ERROR) logger.ERROR)
raise e raise e
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, _int_move_and_sym_link, self._combined_file_operation(file_path, new_path, new_base_name, associated_files, _int_move_and_sym_link,
@ -515,9 +515,9 @@ class PostProcessor(object):
self.in_history = True self.in_history = True
to_return = (show_obj, season_number, episode_numbers, quality) to_return = (show_obj, season_number, episode_numbers, quality)
if not show_obj: if not show_obj:
self._log(u'Unknown show, check availability on ShowList page', logger.DEBUG) self._log('Unknown show, check availability on ShowList page', logger.DEBUG)
break break
self._log(u'Found a match in history for %s' % show_obj.name, logger.DEBUG) self._log(f'Found a match in history for {show_obj.name}', logger.DEBUG)
break break
return to_return return to_return
@ -546,7 +546,7 @@ class PostProcessor(object):
:rtype: Tuple[None, None, List, None] or Tuple[sickgear.tv.TVShow, int, List[int], int] :rtype: Tuple[None, None, List, None] or Tuple[sickgear.tv.TVShow, int, List[int], int]
""" """
logger.log(u'Analyzing name ' + repr(name)) logger.log(f'Analyzing name {repr(name)}')
to_return = (None, None, [], None) to_return = (None, None, [], None)
@ -556,8 +556,8 @@ class PostProcessor(object):
# parse the name to break it into show name, season, and episode # parse the name to break it into show name, season, and episode
np = NameParser(resource, convert=True, show_obj=self.show_obj or show_obj) np = NameParser(resource, convert=True, show_obj=self.show_obj or show_obj)
parse_result = np.parse(name) parse_result = np.parse(name)
self._log(u'Parsed %s<br />.. from %s' self._log(f'Parsed {decode_str(str(parse_result), errors="xmlcharrefreplace")}<br>'
% (decode_str(str(parse_result), errors='xmlcharrefreplace'), name), logger.DEBUG) f'.. from {name}', logger.DEBUG)
if parse_result.is_air_by_date and (None is parse_result.season_number or not parse_result.episode_numbers): if parse_result.is_air_by_date and (None is parse_result.season_number or not parse_result.episode_numbers):
season_number = -1 season_number = -1
@ -598,13 +598,16 @@ class PostProcessor(object):
self.release_name = helpers.remove_extension(os.path.basename(parse_result.original_name)) self.release_name = helpers.remove_extension(os.path.basename(parse_result.original_name))
else: else:
logger.log(u'Parse result not sufficient (all following have to be set). will not save release name', for cur_msg in (
logger.DEBUG) 'Parse result not sufficient (all following have to be set). will not save release name',
logger.log(u'Parse result(series_name): ' + str(parse_result.series_name), logger.DEBUG) f'Parse result(series_name): {parse_result.series_name}',
logger.log(u'Parse result(season_number): ' + str(parse_result.season_number), logger.DEBUG) f'Parse result(season_number): {parse_result.season_number}',
logger.log(u'Parse result(episode_numbers): ' + str(parse_result.episode_numbers), logger.DEBUG) f'Parse result(episode_numbers): {parse_result.episode_numbers}',
logger.log(u' or Parse result(air_date): ' + str(parse_result.air_date), logger.DEBUG) f' or Parse result(air_date): {parse_result.air_date}',
logger.log(u'Parse result(release_group): ' + str(parse_result.release_group), logger.DEBUG) f'Parse result(release_group): {parse_result.release_group}'
):
logger.debug(cur_msg)
def _find_info(self, history_only=False): def _find_info(self, history_only=False):
""" """
@ -632,7 +635,7 @@ class PostProcessor(object):
lambda: self._analyze_name(self.file_path), lambda: self._analyze_name(self.file_path),
# try to analyze the dir + file name together as one name # try to analyze the dir + file name together as one name
lambda: self._analyze_name(self.folder_name + u' ' + self.file_name), lambda: self._analyze_name(f'{self.folder_name} {self.file_name}'),
# try to analyze file name with previously parsed show_obj # try to analyze file name with previously parsed show_obj
lambda: self._analyze_name(self.file_name, show_obj=show_obj, rel_grp=rel_grp)], lambda: self._analyze_name(self.file_name, show_obj=show_obj, rel_grp=rel_grp)],
@ -645,7 +648,7 @@ class PostProcessor(object):
try: try:
(try_show_obj, try_season, try_episodes, try_quality) = cur_try() (try_show_obj, try_season, try_episodes, try_quality) = cur_try()
except (InvalidNameException, InvalidShowException) as e: except (InvalidNameException, InvalidShowException) as e:
logger.log(u'Unable to parse, skipping: ' + ex(e), logger.DEBUG) logger.debug(f'Unable to parse, skipping: {ex(e)}')
continue continue
if not try_show_obj: if not try_show_obj:
@ -667,8 +670,8 @@ class PostProcessor(object):
# for air-by-date shows we need to look up the season/episode from database # for air-by-date shows we need to look up the season/episode from database
if -1 == season_number and show_obj and episode_numbers: if -1 == season_number and show_obj and episode_numbers:
self._log(u'Looks like this is an air-by-date or sports show,' self._log('Looks like this is an air-by-date or sports show,'
u' attempting to convert the date to season/episode', logger.DEBUG) ' attempting to convert the date to season/episode', logger.DEBUG)
airdate = episode_numbers[0].toordinal() airdate = episode_numbers[0].toordinal()
my_db = db.DBConnection() my_db = db.DBConnection()
sql_result = my_db.select( sql_result = my_db.select(
@ -681,8 +684,8 @@ class PostProcessor(object):
season_number = int(sql_result[0][0]) season_number = int(sql_result[0][0])
episode_numbers = [int(sql_result[0][1])] episode_numbers = [int(sql_result[0][1])]
else: else:
self._log(u'Unable to find episode with date %s for show %s, skipping' % self._log(f'Unable to find episode with date {episode_numbers[0]} for show {show_obj.tvid_prodid},'
(episode_numbers[0], show_obj.tvid_prodid), logger.DEBUG) f' skipping', logger.DEBUG)
# don't leave dates in the episode list if we can't convert them to real episode numbers # don't leave dates in the episode list if we can't convert them to real episode numbers
episode_numbers = [] episode_numbers = []
continue continue
@ -697,8 +700,8 @@ class PostProcessor(object):
[show_obj.tvid, show_obj.prodid]) [show_obj.tvid, show_obj.prodid])
if 1 == int(num_seasons_sql_result[0][0]) and None is season_number: if 1 == int(num_seasons_sql_result[0][0]) and None is season_number:
self._log( self._log(
u'No season number found, but this show appears to only have 1 season,' 'No season number found, but this show appears to only have 1 season,'
u' setting season number to 1...', logger.DEBUG) ' setting season number to 1...', logger.DEBUG)
season_number = 1 season_number = 1
if show_obj and season_number and episode_numbers: if show_obj and season_number and episode_numbers:
@ -731,13 +734,13 @@ class PostProcessor(object):
for cur_episode_number in episode_numbers: for cur_episode_number in episode_numbers:
cur_episode_number = int(cur_episode_number) cur_episode_number = int(cur_episode_number)
self._log(u'Retrieving episode object for %sx%s' % (season_number, cur_episode_number), logger.DEBUG) self._log(f'Retrieving episode object for {season_number}x{cur_episode_number}', logger.DEBUG)
# now that we've figured out which episode this file is just load it manually # now that we've figured out which episode this file is just load it manually
try: try:
ep_obj = show_obj.get_episode(season_number, cur_episode_number) ep_obj = show_obj.get_episode(season_number, cur_episode_number)
except exceptions_helper.EpisodeNotFoundException as e: except exceptions_helper.EpisodeNotFoundException as e:
self._log(u'Unable to create episode: ' + ex(e), logger.DEBUG) self._log(f'Unable to create episode: {ex(e)}', logger.DEBUG)
raise exceptions_helper.PostProcessingFailed() raise exceptions_helper.PostProcessingFailed()
# associate all the episodes together under a single root episode # associate all the episodes together under a single root episode
@ -764,9 +767,8 @@ class PostProcessor(object):
if ep_obj.status in common.Quality.SNATCHED_ANY: if ep_obj.status in common.Quality.SNATCHED_ANY:
old_status, ep_quality = common.Quality.split_composite_status(ep_obj.status) old_status, ep_quality = common.Quality.split_composite_status(ep_obj.status)
if common.Quality.UNKNOWN != ep_quality: if common.Quality.UNKNOWN != ep_quality:
self._log( self._log(f'Using "{common.Quality.qualityStrings[ep_quality]}" quality from the old status',
u'Using "%s" quality from the old status' % common.Quality.qualityStrings[ep_quality], logger.DEBUG)
logger.DEBUG)
return ep_quality return ep_quality
# search all possible names for our new quality, in case the file or dir doesn't have it # search all possible names for our new quality, in case the file or dir doesn't have it
@ -780,26 +782,25 @@ class PostProcessor(object):
continue continue
ep_quality = common.Quality.name_quality(cur_name, ep_obj.show_obj.is_anime) ep_quality = common.Quality.name_quality(cur_name, ep_obj.show_obj.is_anime)
quality_log = u' "%s" quality parsed from the %s %s'\ quality_log = f' "{common.Quality.qualityStrings[ep_quality]}" quality parsed from the {thing} {cur_name}'
% (common.Quality.qualityStrings[ep_quality], thing, cur_name)
# if we find a good one then use it # if we find a good one then use it
if common.Quality.UNKNOWN != ep_quality: if common.Quality.UNKNOWN != ep_quality:
self._log(u'Using' + quality_log, logger.DEBUG) self._log(f'Using{quality_log}', logger.DEBUG)
return ep_quality return ep_quality
else: else:
self._log(u'Found' + quality_log, logger.DEBUG) self._log(f'Found{quality_log}', logger.DEBUG)
ep_quality = common.Quality.file_quality(self.file_path) ep_quality = common.Quality.file_quality(self.file_path)
if common.Quality.UNKNOWN != ep_quality: if common.Quality.UNKNOWN != ep_quality:
self._log(u'Using "%s" quality parsed from the metadata file content of %s' self._log(f'Using "{common.Quality.qualityStrings[ep_quality]}" quality parsed'
% (common.Quality.qualityStrings[ep_quality], self.file_name), logger.DEBUG) f' from the metadata file content of {self.file_name}', logger.DEBUG)
return ep_quality return ep_quality
# Try guessing quality from the file name # Try guessing quality from the file name
ep_quality = common.Quality.assume_quality(self.file_name) ep_quality = common.Quality.assume_quality(self.file_name)
self._log(u'Using guessed "%s" quality from the file name %s' self._log(f'Using guessed "{common.Quality.qualityStrings[ep_quality]}" quality'
% (common.Quality.qualityStrings[ep_quality], self.file_name), logger.DEBUG) f' from the file name {self.file_name}', logger.DEBUG)
return ep_quality return ep_quality
@ -822,7 +823,7 @@ class PostProcessor(object):
try: try:
script_cmd = [piece for piece in re.split("( |\\\".*?\\\"|'.*?')", script_name) if piece.strip()] script_cmd = [piece for piece in re.split("( |\\\".*?\\\"|'.*?')", script_name) if piece.strip()]
script_cmd[0] = os.path.abspath(script_cmd[0]) script_cmd[0] = os.path.abspath(script_cmd[0])
self._log(u'Absolute path to script: ' + script_cmd[0], logger.DEBUG) self._log(f'Absolute path to script: {script_cmd[0]}', logger.DEBUG)
script_cmd += [ep_obj.location, self.file_path] script_cmd += [ep_obj.location, self.file_path]
@ -832,7 +833,7 @@ class PostProcessor(object):
str(ep_obj.episode), str(ep_obj.episode),
str(ep_obj.airdate)] str(ep_obj.airdate)]
self._log(u'Executing command ' + str(script_cmd)) self._log(f'Executing command {script_cmd}')
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
self._log('Error creating extra script command: %s' % ex(e), logger.ERROR) self._log('Error creating extra script command: %s' % ex(e), logger.ERROR)
return return
@ -843,10 +844,10 @@ class PostProcessor(object):
self._log('Script result: %s' % output, logger.DEBUG) self._log('Script result: %s' % output, logger.DEBUG)
except OSError as e: except OSError as e:
self._log(u'Unable to run extra_script: ' + ex(e), logger.ERROR) self._log(f'Unable to run extra_script: {ex(e)}', logger.ERROR)
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
self._log(u'Unable to run extra_script: ' + ex(e), logger.ERROR) self._log(f'Unable to run extra_script: {ex(e)}', logger.ERROR)
def _run_extra_scripts(self, ep_obj): def _run_extra_scripts(self, ep_obj):
""" """
@ -881,48 +882,48 @@ class PostProcessor(object):
if not existing_show_path and not sickgear.CREATE_MISSING_SHOW_DIRS: if not existing_show_path and not sickgear.CREATE_MISSING_SHOW_DIRS:
# Show location does not exist, and cannot be created, marking it unsafe to proceed # Show location does not exist, and cannot be created, marking it unsafe to proceed
self._log(u'.. marking it unsafe to proceed because show location does not exist', logger.DEBUG) self._log('.. marking it unsafe to proceed because show location does not exist', logger.DEBUG)
return False return False
# if SickGear snatched this then assume it's safe # if SickGear snatched this then assume it's safe
if ep_obj.status in common.Quality.SNATCHED_ANY: if ep_obj.status in common.Quality.SNATCHED_ANY:
self._log(u'SickGear snatched this episode, marking it safe to replace', logger.DEBUG) self._log('SickGear snatched this episode, marking it safe to replace', logger.DEBUG)
return True return True
old_ep_status, old_ep_quality = common.Quality.split_composite_status(ep_obj.status) old_ep_status, old_ep_quality = common.Quality.split_composite_status(ep_obj.status)
# if old episode is not downloaded/archived then it's safe # if old episode is not downloaded/archived then it's safe
if common.DOWNLOADED != old_ep_status and common.ARCHIVED != old_ep_status: if common.DOWNLOADED != old_ep_status and common.ARCHIVED != old_ep_status:
self._log(u'Existing episode status is not downloaded/archived, marking it safe to replace', logger.DEBUG) self._log('Existing episode status is not downloaded/archived, marking it safe to replace', logger.DEBUG)
return True return True
if common.ARCHIVED == old_ep_status and common.Quality.NONE == old_ep_quality: if common.ARCHIVED == old_ep_status and common.Quality.NONE == old_ep_quality:
self._log(u'Marking it unsafe to replace because the existing episode status is archived', logger.DEBUG) self._log('Marking it unsafe to replace because the existing episode status is archived', logger.DEBUG)
return False return False
# Status downloaded. Quality/ size checks # Status downloaded. Quality/ size checks
# if manual post process option is set to force_replace then it's safe # if manual post process option is set to force_replace then it's safe
if self.force_replace: if self.force_replace:
self._log(u'Force replace existing episode option is enabled, marking it safe to replace', logger.DEBUG) self._log('Force replace existing episode option is enabled, marking it safe to replace', logger.DEBUG)
return True return True
# if the file processed is higher quality than the existing episode then it's safe # if the file processed is higher quality than the existing episode then it's safe
if new_ep_quality > old_ep_quality: if new_ep_quality > old_ep_quality:
if common.Quality.UNKNOWN != new_ep_quality: if common.Quality.UNKNOWN != new_ep_quality:
self._log(u'Existing episode status is not snatched but the episode to process appears to be better' self._log('Existing episode status is not snatched but the episode to process appears to be better'
u' quality than existing episode, marking it safe to replace', logger.DEBUG) ' quality than existing episode, marking it safe to replace', logger.DEBUG)
return True return True
else: else:
self._log(u'Marking it unsafe to replace because an existing episode exists in the database and' self._log('Marking it unsafe to replace because an existing episode exists in the database and'
u' the episode to process has unknown quality', logger.DEBUG) ' the episode to process has unknown quality', logger.DEBUG)
return False return False
existing_file_status = self._check_for_existing_file(ep_obj.location) existing_file_status = self._check_for_existing_file(ep_obj.location)
if PostProcessor.DOESNT_EXIST == existing_file_status \ if PostProcessor.DOESNT_EXIST == existing_file_status \
and (existing_show_path or sickgear.CREATE_MISSING_SHOW_DIRS): and (existing_show_path or sickgear.CREATE_MISSING_SHOW_DIRS):
self._log(u'.. there is no file to replace, marking it safe to continue', logger.DEBUG) self._log('.. there is no file to replace, marking it safe to continue', logger.DEBUG)
return True return True
# if there's an existing downloaded file with same quality, check filesize to decide # if there's an existing downloaded file with same quality, check filesize to decide
@ -946,48 +947,47 @@ class PostProcessor(object):
npr.is_anime, check_is_repack=True) npr.is_anime, check_is_repack=True)
if new_proper_level > cur_proper_level and \ if new_proper_level > cur_proper_level and \
(not is_repack or npr.release_group == ep_obj.release_group): (not is_repack or npr.release_group == ep_obj.release_group):
self._log(u'Proper or repack with same quality, marking it safe to replace', logger.DEBUG) self._log('Proper or repack with same quality, marking it safe to replace', logger.DEBUG)
return True return True
self._log(u'An episode exists in the database with the same quality as the episode to process', self._log('An episode exists in the database with the same quality as the episode to process', logger.DEBUG)
logger.DEBUG)
self._log(u'Checking size of existing file ' + ep_obj.location, logger.DEBUG) self._log(f'Checking size of existing file {ep_obj.location}', logger.DEBUG)
if PostProcessor.EXISTS_SMALLER == existing_file_status: if PostProcessor.EXISTS_SMALLER == existing_file_status:
# File exists and new file is larger, marking it safe to replace # File exists and new file is larger, marking it safe to replace
self._log(u'.. the existing smaller file will be replaced', logger.DEBUG) self._log('.. the existing smaller file will be replaced', logger.DEBUG)
return True return True
elif PostProcessor.EXISTS_LARGER == existing_file_status: elif PostProcessor.EXISTS_LARGER == existing_file_status:
# File exists and new file is smaller, marking it unsafe to replace # File exists and new file is smaller, marking it unsafe to replace
self._log(u'.. marking it unsafe to replace the existing larger file', logger.DEBUG) self._log('.. marking it unsafe to replace the existing larger file', logger.DEBUG)
return False return False
elif PostProcessor.EXISTS_SAME == existing_file_status: elif PostProcessor.EXISTS_SAME == existing_file_status:
# File exists and new file is same size, marking it unsafe to replace # File exists and new file is same size, marking it unsafe to replace
self._log(u'.. marking it unsafe to replace the existing same size file', logger.DEBUG) self._log('.. marking it unsafe to replace the existing same size file', logger.DEBUG)
return False return False
else: else:
self._log(u'Unknown file status for: %s This should never happen, please log this as a bug.' self._log(f'Unknown file status for: {ep_obj.location}'
% ep_obj.location, logger.ERROR) f' This should never happen, please log this as a bug.', logger.ERROR)
return False return False
# if there's an existing file with better quality # if there's an existing file with better quality
if old_ep_quality > new_ep_quality and old_ep_quality != common.Quality.UNKNOWN: if old_ep_quality > new_ep_quality and old_ep_quality != common.Quality.UNKNOWN:
# Episode already exists in database and processed episode has lower quality, marking it unsafe to replace # Episode already exists in database and processed episode has lower quality, marking it unsafe to replace
self._log(u'Marking it unsafe to replace the episode that already exists in database with a file of lower' self._log('Marking it unsafe to replace the episode that already exists in database with a file of lower'
u' quality', logger.DEBUG) ' quality', logger.DEBUG)
return False return False
if self.in_history: if self.in_history:
self._log(u'SickGear snatched this episode, marking it safe to replace', logger.DEBUG) self._log('SickGear snatched this episode, marking it safe to replace', logger.DEBUG)
return True return True
# None of the conditions were met, marking it unsafe to replace # None of the conditions were met, marking it unsafe to replace
self._log(u'Marking it unsafe to replace because no positive condition is met, you may force replace but it' self._log('Marking it unsafe to replace because no positive condition is met, you may force replace but it'
u' would be better to examine the files', logger.DEBUG) ' would be better to examine the files', logger.DEBUG)
return False return False
def _change_ep_objs(self, show_obj, season_number, episode_numbers, quality): def _change_ep_objs(self, show_obj, season_number, episode_numbers, quality):
@ -998,7 +998,7 @@ class PostProcessor(object):
for cur_ep_obj in [ep_obj] + ep_obj.related_ep_obj: for cur_ep_obj in [ep_obj] + ep_obj.related_ep_obj:
with cur_ep_obj.lock: with cur_ep_obj.lock:
if self.release_name: if self.release_name:
self._log(u'Found release name ' + self.release_name, logger.DEBUG) self._log(f'Found release name {self.release_name}', logger.DEBUG)
cur_ep_obj.release_name = self.release_name or '' cur_ep_obj.release_name = self.release_name or ''
@ -1044,7 +1044,7 @@ class PostProcessor(object):
self._log('Successfully processed.', logger.MESSAGE) self._log('Successfully processed.', logger.MESSAGE)
else: else:
self._log('Can\'t figure out what show/episode to process', logger.WARNING) self._log("Can't figure out what show/episode to process", logger.WARNING)
raise exceptions_helper.PostProcessingFailed() raise exceptions_helper.PostProcessingFailed()
def process(self): def process(self):
@ -1054,16 +1054,16 @@ class PostProcessor(object):
:rtype: bool :rtype: bool
""" """
self._log(u'Processing... %s%s' % (os.path.relpath(self.file_path, self.folder_path), self._log(f'Processing... {os.path.relpath(self.file_path, self.folder_path)}'
(u'<br />.. from nzb %s' % self.nzb_name, u'')[None is self.nzb_name])) f'{(f"<br />.. from nzb {self.nzb_name}", "")[None is self.nzb_name]}')
if os.path.isdir(self.file_path): if os.path.isdir(self.file_path):
self._log(u'Expecting file %s<br />.. is actually a directory, skipping' % self.file_path) self._log(f'Expecting file {self.file_path}<br />.. is actually a directory, skipping')
return False return False
for ignore_file in self.IGNORED_FILESTRINGS: for ignore_file in self.IGNORED_FILESTRINGS:
if ignore_file in self.file_path: if ignore_file in self.file_path:
self._log(u'File %s<br />.. is ignored type, skipping' % self.file_path) self._log(f'File {self.file_path}<br />.. is ignored type, skipping')
return False return False
# reset per-file stuff # reset per-file stuff
@ -1075,10 +1075,10 @@ class PostProcessor(object):
# if we don't have it then give up # if we don't have it then give up
if not show_obj: if not show_obj:
self._log(u'Must add show to SickGear before trying to post process an episode', logger.WARNING) self._log('Must add show to SickGear before trying to post process an episode', logger.WARNING)
raise exceptions_helper.PostProcessingFailed() raise exceptions_helper.PostProcessingFailed()
elif None is season_number or not episode_numbers: elif None is season_number or not episode_numbers:
self._log(u'Quitting this post process, could not determine what episode this is', logger.DEBUG) self._log('Quitting this post process, could not determine what episode this is', logger.DEBUG)
return False return False
# retrieve/create the corresponding TVEpisode objects # retrieve/create the corresponding TVEpisode objects
@ -1089,12 +1089,12 @@ class PostProcessor(object):
new_ep_quality = self._get_quality(ep_obj) new_ep_quality = self._get_quality(ep_obj)
else: else:
new_ep_quality = quality new_ep_quality = quality
self._log(u'Using "%s" quality' % common.Quality.qualityStrings[new_ep_quality], logger.DEBUG) self._log(f'Using "{common.Quality.qualityStrings[new_ep_quality]}" quality', logger.DEBUG)
# see if it's safe to replace existing episode (is download snatched, PROPER, better quality) # see if it's safe to replace existing episode (is download snatched, PROPER, better quality)
if not self._safe_replace(ep_obj, new_ep_quality): if not self._safe_replace(ep_obj, new_ep_quality):
# if it's not safe to replace, stop here # if it's not safe to replace, stop here
self._log(u'Quitting this post process', logger.DEBUG) self._log('Quitting this post process', logger.DEBUG)
return False return False
# delete the existing file (and company) # delete the existing file (and company)
@ -1107,7 +1107,7 @@ class PostProcessor(object):
helpers.delete_empty_folders(os.path.dirname(cur_ep_obj.location), helpers.delete_empty_folders(os.path.dirname(cur_ep_obj.location),
keep_dir=ep_obj.show_obj.location) keep_dir=ep_obj.show_obj.location)
except (OSError, IOError): except (OSError, IOError):
raise exceptions_helper.PostProcessingFailed(u'Unable to delete existing files') raise exceptions_helper.PostProcessingFailed('Unable to delete existing files')
# set the status of the episodes # set the status of the episodes
# for cur_ep_obj in [ep_obj] + ep_obj.related_ep_obj: # for cur_ep_obj in [ep_obj] + ep_obj.related_ep_obj:
@ -1115,14 +1115,14 @@ class PostProcessor(object):
# if the show directory doesn't exist then make it if allowed # if the show directory doesn't exist then make it if allowed
if not os.path.isdir(ep_obj.show_obj.location) and sickgear.CREATE_MISSING_SHOW_DIRS: if not os.path.isdir(ep_obj.show_obj.location) and sickgear.CREATE_MISSING_SHOW_DIRS:
self._log(u'Show directory does not exist, creating it', logger.DEBUG) self._log('Show directory does not exist, creating it', logger.DEBUG)
try: try:
os.mkdir(ep_obj.show_obj.location) os.mkdir(ep_obj.show_obj.location)
# do the library update for synoindex # do the library update for synoindex
notifiers.NotifierFactory().get('SYNOINDEX').addFolder(ep_obj.show_obj.location) notifiers.NotifierFactory().get('SYNOINDEX').addFolder(ep_obj.show_obj.location)
except (OSError, IOError): except (OSError, IOError):
raise exceptions_helper.PostProcessingFailed(u'Unable to create show directory: ' raise exceptions_helper.PostProcessingFailed(f'Unable to create show directory:'
+ ep_obj.show_obj.location) f' {ep_obj.show_obj.location}')
# get metadata for the show (but not episode because it hasn't been fully processed) # get metadata for the show (but not episode because it hasn't been fully processed)
ep_obj.show_obj.write_metadata(True) ep_obj.show_obj.write_metadata(True)
@ -1132,7 +1132,7 @@ class PostProcessor(object):
# Just want to keep this consistent for failed handling right now # Just want to keep this consistent for failed handling right now
release_name = show_name_helpers.determine_release_name(self.folder_path, self.nzb_name) release_name = show_name_helpers.determine_release_name(self.folder_path, self.nzb_name)
if None is release_name: if None is release_name:
self._log(u'No snatched release found in history', logger.WARNING) self._log('No snatched release found in history', logger.WARNING)
elif sickgear.USE_FAILED_DOWNLOADS: elif sickgear.USE_FAILED_DOWNLOADS:
failed_history.remove_failed(release_name) failed_history.remove_failed(release_name)
@ -1144,13 +1144,13 @@ class PostProcessor(object):
except exceptions_helper.ShowDirNotFoundException: except exceptions_helper.ShowDirNotFoundException:
raise exceptions_helper.PostProcessingFailed( raise exceptions_helper.PostProcessingFailed(
u'Unable to post process an episode because the show dir does not exist, quitting') 'Unable to post process an episode because the show dir does not exist, quitting')
self._log(u'Destination folder for this episode is ' + dest_path, logger.DEBUG) self._log(f'Destination folder for this episode is {dest_path}', logger.DEBUG)
# create any folders we need # create any folders we need
if not helpers.make_path(dest_path, syno=True): if not helpers.make_path(dest_path, syno=True):
raise exceptions_helper.PostProcessingFailed(u'Unable to create destination folder: ' + dest_path) raise exceptions_helper.PostProcessingFailed(f'Unable to create destination folder: {dest_path}')
# figure out the base name of the resulting episode file # figure out the base name of the resulting episode file
if sickgear.RENAME_EPISODES: if sickgear.RENAME_EPISODES:
@ -1174,7 +1174,7 @@ class PostProcessor(object):
while not stop_event.is_set(): while not stop_event.is_set():
stop_event.wait(60) stop_event.wait(60)
webh('.') webh('.')
webh(u'\n') webh('\n')
keepalive_stop = threading.Event() keepalive_stop = threading.Event()
keepalive = threading.Thread(target=keep_alive, args=(self.webhandler, keepalive_stop)) keepalive = threading.Thread(target=keep_alive, args=(self.webhandler, keepalive_stop))
@ -1185,7 +1185,7 @@ class PostProcessor(object):
'new_base_name': new_base_name, 'new_base_name': new_base_name,
'associated_files': sickgear.MOVE_ASSOCIATED_FILES} 'associated_files': sickgear.MOVE_ASSOCIATED_FILES}
args_cpmv = {'subtitles': sickgear.USE_SUBTITLES and ep_obj.show_obj.subtitles, args_cpmv = {'subtitles': sickgear.USE_SUBTITLES and ep_obj.show_obj.subtitles,
'action_tmpl': u' %s<br />.. to %s'} 'action_tmpl': ' %s<br />.. to %s'}
args_cpmv.update(args_link) args_cpmv.update(args_link)
if self.webhandler: if self.webhandler:
self.webhandler('Processing method is "%s"' % self.process_method) self.webhandler('Processing method is "%s"' % self.process_method)
@ -1199,10 +1199,10 @@ class PostProcessor(object):
elif 'symlink' == self.process_method: elif 'symlink' == self.process_method:
self._move_and_symlink(**args_link) self._move_and_symlink(**args_link)
else: else:
logger.log(u'Unknown process method: ' + str(self.process_method), logger.ERROR) logger.error(f'Unknown process method: {self.process_method}')
raise exceptions_helper.PostProcessingFailed(u'Unable to move the files to the new location') raise exceptions_helper.PostProcessingFailed('Unable to move the files to the new location')
except (OSError, IOError): except (OSError, IOError):
raise exceptions_helper.PostProcessingFailed(u'Unable to move the files to the new location') raise exceptions_helper.PostProcessingFailed('Unable to move the files to the new location')
finally: finally:
if self.webhandler: if self.webhandler:
# stop the keep_alive # stop the keep_alive

View file

@ -70,7 +70,7 @@ class ProcessTVShow(object):
@property @property
def result(self, pre=True): def result(self, pre=True):
# type: (bool) -> AnyStr # type: (bool) -> AnyStr
return (('<br />', u'\n')[pre]).join(self._output) return (('<br>', '\n')[pre]).join(self._output)
def _buffer(self, text=None): def _buffer(self, text=None):
if None is not text: if None is not text:
@ -78,7 +78,7 @@ class ProcessTVShow(object):
if self.webhandler: if self.webhandler:
logger_msg = re.sub(r'(?i)<br[\s/]+>', '\n', text) logger_msg = re.sub(r'(?i)<br[\s/]+>', '\n', text)
logger_msg = re.sub('(?i)<a[^>]+>([^<]+)</a>', r'\1', logger_msg) logger_msg = re.sub('(?i)<a[^>]+>([^<]+)</a>', r'\1', logger_msg)
self.webhandler('%s%s' % (logger_msg, u'\n')) self.webhandler('%s%s' % (logger_msg, '\n'))
def _log_helper(self, message, log_level=logger.DEBUG): def _log_helper(self, message, log_level=logger.DEBUG):
""" """
@ -90,7 +90,7 @@ class ProcessTVShow(object):
""" """
logger_msg = re.sub(r'(?i)<br[\s/]+>\.*', '', message) logger_msg = re.sub(r'(?i)<br[\s/]+>\.*', '', message)
logger_msg = re.sub('(?i)<a[^>]+>([^<]+)</a>', r'\1', logger_msg) logger_msg = re.sub('(?i)<a[^>]+>([^<]+)</a>', r'\1', logger_msg)
logger.log(u'%s' % logger_msg, log_level) logger.log(f'{logger_msg}', log_level)
self._buffer(message) self._buffer(message)
return return
@ -136,14 +136,14 @@ class ProcessTVShow(object):
try: try:
shutil.rmtree(folder) shutil.rmtree(folder)
except (OSError, IOError) as e: except (OSError, IOError) as e:
logger.log(u'Warning: unable to delete folder: %s: %s' % (folder, ex(e)), logger.WARNING) logger.warning(f'Warning: unable to delete folder: {folder}: {ex(e)}')
return False return False
if os.path.isdir(folder): if os.path.isdir(folder):
logger.log(u'Warning: unable to delete folder: %s' % folder, logger.WARNING) logger.warning(f'Warning: unable to delete folder: {folder}')
return False return False
self._log_helper(u'Deleted folder ' + folder, logger.MESSAGE) self._log_helper(f'Deleted folder {folder}', logger.MESSAGE)
return True return True
def _delete_files(self, process_path, notwanted_files, force=False): def _delete_files(self, process_path, notwanted_files, force=False):
@ -170,18 +170,18 @@ class ProcessTVShow(object):
file_attribute = os.stat(cur_file_path)[0] file_attribute = os.stat(cur_file_path)[0]
if not file_attribute & stat.S_IWRITE: if not file_attribute & stat.S_IWRITE:
# File is read-only, so make it writeable # File is read-only, so make it writeable
self._log_helper(u'Changing ReadOnly flag for file ' + cur_file) self._log_helper(f'Changing ReadOnly flag for file {cur_file}')
try: try:
os.chmod(cur_file_path, stat.S_IWRITE) os.chmod(cur_file_path, stat.S_IWRITE)
except OSError as e: except OSError as e:
self._log_helper(u'Cannot change permissions of %s: %s' % (cur_file_path, ex(e))) self._log_helper(f'Cannot change permissions of {cur_file_path}: {ex(e)}')
removal_type = helpers.remove_file(cur_file_path) removal_type = helpers.remove_file(cur_file_path)
if os.path.isfile(cur_file_path): if os.path.isfile(cur_file_path):
result = False result = False
else: else:
self._log_helper(u'%s file %s' % (removal_type, cur_file)) self._log_helper(f'{removal_type} file {cur_file}')
return result return result
@ -209,7 +209,7 @@ class ProcessTVShow(object):
show_obj = helpers.find_show_by_id({int(sql_result[-1]['indexer']): int(sql_result[-1]['showid'])}, show_obj = helpers.find_show_by_id({int(sql_result[-1]['indexer']): int(sql_result[-1]['showid'])},
check_multishow=True) check_multishow=True)
if hasattr(show_obj, 'name'): if hasattr(show_obj, 'name'):
logger.log('Found Show: %s in snatch history for: %s' % (show_obj.name, name), logger.DEBUG) logger.debug('Found Show: %s in snatch history for: %s' % (show_obj.name, name))
except MultipleShowObjectsException: except MultipleShowObjectsException:
show_obj = None show_obj = None
return show_obj return show_obj
@ -319,19 +319,19 @@ class ProcessTVShow(object):
elif dir_name and sickgear.TV_DOWNLOAD_DIR and os.path.isdir(sickgear.TV_DOWNLOAD_DIR)\ elif dir_name and sickgear.TV_DOWNLOAD_DIR and os.path.isdir(sickgear.TV_DOWNLOAD_DIR)\
and os.path.normpath(dir_name) != os.path.normpath(sickgear.TV_DOWNLOAD_DIR): and os.path.normpath(dir_name) != os.path.normpath(sickgear.TV_DOWNLOAD_DIR):
dir_name = os.path.join(sickgear.TV_DOWNLOAD_DIR, os.path.abspath(dir_name).split(os.path.sep)[-1]) dir_name = os.path.join(sickgear.TV_DOWNLOAD_DIR, os.path.abspath(dir_name).split(os.path.sep)[-1])
self._log_helper(u'SickGear PP Config, completed TV downloads folder: ' + sickgear.TV_DOWNLOAD_DIR) self._log_helper(f'SickGear PP Config, completed TV downloads folder: {sickgear.TV_DOWNLOAD_DIR}')
if dir_name: if dir_name:
self._log_helper(u'Checking folder... ' + dir_name) self._log_helper(f'Checking folder... {dir_name}')
# if we didn't find a real directory then process "failed" or just quit # if we didn't find a real directory then process "failed" or just quit
if not dir_name or not os.path.isdir(dir_name): if not dir_name or not os.path.isdir(dir_name):
if nzb_name and failed: if nzb_name and failed:
self._process_failed(dir_name, nzb_name, show_obj=show_obj) self._process_failed(dir_name, nzb_name, show_obj=show_obj)
else: else:
self._log_helper(u'Unable to figure out what folder to process. ' + self._log_helper('Unable to figure out what folder to process. '
u'If your downloader and SickGear aren\'t on the same PC then make sure ' + 'If your downloader and SickGear aren\'t on the same PC then make sure '
u'you fill out your completed TV download folder in the PP config.') 'you fill out your completed TV download folder in the PP config.')
return self.result return self.result
parent = self.find_parent(dir_name) parent = self.find_parent(dir_name)
@ -352,13 +352,13 @@ class ProcessTVShow(object):
path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type) path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type)
if sickgear.POSTPONE_IF_SYNC_FILES and any(filter(helpers.is_sync_file, files)): if sickgear.POSTPONE_IF_SYNC_FILES and any(filter(helpers.is_sync_file, files)):
self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR) self._log_helper('Found temporary sync files, skipping post process', logger.ERROR)
return self.result return self.result
if not process_method: if not process_method:
process_method = sickgear.PROCESS_METHOD process_method = sickgear.PROCESS_METHOD
self._log_helper(u'Processing folder... %s' % path) self._log_helper(f'Processing folder... {path}')
work_files = [] work_files = []
joined = self.join(path) joined = self.join(path)
@ -380,13 +380,13 @@ class ProcessTVShow(object):
work_files += [os.path.join(path, item) for item in rar_content] work_files += [os.path.join(path, item) for item in rar_content]
if 0 < len(files): if 0 < len(files):
self._log_helper(u'Process file%s: %s' % (helpers.maybe_plural(files), str(files))) self._log_helper(f'Process file{helpers.maybe_plural(files)}: {str(files)}')
if 0 < len(video_files): if 0 < len(video_files):
self._log_helper(u'Process video file%s: %s' % (helpers.maybe_plural(video_files), str(video_files))) self._log_helper(f'Process video file{helpers.maybe_plural(video_files)}: {str(video_files)}')
if 0 < len(rar_content): if 0 < len(rar_content):
self._log_helper(u'Process rar content: ' + str(rar_content)) self._log_helper(f'Process rar content: {rar_content}')
if 0 < len(video_in_rar): if 0 < len(video_in_rar):
self._log_helper(u'Process video%s in rar: %s' % (helpers.maybe_plural(video_in_rar), str(video_in_rar))) self._log_helper(f'Process video{helpers.maybe_plural(video_in_rar)} in rar: {str(video_in_rar)}')
# If nzb_name is set and there's more than one videofile in the folder, files will be lost (overwritten). # If nzb_name is set and there's more than one videofile in the folder, files will be lost (overwritten).
nzb_name_original = nzb_name nzb_name_original = nzb_name
@ -425,8 +425,7 @@ class ProcessTVShow(object):
force, force_replace, use_trash=cleanup, show_obj=show_obj) force, force_replace, use_trash=cleanup, show_obj=show_obj)
except OSError as e: except OSError as e:
logger.log('Batch skipped, %s%s' % logger.warning('Batch skipped, %s%s' % (ex(e), e.filename and (' (file %s)' % e.filename) or ''))
(ex(e), e.filename and (' (file %s)' % e.filename) or ''), logger.WARNING)
# Process video files in TV subdirectories # Process video files in TV subdirectories
for directory in [x for x in dirs if self._validate_dir( for directory in [x for x in dirs if self._validate_dir(
@ -438,7 +437,7 @@ class ProcessTVShow(object):
for walk_path, walk_dir, files in os.walk(os.path.join(path, directory), topdown=False): for walk_path, walk_dir, files in os.walk(os.path.join(path, directory), topdown=False):
if sickgear.POSTPONE_IF_SYNC_FILES and any(filter(helpers.is_sync_file, files)): if sickgear.POSTPONE_IF_SYNC_FILES and any(filter(helpers.is_sync_file, files)):
self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR) self._log_helper('Found temporary sync files, skipping post process', logger.ERROR)
return self.result return self.result
parent = self.find_parent(walk_path) parent = self.find_parent(walk_path)
@ -493,8 +492,7 @@ class ProcessTVShow(object):
self.check_video_filenames(walk_dir, video_pick))) self.check_video_filenames(walk_dir, video_pick)))
except OSError as e: except OSError as e:
logger.log('Batch skipped, %s%s' % logger.warning(f'Batch skipped, {ex(e)}{e.filename and (" (file %s)" % e.filename) or ""}')
(ex(e), e.filename and (' (file %s)' % e.filename) or ''), logger.WARNING)
if process_method in ('hardlink', 'symlink') and video_in_rar: if process_method in ('hardlink', 'symlink') and video_in_rar:
self._delete_files(walk_path, rar_content) self._delete_files(walk_path, rar_content)
@ -526,12 +524,13 @@ class ProcessTVShow(object):
if self.any_vid_processed: if self.any_vid_processed:
if not self.files_failed: if not self.files_failed:
_bottom_line(u'Successfully processed.', logger.MESSAGE) _bottom_line('Successfully processed.', logger.MESSAGE)
else: else:
_bottom_line(u'Successfully processed at least one video file%s.' % _bottom_line(f'Successfully processed at least one video file'
(', others were skipped', ' and skipped another')[1 == self.files_failed], logger.MESSAGE) f'{(", others were skipped", " and skipped another")[1 == self.files_failed]}.',
logger.MESSAGE)
else: else:
_bottom_line(u'Failed! Did not process any files.', logger.WARNING) _bottom_line('Failed! Did not process any files.', logger.WARNING)
return self.result return self.result
@ -599,16 +598,16 @@ class ProcessTVShow(object):
:return: success :return: success
:rtype: bool :rtype: bool
""" """
self._log_helper(u'Processing sub dir: ' + dir_name) self._log_helper(f'Processing sub dir: {dir_name}')
if os.path.basename(dir_name).startswith('_FAILED_'): if os.path.basename(dir_name).startswith('_FAILED_'):
self._log_helper(u'The directory name indicates it failed to extract.') self._log_helper('The directory name indicates it failed to extract.')
failed = True failed = True
elif os.path.basename(dir_name).startswith('_UNDERSIZED_'): elif os.path.basename(dir_name).startswith('_UNDERSIZED_'):
self._log_helper(u'The directory name indicates that it was previously rejected for being undersized.') self._log_helper('The directory name indicates that it was previously rejected for being undersized.')
failed = True failed = True
elif os.path.basename(dir_name).upper().startswith('_UNPACK'): elif os.path.basename(dir_name).upper().startswith('_UNPACK'):
self._log_helper(u'The directory name indicates that this release is in the process of being unpacked.') self._log_helper('The directory name indicates that this release is in the process of being unpacked.')
return False return False
if failed: if failed:
@ -616,7 +615,7 @@ class ProcessTVShow(object):
return False return False
if helpers.is_hidden_folder(dir_name): if helpers.is_hidden_folder(dir_name):
self._log_helper(u'Ignoring hidden folder: ' + dir_name) self._log_helper(f'Ignoring hidden folder: {dir_name}')
return False return False
# make sure the directory isn't inside a show directory # make sure the directory isn't inside a show directory
@ -626,9 +625,7 @@ class ProcessTVShow(object):
for cur_result in sql_result: for cur_result in sql_result:
if dir_name.lower().startswith(os.path.realpath(cur_result['location']).lower() + os.sep) \ if dir_name.lower().startswith(os.path.realpath(cur_result['location']).lower() + os.sep) \
or dir_name.lower() == os.path.realpath(cur_result['location']).lower(): or dir_name.lower() == os.path.realpath(cur_result['location']).lower():
self._log_helper( self._log_helper('Found an episode that has already been moved to its show dir, skipping', logger.ERROR)
u'Found an episode that has already been moved to its show dir, skipping',
logger.ERROR)
return False return False
# Get the videofile list for the next checks # Get the videofile list for the next checks
@ -686,16 +683,16 @@ class ProcessTVShow(object):
if sickgear.UNPACK and rar_files: if sickgear.UNPACK and rar_files:
self._log_helper(u'Packed releases detected: ' + str(rar_files)) self._log_helper(f'Packed releases detected: {rar_files}')
for archive in rar_files: for archive in rar_files:
self._log_helper(u'Unpacking archive: ' + archive) self._log_helper(f'Unpacking archive: {archive}')
try: try:
rar_handle = rarfile.RarFile(os.path.join(path, archive)) rar_handle = rarfile.RarFile(os.path.join(path, archive))
except (BaseException, Exception): except (BaseException, Exception):
self._log_helper(u'Failed to open archive: %s' % archive, logger.ERROR) self._log_helper(f'Failed to open archive: {archive}', logger.ERROR)
self._set_process_success(False) self._set_process_success(False)
continue continue
try: try:
@ -704,8 +701,7 @@ class ProcessTVShow(object):
for file_in_archive in [os.path.basename(x.filename) for file_in_archive in [os.path.basename(x.filename)
for x in rar_handle.infolist() if not x.is_dir()]: for x in rar_handle.infolist() if not x.is_dir()]:
if self._already_postprocessed(path, file_in_archive, force): if self._already_postprocessed(path, file_in_archive, force):
self._log_helper( self._log_helper(f'Archive file already processed, extraction skipped: {file_in_archive}')
u'Archive file already processed, extraction skipped: ' + file_in_archive)
skip_file = True skip_file = True
break break
@ -719,14 +715,14 @@ class ProcessTVShow(object):
renamed = self.cleanup_names(path, rar_content) renamed = self.cleanup_names(path, rar_content)
cur_unpacked = rar_content if not renamed else \ cur_unpacked = rar_content if not renamed else \
(list(set(rar_content) - set(iterkeys(renamed))) + list(renamed.values())) (list(set(rar_content) - set(iterkeys(renamed))) + list(renamed.values()))
self._log_helper(u'Unpacked content: [u\'%s\']' % '\', u\''.join(map(text_type, cur_unpacked))) self._log_helper('Unpacked content: ["%s"]' % '", "'.join(map(text_type, cur_unpacked)))
unpacked_files += cur_unpacked unpacked_files += cur_unpacked
except (rarfile.PasswordRequired, rarfile.RarWrongPassword): except (rarfile.PasswordRequired, rarfile.RarWrongPassword):
self._log_helper(u'Failed to unpack archive PasswordRequired: %s' % archive, logger.ERROR) self._log_helper(f'Failed to unpack archive PasswordRequired: {archive}', logger.ERROR)
self._set_process_success(False) self._set_process_success(False)
self.fail_detected = True self.fail_detected = True
except (BaseException, Exception): except (BaseException, Exception):
self._log_helper(u'Failed to unpack archive: %s' % archive, logger.ERROR) self._log_helper(f'Failed to unpack archive: {archive}', logger.ERROR)
self._set_process_success(False) self._set_process_success(False)
finally: finally:
rar_handle.close() rar_handle.close()
@ -738,11 +734,11 @@ class ProcessTVShow(object):
try: try:
rar_handle = rarfile.RarFile(os.path.join(path, archive)) rar_handle = rarfile.RarFile(os.path.join(path, archive))
except (BaseException, Exception): except (BaseException, Exception):
self._log_helper(u'Failed to open archive: %s' % archive, logger.ERROR) self._log_helper(f'Failed to open archive: {archive}', logger.ERROR)
continue continue
try: try:
if rar_handle.needs_password(): if rar_handle.needs_password():
self._log_helper(u'Failed to unpack archive PasswordRequired: %s' % archive, logger.ERROR) self._log_helper(f'Failed to unpack archive PasswordRequired: {archive}', logger.ERROR)
self._set_process_success(False) self._set_process_success(False)
self.failure_detected = True self.failure_detected = True
rar_handle.close() rar_handle.close()
@ -813,7 +809,7 @@ class ProcessTVShow(object):
is_renamed[os.path.relpath(file_path, directory)] = \ is_renamed[os.path.relpath(file_path, directory)] = \
os.path.relpath(new_filename + file_extension, directory) os.path.relpath(new_filename + file_extension, directory)
except OSError as _e: except OSError as _e:
logger.log('Error unable to rename file "%s" because %s' % (cur_filename, ex(_e)), logger.ERROR) logger.error('Error unable to rename file "%s" because %s' % (cur_filename, ex(_e)))
elif helpers.has_media_ext(cur_filename) and \ elif helpers.has_media_ext(cur_filename) and \
None is not garbage_name.search(file_name) and None is not media_pattern.search(base_name): None is not garbage_name.search(file_name) and None is not media_pattern.search(base_name):
_num_videos += 1 _num_videos += 1
@ -836,7 +832,7 @@ class ProcessTVShow(object):
os.rename(old_name, new_name) os.rename(old_name, new_name)
is_renamed[os.path.relpath(old_name, directory)] = os.path.relpath(new_name, directory) is_renamed[os.path.relpath(old_name, directory)] = os.path.relpath(new_name, directory)
except OSError as e: except OSError as e:
logger.log('Error unable to rename file "%s" because %s' % (old_name, ex(e)), logger.ERROR) logger.error('Error unable to rename file "%s" because %s' % (old_name, ex(e)))
return is_renamed return is_renamed
@ -876,7 +872,7 @@ class ProcessTVShow(object):
try: try:
os.rename(base_filepath, outfile) os.rename(base_filepath, outfile)
except OSError: except OSError:
logger.log('Error unable to rename file %s' % base_filepath, logger.ERROR) logger.error('Error unable to rename file %s' % base_filepath)
return result return result
chunk_set.append(outfile) chunk_set.append(outfile)
chunk_set.sort() chunk_set.sort()
@ -957,8 +953,8 @@ class ProcessTVShow(object):
my_db = db.DBConnection() my_db = db.DBConnection()
sql_result = my_db.select('SELECT * FROM tv_episodes WHERE release_name = ?', [dir_name]) sql_result = my_db.select('SELECT * FROM tv_episodes WHERE release_name = ?', [dir_name])
if sql_result: if sql_result:
self._log_helper(u'Found a release directory %s that has already been processed,<br />.. skipping: %s' self._log_helper(f'Found a release directory {showlink} that has already been processed,<br>'
% (showlink, dir_name)) f'.. skipping: {dir_name}')
if ep_detail_sql: if ep_detail_sql:
reset_status(parse_result.show_obj.tvid, reset_status(parse_result.show_obj.tvid,
parse_result.show_obj.prodid, parse_result.show_obj.prodid,
@ -972,8 +968,8 @@ class ProcessTVShow(object):
sql_result = my_db.select( sql_result = my_db.select(
'SELECT * FROM tv_episodes WHERE release_name = ?', [videofile.rpartition('.')[0]]) 'SELECT * FROM tv_episodes WHERE release_name = ?', [videofile.rpartition('.')[0]])
if sql_result: if sql_result:
self._log_helper(u'Found a video, but that release %s was already processed,<br />.. skipping: %s' self._log_helper(f'Found a video, but that release {showlink} was already processed,<br>'
% (showlink, videofile)) f'.. skipping: {videofile}')
if ep_detail_sql: if ep_detail_sql:
reset_status(parse_result.show_obj.tvid, reset_status(parse_result.show_obj.tvid,
parse_result.show_obj.prodid, parse_result.show_obj.prodid,
@ -991,10 +987,10 @@ class ProcessTVShow(object):
+ ' and tv_episodes.status IN (%s)' % ','.join([str(x) for x in common.Quality.DOWNLOADED])\ + ' and tv_episodes.status IN (%s)' % ','.join([str(x) for x in common.Quality.DOWNLOADED])\
+ ' and history.resource LIKE ?' + ' and history.resource LIKE ?'
sql_result = my_db.select(search_sql, [u'%' + videofile]) sql_result = my_db.select(search_sql, [f'%{videofile}'])
if sql_result: if sql_result:
self._log_helper(u'Found a video, but the episode %s is already processed,<br />.. skipping: %s' self._log_helper(f'Found a video, but the episode {showlink} is already processed,<br>'
% (showlink, videofile)) f'.. skipping: {videofile}')
if ep_detail_sql: if ep_detail_sql:
reset_status(parse_result.show_obj.tvid, reset_status(parse_result.show_obj.tvid,
parse_result.show_obj.prodid, parse_result.show_obj.prodid,
@ -1051,7 +1047,7 @@ class ProcessTVShow(object):
process_fail_message = '' process_fail_message = ''
except exceptions_helper.PostProcessingFailed: except exceptions_helper.PostProcessingFailed:
file_success = False file_success = False
process_fail_message = '<br />.. Post Processing Failed' process_fail_message = '<br>.. Post Processing Failed'
self._set_process_success(file_success) self._set_process_success(file_success)
@ -1059,13 +1055,11 @@ class ProcessTVShow(object):
self._buffer(processor.log.strip('\n')) self._buffer(processor.log.strip('\n'))
if file_success: if file_success:
self._log_helper(u'Successfully processed ' + cur_video_file, logger.MESSAGE) self._log_helper(f'Successfully processed {cur_video_file}', logger.MESSAGE)
elif self.any_vid_processed: elif self.any_vid_processed:
self._log_helper(u'Warning fail for %s%s' % (cur_video_file_path, process_fail_message), self._log_helper(f'Warning fail for {cur_video_file_path}{process_fail_message}', logger.WARNING)
logger.WARNING)
else: else:
self._log_helper(u'Did not use file %s%s' % (cur_video_file_path, process_fail_message), self._log_helper(f'Did not use file {cur_video_file_path}{process_fail_message}', logger.WARNING)
logger.WARNING)
@staticmethod @staticmethod
def _get_path_dir_files(dir_name, nzb_name, pp_type): def _get_path_dir_files(dir_name, nzb_name, pp_type):
@ -1131,13 +1125,12 @@ class ProcessTVShow(object):
if sickgear.DELETE_FAILED and self.any_vid_processed: if sickgear.DELETE_FAILED and self.any_vid_processed:
self._delete_folder(dir_name, check_empty=False) self._delete_folder(dir_name, check_empty=False)
task = u'Failed download processing' task = 'Failed download processing'
if self.any_vid_processed: if self.any_vid_processed:
self._log_helper(u'Successful %s: (%s, %s)' self._log_helper(f'Successful {task.lower()}: ({str(nzb_name)}, {dir_name})', logger.MESSAGE)
% (task.lower(), str(nzb_name), dir_name), logger.MESSAGE)
else: else:
self._log_helper(u'%s failed: (%s, %s): %s' self._log_helper(f'{task} failed: ({str(nzb_name)}, {dir_name}): {process_fail_message}',
% (task, str(nzb_name), dir_name, process_fail_message), logger.WARNING) logger.WARNING)
def process_minimal(self, nzb_name, show_obj, failed, webhandler): def process_minimal(self, nzb_name, show_obj, failed, webhandler):
if failed: if failed:

View file

@ -185,7 +185,7 @@ def load_webdl_types():
try: try:
for line in url_data.splitlines(): for line in url_data.splitlines():
try: try:
(key, val) = line.strip().split(u'::', 1) (key, val) = line.strip().split('::', 1)
except (BaseException, Exception): except (BaseException, Exception):
continue continue
if None is key or None is val: if None is key or None is val:
@ -218,10 +218,10 @@ def _search_provider(cur_provider, provider_propers, aired_since_shows, recent_s
provider_propers.extend(cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows, provider_propers.extend(cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows,
anime=recent_anime)) anime=recent_anime))
except AuthException as e: except AuthException as e:
logger.log('Authentication error: %s' % ex(e), logger.ERROR) logger.error('Authentication error: %s' % ex(e))
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log('Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR) logger.error('Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)))
logger.log(traceback.format_exc(), logger.ERROR) logger.error(traceback.format_exc())
if not provider_propers: if not provider_propers:
logger.log('No Proper releases found at [%s]' % cur_provider.name) logger.log('No Proper releases found at [%s]' % cur_provider.name)
@ -306,8 +306,8 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime
cur_proper.parsed_show_obj = (cur_proper.parsed_show_obj cur_proper.parsed_show_obj = (cur_proper.parsed_show_obj
or helpers.find_show_by_id(parse_result.show_obj.tvid_prodid)) or helpers.find_show_by_id(parse_result.show_obj.tvid_prodid))
if None is cur_proper.parsed_show_obj: if None is cur_proper.parsed_show_obj:
logger.log('Skip download; cannot find show with ID [%s] at %s' % logger.error('Skip download; cannot find show with ID [%s] at %s' %
(cur_proper.prodid, sickgear.TVInfoAPI(cur_proper.tvid).name), logger.ERROR) (cur_proper.prodid, sickgear.TVInfoAPI(cur_proper.tvid).name))
continue continue
cur_proper.tvid = cur_proper.parsed_show_obj.tvid cur_proper.tvid = cur_proper.parsed_show_obj.tvid
@ -319,26 +319,25 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime
# only get anime Proper if it has release group and version # only get anime Proper if it has release group and version
if parse_result.is_anime and not parse_result.release_group and -1 == parse_result.version: if parse_result.is_anime and not parse_result.release_group and -1 == parse_result.version:
logger.log('Ignored Proper with no release group and version in name [%s]' % cur_proper.name, logger.debug('Ignored Proper with no release group and version in name [%s]' % cur_proper.name)
logger.DEBUG)
continue continue
if not show_name_helpers.pass_wordlist_checks(cur_proper.name, parse=False, indexer_lookup=False, if not show_name_helpers.pass_wordlist_checks(cur_proper.name, parse=False, indexer_lookup=False,
show_obj=cur_proper.parsed_show_obj): show_obj=cur_proper.parsed_show_obj):
logger.log('Ignored unwanted Proper [%s]' % cur_proper.name, logger.DEBUG) logger.debug('Ignored unwanted Proper [%s]' % cur_proper.name)
continue continue
re_x = dict(re_prefix='.*', re_suffix='.*') re_x = dict(re_prefix='.*', re_suffix='.*')
result = show_name_helpers.contains_any(cur_proper.name, cur_proper.parsed_show_obj.rls_ignore_words, result = show_name_helpers.contains_any(cur_proper.name, cur_proper.parsed_show_obj.rls_ignore_words,
rx=cur_proper.parsed_show_obj.rls_ignore_words_regex, **re_x) rx=cur_proper.parsed_show_obj.rls_ignore_words_regex, **re_x)
if None is not result and result: if None is not result and result:
logger.log('Ignored Proper containing ignore word [%s]' % cur_proper.name, logger.DEBUG) logger.debug('Ignored Proper containing ignore word [%s]' % cur_proper.name)
continue continue
result = show_name_helpers.contains_any(cur_proper.name, cur_proper.parsed_show_obj.rls_require_words, result = show_name_helpers.contains_any(cur_proper.name, cur_proper.parsed_show_obj.rls_require_words,
rx=cur_proper.parsed_show_obj.rls_require_words_regex, **re_x) rx=cur_proper.parsed_show_obj.rls_require_words_regex, **re_x)
if None is not result and not result: if None is not result and not result:
logger.log('Ignored Proper for not containing any required word [%s]' % cur_proper.name, logger.DEBUG) logger.debug('Ignored Proper for not containing any required word [%s]' % cur_proper.name)
continue continue
cur_size = getattr(cur_proper, 'size', None) cur_size = getattr(cur_proper, 'size', None)
@ -419,15 +418,15 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime
old_webdl_type = get_webdl_type(old_extra_no_name, old_name) old_webdl_type = get_webdl_type(old_extra_no_name, old_name)
new_webdl_type = get_webdl_type(parse_result.extra_info_no_name(), cur_proper.name) new_webdl_type = get_webdl_type(parse_result.extra_info_no_name(), cur_proper.name)
if old_webdl_type != new_webdl_type: if old_webdl_type != new_webdl_type:
logger.log('Ignored Proper webdl source [%s], does not match existing webdl source [%s] for [%s]' logger.debug(f'Ignored Proper webdl source [{old_webdl_type}], does not match existing webdl source'
% (old_webdl_type, new_webdl_type, cur_proper.name), logger.DEBUG) f' [{new_webdl_type}] for [{cur_proper.name}]')
continue continue
# for webdls, prevent Propers from different groups # for webdls, prevent Propers from different groups
log_same_grp = 'Ignored Proper from release group [%s] does not match existing group [%s] for [%s]' \ log_same_grp = 'Ignored Proper from release group [%s] does not match existing group [%s] for [%s]' \
% (parse_result.release_group, old_release_group, cur_proper.name) % (parse_result.release_group, old_release_group, cur_proper.name)
if sickgear.PROPERS_WEBDL_ONEGRP and is_web and not same_release_group: if sickgear.PROPERS_WEBDL_ONEGRP and is_web and not same_release_group:
logger.log(log_same_grp, logger.DEBUG) logger.debug(log_same_grp)
continue continue
# check if we actually want this Proper (if it's the right release group and a higher version) # check if we actually want this Proper (if it's the right release group and a higher version)
@ -436,7 +435,7 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime
if not (-1 < old_version < parse_result.version): if not (-1 < old_version < parse_result.version):
continue continue
if not same_release_group: if not same_release_group:
logger.log(log_same_grp, logger.DEBUG) logger.debug(log_same_grp)
continue continue
found_msg = 'Found anime Proper v%s to replace v%s' % (parse_result.version, old_version) found_msg = 'Found anime Proper v%s to replace v%s' % (parse_result.version, old_version)
else: else:
@ -454,7 +453,7 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime
# skip if the episode has never downloaded, because a previous quality is required to match the Proper # skip if the episode has never downloaded, because a previous quality is required to match the Proper
if not len(history_results): if not len(history_results):
logger.log('Ignored Proper cannot find a recent history item for [%s]' % cur_proper.name, logger.DEBUG) logger.debug('Ignored Proper cannot find a recent history item for [%s]' % cur_proper.name)
continue continue
# make sure that none of the existing history downloads are the same Proper as the download candidate # make sure that none of the existing history downloads are the same Proper as the download candidate
@ -471,7 +470,7 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime
logger.log('Ignored Proper already in history [%s]' % cur_proper.name) logger.log('Ignored Proper already in history [%s]' % cur_proper.name)
continue continue
logger.log(found_msg, logger.DEBUG) logger.debug(found_msg)
# finish populating the Proper instance # finish populating the Proper instance
# cur_proper.show_obj = cur_proper.parsed_show_obj.prodid # cur_proper.show_obj = cur_proper.parsed_show_obj.prodid
@ -557,16 +556,14 @@ def _download_propers(proper_list):
if reject: if reject:
if isinstance(reject, string_types): if isinstance(reject, string_types):
if scene_rej_nuked and not scene_nuked_active: if scene_rej_nuked and not scene_nuked_active:
logger.log('Rejecting nuked release. Nuke reason [%s] source [%s]' % (reject, url), logger.debug('Rejecting nuked release. Nuke reason [%s] source [%s]' % (reject, url))
logger.DEBUG)
else: else:
logger.log('Considering nuked release. Nuke reason [%s] source [%s]' % (reject, url), logger.debug('Considering nuked release. Nuke reason [%s] source [%s]' % (reject, url))
logger.DEBUG)
reject = False reject = False
elif scene_contains or non_scene_fallback: elif scene_contains or non_scene_fallback:
reject = False reject = False
else: else:
logger.log('Rejecting as not scene release listed at any [%s]' % url, logger.DEBUG) logger.debug('Rejecting as not scene release listed at any [%s]' % url)
if reject: if reject:
continue continue
@ -685,7 +682,7 @@ def _generic_name(name):
def _set_last_proper_search(when): def _set_last_proper_search(when):
logger.log(u'Setting the last Proper search in the DB to %s' % when, logger.DEBUG) logger.debug(f'Setting the last Proper search in the DB to {when}')
my_db = db.DBConnection() my_db = db.DBConnection()
sql_result = my_db.select('SELECT * FROM info') sql_result = my_db.select('SELECT * FROM info')

View file

@ -177,7 +177,7 @@ def _create_newznab_source(config_string):
except IndexError: except IndexError:
params.update({k: d}) params.update({k: d})
else: else:
logger.log(u'Skipping Newznab provider string: \'%s\', incorrect format' % config_string, logger.ERROR) logger.error(f'Skipping Newznab provider string: \'{config_string}\', incorrect format')
return None return None
newznab_module = sys.modules['sickgear.providers.newznab'] newznab_module = sys.modules['sickgear.providers.newznab']
@ -213,8 +213,7 @@ def _create_torrent_rss_source(config_string):
url = values[1] url = values[1]
enabled = values[3] enabled = values[3]
except ValueError: except ValueError:
logger.log(u"Skipping RSS Torrent provider string: '" + config_string + "', incorrect format", logger.error(f'Skipping RSS Torrent provider string: \'{config_string}\', incorrect format')
logger.ERROR)
return None return None
try: try:

View file

@ -105,7 +105,7 @@ class AlphaRatioProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
results = self._sort_seeding(mode, results + items[mode]) results = self._sort_seeding(mode, results + items[mode])

View file

@ -48,7 +48,7 @@ class BitHDTVProvider(generic.TorrentProvider):
[(None is y or re.search(r'(?i)rss\slink', y)), [(None is y or re.search(r'(?i)rss\slink', y)),
self.has_all_cookies(['su', 'sp', 'sl'], 'h_'), 'search' in self.urls] + self.has_all_cookies(['su', 'sp', 'sl'], 'h_'), 'search' in self.urls] +
[(self.session.cookies.get('h_' + x) or 'sg!no!pw') in self.digest for x in ('su', 'sp', 'sl')])), [(self.session.cookies.get('h_' + x) or 'sg!no!pw') in self.digest for x in ('su', 'sp', 'sl')])),
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) failed_msg=(lambda y=None: 'Invalid cookie details for %s. Check settings'))
@staticmethod @staticmethod
def _has_signature(data=None): def _has_signature(data=None):
@ -110,7 +110,7 @@ class BitHDTVProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)

View file

@ -54,7 +54,7 @@ class BlutopiaProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(BlutopiaProvider, self)._authorised( return super(BlutopiaProvider, self)._authorised(
logged_in=self.logged_in, failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) logged_in=self.logged_in, failed_msg=(lambda y=None: 'Invalid cookie details for %s. Check settings'))
def logged_in(self, resp=None): def logged_in(self, resp=None):
@ -102,7 +102,7 @@ class BlutopiaProvider(generic.TorrentProvider):
show_type = self.show_obj.air_by_date and 'Air By Date' \ show_type = self.show_obj.air_by_date and 'Air By Date' \
or self.show_obj.is_sports and 'Sports' or None or self.show_obj.is_sports and 'Sports' or None
if show_type: if show_type:
logger.log(u'Provider does not carry shows of type: [%s], skipping' % show_type, logger.DEBUG) logger.debug(f'Provider does not carry shows of type: [{show_type}], skipping')
return results return results
for search_string in search_params[mode]: for search_string in search_params[mode]:
@ -159,7 +159,7 @@ class BlutopiaProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, log + search_url) self._log_search(mode, len(items[mode]) - cnt, log + search_url)

View file

@ -75,8 +75,7 @@ class BTNProvider(generic.TorrentProvider):
self.tmr_limit_update('1', 'h', '150/hr %s' % data) self.tmr_limit_update('1', 'h', '150/hr %s' % data)
self.log_failure_url(url, post_data, post_json) self.log_failure_url(url, post_data, post_json)
else: else:
logger.log(u'Action prematurely ended. %(prov)s server error response = %(desc)s' % logger.warning(f'Action prematurely ended. {self.name} server error response = {data}')
{'prov': self.name, 'desc': data}, logger.WARNING)
def _search_provider(self, search_params, age=0, **kwargs): def _search_provider(self, search_params, age=0, **kwargs):
@ -118,7 +117,7 @@ class BTNProvider(generic.TorrentProvider):
self._check_response(error_text, self.url_api, post_data=json_rpc(params)) self._check_response(error_text, self.url_api, post_data=json_rpc(params))
return results return results
except AuthException: except AuthException:
logger.log('API looks to be down, add un/pw config detail to be used as a fallback', logger.WARNING) logger.warning('API looks to be down, add un/pw config detail to be used as a fallback')
except (KeyError, Exception): except (KeyError, Exception):
pass pass
@ -247,7 +246,7 @@ class BTNProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(results) - cnt, search_url) self._log_search(mode, len(results) - cnt, search_url)
@ -267,7 +266,7 @@ class BTNProvider(generic.TorrentProvider):
else: else:
# If we don't have a release name we need to get creative # If we don't have a release name we need to get creative
title = u'' title = ''
keys = ['Series', 'GroupName', 'Resolution', 'Source', 'Codec'] keys = ['Series', 'GroupName', 'Resolution', 'Source', 'Codec']
for key in keys: for key in keys:
if key in data_json: if key in data_json:
@ -353,8 +352,8 @@ class BTNProvider(generic.TorrentProvider):
# Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of "RSS" data search, # Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of "RSS" data search,
# older items will be done through backlog # older items will be done through backlog
if 86400 < seconds_since_last_update: if 86400 < seconds_since_last_update:
logger.log(u'Only trying to fetch the last 24 hours even though the last known successful update on ' + logger.warning(f'Only trying to fetch the last 24 hours even though the last known successful update on'
'%s was over 24 hours' % self.name, logger.WARNING) f' {self.name} was over 24 hours')
seconds_since_last_update = 86400 seconds_since_last_update = 86400
return self._search_provider(dict(Cache=['']), age=seconds_since_last_update) return self._search_provider(dict(Cache=['']), age=seconds_since_last_update)

View file

@ -106,7 +106,7 @@ class EztvProvider(generic.TorrentProvider):
except (generic.HaltParseException, IndexError): except (generic.HaltParseException, IndexError):
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)

View file

@ -122,7 +122,7 @@ class FanoProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, log + search_url) self._log_search(mode, len(items[mode]) - cnt, log + search_url)

View file

@ -96,7 +96,7 @@ class FLProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url')) self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url'))

View file

@ -80,7 +80,7 @@ class FSTProvider(generic.NZBProvider):
success, msg = self._check_cookie() success, msg = self._check_cookie()
if success: if success:
return False return False
logger.warning(u'%s: %s' % (msg, self.cookies)) logger.warning(f'{msg}: {self.cookies}')
self.cookies = None self.cookies = None
return None return None
@ -166,7 +166,7 @@ class FSTProvider(generic.NZBProvider):
time.sleep(1.1) time.sleep(1.1)
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.error(u'Failed to parse. Traceback: %s' % traceback.format_exc()) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search((mode, search_mode)['Propers' == search_mode], len(results) - cnt, search_url) self._log_search((mode, search_mode)['Propers' == search_mode], len(results) - cnt, search_url)
return results return results

View file

@ -106,7 +106,7 @@ class FunFileProvider(generic.TorrentProvider):
except (generic.HaltParseException, AttributeError): except (generic.HaltParseException, AttributeError):
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)

View file

@ -166,9 +166,8 @@ class ProviderFailList(object):
with self.lock: with self.lock:
self.dirty = True self.dirty = True
self._fails.append(fail) self._fails.append(fail)
logger.log('Adding fail.%s for %s' % (ProviderFailTypes.names.get( logger.debug('Adding fail.%s for %s' % (ProviderFailTypes.names.get(
fail.fail_type, ProviderFailTypes.names[ProviderFailTypes.other]), self.provider_name()), fail.fail_type, ProviderFailTypes.names[ProviderFailTypes.other]), self.provider_name()))
logger.DEBUG)
self.save_list() self.save_list()
def save_list(self): def save_list(self):
@ -426,8 +425,8 @@ class GenericProvider(object):
if not limit_set: if not limit_set:
time_index = self.fail_time_index(base_limit=0) time_index = self.fail_time_index(base_limit=0)
self.tmr_limit_wait = self.wait_time(time_index) self.tmr_limit_wait = self.wait_time(time_index)
logger.log('Request limit reached. Waiting for %s until next retry. Message: %s' % logger.warning(f'Request limit reached. Waiting for {self.tmr_limit_wait} until next retry.'
(self.tmr_limit_wait, desc or 'none found'), logger.WARNING) f' Message: {desc or "none found"}')
def wait_time(self, time_index=None): def wait_time(self, time_index=None):
# type: (Optional[int]) -> datetime.timedelta # type: (Optional[int]) -> datetime.timedelta
@ -503,8 +502,8 @@ class GenericProvider(object):
# Ensure provider name output (e.g. when displaying config/provs) instead of e.g. thread "Tornado" # Ensure provider name output (e.g. when displaying config/provs) instead of e.g. thread "Tornado"
prepend = ('[%s] :: ' % self.name, '')[any(x.name in threading.current_thread().name prepend = ('[%s] :: ' % self.name, '')[any(x.name in threading.current_thread().name
for x in sickgear.providers.sorted_sources())] for x in sickgear.providers.sorted_sources())]
logger.log('%sToo many requests reached at %s, waiting for %s' % ( logger.warning(f'{prepend}Too many requests reached at {self.fmt_delta(self.tmr_limit_time)},'
prepend, self.fmt_delta(self.tmr_limit_time), self.fmt_delta(time_left)), logger.WARNING) f' waiting for {self.fmt_delta(time_left)}')
return use_tmr_limit return use_tmr_limit
else: else:
self.tmr_limit_time = None self.tmr_limit_time = None
@ -515,10 +514,9 @@ class GenericProvider(object):
if self.is_waiting(): if self.is_waiting():
if log_warning: if log_warning:
time_left = self.wait_time() - self.fail_newest_delta() time_left = self.wait_time() - self.fail_newest_delta()
logger.log('Failed %s times, skipping provider for %s, last failure at %s with fail type: %s' % ( logger.warning('Failed %s times, skipping provider for %s, last failure at %s with fail type: %s' % (
self.failure_count, self.fmt_delta(time_left), self.fmt_delta(self.failure_time), self.failure_count, self.fmt_delta(time_left), self.fmt_delta(self.failure_time),
ProviderFailTypes.names.get( ProviderFailTypes.names.get(self.last_fail, ProviderFailTypes.names[ProviderFailTypes.other])))
self.last_fail, ProviderFailTypes.names[ProviderFailTypes.other])), logger.WARNING)
return True return True
return False return False
@ -533,7 +531,7 @@ class GenericProvider(object):
self._last_fail_type = fail_type self._last_fail_type = fail_type
self.fails.add_fail(*args, **kwargs) self.fails.add_fail(*args, **kwargs)
else: else:
logger.log('%s: Not logging same failure within 3 seconds' % self.name, logger.DEBUG) logger.debug('%s: Not logging same failure within 3 seconds' % self.name)
def get_url(self, url, skip_auth=False, use_tmr_limit=True, *args, **kwargs): def get_url(self, url, skip_auth=False, use_tmr_limit=True, *args, **kwargs):
# type: (AnyStr, bool, bool, Any, Any) -> Optional[AnyStr, Dict] # type: (AnyStr, bool, bool, Any, Any) -> Optional[AnyStr, Dict]
@ -580,7 +578,7 @@ class GenericProvider(object):
if data and not isinstance(data, tuple) \ if data and not isinstance(data, tuple) \
or isinstance(data, tuple) and data[0]: or isinstance(data, tuple) and data[0]:
if 0 != self.failure_count: if 0 != self.failure_count:
logger.log('Unblocking provider: %s' % self.get_id(), logger.DEBUG) logger.debug('Unblocking provider: %s' % self.get_id())
self.failure_count = 0 self.failure_count = 0
self.failure_time = None self.failure_time = None
else: else:
@ -628,7 +626,7 @@ class GenericProvider(object):
post += [' .. Post params: [%s]' % '&'.join([post_data])] post += [' .. Post params: [%s]' % '&'.join([post_data])]
if post_json: if post_json:
post += [' .. Json params: [%s]' % '&'.join([post_json])] post += [' .. Json params: [%s]' % '&'.join([post_json])]
logger.log('Failure URL: %s%s' % (url, ''.join(post)), logger.WARNING) logger.warning('Failure URL: %s%s' % (url, ''.join(post)))
def get_id(self): def get_id(self):
# type: (...) -> AnyStr # type: (...) -> AnyStr
@ -812,7 +810,7 @@ class GenericProvider(object):
if not btih or not re.search('(?i)[0-9a-f]{32,40}', btih): if not btih or not re.search('(?i)[0-9a-f]{32,40}', btih):
assert not result.url.startswith('http') assert not result.url.startswith('http')
logger.log('Unable to extract torrent hash from link: ' + ex(result.url), logger.ERROR) logger.error('Unable to extract torrent hash from link: ' + ex(result.url))
return False return False
urls = ['http%s://%s/torrent/%s.torrent' % (u + (btih.upper(),)) urls = ['http%s://%s/torrent/%s.torrent' % (u + (btih.upper(),))
@ -846,14 +844,14 @@ class GenericProvider(object):
failure_monitor=False): failure_monitor=False):
if self._verify_download(cache_file): if self._verify_download(cache_file):
logger.log(u'Downloaded %s result from %s' % (self.name, url)) logger.log(f'Downloaded {self.name} result from {url}')
try: try:
helpers.move_file(cache_file, final_file) helpers.move_file(cache_file, final_file)
msg = 'moved' msg = 'moved'
except (OSError, Exception): except (OSError, Exception):
msg = 'copied cached file' msg = 'copied cached file'
logger.log(u'Saved .%s data and %s to %s' % ( logger.log(f'Saved .{(link_type, "torrent cache")["magnet" == link_type]} data'
(link_type, 'torrent cache')['magnet' == link_type], msg, final_file)) f' and {msg} to {final_file}')
saved = True saved = True
break break
@ -866,7 +864,7 @@ class GenericProvider(object):
del(self.session.headers['Referer']) del(self.session.headers['Referer'])
if not saved and 'magnet' == link_type: if not saved and 'magnet' == link_type:
logger.log(u'All torrent cache servers failed to return a downloadable result', logger.DEBUG) logger.debug('All torrent cache servers failed to return a downloadable result')
final_file = os.path.join(final_dir, '%s.%s' % (helpers.sanitize_filename(result.name), link_type)) final_file = os.path.join(final_dir, '%s.%s' % (helpers.sanitize_filename(result.name), link_type))
try: try:
with open(final_file, 'wb') as fp: with open(final_file, 'wb') as fp:
@ -874,12 +872,12 @@ class GenericProvider(object):
fp.flush() fp.flush()
os.fsync(fp.fileno()) os.fsync(fp.fileno())
saved = True saved = True
logger.log(u'Saved magnet link to file as some clients (or plugins) support this, %s' % final_file) logger.log(f'Saved magnet link to file as some clients (or plugins) support this, {final_file}')
if 'blackhole' == sickgear.TORRENT_METHOD: if 'blackhole' == sickgear.TORRENT_METHOD:
logger.log('Tip: If your client fails to load magnet in files, ' + logger.log('Tip: If your client fails to load magnet in files, ' +
'change blackhole to a client connection method in search settings') 'change blackhole to a client connection method in search settings')
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to save magnet link to file, %s' % final_file) logger.log(f'Failed to save magnet link to file, {final_file}')
elif not saved: elif not saved:
if 'torrent' == link_type and result.provider.get_id() in sickgear.PROVIDER_HOMES: if 'torrent' == link_type and result.provider.get_id() in sickgear.PROVIDER_HOMES:
t_result = result # type: TorrentSearchResult t_result = result # type: TorrentSearchResult
@ -895,7 +893,7 @@ class GenericProvider(object):
t_result.provider._valid_home(url_exclude=urls) t_result.provider._valid_home(url_exclude=urls)
setattr(sickgear, 'PROVIDER_EXCLUDE', ([], urls)[any([t_result.provider.url])]) setattr(sickgear, 'PROVIDER_EXCLUDE', ([], urls)[any([t_result.provider.url])])
logger.log(u'Server failed to return anything useful', logger.ERROR) logger.error('Server failed to return anything useful')
return saved return saved
@ -969,7 +967,7 @@ class GenericProvider(object):
except (BaseException, Exception): except (BaseException, Exception):
pass pass
title = title and re.sub(r'\s+', '.', u'%s' % title) title = title and re.sub(r'\s+', '.', f'{title}')
if url and not re.match('(?i)magnet:', url): if url and not re.match('(?i)magnet:', url):
url = str(url).replace('&amp;', '&') url = str(url).replace('&amp;', '&')
@ -1193,10 +1191,10 @@ class GenericProvider(object):
try: try:
parse_result = parser.parse(title, release_group=self.get_id()) parse_result = parser.parse(title, release_group=self.get_id())
except InvalidNameException: except InvalidNameException:
logger.log(u'Unable to parse the filename %s into a valid episode' % title, logger.DEBUG) logger.debug(f'Unable to parse the filename {title} into a valid episode')
continue continue
except InvalidShowException: except InvalidShowException:
logger.log(u'No match for search criteria in the parsed filename ' + title, logger.DEBUG) logger.debug(f'No match for search criteria in the parsed filename {title}')
continue continue
if parse_result.show_obj.is_anime: if parse_result.show_obj.is_anime:
@ -1208,8 +1206,8 @@ class GenericProvider(object):
continue continue
if not (parse_result.show_obj.tvid == show_obj.tvid and parse_result.show_obj.prodid == show_obj.prodid): if not (parse_result.show_obj.tvid == show_obj.tvid and parse_result.show_obj.prodid == show_obj.prodid):
logger.debug(u'Parsed show [%s] is not show [%s] we are searching for' % ( logger.debug(f'Parsed show [{parse_result.show_obj.unique_name}] is not show [{show_obj.unique_name}]'
parse_result.show_obj.unique_name, show_obj.unique_name)) f' we are searching for')
continue continue
parsed_show_obj = parse_result.show_obj parsed_show_obj = parse_result.show_obj
@ -1223,15 +1221,15 @@ class GenericProvider(object):
if not (parsed_show_obj.air_by_date or parsed_show_obj.is_sports): if not (parsed_show_obj.air_by_date or parsed_show_obj.is_sports):
if 'sponly' == search_mode: if 'sponly' == search_mode:
if len(parse_result.episode_numbers): if len(parse_result.episode_numbers):
logger.log(u'This is supposed to be a season pack search but the result ' + title + logger.debug(f'This is supposed to be a season pack search but the result {title}'
u' is not a valid season pack, skipping it', logger.DEBUG) f' is not a valid season pack, skipping it')
add_cache_entry = True add_cache_entry = True
if len(parse_result.episode_numbers) \ if len(parse_result.episode_numbers) \
and (parse_result.season_number not in set([ep_obj.season for ep_obj in ep_obj_list]) and (parse_result.season_number not in set([ep_obj.season for ep_obj in ep_obj_list])
or not [ep_obj for ep_obj in ep_obj_list or not [ep_obj for ep_obj in ep_obj_list
if ep_obj.scene_episode in parse_result.episode_numbers]): if ep_obj.scene_episode in parse_result.episode_numbers]):
logger.log(u'The result ' + title + u' doesn\'t seem to be a valid episode that we are trying' + logger.debug(f'The result {title} doesn\'t seem to be a valid episode that we are trying'
u' to snatch, ignoring', logger.DEBUG) f' to snatch, ignoring')
add_cache_entry = True add_cache_entry = True
else: else:
if not len(parse_result.episode_numbers)\ if not len(parse_result.episode_numbers)\
@ -1239,14 +1237,14 @@ class GenericProvider(object):
and not [ep_obj for ep_obj in ep_obj_list and not [ep_obj for ep_obj in ep_obj_list
if ep_obj.season == parse_result.season_number and if ep_obj.season == parse_result.season_number and
ep_obj.episode in parse_result.episode_numbers]: ep_obj.episode in parse_result.episode_numbers]:
logger.log(u'The result ' + title + u' doesn\'t seem to be a valid season that we are trying' + logger.debug(f'The result {title} doesn\'t seem to be a valid season that we are trying'
u' to snatch, ignoring', logger.DEBUG) f' to snatch, ignoring')
add_cache_entry = True add_cache_entry = True
elif len(parse_result.episode_numbers) and not [ elif len(parse_result.episode_numbers) and not [
ep_obj for ep_obj in ep_obj_list if ep_obj.season == parse_result.season_number ep_obj for ep_obj in ep_obj_list if ep_obj.season == parse_result.season_number
and ep_obj.episode in parse_result.episode_numbers]: and ep_obj.episode in parse_result.episode_numbers]:
logger.log(u'The result ' + title + ' doesn\'t seem to be a valid episode that we are trying' + logger.debug(f'The result {title} doesn\'t seem to be a valid episode that we are trying'
u' to snatch, ignoring', logger.DEBUG) f' to snatch, ignoring')
add_cache_entry = True add_cache_entry = True
if not add_cache_entry: if not add_cache_entry:
@ -1255,8 +1253,8 @@ class GenericProvider(object):
episode_numbers = parse_result.episode_numbers episode_numbers = parse_result.episode_numbers
else: else:
if not parse_result.is_air_by_date: if not parse_result.is_air_by_date:
logger.log(u'This is supposed to be a date search but the result ' + title + logger.debug(f'This is supposed to be a date search but the result {title}'
u' didn\'t parse as one, skipping it', logger.DEBUG) f' didn\'t parse as one, skipping it')
add_cache_entry = True add_cache_entry = True
else: else:
season_number = parse_result.season_number season_number = parse_result.season_number
@ -1265,13 +1263,13 @@ class GenericProvider(object):
if not episode_numbers or \ if not episode_numbers or \
not [ep_obj for ep_obj in ep_obj_list not [ep_obj for ep_obj in ep_obj_list
if ep_obj.season == season_number and ep_obj.episode in episode_numbers]: if ep_obj.season == season_number and ep_obj.episode in episode_numbers]:
logger.log(u'The result ' + title + ' doesn\'t seem to be a valid episode that we are trying' + logger.debug(f'The result {title} doesn\'t seem to be a valid episode that we are trying'
u' to snatch, ignoring', logger.DEBUG) f' to snatch, ignoring')
add_cache_entry = True add_cache_entry = True
# add parsed result to cache for usage later on # add parsed result to cache for usage later on
if add_cache_entry: if add_cache_entry:
logger.log(u'Adding item from search to cache: ' + title, logger.DEBUG) logger.debug(f'Adding item from search to cache: {title}')
ci = self.cache.add_cache_entry(title, url, parse_result=parse_result) ci = self.cache.add_cache_entry(title, url, parse_result=parse_result)
if None is not ci: if None is not ci:
cl.append(ci) cl.append(ci)
@ -1288,11 +1286,11 @@ class GenericProvider(object):
multi_ep = 1 < len(episode_numbers) multi_ep = 1 < len(episode_numbers)
if not want_ep: if not want_ep:
logger.log(u'Ignoring result %s because we don\'t want an episode that is %s' logger.debug(f'Ignoring result {title} because we don\'t want an episode that is'
% (title, Quality.qualityStrings[quality]), logger.DEBUG) f' {Quality.qualityStrings[quality]}')
continue continue
logger.log(u'Found result %s at %s' % (title, url), logger.DEBUG) logger.debug(f'Found result {title} at {url}')
# make a result object # make a result object
ep_obj_results = [] # type: List[TVEpisode] ep_obj_results = [] # type: List[TVEpisode]
@ -1317,14 +1315,14 @@ class GenericProvider(object):
ep_num = None ep_num = None
if 1 == len(ep_obj_results): if 1 == len(ep_obj_results):
ep_num = ep_obj_results[0].episode ep_num = ep_obj_results[0].episode
logger.log(u'Single episode result.', logger.DEBUG) logger.debug('Single episode result.')
elif 1 < len(ep_obj_results): elif 1 < len(ep_obj_results):
ep_num = MULTI_EP_RESULT ep_num = MULTI_EP_RESULT
logger.log(u'Separating multi-episode result to check for later - result contains episodes: ' + logger.debug(f'Separating multi-episode result to check for later - result contains episodes:'
str(parse_result.episode_numbers), logger.DEBUG) f' {parse_result.episode_numbers}')
elif 0 == len(ep_obj_results): elif 0 == len(ep_obj_results):
ep_num = SEASON_RESULT ep_num = SEASON_RESULT
logger.log(u'Separating full season result to check for later', logger.DEBUG) logger.debug('Separating full season result to check for later')
if ep_num not in results: if ep_num not in results:
# noinspection PyTypeChecker # noinspection PyTypeChecker
@ -1390,7 +1388,7 @@ class GenericProvider(object):
if not self.should_skip(): if not self.should_skip():
str1, thing, str3 = (('', '%s item' % mode.lower(), ''), (' usable', 'proper', ' found'))['Propers' == mode] str1, thing, str3 = (('', '%s item' % mode.lower(), ''), (' usable', 'proper', ' found'))['Propers' == mode]
logger.log((u'%s %s in response%s from %s' % (('No' + str1, count)[0 < count], ( logger.log(('%s %s in response%s from %s' % (('No' + str1, count)[0 < count], (
'%s%s%s%s' % (('', 'freeleech ')[getattr(self, 'freeleech', False)], thing, maybe_plural(count), str3)), '%s%s%s%s' % (('', 'freeleech ')[getattr(self, 'freeleech', False)], thing, maybe_plural(count), str3)),
('', ' (rejects: %s)' % rejects)[bool(rejects)], re.sub(r'(\s)\s+', r'\1', url))).replace('%%', '%')) ('', ' (rejects: %s)' % rejects)[bool(rejects)], re.sub(r'(\s)\s+', r'\1', url))).replace('%%', '%'))
@ -1412,9 +1410,9 @@ class GenericProvider(object):
reqd = 'cf_clearance' reqd = 'cf_clearance'
if reqd in ui_string_method(key) and reqd not in cookies: if reqd in ui_string_method(key) and reqd not in cookies:
return False, \ return False, \
u'%(p)s Cookies setting require %(r)s. If %(r)s not found in browser, log out,' \ '%(p)s Cookies setting require %(r)s. If %(r)s not found in browser, log out,' \
u' delete site cookies, refresh browser, %(r)s should be created' % \ ' delete site cookies, refresh browser, %(r)s should be created' % \
dict(p=self.name, r='\'%s\'' % reqd) dict(p=self.name, r='\'%s\'' % reqd)
cj = requests.utils.add_dict_to_cookiejar(self.session.cookies, cj = requests.utils.add_dict_to_cookiejar(self.session.cookies,
dict([x.strip().split('=', 1) for x in cookies.split(';') dict([x.strip().split('=', 1) for x in cookies.split(';')
@ -1586,7 +1584,7 @@ class NZBProvider(GenericProvider):
if result_date: if result_date:
result_date = datetime.datetime(*result_date[0:6]) result_date = datetime.datetime(*result_date[0:6])
else: else:
logger.log(u'Unable to figure out the date for entry %s, skipping it' % title) logger.log(f'Unable to figure out the date for entry {title}, skipping it')
continue continue
if not search_date or search_date < result_date: if not search_date or search_date < result_date:
@ -1918,7 +1916,7 @@ class TorrentProvider(GenericProvider):
success, msg = self._check_cookie() success, msg = self._check_cookie()
if not success: if not success:
self.cookies = None self.cookies = None
logger.log(u'%s' % msg, logger.WARNING) logger.warning(f'{msg}')
return return
url_base = getattr(self, 'url_base', None) url_base = getattr(self, 'url_base', None)
@ -1998,12 +1996,12 @@ class TorrentProvider(GenericProvider):
r'(?i)([1-3]((<[^>]+>)|\W)*(attempts|tries|remain)[\W\w]{,40}?(remain|left|attempt)|last[^<]+?attempt)', y)) r'(?i)([1-3]((<[^>]+>)|\W)*(attempts|tries|remain)[\W\w]{,40}?(remain|left|attempt)|last[^<]+?attempt)', y))
logged_in, failed_msg = [None is not a and a or b for (a, b) in ( logged_in, failed_msg = [None is not a and a or b for (a, b) in (
(logged_in, (lambda y=None: self.has_all_cookies())), (logged_in, (lambda y=None: self.has_all_cookies())),
(failed_msg, (lambda y='': maxed_out(y) and u'Urgent abort, running low on login attempts. ' + (failed_msg, (lambda y='': maxed_out(y) and 'Urgent abort, running low on login attempts. ' +
u'Password flushed to prevent service disruption to %s.' or 'Password flushed to prevent service disruption to %s.' or
(re.search(r'(?i)(username|password)((<[^>]+>)|\W)*' + (re.search(r'(?i)(username|password)((<[^>]+>)|\W)*' +
r'(or|and|/|\s)((<[^>]+>)|\W)*(password|incorrect)', y) and r'(or|and|/|\s)((<[^>]+>)|\W)*(password|incorrect)', y) and
u'Invalid username or password for %s. Check settings' or 'Invalid username or password for %s. Check settings' or
u'Failed to authenticate or parse a response from %s, abort provider'))) 'Failed to authenticate or parse a response from %s, abort provider')))
)] )]
if logged_in() and (not hasattr(self, 'urls') or bool(len(getattr(self, 'urls')))): if logged_in() and (not hasattr(self, 'urls') or bool(len(getattr(self, 'urls')))):
@ -2017,7 +2015,7 @@ class TorrentProvider(GenericProvider):
if not self._check_auth(): if not self._check_auth():
return False return False
except AuthException as e: except AuthException as e:
logger.log('%s' % ex(e), logger.ERROR) logger.error('%s' % ex(e))
return False return False
if isinstance(url, type([])): if isinstance(url, type([])):
@ -2094,7 +2092,7 @@ class TorrentProvider(GenericProvider):
sickgear.save_config() sickgear.save_config()
msg = failed_msg(response) msg = failed_msg(response)
if msg: if msg:
logger.log(msg % self.name, logger.ERROR) logger.error(msg % self.name)
return False return False

View file

@ -49,7 +49,7 @@ class HDBitsProvider(generic.TorrentProvider):
def _check_auth_from_data(self, parsed_json): def _check_auth_from_data(self, parsed_json):
if 'status' in parsed_json and 5 == parsed_json.get('status') and 'message' in parsed_json: if 'status' in parsed_json and 5 == parsed_json.get('status') and 'message' in parsed_json:
logger.log(u'Incorrect username or password for %s: %s' % (self.name, parsed_json['message']), logger.DEBUG) logger.debug(f'Incorrect username or password for {self.name}: {parsed_json["message"]}')
raise AuthException('Your username or password for %s is incorrect, check your config.' % self.name) raise AuthException('Your username or password for %s is incorrect, check your config.' % self.name)
return True return True
@ -115,10 +115,10 @@ class HDBitsProvider(generic.TorrentProvider):
try: try:
if not (json_resp and self._check_auth_from_data(json_resp) and 'data' in json_resp): if not (json_resp and self._check_auth_from_data(json_resp) and 'data' in json_resp):
logger.log(u'Response from %s does not contain any json data, abort' % self.name, logger.ERROR) logger.error(f'Response from {self.name} does not contain any json data, abort')
return results return results
except AuthException as e: except AuthException as e:
logger.log(u'Authentication error: %s' % (ex(e)), logger.ERROR) logger.error(f'Authentication error: {ex(e)}')
return results return results
cnt = len(items[mode]) cnt = len(items[mode])

View file

@ -128,7 +128,7 @@ class HDSpaceProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, log + search_url) self._log_search(mode, len(items[mode]) - cnt, log + search_url)
results = self._sort_seeding(mode, results + items[mode]) results = self._sort_seeding(mode, results + items[mode])

View file

@ -131,7 +131,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, log + search_url) self._log_search(mode, len(items[mode]) - cnt, log + search_url)

View file

@ -58,7 +58,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
['IPTorrents' in y, 'type="password"' not in y[0:2048], self.has_all_cookies()] + ['IPTorrents' in y, 'type="password"' not in y[0:2048], self.has_all_cookies()] +
[(self.session.cookies.get(c, domain='') or 'sg!no!pw') in self.digest [(self.session.cookies.get(c, domain='') or 'sg!no!pw') in self.digest
for c in ('uid', 'pass')])), for c in ('uid', 'pass')])),
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) failed_msg=(lambda y=None: 'Invalid cookie details for %s. Check settings'))
@staticmethod @staticmethod
def _has_signature(data=None): def _has_signature(data=None):
@ -154,7 +154,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, search_url, log_settings_hint) self._log_search(mode, len(items[mode]) - cnt, search_url, log_settings_hint)
if self.is_search_finished(mode, items, cnt_search, rc['id'], last_recent_search, lrs_new, lrs_found): if self.is_search_finished(mode, items, cnt_search, rc['id'], last_recent_search, lrs_new, lrs_found):

View file

@ -114,7 +114,7 @@ class LimeTorrentsProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
@ -131,7 +131,7 @@ class LimeTorrentsProvider(generic.TorrentProvider):
try: try:
result = re.findall('(?i)"(magnet:[^"]+?)"', html)[0] result = re.findall('(?i)"(magnet:[^"]+?)"', html)[0]
except IndexError: except IndexError:
logger.log('Failed no magnet in response', logger.DEBUG) logger.debug('Failed no magnet in response')
return result return result

View file

@ -99,7 +99,7 @@ class MagnetDLProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)

View file

@ -112,7 +112,7 @@ class MoreThanProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)

View file

@ -105,7 +105,7 @@ class NcoreProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)

View file

@ -119,7 +119,7 @@ class NebulanceProvider(generic.TorrentProvider):
items[mode].append((title, download_url, seeders, self._bytesizer(size))) items[mode].append((title, download_url, seeders, self._bytesizer(size)))
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
results = self._sort_seeding(mode, results + items[mode]) results = self._sort_seeding(mode, results + items[mode])

View file

@ -331,7 +331,7 @@ class NewznabProvider(generic.NZBProvider):
except (BaseException, Exception): except (BaseException, Exception):
continue continue
except (BaseException, Exception): except (BaseException, Exception):
logger.log('Error parsing result for [%s]' % self.name, logger.DEBUG) logger.debug('Error parsing result for [%s]' % self.name)
if not caps and self._caps and not all_cats and self._caps_all_cats and not cats and self._caps_cats: if not caps and self._caps and not all_cats and self._caps_all_cats and not cats and self._caps_cats:
self._check_excludes(cats) self._check_excludes(cats)
@ -644,14 +644,14 @@ class NewznabProvider(generic.NZBProvider):
if not s.show_obj.is_anime and not s.show_obj.is_sports: if not s.show_obj.is_anime and not s.show_obj.is_sports:
if not getattr(s, 'wanted_quality', None): if not getattr(s, 'wanted_quality', None):
# this should not happen, the creation is missing for the search in this case # this should not happen, the creation is missing for the search in this case
logger.log('wanted_quality property was missing for search, creating it', logger.WARNING) logger.warning('wanted_quality property was missing for search, creating it')
ep_status, ep_quality = Quality.split_composite_status(ep_obj.status) ep_status, ep_quality = Quality.split_composite_status(ep_obj.status)
s.wanted_quality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=True) s.wanted_quality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=True)
needed.check_needed_qualities(s.wanted_quality) needed.check_needed_qualities(s.wanted_quality)
if not hasattr(ep_obj, 'eps_aired_in_season'): if not hasattr(ep_obj, 'eps_aired_in_season'):
# this should not happen, the creation is missing for the search in this case # this should not happen, the creation is missing for the search in this case
logger.log('eps_aired_in_season property was missing for search, creating it', logger.WARNING) logger.warning('eps_aired_in_season property was missing for search, creating it')
ep_count, ep_count_scene = get_aired_in_season(ep_obj.show_obj) ep_count, ep_count_scene = get_aired_in_season(ep_obj.show_obj)
ep_obj.eps_aired_in_season = ep_count.get(ep_obj.season, 0) ep_obj.eps_aired_in_season = ep_count.get(ep_obj.season, 0)
ep_obj.eps_aired_in_scene_season = ep_count_scene.get(ep_obj.scene_season, 0) if ep_obj.show_obj.is_scene \ ep_obj.eps_aired_in_scene_season = ep_count_scene.get(ep_obj.scene_season, 0) if ep_obj.show_obj.is_scene \
@ -978,14 +978,14 @@ class NewznabProvider(generic.NZBProvider):
parsed_xml, n_spaces = self.cache.parse_and_get_ns(data) parsed_xml, n_spaces = self.cache.parse_and_get_ns(data)
items = parsed_xml.findall('channel/item') items = parsed_xml.findall('channel/item')
except (BaseException, Exception): except (BaseException, Exception):
logger.log('Error trying to load %s RSS feed' % self.name, logger.WARNING) logger.warning('Error trying to load %s RSS feed' % self.name)
break break
if not self._check_auth_from_data(parsed_xml, search_url): if not self._check_auth_from_data(parsed_xml, search_url):
break break
if 'rss' != parsed_xml.tag: if 'rss' != parsed_xml.tag:
logger.log('Resulting XML from %s isn\'t RSS, not parsing it' % self.name, logger.WARNING) logger.warning('Resulting XML from %s isn\'t RSS, not parsing it' % self.name)
break break
i and time.sleep(2.1) i and time.sleep(2.1)
@ -996,8 +996,7 @@ class NewznabProvider(generic.NZBProvider):
if title and url: if title and url:
results.append(item) results.append(item)
else: else:
logger.log('The data returned from %s is incomplete, this result is unusable' % self.name, logger.debug('The data returned from %s is incomplete, this result is unusable' % self.name)
logger.DEBUG)
# get total and offset attributes # get total and offset attributes
try: try:
@ -1036,8 +1035,8 @@ class NewznabProvider(generic.NZBProvider):
# there are more items available than the amount given in one call, grab some more # there are more items available than the amount given in one call, grab some more
items = total - request_params['offset'] items = total - request_params['offset']
logger.log('%s more item%s to fetch from a batch of up to %s items.' logger.debug(f'{items} more item{helpers.maybe_plural(items)} to fetch from a batch of up to'
% (items, helpers.maybe_plural(items), request_params['limit']), logger.DEBUG) f' {request_params["limit"]} items.')
batch_count = self._log_result(results, mode, cnt, search_url) batch_count = self._log_result(results, mode, cnt, search_url)
exit_log = False exit_log = False
@ -1125,7 +1124,7 @@ class NewznabProvider(generic.NZBProvider):
result_date = self._parse_pub_date(item) result_date = self._parse_pub_date(item)
if not result_date: if not result_date:
logger.log(u'Unable to figure out the date for entry %s, skipping it' % title) logger.log(f'Unable to figure out the date for entry {title}, skipping it')
continue continue
result_size, result_uid = self._parse_size_uid(item, ns=n_space) result_size, result_uid = self._parse_size_uid(item, ns=n_space)
@ -1201,7 +1200,7 @@ class NewznabCache(tvcache.TVCache):
else: else:
(items, n_spaces) = self.provider.cache_data(needed=needed) (items, n_spaces) = self.provider.cache_data(needed=needed)
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.log('Error updating Cache: %s' % ex(e), logger.ERROR) logger.error('Error updating Cache: %s' % ex(e))
items = None items = None
if items: if items:
@ -1257,5 +1256,4 @@ class NewznabCache(tvcache.TVCache):
if title and url: if title and url:
return self.add_cache_entry(title, url, tvid_prodid=ids) return self.add_cache_entry(title, url, tvid_prodid=ids)
logger.log('Data returned from the %s feed is incomplete, this result is unusable' % self.provider.name, logger.debug('Data returned from the %s feed is incomplete, this result is unusable' % self.provider.name)
logger.DEBUG)

View file

@ -91,7 +91,7 @@ class NyaaProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)

View file

@ -87,8 +87,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
if re.search('(?i)(information is incorrect|in(?:valid|correct).*?(?:username|api))', if re.search('(?i)(information is incorrect|in(?:valid|correct).*?(?:username|api))',
data_json.get('notice')): data_json.get('notice')):
logger.log(u'Incorrect authentication credentials for ' + self.name + ' : ' + str(description_text), logger.debug(f'Incorrect authentication credentials for {self.name} : {description_text}')
logger.DEBUG)
raise AuthException( raise AuthException(
'Your authentication credentials for ' + self.name + ' are incorrect, check your config.') 'Your authentication credentials for ' + self.name + ' are incorrect, check your config.')
@ -96,7 +95,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
return True return True
else: else:
logger.log(u'Unknown error given from ' + self.name + ' : ' + str(description_text), logger.DEBUG) logger.debug(f'Unknown error given from {self.name} : {str(description_text)}')
return False return False
return True return True
@ -149,7 +148,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
self.tmr_limit_update('1', 'h', 'Your 24 hour limit of 10 NZBs has been reached') self.tmr_limit_update('1', 'h', 'Your 24 hour limit of 10 NZBs has been reached')
self.log_failure_url(url) self.log_failure_url(url)
elif '</nzb>' not in data or 'seem to be logged in' in data: elif '</nzb>' not in data or 'seem to be logged in' in data:
logger.log('Failed nzb data response: %s' % data, logger.DEBUG) logger.debug('Failed nzb data response: %s' % data)
else: else:
result = data result = data
return result return result
@ -345,7 +344,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
time.sleep(1.1) time.sleep(1.1)
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
mode = (mode, search_mode)['Propers' == search_mode] mode = (mode, search_mode)['Propers' == search_mode]
self._log_search(mode, len(results) - cnt, search_url) self._log_search(mode, len(results) - cnt, search_url)
@ -400,7 +399,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
if success and self.nn: if success and self.nn:
success, msg = None, 'pm dev in irc about this feature' success, msg = None, 'pm dev in irc about this feature'
if not success: if not success:
logger.log(u'%s: %s' % (msg, self.cookies), logger.WARNING) logger.warning(f'{msg}: {self.cookies}')
self.cookies = None self.cookies = None
return None return None
return False return False

View file

@ -100,7 +100,7 @@ class PreToMeProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.error(u'Failed to parse. Traceback: %s' % traceback.format_exc()) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)

View file

@ -56,7 +56,7 @@ class PrivateHDProvider(generic.TorrentProvider):
return super(PrivateHDProvider, self)._authorised( return super(PrivateHDProvider, self)._authorised(
logged_in=(lambda y='': 'English' in y and 'auth/login' not in y and all( logged_in=(lambda y='': 'English' in y and 'auth/login' not in y and all(
[(self.session.cookies.get('privatehdx_session', domain='') or 'sg!no!pw') in self.digest])), [(self.session.cookies.get('privatehdx_session', domain='') or 'sg!no!pw') in self.digest])),
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) failed_msg=(lambda y=None: 'Invalid cookie details for %s. Check settings'))
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -88,7 +88,7 @@ class PrivateHDProvider(generic.TorrentProvider):
show_type = self.show_obj.air_by_date and 'Air By Date' \ show_type = self.show_obj.air_by_date and 'Air By Date' \
or self.show_obj.is_sports and 'Sports' or self.show_obj.is_anime and 'Anime' or None or self.show_obj.is_sports and 'Sports' or self.show_obj.is_anime and 'Anime' or None
if show_type: if show_type:
logger.log(u'Provider does not carry shows of type: [%s], skipping' % show_type, logger.DEBUG) logger.debug(f'Provider does not carry shows of type: [{show_type}], skipping')
return results return results
for search_string in search_params[mode]: for search_string in search_params[mode]:
@ -141,7 +141,7 @@ class PrivateHDProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, log + search_url) self._log_search(mode, len(items[mode]) - cnt, log + search_url)

View file

@ -56,7 +56,7 @@ class PTFProvider(generic.TorrentProvider):
logged_in=(lambda y='': all( logged_in=(lambda y='': all(
['RSS Feed' in y, self.has_all_cookies('session_key')] + ['RSS Feed' in y, self.has_all_cookies('session_key')] +
[(self.session.cookies.get(x) or 'sg!no!pw') in self.digest for x in ['session_key']])), [(self.session.cookies.get(x) or 'sg!no!pw') in self.digest for x in ['session_key']])),
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) failed_msg=(lambda y=None: 'Invalid cookie details for %s. Check settings'))
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -144,7 +144,7 @@ class PTFProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, log + self.session.response.get('url')) self._log_search(mode, len(items[mode]) - cnt, log + self.session.response.get('url'))

View file

@ -68,7 +68,7 @@ class RarbgProvider(generic.TorrentProvider):
return True return True
time.sleep(2) time.sleep(2)
logger.log(u'No usable API token returned from: %s' % self.urls['api_token'], logger.ERROR) logger.error(f'No usable API token returned from: {self.urls["api_token"]}')
return False return False
@staticmethod @staticmethod

View file

@ -102,7 +102,7 @@ class RevTTProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url')) self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url'))

View file

@ -59,7 +59,7 @@ class TorrentRssProvider(generic.TorrentProvider):
title, url = None, None title, url = None, None
if item.title: if item.title:
title = re.sub(r'\s+', '.', u'' + item.title) title = re.sub(r'\s+', '.', '' + item.title)
attempt_list = [lambda: item.torrent_magneturi, attempt_list = [lambda: item.torrent_magneturi,
lambda: item.enclosures[0].href, lambda: item.enclosures[0].href,

View file

@ -47,7 +47,7 @@ class SceneHDProvider(generic.TorrentProvider):
return super(SceneHDProvider, self)._authorised( return super(SceneHDProvider, self)._authorised(
logged_in=(lambda y='': ['RSS links' in y] and all( logged_in=(lambda y='': ['RSS links' in y] and all(
[(self.session.cookies.get(c, domain='') or 'sg!no!pw') in self.digest for c in ('uid', 'pass')])), [(self.session.cookies.get(c, domain='') or 'sg!no!pw') in self.digest for c in ('uid', 'pass')])),
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) failed_msg=(lambda y=None: 'Invalid cookie details for %s. Check settings'))
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -109,7 +109,7 @@ class SceneHDProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)

View file

@ -50,7 +50,7 @@ class SceneTimeProvider(generic.TorrentProvider):
['staff-support' in y, self.has_all_cookies()] + ['staff-support' in y, self.has_all_cookies()] +
[(self.session.cookies.get(x, domain='') or 'sg!no!pw') in self.digest [(self.session.cookies.get(x, domain='') or 'sg!no!pw') in self.digest
for x in ('uid', 'pass')])), for x in ('uid', 'pass')])),
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) failed_msg=(lambda y=None: 'Invalid cookie details for %s. Check settings'))
@staticmethod @staticmethod
def _has_signature(data=None): def _has_signature(data=None):
@ -146,7 +146,7 @@ class SceneTimeProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, search_url, log_settings_hint) self._log_search(mode, len(items[mode]) - cnt, search_url, log_settings_hint)

View file

@ -134,7 +134,7 @@ class ShazbatProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
results = self._sort_seeding(mode, results + items[mode]) results = self._sort_seeding(mode, results + items[mode])

View file

@ -114,7 +114,7 @@ class ShowRSSProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
results = self._sort_seeding(mode, results + items[mode]) results = self._sort_seeding(mode, results + items[mode])

View file

@ -117,7 +117,7 @@ class SnowflProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)

View file

@ -46,7 +46,7 @@ class SpeedAppProvider(generic.TorrentProvider):
return super(SpeedAppProvider, self)._authorised( return super(SpeedAppProvider, self)._authorised(
logged_in=self.logged_in, parse_json=True, headers=self.auth_header(), logged_in=self.logged_in, parse_json=True, headers=self.auth_header(),
failed_msg=(lambda y=None: u'Invalid token or permissions for %s. Check settings')) failed_msg=(lambda y=None: 'Invalid token or permissions for %s. Check settings'))
def logged_in(self, resp=None): def logged_in(self, resp=None):

View file

@ -94,9 +94,9 @@ class SpeedCDProvider(generic.TorrentProvider):
self.digest = 'inSpeed_speedian=%s' % self.session.cookies.get('inSpeed_speedian') self.digest = 'inSpeed_speedian=%s' % self.session.cookies.get('inSpeed_speedian')
sickgear.save_config() sickgear.save_config()
result = True result = True
logger.log('Cookie details for %s updated.' % self.name, logger.DEBUG) logger.debug('Cookie details for %s updated.' % self.name)
elif not self.failure_count: elif not self.failure_count:
logger.log('Invalid cookie details for %s and login failed. Check settings' % self.name, logger.ERROR) logger.error('Invalid cookie details for %s and login failed. Check settings' % self.name)
return result return result
@staticmethod @staticmethod

View file

@ -113,7 +113,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
if not self._reject_item(seeders, leechers): if not self._reject_item(seeders, leechers):
status, info_hash = [cur_item.get(k) for k in ('status', 'info_hash')] status, info_hash = [cur_item.get(k) for k in ('status', 'info_hash')]
if self.confirmed and not rc['verify'].search(status): if self.confirmed and not rc['verify'].search(status):
logger.log(u'Skipping untrusted non-verified result: ' + title, logger.DEBUG) logger.debug('Skipping untrusted non-verified result: ' + title)
continue continue
download_magnet = info_hash if '&tr=' in info_hash \ download_magnet = info_hash if '&tr=' in info_hash \
else self._dhtless_magnet(info_hash, title) else self._dhtless_magnet(info_hash, title)
@ -236,7 +236,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
if self.confirmed and not ( if self.confirmed and not (
tr.find('img', title=rc['verify']) or tr.find('img', alt=rc['verify']) tr.find('img', title=rc['verify']) or tr.find('img', alt=rc['verify'])
or tr.find('img', src=rc['verify'])): or tr.find('img', src=rc['verify'])):
logger.log(u'Skipping untrusted non-verified result: ' + title, logger.DEBUG) logger.debug('Skipping untrusted non-verified result: ' + title)
continue continue
if title and download_magnet: if title and download_magnet:
@ -245,7 +245,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.error(f'Failed to parse. Traceback: {traceback.format_exc()}')
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
results = self._sort_seeding(mode, results + items[mode]) results = self._sort_seeding(mode, results + items[mode])

Some files were not shown because too many files have changed in this diff Show more