mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-05 17:43:37 +00:00
Change common and config code to PEP8 standards
This commit is contained in:
parent
eb08119ac2
commit
d479984e49
3 changed files with 198 additions and 221 deletions
|
@ -22,6 +22,7 @@
|
|||
* Add BET network logo
|
||||
* Remove unused force variable from code and PEP8
|
||||
* Change browser, bs4 parser and classes code to PEP8 standards
|
||||
* Change common and config code to PEP8 standards
|
||||
|
||||
[develop changelog]
|
||||
* Fix traceback error when using the menu item Manage/Update Kodi
|
||||
|
|
|
@ -22,44 +22,39 @@ import platform
|
|||
import re
|
||||
import uuid
|
||||
|
||||
import logger
|
||||
import sickbeard
|
||||
import logger
|
||||
|
||||
|
||||
INSTANCE_ID = str(uuid.uuid1())
|
||||
USER_AGENT = ('SickGear/(' + platform.system() + '; ' + platform.release() + '; ' + INSTANCE_ID + ')')
|
||||
|
||||
mediaExtensions = ['avi', 'mkv', 'mpg', 'mpeg', 'wmv',
|
||||
'ogm', 'mp4', 'iso', 'img', 'divx',
|
||||
'm2ts', 'm4v', 'ts', 'flv', 'f4v',
|
||||
'mov', 'rmvb', 'vob', 'dvr-ms', 'wtv',
|
||||
'ogv', '3gp', 'webm']
|
||||
USER_AGENT = ('SickGear/(%s; %s; %s)' % (platform.system(), platform.release(), INSTANCE_ID))
|
||||
|
||||
mediaExtensions = ['avi', 'mkv', 'mpg', 'mpeg', 'wmv', 'ogm', 'mp4', 'iso', 'img', 'divx', 'm2ts', 'm4v', 'ts', 'flv',
|
||||
'f4v', 'mov', 'rmvb', 'vob', 'dvr-ms', 'wtv', 'ogv', '3gp', 'webm']
|
||||
|
||||
subtitleExtensions = ['srt', 'sub', 'ass', 'idx', 'ssa']
|
||||
|
||||
cpu_presets = {'HIGH': 0.1,
|
||||
'NORMAL': 0.05,
|
||||
'LOW': 0.01
|
||||
}
|
||||
cpu_presets = {'LOW': 0.01, 'NORMAL': 0.05, 'HIGH': 0.1}
|
||||
|
||||
### Other constants
|
||||
# Other constants
|
||||
MULTI_EP_RESULT = -1
|
||||
SEASON_RESULT = -2
|
||||
|
||||
### Notification Types
|
||||
# Notification Types
|
||||
NOTIFY_SNATCH = 1
|
||||
NOTIFY_DOWNLOAD = 2
|
||||
NOTIFY_SUBTITLE_DOWNLOAD = 3
|
||||
NOTIFY_GIT_UPDATE = 4
|
||||
NOTIFY_GIT_UPDATE_TEXT = 5
|
||||
|
||||
notifyStrings = {}
|
||||
notifyStrings[NOTIFY_SNATCH] = "Started Download"
|
||||
notifyStrings[NOTIFY_DOWNLOAD] = "Download Finished"
|
||||
notifyStrings[NOTIFY_SUBTITLE_DOWNLOAD] = "Subtitle Download Finished"
|
||||
notifyStrings[NOTIFY_GIT_UPDATE] = "SickGear Updated"
|
||||
notifyStrings[NOTIFY_GIT_UPDATE_TEXT] = "SickGear Updated To Commit#: "
|
||||
notifyStrings = {NOTIFY_SNATCH: 'Started Download',
|
||||
NOTIFY_DOWNLOAD: 'Download Finished',
|
||||
NOTIFY_SUBTITLE_DOWNLOAD: 'Subtitle Download Finished',
|
||||
NOTIFY_GIT_UPDATE: 'SickGear Updated',
|
||||
NOTIFY_GIT_UPDATE_TEXT: 'SickGear Updated To Commit#: '}
|
||||
|
||||
### Episode statuses
|
||||
# Episode statuses
|
||||
UNKNOWN = -1 # should never happen
|
||||
UNAIRED = 1 # episodes that haven't aired yet
|
||||
SNATCHED = 2 # qualified with quality
|
||||
|
@ -70,7 +65,7 @@ ARCHIVED = 6 # episodes that you don't have locally (counts toward download com
|
|||
IGNORED = 7 # episodes that you don't want included in your download stats
|
||||
SNATCHED_PROPER = 9 # qualified with quality
|
||||
SUBTITLED = 10 # qualified with quality
|
||||
FAILED = 11 #episode downloaded or snatched we don't want
|
||||
FAILED = 11 # episode downloaded or snatched we don't want
|
||||
SNATCHED_BEST = 12 # episode redownloaded using best quality
|
||||
|
||||
NAMING_REPEAT = 1
|
||||
|
@ -80,13 +75,12 @@ NAMING_LIMITED_EXTEND = 8
|
|||
NAMING_SEPARATED_REPEAT = 16
|
||||
NAMING_LIMITED_EXTEND_E_PREFIXED = 32
|
||||
|
||||
multiEpStrings = {}
|
||||
multiEpStrings[NAMING_REPEAT] = "Repeat"
|
||||
multiEpStrings[NAMING_SEPARATED_REPEAT] = "Repeat (Separated)"
|
||||
multiEpStrings[NAMING_DUPLICATE] = "Duplicate"
|
||||
multiEpStrings[NAMING_EXTEND] = "Extend"
|
||||
multiEpStrings[NAMING_LIMITED_EXTEND] = "Extend (Limited)"
|
||||
multiEpStrings[NAMING_LIMITED_EXTEND_E_PREFIXED] = "Extend (Limited, E-prefixed)"
|
||||
multiEpStrings = {NAMING_REPEAT: 'Repeat',
|
||||
NAMING_SEPARATED_REPEAT: 'Repeat (Separated)',
|
||||
NAMING_DUPLICATE: 'Duplicate',
|
||||
NAMING_EXTEND: 'Extend',
|
||||
NAMING_LIMITED_EXTEND: 'Extend (Limited)',
|
||||
NAMING_LIMITED_EXTEND_E_PREFIXED: 'Extend (Limited, E-prefixed)'}
|
||||
|
||||
|
||||
class Quality:
|
||||
|
@ -104,30 +98,30 @@ class Quality:
|
|||
# put these bits at the other end of the spectrum, far enough out that they shouldn't interfere
|
||||
UNKNOWN = 1 << 15 # 32768
|
||||
|
||||
qualityStrings = {NONE: "N/A",
|
||||
UNKNOWN: "Unknown",
|
||||
SDTV: "SD TV",
|
||||
SDDVD: "SD DVD",
|
||||
HDTV: "HD TV",
|
||||
RAWHDTV: "RawHD TV",
|
||||
FULLHDTV: "1080p HD TV",
|
||||
HDWEBDL: "720p WEB-DL",
|
||||
FULLHDWEBDL: "1080p WEB-DL",
|
||||
HDBLURAY: "720p BluRay",
|
||||
FULLHDBLURAY: "1080p BluRay"}
|
||||
qualityStrings = {NONE: 'N/A',
|
||||
UNKNOWN: 'Unknown',
|
||||
SDTV: 'SD TV',
|
||||
SDDVD: 'SD DVD',
|
||||
HDTV: 'HD TV',
|
||||
RAWHDTV: 'RawHD TV',
|
||||
FULLHDTV: '1080p HD TV',
|
||||
HDWEBDL: '720p WEB-DL',
|
||||
FULLHDWEBDL: '1080p WEB-DL',
|
||||
HDBLURAY: '720p BluRay',
|
||||
FULLHDBLURAY: '1080p BluRay'}
|
||||
|
||||
statusPrefixes = {DOWNLOADED: "Downloaded",
|
||||
SNATCHED: "Snatched",
|
||||
SNATCHED_PROPER: "Snatched (Proper)",
|
||||
FAILED: "Failed",
|
||||
SNATCHED_BEST: "Snatched (Best)"}
|
||||
statusPrefixes = {DOWNLOADED: 'Downloaded',
|
||||
SNATCHED: 'Snatched',
|
||||
SNATCHED_PROPER: 'Snatched (Proper)',
|
||||
FAILED: 'Failed',
|
||||
SNATCHED_BEST: 'Snatched (Best)'}
|
||||
|
||||
@staticmethod
|
||||
def _getStatusStrings(status):
|
||||
toReturn = {}
|
||||
for x in Quality.qualityStrings.keys():
|
||||
toReturn[Quality.compositeStatus(status, x)] = Quality.statusPrefixes[status] + " (" + \
|
||||
Quality.qualityStrings[x] + ")"
|
||||
toReturn[Quality.compositeStatus(status, x)] = '%s (%s)' % (
|
||||
Quality.statusPrefixes[status], Quality.qualityStrings[x])
|
||||
return toReturn
|
||||
|
||||
@staticmethod
|
||||
|
@ -150,7 +144,7 @@ class Quality:
|
|||
if curQual << 16 & quality:
|
||||
bestQualities.append(curQual)
|
||||
|
||||
return (sorted(anyQualities), sorted(bestQualities))
|
||||
return sorted(anyQualities), sorted(bestQualities)
|
||||
|
||||
@staticmethod
|
||||
def nameQuality(name, anime=False):
|
||||
|
@ -166,7 +160,7 @@ class Quality:
|
|||
if x == Quality.UNKNOWN:
|
||||
continue
|
||||
|
||||
if x == Quality.NONE: #Last chance
|
||||
if x == Quality.NONE: # Last chance
|
||||
return Quality.sceneQuality(name, anime)
|
||||
|
||||
regex = '\W' + Quality.qualityStrings[x].replace(' ', '\W') + '\W'
|
||||
|
@ -182,14 +176,14 @@ class Quality:
|
|||
|
||||
name = os.path.basename(name)
|
||||
|
||||
checkName = lambda list, func: func([re.search(x, name, re.I) for x in list])
|
||||
checkName = lambda quality_list, func: func([re.search(x, name, re.I) for x in quality_list])
|
||||
|
||||
if anime:
|
||||
dvdOptions = checkName(["dvd", "dvdrip"], any)
|
||||
blueRayOptions = checkName(["bluray", "blu-ray", "BD"], any)
|
||||
sdOptions = checkName(["360p", "480p", "848x480", "XviD"], any)
|
||||
hdOptions = checkName(["720p", "1280x720", "960x720"], any)
|
||||
fullHD = checkName(["1080p", "1920x1080"], any)
|
||||
dvdOptions = checkName(['dvd', 'dvdrip'], any)
|
||||
blueRayOptions = checkName(['bluray', 'blu-ray', 'BD'], any)
|
||||
sdOptions = checkName(['360p', '480p', '848x480', 'XviD'], any)
|
||||
hdOptions = checkName(['720p', '1280x720', '960x720'], any)
|
||||
fullHD = checkName(['1080p', '1920x1080'], any)
|
||||
|
||||
if sdOptions and not blueRayOptions and not dvdOptions:
|
||||
return Quality.SDTV
|
||||
|
@ -206,44 +200,41 @@ class Quality:
|
|||
elif blueRayOptions and fullHD and not hdOptions:
|
||||
return Quality.FULLHDBLURAY
|
||||
elif sickbeard.ANIME_TREAT_AS_HDTV:
|
||||
logger.log(u'Treating file: ' + name + ' with "unknown" quality as HDTV per user settings',
|
||||
logger.DEBUG)
|
||||
logger.log(u'Treating file: %s with "unknown" quality as HDTV per user settings' % name, logger.DEBUG)
|
||||
return Quality.HDTV
|
||||
else:
|
||||
return Quality.UNKNOWN
|
||||
|
||||
if checkName(["(pdtv|hdtv|dsr|tvrip).(xvid|x264|h.?264)"], all) and not checkName(["(720|1080)[pi]"], all) and\
|
||||
not checkName(["hr.ws.pdtv.x264"], any):
|
||||
if checkName(['(pdtv|hdtv|dsr|tvrip).(xvid|x264|h.?264)'], all) and not checkName(['(720|1080)[pi]'], all) \
|
||||
and not checkName(['hr.ws.pdtv.x264'], any):
|
||||
return Quality.SDTV
|
||||
elif checkName(["web.dl|webrip", "xvid|x264|h.?264"], all) and not checkName(["(720|1080)[pi]"], all):
|
||||
elif checkName(['web.dl|webrip', 'xvid|x264|h.?264'], all) and not checkName(['(720|1080)[pi]'], all):
|
||||
return Quality.SDTV
|
||||
elif checkName(["(dvdrip|b[r|d]rip)(.ws)?.(xvid|divx|x264)"], any) and not checkName(["(720|1080)[pi]"], all):
|
||||
elif checkName(['(dvdrip|b[r|d]rip)(.ws)?.(xvid|divx|x264)'], any) and not checkName(['(720|1080)[pi]'], all):
|
||||
return Quality.SDDVD
|
||||
elif checkName(["720p", "hdtv", "x264"], all) or checkName(["hr.ws.pdtv.x264"], any) and not checkName(
|
||||
["(1080)[pi]"], all):
|
||||
elif checkName(['720p', 'hdtv', 'x264'], all) or checkName(['hr.ws.pdtv.x264'], any) \
|
||||
and not checkName(['(1080)[pi]'], all):
|
||||
return Quality.HDTV
|
||||
elif checkName(["720p|1080i", "hdtv", "mpeg-?2"], all) or checkName(["1080[pi].hdtv", "h.?264"], all):
|
||||
elif checkName(['720p|1080i', 'hdtv', 'mpeg-?2'], all) or checkName(['1080[pi].hdtv', 'h.?264'], all):
|
||||
return Quality.RAWHDTV
|
||||
elif checkName(["1080p", "hdtv", "x264"], all):
|
||||
elif checkName(['1080p', 'hdtv', 'x264'], all):
|
||||
return Quality.FULLHDTV
|
||||
elif checkName(["720p", "web.dl|webrip"], all) or checkName(["720p", "itunes", "h.?264"], all):
|
||||
elif checkName(['720p', 'web.dl|webrip'], all) or checkName(['720p', 'itunes', 'h.?264'], all):
|
||||
return Quality.HDWEBDL
|
||||
elif checkName(["1080p", "web.dl|webrip"], all) or checkName(["1080p", "itunes", "h.?264"], all):
|
||||
elif checkName(['1080p', 'web.dl|webrip'], all) or checkName(['1080p', 'itunes', 'h.?264'], all):
|
||||
return Quality.FULLHDWEBDL
|
||||
elif checkName(["720p", "bluray|hddvd|b[r|d]rip", "x264"], all):
|
||||
elif checkName(['720p', 'bluray|hddvd|b[r|d]rip', 'x264'], all):
|
||||
return Quality.HDBLURAY
|
||||
elif checkName(["1080p", "bluray|hddvd|b[r|d]rip", "x264"], all):
|
||||
elif checkName(['1080p', 'bluray|hddvd|b[r|d]rip', 'x264'], all):
|
||||
return Quality.FULLHDBLURAY
|
||||
else:
|
||||
return Quality.UNKNOWN
|
||||
|
||||
@staticmethod
|
||||
def assumeQuality(name):
|
||||
if name.lower().endswith((".avi", ".mp4")):
|
||||
if name.lower().endswith(('.avi', '.mp4')):
|
||||
return Quality.SDTV
|
||||
# elif name.lower().endswith(".mkv"):
|
||||
# return Quality.HDTV
|
||||
elif name.lower().endswith(".ts"):
|
||||
elif name.lower().endswith('.ts'):
|
||||
return Quality.RAWHDTV
|
||||
else:
|
||||
return Quality.UNKNOWN
|
||||
|
@ -260,13 +251,13 @@ class Quality:
|
|||
def splitCompositeStatus(status):
|
||||
"""Returns a tuple containing (status, quality)"""
|
||||
if status == UNKNOWN:
|
||||
return (UNKNOWN, Quality.UNKNOWN)
|
||||
return UNKNOWN, Quality.UNKNOWN
|
||||
|
||||
for x in sorted(Quality.qualityStrings.keys(), reverse=True):
|
||||
if status > x * 100:
|
||||
return (status - x * 100, x)
|
||||
return status - x * 100, x
|
||||
|
||||
return (status, Quality.NONE)
|
||||
return status, Quality.NONE
|
||||
|
||||
@staticmethod
|
||||
def statusFromName(name, assume=True, anime=False):
|
||||
|
@ -302,27 +293,28 @@ ANY = Quality.combineQualities(
|
|||
BEST = Quality.combineQualities([Quality.SDTV, Quality.HDTV, Quality.HDWEBDL], [Quality.HDTV])
|
||||
|
||||
qualityPresets = (SD, HD, HD720p, HD1080p, ANY)
|
||||
qualityPresetStrings = {SD: "SD",
|
||||
HD: "HD",
|
||||
HD720p: "HD720p",
|
||||
HD1080p: "HD1080p",
|
||||
ANY: "Any"}
|
||||
|
||||
qualityPresetStrings = {SD: 'SD',
|
||||
HD: 'HD',
|
||||
HD720p: 'HD720p',
|
||||
HD1080p: 'HD1080p',
|
||||
ANY: 'Any'}
|
||||
|
||||
|
||||
class StatusStrings:
|
||||
def __init__(self):
|
||||
self.statusStrings = {UNKNOWN: "Unknown",
|
||||
UNAIRED: "Unaired",
|
||||
SNATCHED: "Snatched",
|
||||
DOWNLOADED: "Downloaded",
|
||||
SKIPPED: "Skipped",
|
||||
SNATCHED_PROPER: "Snatched (Proper)",
|
||||
WANTED: "Wanted",
|
||||
ARCHIVED: "Archived",
|
||||
IGNORED: "Ignored",
|
||||
SUBTITLED: "Subtitled",
|
||||
FAILED: "Failed",
|
||||
SNATCHED_BEST: "Snatched (Best)"}
|
||||
self.statusStrings = {UNKNOWN: 'Unknown',
|
||||
UNAIRED: 'Unaired',
|
||||
SNATCHED: 'Snatched',
|
||||
DOWNLOADED: 'Downloaded',
|
||||
SKIPPED: 'Skipped',
|
||||
SNATCHED_PROPER: 'Snatched (Proper)',
|
||||
WANTED: 'Wanted',
|
||||
ARCHIVED: 'Archived',
|
||||
IGNORED: 'Ignored',
|
||||
SUBTITLED: 'Subtitled',
|
||||
FAILED: 'Failed',
|
||||
SNATCHED_BEST: 'Snatched (Best)'}
|
||||
|
||||
def __getitem__(self, name):
|
||||
if name in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST:
|
||||
|
@ -330,12 +322,13 @@ class StatusStrings:
|
|||
if quality == Quality.NONE:
|
||||
return self.statusStrings[status]
|
||||
else:
|
||||
return self.statusStrings[status] + " (" + Quality.qualityStrings[quality] + ")"
|
||||
return '%s (%s)' % (self.statusStrings[status], Quality.qualityStrings[quality])
|
||||
else:
|
||||
return self.statusStrings[name] if self.statusStrings.has_key(name) else ''
|
||||
|
||||
def has_key(self, name):
|
||||
return name in self.statusStrings or name in Quality.DOWNLOADED or name in Quality.SNATCHED or name in Quality.SNATCHED_PROPER or name in Quality.SNATCHED_BEST
|
||||
return name in self.statusStrings or name in Quality.DOWNLOADED or name in Quality.SNATCHED \
|
||||
or name in Quality.SNATCHED_PROPER or name in Quality.SNATCHED_BEST
|
||||
|
||||
|
||||
statusStrings = StatusStrings()
|
||||
|
@ -351,19 +344,13 @@ class Overview:
|
|||
# For both snatched statuses. Note: SNATCHED/QUAL have same value and break dict.
|
||||
SNATCHED = SNATCHED_PROPER = SNATCHED_BEST # 9
|
||||
|
||||
overviewStrings = {SKIPPED: "skipped",
|
||||
WANTED: "wanted",
|
||||
QUAL: "qual",
|
||||
GOOD: "good",
|
||||
UNAIRED: "unaired",
|
||||
SNATCHED: "snatched"}
|
||||
|
||||
# Get our xml namespaces correct for lxml
|
||||
XML_NSMAP = {'xsi': 'http://www.w3.org/2001/XMLSchema-instance',
|
||||
'xsd': 'http://www.w3.org/2001/XMLSchema'}
|
||||
overviewStrings = {SKIPPED: 'skipped',
|
||||
WANTED: 'wanted',
|
||||
QUAL: 'qual',
|
||||
GOOD: 'good',
|
||||
UNAIRED: 'unaired',
|
||||
SNATCHED: 'snatched'}
|
||||
|
||||
countryList = {'Australia': 'AU',
|
||||
'Canada': 'CA',
|
||||
'USA': 'US'
|
||||
}
|
||||
|
||||
'USA': 'US'}
|
|
@ -20,35 +20,31 @@ import os.path
|
|||
import datetime
|
||||
import re
|
||||
import urlparse
|
||||
|
||||
import sickbeard
|
||||
|
||||
from sickbeard import encodingKludge as ek
|
||||
from sickbeard import helpers
|
||||
from sickbeard import logger
|
||||
from sickbeard import naming
|
||||
from sickbeard import db
|
||||
from sickbeard import providers
|
||||
from sickbeard.providers.generic import GenericProvider
|
||||
from sickbeard import helpers, logger, naming, db, providers
|
||||
|
||||
naming_ep_type = ("%(seasonnumber)dx%(episodenumber)02d",
|
||||
"s%(seasonnumber)02de%(episodenumber)02d",
|
||||
"S%(seasonnumber)02dE%(episodenumber)02d",
|
||||
"%(seasonnumber)02dx%(episodenumber)02d")
|
||||
|
||||
sports_ep_type = ("%(seasonnumber)dx%(episodenumber)02d",
|
||||
"s%(seasonnumber)02de%(episodenumber)02d",
|
||||
"S%(seasonnumber)02dE%(episodenumber)02d",
|
||||
"%(seasonnumber)02dx%(episodenumber)02d")
|
||||
naming_ep_type = ('%(seasonnumber)dx%(episodenumber)02d',
|
||||
's%(seasonnumber)02de%(episodenumber)02d',
|
||||
'S%(seasonnumber)02dE%(episodenumber)02d',
|
||||
'%(seasonnumber)02dx%(episodenumber)02d')
|
||||
|
||||
naming_ep_type_text = ("1x02", "s01e02", "S01E02", "01x02")
|
||||
sports_ep_type = ('%(seasonnumber)dx%(episodenumber)02d',
|
||||
's%(seasonnumber)02de%(episodenumber)02d',
|
||||
'S%(seasonnumber)02dE%(episodenumber)02d',
|
||||
'%(seasonnumber)02dx%(episodenumber)02d')
|
||||
|
||||
naming_multi_ep_type = {0: ["-%(episodenumber)02d"] * len(naming_ep_type),
|
||||
1: [" - " + x for x in naming_ep_type],
|
||||
2: [x + "%(episodenumber)02d" for x in ("x", "e", "E", "x")]}
|
||||
naming_multi_ep_type_text = ("extend", "duplicate", "repeat")
|
||||
naming_ep_type_text = ('1x02', 's01e02', 'S01E02', '01x02')
|
||||
|
||||
naming_sep_type = (" - ", " ")
|
||||
naming_sep_type_text = (" - ", "space")
|
||||
naming_multi_ep_type = {0: ['-%(episodenumber)02d'] * len(naming_ep_type),
|
||||
1: [' - %s' % x for x in naming_ep_type],
|
||||
2: [x + '%(episodenumber)02d' for x in ('x', 'e', 'E', 'x')]}
|
||||
naming_multi_ep_type_text = ('extend', 'duplicate', 'repeat')
|
||||
|
||||
naming_sep_type = (' - ', ' ')
|
||||
naming_sep_type_text = (' - ', 'space')
|
||||
|
||||
|
||||
def change_HTTPS_CERT(https_cert):
|
||||
|
@ -59,7 +55,7 @@ def change_HTTPS_CERT(https_cert):
|
|||
if os.path.normpath(sickbeard.HTTPS_CERT) != os.path.normpath(https_cert):
|
||||
if helpers.makeDir(os.path.dirname(os.path.abspath(https_cert))):
|
||||
sickbeard.HTTPS_CERT = os.path.normpath(https_cert)
|
||||
logger.log(u"Changed https cert path to " + https_cert)
|
||||
logger.log(u'Changed https cert path to %s' % https_cert)
|
||||
else:
|
||||
return False
|
||||
|
||||
|
@ -74,7 +70,7 @@ def change_HTTPS_KEY(https_key):
|
|||
if os.path.normpath(sickbeard.HTTPS_KEY) != os.path.normpath(https_key):
|
||||
if helpers.makeDir(os.path.dirname(os.path.abspath(https_key))):
|
||||
sickbeard.HTTPS_KEY = os.path.normpath(https_key)
|
||||
logger.log(u"Changed https key path to " + https_key)
|
||||
logger.log(u'Changed https key path to %s' % https_key)
|
||||
else:
|
||||
return False
|
||||
|
||||
|
@ -92,13 +88,13 @@ def change_LOG_DIR(log_dir, web_log):
|
|||
sickbeard.LOG_DIR = abs_log_dir
|
||||
|
||||
logger.sb_log_instance.initLogging()
|
||||
logger.log(u"Initialized new log file in " + sickbeard.LOG_DIR)
|
||||
logger.log(u'Initialized new log file in %s' % sickbeard.LOG_DIR)
|
||||
log_dir_changed = True
|
||||
|
||||
else:
|
||||
return False
|
||||
|
||||
if sickbeard.WEB_LOG != web_log_value or log_dir_changed == True:
|
||||
if sickbeard.WEB_LOG != web_log_value or log_dir_changed:
|
||||
sickbeard.WEB_LOG = web_log_value
|
||||
|
||||
return True
|
||||
|
@ -112,7 +108,7 @@ def change_NZB_DIR(nzb_dir):
|
|||
if os.path.normpath(sickbeard.NZB_DIR) != os.path.normpath(nzb_dir):
|
||||
if helpers.makeDir(nzb_dir):
|
||||
sickbeard.NZB_DIR = os.path.normpath(nzb_dir)
|
||||
logger.log(u"Changed NZB folder to " + nzb_dir)
|
||||
logger.log(u'Changed NZB folder to %s' % nzb_dir)
|
||||
else:
|
||||
return False
|
||||
|
||||
|
@ -127,7 +123,7 @@ def change_TORRENT_DIR(torrent_dir):
|
|||
if os.path.normpath(sickbeard.TORRENT_DIR) != os.path.normpath(torrent_dir):
|
||||
if helpers.makeDir(torrent_dir):
|
||||
sickbeard.TORRENT_DIR = os.path.normpath(torrent_dir)
|
||||
logger.log(u"Changed torrent folder to " + torrent_dir)
|
||||
logger.log(u'Changed torrent folder to %s' % torrent_dir)
|
||||
else:
|
||||
return False
|
||||
|
||||
|
@ -142,7 +138,7 @@ def change_TV_DOWNLOAD_DIR(tv_download_dir):
|
|||
if os.path.normpath(sickbeard.TV_DOWNLOAD_DIR) != os.path.normpath(tv_download_dir):
|
||||
if helpers.makeDir(tv_download_dir):
|
||||
sickbeard.TV_DOWNLOAD_DIR = os.path.normpath(tv_download_dir)
|
||||
logger.log(u"Changed TV download folder to " + tv_download_dir)
|
||||
logger.log(u'Changed TV download folder to %s' % tv_download_dir)
|
||||
else:
|
||||
return False
|
||||
|
||||
|
@ -157,6 +153,7 @@ def change_AUTOPOSTPROCESSER_FREQUENCY(freq):
|
|||
|
||||
sickbeard.autoPostProcesserScheduler.cycleTime = datetime.timedelta(minutes=sickbeard.AUTOPOSTPROCESSER_FREQUENCY)
|
||||
|
||||
|
||||
def change_RECENTSEARCH_FREQUENCY(freq):
|
||||
sickbeard.RECENTSEARCH_FREQUENCY = to_int(freq, default=sickbeard.DEFAULT_RECENTSEARCH_FREQUENCY)
|
||||
|
||||
|
@ -165,6 +162,7 @@ def change_RECENTSEARCH_FREQUENCY(freq):
|
|||
|
||||
sickbeard.recentSearchScheduler.cycleTime = datetime.timedelta(minutes=sickbeard.RECENTSEARCH_FREQUENCY)
|
||||
|
||||
|
||||
def change_BACKLOG_FREQUENCY(freq):
|
||||
sickbeard.BACKLOG_FREQUENCY = to_int(freq, default=sickbeard.DEFAULT_BACKLOG_FREQUENCY)
|
||||
|
||||
|
@ -174,6 +172,7 @@ def change_BACKLOG_FREQUENCY(freq):
|
|||
|
||||
sickbeard.backlogSearchScheduler.cycleTime = datetime.timedelta(minutes=sickbeard.BACKLOG_FREQUENCY)
|
||||
|
||||
|
||||
def change_UPDATE_FREQUENCY(freq):
|
||||
sickbeard.UPDATE_FREQUENCY = to_int(freq, default=sickbeard.DEFAULT_UPDATE_FREQUENCY)
|
||||
|
||||
|
@ -182,6 +181,7 @@ def change_UPDATE_FREQUENCY(freq):
|
|||
|
||||
sickbeard.versionCheckScheduler.cycleTime = datetime.timedelta(hours=sickbeard.UPDATE_FREQUENCY)
|
||||
|
||||
|
||||
def change_VERSION_NOTIFY(version_notify):
|
||||
oldSetting = sickbeard.VERSION_NOTIFY
|
||||
|
||||
|
@ -190,8 +190,9 @@ def change_VERSION_NOTIFY(version_notify):
|
|||
if not version_notify:
|
||||
sickbeard.NEWEST_VERSION_STRING = None
|
||||
|
||||
if oldSetting == False and version_notify == True:
|
||||
sickbeard.versionCheckScheduler.action.run() # @UndefinedVariable
|
||||
if not oldSetting and version_notify:
|
||||
sickbeard.versionCheckScheduler.action.run()
|
||||
|
||||
|
||||
def change_DOWNLOAD_PROPERS(download_propers):
|
||||
if sickbeard.DOWNLOAD_PROPERS == download_propers:
|
||||
|
@ -202,12 +203,13 @@ def change_DOWNLOAD_PROPERS(download_propers):
|
|||
sickbeard.properFinderScheduler.start()
|
||||
else:
|
||||
sickbeard.properFinderScheduler.stop.set()
|
||||
logger.log(u"Waiting for the PROPERFINDER thread to exit")
|
||||
logger.log(u'Waiting for the PROPERFINDER thread to exit')
|
||||
try:
|
||||
sickbeard.properFinderScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def change_USE_TRAKT(use_trakt):
|
||||
if sickbeard.USE_TRAKT == use_trakt:
|
||||
return
|
||||
|
@ -217,12 +219,13 @@ def change_USE_TRAKT(use_trakt):
|
|||
sickbeard.traktCheckerScheduler.start()
|
||||
else:
|
||||
sickbeard.traktCheckerScheduler.stop.set()
|
||||
logger.log(u"Waiting for the TRAKTCHECKER thread to exit")
|
||||
logger.log(u'Waiting for the TRAKTCHECKER thread to exit')
|
||||
try:
|
||||
sickbeard.traktCheckerScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def change_USE_SUBTITLES(use_subtitles):
|
||||
if sickbeard.USE_SUBTITLES == use_subtitles:
|
||||
return
|
||||
|
@ -232,12 +235,13 @@ def change_USE_SUBTITLES(use_subtitles):
|
|||
sickbeard.subtitlesFinderScheduler.start()
|
||||
else:
|
||||
sickbeard.subtitlesFinderScheduler.stop.set()
|
||||
logger.log(u"Waiting for the SUBTITLESFINDER thread to exit")
|
||||
logger.log(u'Waiting for the SUBTITLESFINDER thread to exit')
|
||||
try:
|
||||
sickbeard.subtitlesFinderScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def CheckSection(CFG, sec):
|
||||
""" Check if INI section exists, if not create it """
|
||||
try:
|
||||
|
@ -281,10 +285,10 @@ def clean_host(host, default_port=None):
|
|||
if cleaned_host:
|
||||
|
||||
if cleaned_port:
|
||||
host = cleaned_host + ':' + cleaned_port
|
||||
host = '%s:%s' % (cleaned_host, cleaned_port)
|
||||
|
||||
elif default_port:
|
||||
host = cleaned_host + ':' + str(default_port)
|
||||
host = '%s:%s' % (cleaned_host, default_port)
|
||||
|
||||
else:
|
||||
host = cleaned_host
|
||||
|
@ -298,14 +302,14 @@ def clean_host(host, default_port=None):
|
|||
def clean_hosts(hosts, default_port=None):
|
||||
cleaned_hosts = []
|
||||
|
||||
for cur_host in [x.strip() for x in hosts.split(",")]:
|
||||
for cur_host in [x.strip() for x in hosts.split(',')]:
|
||||
if cur_host:
|
||||
cleaned_host = clean_host(cur_host, default_port)
|
||||
if cleaned_host:
|
||||
cleaned_hosts.append(cleaned_host)
|
||||
|
||||
if cleaned_hosts:
|
||||
cleaned_hosts = ",".join(cleaned_hosts)
|
||||
cleaned_hosts = ','.join(cleaned_hosts)
|
||||
|
||||
else:
|
||||
cleaned_hosts = ''
|
||||
|
@ -314,10 +318,7 @@ def clean_hosts(hosts, default_port=None):
|
|||
|
||||
|
||||
def clean_url(url, add_slash=True):
|
||||
"""
|
||||
Returns an cleaned url starting with a scheme and folder with trailing /
|
||||
or an empty string
|
||||
"""
|
||||
""" Returns an cleaned url starting with a scheme and folder with trailing '/' or an empty string """
|
||||
|
||||
if url and url.strip():
|
||||
|
||||
|
@ -329,9 +330,9 @@ def clean_url(url, add_slash=True):
|
|||
scheme, netloc, path, query, fragment = urlparse.urlsplit(url, 'http')
|
||||
|
||||
if not path.endswith('/'):
|
||||
basename, ext = ek.ek(os.path.splitext, ek.ek(os.path.basename, path)) # @UnusedVariable
|
||||
basename, ext = ek.ek(os.path.splitext, ek.ek(os.path.basename, path))
|
||||
if not ext and add_slash:
|
||||
path = path + '/'
|
||||
path += '/'
|
||||
|
||||
cleaned_url = urlparse.urlunsplit((scheme, netloc, path, query, fragment))
|
||||
|
||||
|
@ -352,9 +353,6 @@ def to_int(val, default=0):
|
|||
return val
|
||||
|
||||
|
||||
################################################################################
|
||||
# Check_setting_int #
|
||||
################################################################################
|
||||
def minimax(val, default, low, high):
|
||||
""" Return value forced within range """
|
||||
|
||||
|
@ -368,9 +366,6 @@ def minimax(val, default, low, high):
|
|||
return val
|
||||
|
||||
|
||||
################################################################################
|
||||
# Check_setting_int #
|
||||
################################################################################
|
||||
def check_setting_int(config, cfg_name, item_name, def_val):
|
||||
try:
|
||||
my_val = int(config[cfg_name][item_name])
|
||||
|
@ -381,13 +376,10 @@ def check_setting_int(config, cfg_name, item_name, def_val):
|
|||
except:
|
||||
config[cfg_name] = {}
|
||||
config[cfg_name][item_name] = my_val
|
||||
logger.log(item_name + " -> " + str(my_val), logger.DEBUG)
|
||||
logger.log('%s -> %s' % (item_name, my_val), logger.DEBUG)
|
||||
return my_val
|
||||
|
||||
|
||||
################################################################################
|
||||
# Check_setting_float #
|
||||
################################################################################
|
||||
def check_setting_float(config, cfg_name, item_name, def_val):
|
||||
try:
|
||||
my_val = float(config[cfg_name][item_name])
|
||||
|
@ -399,15 +391,16 @@ def check_setting_float(config, cfg_name, item_name, def_val):
|
|||
config[cfg_name] = {}
|
||||
config[cfg_name][item_name] = my_val
|
||||
|
||||
logger.log(item_name + " -> " + str(my_val), logger.DEBUG)
|
||||
logger.log('%s -> %s' % (item_name, my_val), logger.DEBUG)
|
||||
return my_val
|
||||
|
||||
|
||||
################################################################################
|
||||
# Check_setting_str #
|
||||
################################################################################
|
||||
def check_setting_str(config, cfg_name, item_name, def_val, log=True):
|
||||
# For passwords you must include the word `password` in the item_name and add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config()
|
||||
"""
|
||||
For passwords you must include the word `password` in the item_name and
|
||||
add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config()
|
||||
"""
|
||||
|
||||
if bool(item_name.find('password') + 1):
|
||||
log = False
|
||||
encryption_version = sickbeard.ENCRYPTION_VERSION
|
||||
|
@ -425,17 +418,18 @@ def check_setting_str(config, cfg_name, item_name, def_val, log=True):
|
|||
config[cfg_name][item_name] = helpers.encrypt(my_val, encryption_version)
|
||||
|
||||
if log:
|
||||
logger.log(item_name + " -> " + str(my_val), logger.DEBUG)
|
||||
logger.log('%s -> %s' % (item_name, my_val), logger.DEBUG)
|
||||
else:
|
||||
logger.log(item_name + " -> ******", logger.DEBUG)
|
||||
logger.log('%s -> ******' % item_name, logger.DEBUG)
|
||||
|
||||
return my_val
|
||||
|
||||
|
||||
class ConfigMigrator():
|
||||
def __init__(self, config_obj):
|
||||
"""
|
||||
Initializes a config migrator that can take the config from the version indicated in the config
|
||||
file up to the version required by SB
|
||||
file up to the version required by SG
|
||||
"""
|
||||
|
||||
self.config_obj = config_obj
|
||||
|
@ -451,18 +445,16 @@ class ConfigMigrator():
|
|||
6: 'Rename daily search to recent search',
|
||||
7: 'Rename coming episodes to episode view',
|
||||
8: 'Disable searches on start',
|
||||
9: 'Rename pushbullet variables',}
|
||||
9: 'Rename pushbullet variables'}
|
||||
|
||||
def migrate_config(self):
|
||||
"""
|
||||
Calls each successive migration until the config is the same version as SB expects
|
||||
"""
|
||||
""" Calls each successive migration until the config is the same version as SG expects """
|
||||
|
||||
if self.config_version > self.expected_config_version:
|
||||
logger.log_error_and_exit(u"Your config version (" + str(
|
||||
self.config_version) + ") has been incremented past what this version of SickGear supports (" + str(
|
||||
self.expected_config_version) + ").\n" + \
|
||||
"If you have used other forks or a newer version of SickGear, your config file may be unusable due to their modifications.")
|
||||
logger.log_error_and_exit(
|
||||
u'Your config version (%s) has been incremented past what this version of SickGear supports (%s).\n'
|
||||
'If you have used other forks or a newer version of SickGear, your config file may be unusable due to '
|
||||
'their modifications.' % (self.config_version, self.expected_config_version))
|
||||
|
||||
sickbeard.CONFIG_VERSION = self.config_version
|
||||
|
||||
|
@ -470,24 +462,24 @@ class ConfigMigrator():
|
|||
next_version = self.config_version + 1
|
||||
|
||||
if next_version in self.migration_names:
|
||||
migration_name = ': ' + self.migration_names[next_version]
|
||||
migration_name = ': %s' % self.migration_names[next_version]
|
||||
else:
|
||||
migration_name = ''
|
||||
|
||||
logger.log(u"Backing up config before upgrade")
|
||||
logger.log(u'Backing up config before upgrade')
|
||||
if not helpers.backupVersionedFile(sickbeard.CONFIG_FILE, self.config_version):
|
||||
logger.log_error_and_exit(u"Config backup failed, abort upgrading config")
|
||||
logger.log_error_and_exit(u'Config backup failed, abort upgrading config')
|
||||
else:
|
||||
logger.log(u"Proceeding with upgrade")
|
||||
logger.log(u'Proceeding with upgrade')
|
||||
|
||||
# do the migration, expect a method named _migrate_v<num>
|
||||
logger.log(u"Migrating config up to version " + str(next_version) + migration_name)
|
||||
getattr(self, '_migrate_v' + str(next_version))()
|
||||
# do the migration, expect a method named _migrate_v<num>
|
||||
logger.log(u'Migrating config up to version %s %s' % (next_version, migration_name))
|
||||
getattr(self, '_migrate_v%s' % next_version)()
|
||||
self.config_version = next_version
|
||||
|
||||
# save new config after migration
|
||||
sickbeard.CONFIG_VERSION = self.config_version
|
||||
logger.log(u"Saving config file to disk")
|
||||
logger.log(u'Saving config file to disk')
|
||||
sickbeard.save_config()
|
||||
|
||||
# Migration v1: Custom naming
|
||||
|
@ -497,13 +489,13 @@ class ConfigMigrator():
|
|||
"""
|
||||
|
||||
sickbeard.NAMING_PATTERN = self._name_to_pattern()
|
||||
logger.log("Based on your old settings I'm setting your new naming pattern to: " + sickbeard.NAMING_PATTERN)
|
||||
logger.log('Based on your old settings I am setting your new naming pattern to: %s' % sickbeard.NAMING_PATTERN)
|
||||
|
||||
sickbeard.NAMING_CUSTOM_ABD = bool(check_setting_int(self.config_obj, 'General', 'naming_dates', 0))
|
||||
|
||||
if sickbeard.NAMING_CUSTOM_ABD:
|
||||
sickbeard.NAMING_ABD_PATTERN = self._name_to_pattern(True)
|
||||
logger.log("Adding a custom air-by-date naming pattern to your config: " + sickbeard.NAMING_ABD_PATTERN)
|
||||
logger.log('Adding a custom air-by-date naming pattern to your config: %s' % sickbeard.NAMING_ABD_PATTERN)
|
||||
else:
|
||||
sickbeard.NAMING_ABD_PATTERN = naming.name_abd_presets[0]
|
||||
|
||||
|
@ -511,7 +503,7 @@ class ConfigMigrator():
|
|||
|
||||
# see if any of their shows used season folders
|
||||
myDB = db.DBConnection()
|
||||
season_folder_shows = myDB.select("SELECT * FROM tv_shows WHERE flatten_folders = 0")
|
||||
season_folder_shows = myDB.select('SELECT * FROM tv_shows WHERE flatten_folders = 0')
|
||||
|
||||
# if any shows had season folders on then prepend season folder to the pattern
|
||||
if season_folder_shows:
|
||||
|
@ -524,20 +516,20 @@ class ConfigMigrator():
|
|||
new_season_format = str(new_season_format).replace('09', '%0S')
|
||||
new_season_format = new_season_format.replace('9', '%S')
|
||||
|
||||
logger.log(
|
||||
u"Changed season folder format from " + old_season_format + " to " + new_season_format + ", prepending it to your naming config")
|
||||
logger.log(u'Changed season folder format from %s to %s, prepending it to your naming config' %
|
||||
(old_season_format, new_season_format))
|
||||
sickbeard.NAMING_PATTERN = new_season_format + os.sep + sickbeard.NAMING_PATTERN
|
||||
|
||||
except (TypeError, ValueError):
|
||||
logger.log(u"Can't change " + old_season_format + " to new season format", logger.ERROR)
|
||||
logger.log(u'Can not change %s to new season format' % old_season_format, logger.ERROR)
|
||||
|
||||
# if no shows had it on then don't flatten any shows and don't put season folders in the config
|
||||
else:
|
||||
|
||||
logger.log(u"No shows were using season folders before so I'm disabling flattening on all shows")
|
||||
logger.log(u'No shows were using season folders before so I am disabling flattening on all shows')
|
||||
|
||||
# don't flatten any shows at all
|
||||
myDB.action("UPDATE tv_shows SET flatten_folders = 0")
|
||||
myDB.action('UPDATE tv_shows SET flatten_folders = 0')
|
||||
|
||||
sickbeard.NAMING_FORCE_FOLDERS = naming.check_force_season_folders()
|
||||
|
||||
|
@ -553,11 +545,11 @@ class ConfigMigrator():
|
|||
use_ep_name = bool(check_setting_int(self.config_obj, 'General', 'naming_ep_name', 1))
|
||||
|
||||
# make the presets into templates
|
||||
naming_ep_type = ("%Sx%0E",
|
||||
"s%0Se%0E",
|
||||
"S%0SE%0E",
|
||||
"%0Sx%0E")
|
||||
naming_sep_type = (" - ", " ")
|
||||
naming_ep_type = ('%Sx%0E',
|
||||
's%0Se%0E',
|
||||
'S%0SE%0E',
|
||||
'%0Sx%0E')
|
||||
naming_sep_type = (' - ', ' ')
|
||||
|
||||
# set up our data to use
|
||||
if use_periods:
|
||||
|
@ -576,7 +568,7 @@ class ConfigMigrator():
|
|||
else:
|
||||
ep_string = naming_ep_type[ep_type]
|
||||
|
||||
finalName = ""
|
||||
finalName = ''
|
||||
|
||||
# start with the show name
|
||||
if use_show_name:
|
||||
|
@ -594,7 +586,7 @@ class ConfigMigrator():
|
|||
finalName += naming_sep_type[sep_type] + ep_quality
|
||||
|
||||
if use_periods:
|
||||
finalName = re.sub("\s+", ".", finalName)
|
||||
finalName = re.sub('\s+', '.', finalName)
|
||||
|
||||
return finalName
|
||||
|
||||
|
@ -619,13 +611,13 @@ class ConfigMigrator():
|
|||
old_newznab_data = check_setting_str(self.config_obj, 'Newznab', 'newznab_data', '')
|
||||
|
||||
if old_newznab_data:
|
||||
old_newznab_data_list = old_newznab_data.split("!!!")
|
||||
old_newznab_data_list = old_newznab_data.split('!!!')
|
||||
|
||||
for cur_provider_data in old_newznab_data_list:
|
||||
try:
|
||||
name, url, key, enabled = cur_provider_data.split("|")
|
||||
name, url, key, enabled = cur_provider_data.split('|')
|
||||
except ValueError:
|
||||
logger.log(u"Skipping Newznab provider string: '" + cur_provider_data + "', incorrect format",
|
||||
logger.log(u'Skipping Newznab provider string: "%s", incorrect format' % cur_provider_data,
|
||||
logger.ERROR)
|
||||
continue
|
||||
|
||||
|
@ -638,15 +630,15 @@ class ConfigMigrator():
|
|||
catIDs = '5030,5040,5060'
|
||||
|
||||
cur_provider_data_list = [name, url, key, catIDs, enabled]
|
||||
new_newznab_data.append("|".join(cur_provider_data_list))
|
||||
new_newznab_data.append('|'.join(cur_provider_data_list))
|
||||
|
||||
sickbeard.NEWZNAB_DATA = "!!!".join(new_newznab_data)
|
||||
sickbeard.NEWZNAB_DATA = '!!!'.join(new_newznab_data)
|
||||
|
||||
# Migration v5: Metadata upgrade
|
||||
def _migrate_v5(self):
|
||||
""" Updates metadata values to the new format """
|
||||
""" Updates metadata values to the new format
|
||||
|
||||
""" Quick overview of what the upgrade does:
|
||||
Quick overview of what the upgrade does:
|
||||
|
||||
new | old | description (new)
|
||||
----+-----+--------------------
|
||||
|
@ -684,30 +676,28 @@ class ConfigMigrator():
|
|||
cur_metadata = metadata.split('|')
|
||||
# if target has the old number of values, do upgrade
|
||||
if len(cur_metadata) == 6:
|
||||
logger.log(u"Upgrading " + metadata_name + " metadata, old value: " + metadata)
|
||||
logger.log(u'Upgrading ' + metadata_name + ' metadata, old value: ' + metadata)
|
||||
cur_metadata.insert(4, '0')
|
||||
cur_metadata.append('0')
|
||||
cur_metadata.append('0')
|
||||
cur_metadata.append('0')
|
||||
# swap show fanart, show poster
|
||||
cur_metadata[3], cur_metadata[2] = cur_metadata[2], cur_metadata[3]
|
||||
# if user was using use_banner to override the poster, instead enable the banner option and deactivate poster
|
||||
# if user was using use_banner to override the poster,
|
||||
# instead enable the banner option and deactivate poster
|
||||
if metadata_name == 'XBMC' and use_banner:
|
||||
cur_metadata[4], cur_metadata[3] = cur_metadata[3], '0'
|
||||
# write new format
|
||||
metadata = '|'.join(cur_metadata)
|
||||
logger.log(u"Upgrading " + metadata_name + " metadata, new value: " + metadata)
|
||||
logger.log(u'Upgrading %s metadata, new value: %s' % (metadata_name, metadata))
|
||||
|
||||
elif len(cur_metadata) == 10:
|
||||
|
||||
metadata = '|'.join(cur_metadata)
|
||||
logger.log(u"Keeping " + metadata_name + " metadata, value: " + metadata)
|
||||
|
||||
logger.log(u'Keeping %s metadata, value: %s' % (metadata_name, metadata))
|
||||
else:
|
||||
logger.log(u"Skipping " + metadata_name + " metadata: '" + metadata + "', incorrect format",
|
||||
logger.ERROR)
|
||||
logger.log(u'Skipping %s: "%s", incorrect format' % (metadata_name, metadata), logger.ERROR)
|
||||
metadata = '0|0|0|0|0|0|0|0|0|0'
|
||||
logger.log(u"Setting " + metadata_name + " metadata, new value: " + metadata)
|
||||
logger.log(u'Setting %s metadata, new value: %s' % (metadata_name, metadata))
|
||||
|
||||
return metadata
|
||||
|
||||
|
@ -722,7 +712,6 @@ class ConfigMigrator():
|
|||
|
||||
# Migration v6: Rename daily search to recent search
|
||||
def _migrate_v6(self):
|
||||
|
||||
sickbeard.RECENTSEARCH_FREQUENCY = check_setting_int(self.config_obj, 'General', 'dailysearch_frequency',
|
||||
sickbeard.DEFAULT_RECENTSEARCH_FREQUENCY)
|
||||
|
||||
|
@ -732,16 +721,16 @@ class ConfigMigrator():
|
|||
|
||||
for curProvider in providers.sortedProviderList():
|
||||
if hasattr(curProvider, 'enable_recentsearch'):
|
||||
curProvider.enable_recentsearch = bool(check_setting_int(self.config_obj, curProvider.getID().upper(),
|
||||
curProvider.getID() + '_enable_dailysearch', 1))
|
||||
curProvider.enable_recentsearch = bool(check_setting_int(
|
||||
self.config_obj, curProvider.getID().upper(), curProvider.getID() + '_enable_dailysearch', 1))
|
||||
|
||||
def _migrate_v7(self):
|
||||
|
||||
sickbeard.EPISODE_VIEW_LAYOUT = check_setting_str(self.config_obj, 'GUI', 'coming_eps_layout', 'banner')
|
||||
sickbeard.EPISODE_VIEW_SORT = check_setting_str(self.config_obj, 'GUI', 'coming_eps_sort', 'time')
|
||||
if 'date' == sickbeard.EPISODE_VIEW_SORT:
|
||||
sickbeard.EPISODE_VIEW_SORT = 'time'
|
||||
sickbeard.EPISODE_VIEW_DISPLAY_PAUSED = bool(check_setting_int(self.config_obj, 'GUI', 'coming_eps_display_paused', 0))
|
||||
sickbeard.EPISODE_VIEW_DISPLAY_PAUSED = bool(
|
||||
check_setting_int(self.config_obj, 'GUI', 'coming_eps_display_paused', 0))
|
||||
sickbeard.EPISODE_VIEW_MISSED_RANGE = check_setting_int(self.config_obj, 'GUI', 'coming_eps_missed_range', 7)
|
||||
|
||||
def _migrate_v8(self):
|
||||
|
|
Loading…
Reference in a new issue