Merge pull request #794 from echel0n/dev

Dev
This commit is contained in:
adam111316 2014-09-13 00:04:53 +08:00
commit 3814d833ad
16 changed files with 305 additions and 55 deletions

Binary file not shown.

After

Width:  |  Height:  |  Size: 977 B

View file

@ -112,6 +112,14 @@
</label>
</div>
<div class="field-pair">
<input type="checkbox" name="postpone_if_sync_files" id="postpone_if_sync_files" #if $sickbeard.POSTPONE_IF_SYNC_FILES == True then "checked=\"checked\"" else ""# />
<label class="clearfix" for="postpone_if_sync_files">
<span class="component-title">Postpone post processing</span>
<span class="component-desc">if !sync files are present in the TV download dir</span>
</label>
</div>
<div class="field-pair">
<input type="checkbox" name="nfo_rename" id="nfo_rename" #if $sickbeard.NFO_RENAME == True then "checked=\"checked\"" else ""# />
<label class="clearfix" for="nfo_rename">

View file

@ -47,6 +47,15 @@ In Progress<br />
<h3>Version Check:</h3>
<a class="btn" href="$sbRoot/manage/manageSearches/forceVersionCheck"><i class="icon-check"></i> Force Check</a>
<br />
<br /> <br />
<h3>Search Queue:</h3>
Backlog: <i>$queueLength['backlog'] pending items</i></br>
Daily: <i>$queueLength['daily'] pending items</i></br>
Manual: <i>$queueLength['manual'] pending items</i></br>
Failed: <i>$queueLength['failed'] pending items</i></br>
#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_bottom.tmpl")

View file

@ -13,8 +13,18 @@ $(document).ready(function(){
});
}
$.fn.getCategories = function (isDefault, name, url, key) {
/**
* Gets categories for the provided newznab provider.
* @param {String} isDefault
* @param {Array} selectedProvider
* @return no return data. Function updateNewznabCaps() is run at callback
*/
$.fn.getCategories = function (isDefault, selectedProvider) {
var name = selectedProvider[0];
var url = selectedProvider[1];
var key = selectedProvider[2];
if (!name)
return;
@ -28,23 +38,11 @@ $(document).ready(function(){
var params = {url: url, name: name, key: key};
var returnData;
$.ajaxSetup( { "async": false } );
$.getJSON(sbRoot + '/config/providers/getNewznabCategories', params,
function(data){
if (data.error != "") {
alert(data.error);
return false;
}
if (data.success == false) {
return false;
}
updateNewznabCaps( data, selectedProvider );
console.debug(data.tv_categories);
returnData = data;
});
$.ajaxSetup( { "async": true } );
return returnData;
}
$.fn.addProvider = function (id, name, url, key, cat, isDefault, showProvider) {
@ -202,30 +200,13 @@ $(document).ready(function(){
//Get Categories Capabilities
if (data[0] && data[1] && data[2] && !ifExists($.fn.newznabProvidersCapabilities, data[0])) {
var categoryresult = $(this).getCategories(isDefault, data[0], data[1], data[2]);
if (categoryresult && categoryresult.success && categoryresult.tv_categories) {
$.fn.newznabProvidersCapabilities.push({'name' : data[0], 'categories' : categoryresult.tv_categories});
}
$(this).getCategories(isDefault, data);
}
else {
updateNewznabCaps( null, data );
}
//Loop through the array and if currently selected newznab provider name matches one in the array, use it to
//update the capabilities select box (on the left).
if (data[0]) {
$.fn.newznabProvidersCapabilities.forEach(function(newzNabCap) {
if (newzNabCap.name && newzNabCap.name == data[0] && newzNabCap.categories instanceof Array) {
var newCapOptions = [];
newzNabCap.categories.forEach(function(category_set) {
if (category_set.id && category_set.name) {
newCapOptions.push({value : category_set.id, text : category_set.name + "(" + category_set.id + ")"});
};
});
$("#newznab_cap").replaceOptions(newCapOptions);
}
});
};
}
}
@ -244,6 +225,41 @@ $(document).ready(function(){
return found;
};
/**
* Updates the Global array $.fn.newznabProvidersCapabilities with a combination of newznab prov name
* and category capabilities. Return
* @param {Array} newzNabCaps, is the returned object with newzNabprod Name and Capabilities.
* @param {Array} selectedProvider
* @return no return data. The multiselect input $("#newznab_cap") is updated, as a result.
*/
updateNewznabCaps = function( newzNabCaps, selectedProvider ) {
if (newzNabCaps && !ifExists($.fn.newznabProvidersCapabilities, selectedProvider[0])) {
$.fn.newznabProvidersCapabilities.push({'name' : selectedProvider[0], 'categories' : newzNabCaps.tv_categories});
}
//Loop through the array and if currently selected newznab provider name matches one in the array, use it to
//update the capabilities select box (on the left).
if (selectedProvider[0]) {
$.fn.newznabProvidersCapabilities.forEach(function(newzNabCap) {
if (newzNabCap.name && newzNabCap.name == selectedProvider[0] && newzNabCap.categories instanceof Array) {
var newCapOptions = [];
newzNabCap.categories.forEach(function(category_set) {
if (category_set.id && category_set.name) {
newCapOptions.push({value : category_set.id, text : category_set.name + "(" + category_set.id + ")"});
};
});
$("#newznab_cap").replaceOptions(newCapOptions);
}
});
};
}
$.fn.makeNewznabProviderString = function() {
var provStrings = new Array();

View file

@ -32,7 +32,7 @@ from sickbeard import providers, metadata, config, webserveInit
from sickbeard.providers.generic import GenericProvider
from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, iptorrents, \
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd, nyaatorrents, fanzub, torrentbytes, animezb, \
freshontv, bitsoup, t411
freshontv, bitsoup, t411, tokyotoshokan
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \
naming_ep_type
from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \
@ -211,6 +211,7 @@ PROCESS_AUTOMATICALLY = False
KEEP_PROCESSED_DIR = False
PROCESS_METHOD = None
MOVE_ASSOCIATED_FILES = False
POSTPONE_IF_SYNC_FILES = True
NFO_RENAME = True
TV_DOWNLOAD_DIR = None
UNPACK = False
@ -478,7 +479,7 @@ def initialize(consoleLogging=True):
USE_SYNOLOGYNOTIFIER, SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH, SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD, SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD, \
USE_EMAIL, EMAIL_HOST, EMAIL_PORT, EMAIL_TLS, EMAIL_USER, EMAIL_PASSWORD, EMAIL_FROM, EMAIL_NOTIFY_ONSNATCH, EMAIL_NOTIFY_ONDOWNLOAD, EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD, EMAIL_LIST, \
USE_LISTVIEW, METADATA_XBMC, METADATA_XBMC_12PLUS, METADATA_MEDIABROWSER, METADATA_PS3, metadata_provider_dict, \
NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, dailySearchScheduler, NFO_RENAME, \
NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, POSTPONE_IF_SYNC_FILES, dailySearchScheduler, NFO_RENAME, \
GUI_NAME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, COMING_EPS_MISSED_RANGE, FUZZY_DATING, TRIM_ZERO, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, \
METADATA_WDTV, METADATA_TIVO, METADATA_MEDE8ER, IGNORE_WORDS, CALENDAR_UNPROTECTED, CREATE_MISSING_SHOW_DIRS, \
ADD_SHOWS_WO_DIR, USE_SUBTITLES, SUBTITLES_LANGUAGES, SUBTITLES_DIR, SUBTITLES_SERVICES_LIST, SUBTITLES_SERVICES_ENABLED, SUBTITLES_HISTORY, SUBTITLES_FINDER_FREQUENCY, subtitlesFinderScheduler, \
@ -677,6 +678,7 @@ def initialize(consoleLogging=True):
KEEP_PROCESSED_DIR = bool(check_setting_int(CFG, 'General', 'keep_processed_dir', 1))
PROCESS_METHOD = check_setting_str(CFG, 'General', 'process_method', 'copy' if KEEP_PROCESSED_DIR else 'move')
MOVE_ASSOCIATED_FILES = bool(check_setting_int(CFG, 'General', 'move_associated_files', 0))
POSTPONE_IF_SYNC_FILES = bool(check_setting_int(CFG, 'General', 'postpone_if_sync_files', 1))
NFO_RENAME = bool(check_setting_int(CFG, 'General', 'nfo_rename', 1))
CREATE_MISSING_SHOW_DIRS = bool(check_setting_int(CFG, 'General', 'create_missing_show_dirs', 0))
ADD_SHOWS_WO_DIR = bool(check_setting_int(CFG, 'General', 'add_shows_wo_dir', 0))
@ -1405,6 +1407,7 @@ def save_config():
new_config['General']['keep_processed_dir'] = int(KEEP_PROCESSED_DIR)
new_config['General']['process_method'] = PROCESS_METHOD
new_config['General']['move_associated_files'] = int(MOVE_ASSOCIATED_FILES)
new_config['General']['postpone_if_sync_files'] = int (POSTPONE_IF_SYNC_FILES)
new_config['General']['nfo_rename'] = int(NFO_RENAME)
new_config['General']['process_automatically'] = int(PROCESS_AUTOMATICALLY)
new_config['General']['unpack'] = int(UNPACK)

View file

@ -140,8 +140,8 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior
SyncFiles = filter(helpers.isSyncFile, files)
# Don't post process if files are still being synced
if SyncFiles:
# Don't post process if files are still being synced and option is activated
if SyncFiles and sickbeard.POSTPONE_IF_SYNC_FILES:
returnStr += logHelper(u"Found temporary sync files, skipping post processing", logger.ERROR)
return returnStr
@ -188,8 +188,8 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior
SyncFiles = filter(helpers.isSyncFile, fileList)
# Don't post process if files are still being synced
if SyncFiles:
# Don't post process if files are still being synced and option is activated
if SyncFiles and sickbeard.POSTPONE_IF_SYNC_FILES:
returnStr += logHelper(u"Found temporary sync files, skipping post processing", logger.ERROR)
return returnStr

View file

@ -37,7 +37,8 @@ __all__ = ['ezrss',
'animezb',
'freshontv',
'bitsoup',
't411'
't411',
'tokyotoshokan',
]
import sickbeard

View file

@ -36,7 +36,7 @@ from sickbeard.common import Quality
from sickbeard import clients
from hachoir_parser import createParser
from base64 import b16encode, b32decode
class GenericProvider:
NZB = "nzb"
@ -140,6 +140,10 @@ class GenericProvider:
if self.providerType == GenericProvider.TORRENT:
try:
torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper()
if len(torrent_hash) == 32:
torrent_hash = b16encode(b32decode(torrent_hash)).lower()
if not torrent_hash:
logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR)
return False

View file

@ -176,7 +176,7 @@ class KATProvider(generic.TorrentProvider):
ep_string = show_name + ' Season ' + str(ep_obj.airdate).split('-')[0]
search_string['Season'].append(ep_string)
elif ep_obj.show.anime:
ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
ep_string = show_name + ' ' + "%02d" % ep_obj.scene_absolute_number
search_string['Season'].append(ep_string)
else:
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) + ' -S%02d' % int(
@ -205,7 +205,7 @@ class KATProvider(generic.TorrentProvider):
elif self.show.anime:
for show_name in set(allPossibleShowNames(self.show)):
ep_string = sanitizeSceneName(show_name) + ' ' + \
"%i" % int(ep_obj.scene_absolute_number)
"%02i" % int(ep_obj.scene_absolute_number)
search_string['Episode'].append(ep_string)
else:
for show_name in set(allPossibleShowNames(self.show)):

View file

@ -118,11 +118,19 @@ class NewznabProvider(generic.NZBProvider):
if self.needs_auth and self.key:
params['apikey'] = self.key
categories = self.getURL("%s/api" % (self.url), params=params)
try:
categories = self.getURL("%s/api" % (self.url), params=params, timeout=10)
except:
logger.log(u"Error getting html for [%s]" %
("%s/api?%s" % (self.url, '&'.join("%s=%s" % (x,y) for x,y in params.items())) ), logger.DEBUG)
return (False, return_categories, "Error getting html for [%s]" %
("%s/api?%s" % (self.url, '&'.join("%s=%s" % (x,y) for x,y in params.items()) )))
xml_categories = helpers.parse_xml(categories)
if not xml_categories:
logger.log(u"Error parsing xml for [%s]" % (self.name),
logger.DEBUG)
return (False, return_categories, "Error parsing xml for [%s]" % (self.name))
try:
@ -131,6 +139,8 @@ class NewznabProvider(generic.NZBProvider):
for subcat in category.findall('subcat'):
return_categories.append(subcat.attrib)
except:
logger.log(u"Error parsing result for [%s]" % (self.name),
logger.DEBUG)
return (False, return_categories, "Error parsing result for [%s]" % (self.name))
return (True, return_categories, "")
@ -259,13 +269,17 @@ class NewznabProvider(generic.NZBProvider):
if search_params:
params.update(search_params)
if 'rid' not in search_params and 'q' not in search_params:
logger.log("Error no rid or search term given. Report to forums with a full debug log")
return []
if self.needs_auth and self.key:
params['apikey'] = self.key
results = []
offset = total = 0
while total >= (offset or 1000):
while (total >= offset) and (offset < 1000):
search_url = self.url + 'api?' + urllib.urlencode(params)
logger.log(u"Search url: " + search_url, logger.DEBUG)
data = self.cache.getRSSFeed(search_url)
@ -289,6 +303,10 @@ class NewznabProvider(generic.NZBProvider):
total = int(data.feed.newznab_response['total'] or 0)
offset = int(data.feed.newznab_response['offset'] or 0)
if offset != params['offset']:
logger.log("Tell your newznab provider to fix their bloody newznab responses")
break
# if there are more items available then the amount given in one call, grab some more
params['offset'] += params['limit']

View file

@ -29,6 +29,7 @@ from sickbeard import logger
from sickbeard import tvcache
from sickbeard import show_name_helpers
from sickbeard.bs4_parser import BS4Parser
from sickbeard import db
class T411Provider(generic.TorrentProvider):
urls = {'base_url': 'http://www.t411.me/',

View file

@ -177,7 +177,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
ep_string = show_name + ' Season ' + str(ep_obj.airdate).split('-')[0]
search_string['Season'].append(ep_string)
elif ep_obj.show.anime:
ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
ep_string = show_name + ' ' + "%02d" % ep_obj.scene_absolute_number
search_string['Season'].append(ep_string)
else:
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season)
@ -207,7 +207,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
elif self.show.anime:
for show_name in set(allPossibleShowNames(self.show)):
ep_string = sanitizeSceneName(show_name) + ' ' + \
"%i" % int(ep_obj.scene_absolute_number)
"%02i" % int(ep_obj.scene_absolute_number)
search_string['Episode'].append(ep_string)
else:
for show_name in set(allPossibleShowNames(self.show)):

View file

@ -0,0 +1,174 @@
# Author: Mr_Orange
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import urllib
import re
import traceback
import sickbeard
import generic
from sickbeard import show_name_helpers
from sickbeard import logger
from sickbeard.common import Quality
from sickbeard import tvcache
from sickbeard import show_name_helpers, helpers
from sickbeard.bs4_parser import BS4Parser
class TokyoToshokanProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, "TokyoToshokan")
self.supportsBacklog = True
self.supportsAbsoluteNumbering = True
self.anime_only = True
self.enabled = False
self.ratio = None
self.cache = TokyoToshokanCache(self)
self.url = 'http://tokyotosho.info/'
def isEnabled(self):
return self.enabled
def imageName(self):
return 'tokyotoshokan.png'
def _get_title_and_url(self, item):
title, url = item
if title:
title = u'' + title
title = title.replace(' ', '.')
if url:
url = url.replace('&amp;', '&')
return (title, url)
def seedRatio(self):
return self.ratio
def getQuality(self, item, anime=False):
quality = Quality.sceneQuality(item[0], anime)
return quality
def findSearchResults(self, show, episodes, search_mode, manualSearch=False):
return generic.TorrentProvider.findSearchResults(self, show, episodes, search_mode, manualSearch)
def _get_season_search_strings(self, ep_obj):
return [x.replace('.', ' ') for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)]
def _get_episode_search_strings(self, ep_obj, add_string=''):
return [x.replace('.', ' ') for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)]
def _doSearch(self, search_string, search_mode='eponly', epcount=0, age=0):
if self.show and not self.show.is_anime:
logger.log(u"" + str(self.show.name) + " is not an anime skiping " + str(self.name))
return []
params = {
"terms": search_string.encode('utf-8'),
"type": 1, # get anime types
}
searchURL = self.url + 'search.php?' + urllib.urlencode(params)
data = self.getURL(searchURL)
logger.log(u"Search string: " + searchURL, logger.DEBUG)
if not data:
return []
results = []
try:
with BS4Parser(data, features=["html5lib", "permissive"]) as soup:
torrent_table = soup.find('table', attrs={'class': 'listing'})
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
if torrent_rows[0].find('td', attrs={'class': 'centertext'}):
a = 1
else:
a = 0
for top, bottom in zip(torrent_rows[a::2], torrent_rows[a::2]):
title = top.find('td', attrs={'class': 'desc-top'}).text
url = top.find('td', attrs={'class': 'desc-top'}).find('a')['href']
if not title or not url:
continue
item = title.lstrip(), url
results.append(item)
except Exception, e:
logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
return results
class TokyoToshokanCache(tvcache.TVCache):
def __init__(self, provider):
tvcache.TVCache.__init__(self, provider)
# only poll NyaaTorrents every 15 minutes max
self.minTime = 15
def _get_title_and_url(self, item):
"""
Retrieves the title and URL data from the item XML node
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
Returns: A tuple containing two strings representing title and URL respectively
"""
title = item.title if item.title else None
if title:
title = u'' + title
title = title.replace(' ', '.')
url = item.link if item.link else None
if url:
url = url.replace('&amp;', '&')
return (title, url)
def _getRSSData(self):
params = {
"filter": '1',
}
url = self.provider.url + 'rss.php?' + urllib.urlencode(params)
logger.log(u"TokyoToshokan cache update URL: " + url, logger.DEBUG)
data = self.getRSSFeed(url)
if data and 'entries' in data:
return data.entries
else:
return []
provider = TokyoToshokanProvider()

View file

@ -77,6 +77,20 @@ class SearchQueue(generic_queue.GenericQueue):
return True
return False
def queue_length(self):
length = {'backlog': 0, 'daily': 0, 'manual': 0, 'failed': 0}
for cur_item in self.queue:
if isinstance(cur_item, DailySearchQueueItem):
length['daily'] += 1
elif isinstance(cur_item, BacklogQueueItem):
length['backlog'] += 1
elif isinstance(cur_item, ManualSearchQueueItem):
length['manual'] += 1
elif isinstance(cur_item, FailedQueueItem):
length['failed'] += 1
return length
def add_item(self, item):
if isinstance(item, DailySearchQueueItem):
# daily searches

View file

@ -143,7 +143,7 @@ def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None):
common.SNATCHED) and curQuality < highestBestQuality) or curStatus == common.WANTED:
ab_number = episode.scene_absolute_number
if ab_number > 0:
seasonStrings.append("%d" % ab_number)
seasonStrings.append("%02d" % ab_number)
else:
myDB = db.DBConnection()
@ -190,7 +190,7 @@ def makeSceneSearchString(show, ep_obj):
if (show.air_by_date or show.sports) and ep_obj.airdate != datetime.date.fromordinal(1):
epStrings = [str(ep_obj.airdate)]
elif show.is_anime:
epStrings = ["%i" % int(ep_obj.scene_absolute_number)]
epStrings = ["%02i" % int(ep_obj.scene_absolute_number)]
else:
epStrings = ["S%02iE%02i" % (int(ep_obj.scene_season), int(ep_obj.scene_episode)),
"%ix%02i" % (int(ep_obj.scene_season), int(ep_obj.scene_episode))]

View file

@ -596,6 +596,7 @@ class ManageSearches(MainHandler):
t.backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() # @UndefinedVariable
t.dailySearchStatus = sickbeard.dailySearchScheduler.action.amActive # @UndefinedVariable
t.findPropersStatus = sickbeard.properFinderScheduler.action.amActive # @UndefinedVariable
t.queueLength = sickbeard.searchQueueScheduler.action.queue_length()
t.submenu = ManageMenu()
@ -1701,7 +1702,7 @@ class ConfigPostProcessing(MainHandler):
wdtv_data=None, tivo_data=None, mede8er_data=None,
keep_processed_dir=None, process_method=None, process_automatically=None,
rename_episodes=None, airdate_episodes=None, unpack=None,
move_associated_files=None, nfo_rename=None, tv_download_dir=None, naming_custom_abd=None,
move_associated_files=None, postpone_if_sync_files=None, nfo_rename=None, tv_download_dir=None, naming_custom_abd=None,
naming_anime=None,
naming_abd_pattern=None, naming_strip_year=None, use_failed_downloads=None,
delete_failed=None, extra_scripts=None, skip_removed_files=None,
@ -1735,6 +1736,7 @@ class ConfigPostProcessing(MainHandler):
sickbeard.RENAME_EPISODES = config.checkbox_to_value(rename_episodes)
sickbeard.AIRDATE_EPISODES = config.checkbox_to_value(airdate_episodes)
sickbeard.MOVE_ASSOCIATED_FILES = config.checkbox_to_value(move_associated_files)
sickbeard.POSTPONE_IF_SYNC_FILES = config.checkbox_to_value(postpone_if_sync_files)
sickbeard.NAMING_CUSTOM_ABD = config.checkbox_to_value(naming_custom_abd)
sickbeard.NAMING_CUSTOM_SPORTS = config.checkbox_to_value(naming_custom_sports)
sickbeard.NAMING_STRIP_YEAR = config.checkbox_to_value(naming_strip_year)