Fixes for post-processing issues.

Improvements made to caches for overall performance boost and more accurate matches.

Misc bug fixes applied.
This commit is contained in:
echel0n 2014-04-30 15:07:18 -07:00
parent 9f2a6dad5d
commit 9d191f6999
10 changed files with 215 additions and 187 deletions

View file

@ -38,7 +38,7 @@ from sickbeard import searchCurrent, searchBacklog, showUpdater, versionChecker,
from sickbeard import helpers, db, exceptions, show_queue, search_queue, scheduler, show_name_helpers
from sickbeard import logger
from sickbeard import naming
from sickbeard import scene_numbering
from sickbeard import scene_numbering, scene_exceptions, name_cache
from indexers.indexer_api import indexerApi
from indexers.indexer_exceptions import indexer_shownotfound, indexer_exception, indexer_error, indexer_episodenotfound, \
indexer_attributenotfound, indexer_seasonnotfound, indexer_userabort, indexerExcepts

View file

@ -105,9 +105,9 @@ class FailedProcessor(object):
return exception
for show_name in show_names:
found_info = helpers.searchDBForShow(show_name)
found_info = helpers.get_show_by_name(show_name)
if found_info is not None:
return (found_info[1])
return (found_info.indexerid)
return None

View file

@ -56,13 +56,13 @@ from sickbeard.common import USER_AGENT, mediaExtensions, subtitleExtensions, XM
from sickbeard import db
from sickbeard import encodingKludge as ek
from sickbeard import notifiers
from sickbeard import exceptions
from lib import subliminal
urllib._urlopener = classes.SickBeardURLopener()
session = requests.Session()
def indentXML(elem, level=0):
'''
Does our pretty printing, makes Matt very happy
@ -191,11 +191,13 @@ def getURL(url, post_data=None, headers=None, params=None, timeout=30, json=Fals
proxies = {
"http": sickbeard.PROXY_SETTING,
"https": sickbeard.PROXY_SETTING,
}
}
r = session.get(url, params=params, data=post_data, headers=dict(zip(it, it)), proxies=proxies, timeout=timeout, verify=False)
r = session.get(url, params=params, data=post_data, headers=dict(zip(it, it)), proxies=proxies,
timeout=timeout, verify=False)
else:
r = session.get(url, params=params, data=post_data, headers=dict(zip(it, it)), timeout=timeout, verify=False)
r = session.get(url, params=params, data=post_data, headers=dict(zip(it, it)), timeout=timeout,
verify=False)
except requests.HTTPError, e:
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
return None
@ -288,28 +290,14 @@ def searchDBForShow(regShowName):
yearRegex = "([^()]+?)\s*(\()?(\d{4})(?(2)\))$"
for showName in showNames:
show = get_show_by_name(showName, sickbeard.showList)
if show:
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ? OR show_name LIKE ?",
[show.name, show.name])
else:
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ? OR show_name LIKE ?",
[showName, showName])
if len(sqlResults) == 1:
return (int(sqlResults[0]["indexer"]), int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
else:
# if we didn't get exactly one result then try again with the year stripped off if possible
match = re.match(yearRegex, showName)
if match and match.group(1):
logger.log(u"Unable to match original name but trying to manually strip and specify show year",
logger.DEBUG)
sqlResults = myDB.select(
"SELECT * FROM tv_shows WHERE (show_name LIKE ? OR show_name LIKE ?) AND startyear = ?",
[match.group(1) + '%', match.group(1) + '%', match.group(3)])
# if we didn't get exactly one result then try again with the year stripped off if possible
match = re.match(yearRegex, showName)
if match and match.group(1):
logger.log(u"Unable to match original name but trying to manually strip and specify show year",
logger.DEBUG)
sqlResults = myDB.select(
"SELECT * FROM tv_shows WHERE (show_name LIKE ? OR show_name LIKE ?) AND startyear = ?",
[match.group(1) + '%', match.group(1) + '%', match.group(3)])
if len(sqlResults) == 0:
logger.log(u"Unable to match a record in the DB for " + showName, logger.DEBUG)
@ -320,9 +308,6 @@ def searchDBForShow(regShowName):
else:
return (int(sqlResults[0]["indexer"]), int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
return None
def searchIndexerForShowID(regShowName, indexer=None, indexer_id=None, ui=None):
showNames = list(set([re.sub('[. -]', ' ', regShowName), regShowName]))
@ -330,7 +315,7 @@ def searchIndexerForShowID(regShowName, indexer=None, indexer_id=None, ui=None):
for indexer in sickbeard.indexerApi().indexers if not indexer else [int(indexer)]:
# Query Indexers for each search term and build the list of results
lINDEXER_API_PARMS = sickbeard.indexerApi(indexer).api_params.copy()
if ui:lINDEXER_API_PARMS['custom_ui'] = ui
if ui: lINDEXER_API_PARMS['custom_ui'] = ui
t = sickbeard.indexerApi(indexer).indexer(**lINDEXER_API_PARMS)
for name in showNames:
@ -354,6 +339,7 @@ def searchIndexerForShowID(regShowName, indexer=None, indexer_id=None, ui=None):
except Exception:
continue
def sizeof_fmt(num):
'''
>>> sizeof_fmt(2)
@ -430,11 +416,13 @@ def symlink(src, dst):
if os.name == 'nt':
import ctypes
if ctypes.windll.kernel32.CreateSymbolicLinkW(unicode(dst), unicode(src), 1 if os.path.isdir(src) else 0) in [0,1280]:
if ctypes.windll.kernel32.CreateSymbolicLinkW(unicode(dst), unicode(src), 1 if os.path.isdir(src) else 0) in [0,
1280]:
raise ctypes.WinError()
else:
os.symlink(src, dst)
def moveAndSymlinkFile(srcFile, destFile):
try:
ek.ek(os.rename, srcFile, destFile)
@ -652,7 +640,7 @@ def fixSetGroupID(childPath):
except OSError:
logger.log(
u"Failed to respect the set-group-ID bit on the parent directory for %s (setting group ID %i)" % (
childPath, parentGID), logger.ERROR)
childPath, parentGID), logger.ERROR)
def sanitizeSceneName(name, ezrss=False):
@ -944,21 +932,58 @@ def _check_against_names(name, show):
for showName in showNames:
nameFromList = full_sanitizeSceneName(showName)
#logger.log(u"Comparing names: '"+nameFromList+"' vs '"+nameInQuestion+"'", logger.DEBUG)
if nameFromList == nameInQuestion:
return True
return False
def get_show_by_name(name, showList, useIndexer=False):
if showList:
for show in showList:
if _check_against_names(name, show):
logger.log(u"Matched " + name + " in the showlist to the show " + show.name, logger.DEBUG)
return show
def get_show_by_name(name, useIndexer=False):
in_cache = False
foundResult = None
logger.log(
u"Checking the cahe for:" + str(name),
logger.DEBUG)
cacheResult = sickbeard.name_cache.retrieveNameFromCache(name)
if cacheResult:
in_cache = True
foundResult = findCertainShow(sickbeard.showList, cacheResult)
logger.log(
u"Cache lookup found Indexer ID:" + repr(
foundResult.indexerid) + ", using that for " + name,
logger.DEBUG)
if not foundResult:
logger.log(
u"Checking the showlist for:" + str(name),
logger.DEBUG)
for show in sickbeard.showList:
if _check_against_names(name, show):
logger.log(
u"Showlist lookup found Indexer ID:" + str(show.indexerid) + ", using that for " + name,
logger.DEBUG)
foundResult = show
if not foundResult:
logger.log(
u"Checking the database for show:" + str(name),
logger.DEBUG)
dbResult = searchDBForShow(name)
if dbResult:
foundResult = findCertainShow(sickbeard.showList, dbResult[1])
logger.log(
u"Database lookup found Indexer ID:" + str(
foundResult.indexerid) + ", using that for " + name, logger.DEBUG)
if not foundResult and useIndexer:
logger.log(
u"Checking the Indexers for:" + str(name),
logger.DEBUG)
if useIndexer:
for indexer in sickbeard.indexerApi().indexers:
try:
lINDEXER_API_PARMS = sickbeard.indexerApi(indexer).api_params.copy()
@ -967,14 +992,21 @@ def get_show_by_name(name, showList, useIndexer=False):
t = sickbeard.indexerApi(indexer).indexer(**lINDEXER_API_PARMS)
showObj = t[name]
except:continue
except:
continue
if showObj:
showResult = findCertainShow(sickbeard.showList, int(showObj["id"]))
if showResult is not None:
return showResult
foundResult = findCertainShow(sickbeard.showList, int(showObj["id"]))
if foundResult:
logger.log(
u"Indexer lookup found Indexer ID:" + str(
foundResult.indexerid) + ", using that for " + name, logger.DEBUG)
return None
# add to name cache if we didn't get it from the cache
if foundResult and not in_cache:
sickbeard.name_cache.addNameToCache(name, foundResult.indexerid)
return foundResult
def is_hidden_folder(folder):
"""
@ -995,6 +1027,7 @@ def real_path(path):
"""
return ek.ek(os.path.normpath, ek.ek(os.path.normcase, ek.ek(os.path.realpath, path)))
def validateShow(show, season=None, episode=None):
indexer_lang = show.lang

View file

@ -21,21 +21,21 @@ import os.path
import re
import regexes
import sickbeard
import calendar
from sickbeard import logger, helpers, scene_numbering
from dateutil import parser
class NameParser(object):
ALL_REGEX = 0
NORMAL_REGEX = 1
SPORTS_REGEX = 2
def __init__(self, file_name=True, regexMode=0):
def __init__(self, file_name=True, regexMode=1):
self.file_name = file_name
self.regexMode = regexMode
self.compiled_regexes = []
self._compile_regexes(regexMode)
self._compile_regexes(self.regexMode)
def clean_series_name(self, series_name):
"""Cleans up series name by removing any . and _
@ -125,6 +125,14 @@ class NameParser(object):
else:
result.episode_numbers = [ep_num]
if 'sports_event_date' in named_groups:
sports_event_date = match.group('sports_event_date')
if sports_event_date:
try:
result.sports_event_date = parser.parse(sports_event_date, fuzzy=True).date()
except ValueError, e:
raise InvalidNameException(e.message)
if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups:
year = int(match.group('air_year'))
month = int(match.group('air_month'))
@ -232,6 +240,7 @@ class NameParser(object):
# sports event title
final_result.sports_event_title = self._combine_results(file_name_result, dir_name_result, 'sports_event_title')
final_result.sports_event_date = self._combine_results(file_name_result, dir_name_result, 'sports_event_date')
if not final_result.air_date:
final_result.season_number = self._combine_results(file_name_result, dir_name_result, 'season_number')
@ -265,6 +274,7 @@ class ParseResult(object):
original_name,
series_name=None,
sports_event_title=None,
sports_event_date=None,
season_number=None,
episode_numbers=None,
extra_info=None,
@ -287,6 +297,7 @@ class ParseResult(object):
self.air_date = air_date
self.sports_event_title = sports_event_title
self.sports_event_date = sports_event_date
self.which_regex = None
@ -308,6 +319,8 @@ class ParseResult(object):
return False
if self.sports_event_title != other.sports_event_title:
return False
if self.sports_event_date != other.sports_event_date:
return False
return True
@ -326,6 +339,7 @@ class ParseResult(object):
to_return += str(self.air_date)
if self.sports:
to_return += str(self.sports_event_title)
to_return += str(self.sports_event_date)
if self.extra_info:
to_return += ' - ' + self.extra_info
@ -344,12 +358,12 @@ class ParseResult(object):
if len(self.episode_numbers) == 0: return self # need at least one episode
# convert scene numbered releases before storing to cache
indexer_id = helpers.searchDBForShow(self.series_name)
if indexer_id:
showObj = helpers.get_show_by_name(self.series_name)
if showObj:
new_episode_numbers = []
new_season_numbers = []
for epNo in self.episode_numbers:
(s, e) = scene_numbering.get_indexer_numbering(indexer_id, self.season_number, epNo)
(s, e) = scene_numbering.get_indexer_numbering(showObj.indexerid, self.season_number, epNo)
new_episode_numbers.append(e)
new_season_numbers.append(s)
@ -380,7 +394,7 @@ class ParseResult(object):
air_by_date = property(_is_air_by_date)
def _is_sports(self):
if self.sports_event_title:
if self.sports_event_title or self.sports_event_date:
return True
return False
sports = property(_is_sports)

View file

@ -187,14 +187,15 @@ ep_regexes = [
]
sports_regexs = [
('sports_event_mma',
# Show.Name.123.Event.23rd.Nov.2010.Source.Quality.Etc-Group
'''
^(?P<series_name>.+?)[. _-]+
(?P<sports_event_title>\d{3}.+[. _-]vs[. _-].+?)[. _-]+
((?![. _-]+\d{2})(.*?)(?:\d{4}[. _-]+))?
([. _-]*(?P<extra_info>.+?)((?<![. _-])
(?<!WEB)-(?P<release_group>[^- ]+))?)?$
'''
('sports_standard',
# Sports.Name.2010.11.23.Source.Quality.Etc-Group
# Sports.Name.23rd.Nov.2010.Source.Quality.Etc-Group
'''
^(?P<series_name>.+?)[. _-]+
(?P<sports_event_date>(\d{4}[. _-]+\d{2}[. _-]+\d{2})|(\d{2}\w{2}[. _-]+\w+[. _-]+\d{4}))
[. _-]*((?P<extra_info>.+?)((?<![. _-])(?<!WEB)
-(?P<release_group>[^- ]+))?)?$
'''
),
]

View file

@ -506,43 +506,10 @@ class PostProcessor(object):
# for each possible interpretation of that scene name
for cur_name in name_list:
self._log(u"Checking cache for " + cur_name, logger.DEBUG)
cache_id = name_cache.retrieveNameFromCache(parse_result.series_name)
if cache_id:
self._log(u"Cache lookup got a Indexer ID " + str(cache_id) + ", using that", logger.DEBUG)
showObj = helpers.get_show_by_name(parse_result.series_name)
if showObj:
_finalize(parse_result)
return (cache_id, season, episodes)
# for each possible interpretation of that scene name
for cur_name in name_list:
self._log(u"Checking scene exceptions for a match on " + cur_name, logger.DEBUG)
scene_id = scene_exceptions.get_scene_exception_by_name(cur_name)
if scene_id:
self._log(u"Scene exception lookup got a Indexer ID " + str(scene_id) + ", using that", logger.DEBUG)
_finalize(parse_result)
return (scene_id, season, episodes)
# see if we can find the name directly in the DB, if so use it
for cur_name in name_list:
self._log(u"Looking up " + cur_name + u" in the DB", logger.DEBUG)
db_result = helpers.searchDBForShow(cur_name)
if db_result:
self._log(u"Lookup successful, using " + sickbeard.indexerApi(db_result[0]).name + " id " + str(
db_result[1]),
logger.DEBUG)
_finalize(parse_result)
return (int(db_result[1]), season, episodes)
# see if we can find the name on the Indexer
for cur_name in name_list:
foundInfo = helpers.searchIndexerForShowID(cur_name, ui=classes.ShowListUI)
if foundInfo:
indexer_id = foundInfo[1]
self._log(
u"Lookup successful, using " + sickbeard.indexerApi(self.indexer).name + " id " + str(indexer_id),
logger.DEBUG)
_finalize(parse_result)
return (indexer_id, season, episodes)
return (showObj.indexerid, season, episodes)
_finalize(parse_result)
return to_return

View file

@ -59,9 +59,11 @@ class EZRSSProvider(generic.TorrentProvider):
def getSearchResults(self, show, season, ep_objs, seasonSearch=False, manualSearch=False):
self.show = show
results = {}
if self.show.air_by_date or self.show.sports:
if show.air_by_date or show.sports:
logger.log(self.name + u" doesn't support air-by-date or sports backloging because of limitations on their RSS search.",
logger.WARNING)
return results

View file

@ -38,6 +38,7 @@ from lib.hachoir_parser import createParser
from sickbeard.name_parser.parser import NameParser, InvalidNameException
from sickbeard.common import Quality
class GenericProvider:
NZB = "nzb"
TORRENT = "torrent"
@ -56,7 +57,8 @@ class GenericProvider:
self.session = requests.session()
self.session.verify = False
self.session.headers.update({'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'})
self.session.headers.update({
'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'})
def getID(self):
@ -132,7 +134,8 @@ class GenericProvider:
logger.log(u"Error loading " + self.name + " URL: " + url, logger.ERROR)
return None
elif 'error' in f.feed:
logger.log(u"Newznab ERROR:[%s] CODE:[%s]" % (f.feed['error']['description'], f.feed['error']['code']), logger.DEBUG)
logger.log(u"Newznab ERROR:[%s] CODE:[%s]" % (f.feed['error']['description'], f.feed['error']['code']),
logger.DEBUG)
return None
elif not f.entries:
logger.log(u"No items found on " + self.name + " using URL: " + url, logger.WARNING)
@ -253,7 +256,7 @@ class GenericProvider:
self.show = show
regexMode = 0
if show.sports:
if self.show.sports:
regexMode = 2
for ep_obj in ep_objs:
@ -280,7 +283,7 @@ class GenericProvider:
logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
continue
if not show.air_by_date:
if not (self.show.air_by_date, self.show.sports):
# this check is meaningless for non-season searches
if (parse_result.season_number is not None and parse_result.season_number != season) or (
parse_result.season_number is None and season != 1):
@ -293,16 +296,27 @@ class GenericProvider:
actual_episodes = parse_result.episode_numbers
else:
if show.air_by_date and not parse_result.air_by_date:
if self.show.air_by_date and not parse_result.air_by_date:
logger.log(
u"This is supposed to be an air-by-date search but the result " + title + " didn't parse as one, skipping it",
logger.DEBUG)
continue
if self.show.sports and not parse_result.sports_event_date:
logger.log(
u"This is supposed to be an sports-event-date search but the result " + title + " didn't parse as one, skipping it",
logger.DEBUG)
continue
myDB = db.DBConnection()
if parse_result.air_by_date:
sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
[show.indexerid, parse_result.air_date.toordinal()])
sql_results = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
[self.show.indexerid, parse_result.air_date.toordinal()])
elif parse_result.sports:
sql_results = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
[self.show.indexerid, parse_result.sports_event_date.toordinal()])
if len(sql_results) != 1:
logger.log(
@ -316,8 +330,8 @@ class GenericProvider:
# make sure we want the episode
wantEp = True
for epNo in actual_episodes:
epObj = show.getEpisode(actual_season, epNo)
if not epObj or not show.wantEpisode(epObj.season, epObj.episode, quality, manualSearch=manualSearch):
epObj = self.show.getEpisode(actual_season, epNo)
if not epObj or not self.show.wantEpisode(epObj.season, epObj.episode, quality, manualSearch=manualSearch):
wantEp = False
break
@ -351,13 +365,13 @@ class GenericProvider:
parse_result.episode_numbers), logger.DEBUG)
elif len(epObjs) == 0:
epNum = SEASON_RESULT
result.extraInfo = [show]
result.extraInfo = [self.show]
logger.log(u"Separating full season result to check for later", logger.DEBUG)
if epNum in results:
results[epNum].append(result)
else:
results = {epNum:[result]}
results = {epNum: [result]}
return results

View file

@ -193,92 +193,67 @@ class TVCache():
def _addCacheEntry(self, name, url, quality=None):
cacheDB = self._getDB()
parse_result = None
indexer_id = None
season = None
episodes = None
from_cache = False
# if we don't have complete info then parse the filename to get it
while(True):
try:
myParser = NameParser()
parse_result = myParser.parse(name).convert()
except InvalidNameException:
logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG)
try:
myParser = NameParser()
parse_result = myParser.parse(name).convert()
except InvalidNameException:
logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG)
return None
if not parse_result:
logger.log(u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG)
return None
if not parse_result.series_name:
logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG)
return None
try:
showObj = helpers.get_show_by_name(parse_result.series_name)
if showObj is None:
return None
if not parse_result:
logger.log(u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG)
return None
myDB = db.DBConnection()
if showObj.air_by_date:
sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
[showObj.indexerid, parse_result.air_date.toordinal()])
if sql_results > 0:
season = int(sql_results[0]["season"])
episodes = [int(sql_results[0]["episode"])]
elif showObj.sports:
sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
[showObj.indexerid, parse_result.sports_event_date.toordinal()])
if not parse_result.series_name:
logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG)
return None
if sql_results > 0:
season = int(sql_results[0]["season"])
episodes = [int(sql_results[0]["episode"])]
else:
season = parse_result.season_number
episodes = parse_result.episode_numbers
logger.log(
u"Checking the cache for show:" + str(parse_result.series_name),
logger.DEBUG)
if season and episodes:
# store episodes as a seperated string
episodeText = "|" + "|".join(map(str, episodes)) + "|"
# remember if the cache lookup worked or not so we know whether we should bother updating it later
cache_id = name_cache.retrieveNameFromCache(parse_result.series_name)
if cache_id:
logger.log(u"Cache lookup found Indexer ID:" + repr(indexer_id) + ", using that for " + parse_result.series_name, logger.DEBUG)
from_cache = True
indexer_id = cache_id
break
# get the current timestamp
curTimestamp = int(time.mktime(datetime.datetime.today().timetuple()))
# if the cache failed, try looking up the show name in the database
logger.log(
u"Checking the database for show:" + str(parse_result.series_name),
logger.DEBUG)
# get quality of release
if quality is None:
quality = Quality.sceneQuality(name)
showResult = helpers.searchDBForShow(parse_result.series_name)
if showResult:
logger.log(
u"Database lookup found Indexer ID:" + str(showResult[1]) + ", using that for " + parse_result.series_name, logger.DEBUG)
indexer_id = showResult[1]
break
if not isinstance(name, unicode):
name = unicode(name, 'utf-8')
# if we didn't find a Indexer ID return None
if indexer_id:
# add to name cache if we didn't get it from the cache
if not from_cache:
name_cache.addNameToCache(parse_result.series_name, indexer_id)
# if the show isn't in out database then return None
try:
showObj = helpers.findCertainShow(sickbeard.showList, indexer_id)
myDB = db.DBConnection()
if parse_result.air_by_date:
sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
[showObj.indexerid, parse_result.air_date.toordinal()])
if sql_results > 0:
season = int(sql_results[0]["season"])
episodes = [int(sql_results[0]["episode"])]
else:
season = parse_result.season_number
episodes = parse_result.episode_numbers
if season and episodes:
# store episodes as a seperated string
episodeText = "|" + "|".join(map(str, episodes)) + "|"
# get the current timestamp
curTimestamp = int(time.mktime(datetime.datetime.today().timetuple()))
# get quality of release
if quality is None:
quality = Quality.sceneQuality(name)
if not isinstance(name, unicode):
name = unicode(name, 'utf-8')
cacheDB.action(
"INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)",
[name, season, episodeText, indexer_id, url, curTimestamp, quality])
except:
return
cacheDB.action(
"INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)",
[name, season, episodeText, showObj.indexerid, url, curTimestamp, quality])
except:
return
def searchCache(self, episode, manualSearch=False):
neededEps = self.findNeededEpisodes(episode, manualSearch)

View file

@ -28,7 +28,7 @@ sys.path.append(os.path.abspath('../lib'))
import test_lib as test
import sickbeard
from sickbeard.helpers import sanitizeSceneName
from sickbeard.helpers import get_show_by_name
from sickbeard.tv import TVShow
from sickbeard.name_parser.parser import NameParser, InvalidNameException
@ -65,6 +65,28 @@ class XEMBasicTests(test.SickbeardTestDBCase):
print scene_parsse_results1
print scene_parsse_results2
sports_release = 'UFC.168.Weidman.vs.Silva.II.28th.Dec.2013.HDTV.x264-Sir.Paul'
try:
myParser = NameParser(False, 2)
parse_result = myParser.parse(sports_release)
test = sickbeard.show_name_helpers.allPossibleShowNames(parse_result.series_name)
show = get_show_by_name(parse_result.series_name)
if show:
sql_results = test.db.DBConnection().select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
[show.indexerid, parse_result.sports_event_date.toordinal()])
actual_season = int(sql_results[0]["season"])
actual_episodes = [int(sql_results[0]["episode"])]
print actual_season
print actual_episodes
except InvalidNameException:
print(u"Unable to parse the filename " + scene_release + " into a valid episode")
print scene_parsse_results1
if __name__ == "__main__":
print "=================="
print "STARTING - XEM Scene Numbering TESTS"