Fixes issues with findpropers and airdate.

Fixes issues with unhashable dict types.
Fixes issues with 100% CPU usage.
Fixes issues with RSS feed cache.
Fixes issues with scene numbering and converting back to indexer numbering.
Fixes issues with backlog searches.
Fixes issues with season pack searches.
Fixes issues with BTN.
This commit is contained in:
echel0n 2014-05-04 20:04:46 -07:00
parent 7f862a4a74
commit a5b72dea84
19 changed files with 104 additions and 121 deletions

View file

@ -991,9 +991,6 @@ def initialize(consoleLogging=True):
newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA)
providerList = providers.makeProviderList()
# fix scene numbering in mainDB
scene_numbering.fix_scene_numbering()
# initialize newznab providers
currentSearchScheduler = scheduler.Scheduler(searchCurrent.CurrentSearcher(),
cycleTime=datetime.timedelta(minutes=SEARCH_FREQUENCY),

View file

@ -946,33 +946,25 @@ def _check_against_names(name, show):
def get_show_by_name(name):
showObj = None
in_cache = False
if not sickbeard.showList:
return
indexerid = sickbeard.name_cache.retrieveNameFromCache(name)
if indexerid:
in_cache = True
showNames = list(set(sickbeard.show_name_helpers.sceneToNormalShowNames(name)))
for showName in showNames if not in_cache else []:
try:
showObj = [x for x in sickbeard.showList if _check_against_names(showName, x)][0]
indexerid = showObj.indexerid
except:
indexerid = 0
if indexerid:
for showName in showNames if not indexerid else []:
sceneResults = [x for x in sickbeard.showList if _check_against_names(showName, x)]
showObj = sceneResults[0] if len(sceneResults) else None
if showObj:
break
if indexerid:
if showObj or indexerid:
logger.log(u"Found Indexer ID:[" + repr(indexerid) + "], using that for [" + str(name) + "}",logger.DEBUG)
if not showObj:
showObjList = [x for x in sickbeard.showList if x.indexerid == indexerid]
if len(showObjList):
showObj = showObjList[0]
return showObj
showObj = findCertainShow(sickbeard.showList, int(indexerid))
return showObj
def is_hidden_folder(folder):
"""

View file

@ -16,6 +16,8 @@
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import sickbeard
from sickbeard import db
from sickbeard.helpers import sanitizeSceneName
@ -52,6 +54,11 @@ def retrieveNameFromCache(name):
if cache_results:
return int(cache_results[0]["indexer_id"])
def retrieveShowFromCache(name):
indexerid = retrieveNameFromCache(name)
if indexerid:
return sickbeard.helpers.findCertainShow(sickbeard.showList, int(indexerid))
def clearCache():
"""
Deletes all "unknown" entries from the cache (names with indexer_id of 0).

View file

@ -623,10 +623,13 @@ class PostProcessor(object):
for cur_episode in episodes:
self._log(u"Retrieving episode object for " + str(season) + "x" + str(cur_episode), logger.DEBUG)
# detect and convert scene numbered releases
season, cur_episode = sickbeard.scene_numbering.get_indexer_numbering(indexer_id,indexer,season,cur_episode)
self._log(u"Episode object has been scene converted to " + str(season) + "x" + str(cur_episode), logger.DEBUG)
# now that we've figured out which episode this file is just load it manually
try:
# convert scene numbered release and load episode from database
curEp = show_obj.getEpisode(season, cur_episode, scene=True)
curEp = show_obj.getEpisode(season, cur_episode)
except exceptions.EpisodeNotFoundException, e:
self._log(u"Unable to create episode: " + ex(e), logger.DEBUG)
raise exceptions.PostProcessingFailed()

View file

@ -112,9 +112,9 @@ class ProperFinder():
continue
# populate our Proper instance
if parse_result.air_by_date:
if parse_result.air_by_date or parse_result.sports:
curProper.season = -1
curProper.episode = parse_result.air_date
curProper.episode = parse_result.air_date or parse_result.sports_event_date
else:
curProper.scene_season = parse_result.season_number if parse_result.season_number != None else 1
curProper.scene_episode = parse_result.episode_numbers[0]
@ -178,15 +178,11 @@ class ProperFinder():
continue
# if we have an air-by-date show then get the real season/episode numbers
if curProper.season == -1 and curProper.indexerid and curProper.indexer:
if (parse_result.air_by_date or parse_result.sports_event_date) and curProper.indexerid:
logger.log(
u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
logger.DEBUG)
if curProper.airdate:
airdate = curProper.airdate.toordinal()
else:
airdate = None
airdate = curProper.episode.toordinal()
myDB = db.DBConnection()
sql_result = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
@ -263,7 +259,7 @@ class ProperFinder():
showObj = helpers.findCertainShow(sickbeard.showList, curProper.indexerid)
if showObj == None:
logger.log(u"Unable to find the show with indexerid " + str(
curProper.indexerid) + " so unable to download the proper", logger.ERROR)
curProper .indexerid) + " so unable to download the proper", logger.ERROR)
continue
epObj = showObj.getEpisode(curProper.season, curProper.episode)

View file

@ -117,7 +117,6 @@ class BTNProvider(generic.TorrentProvider):
found_torrents.update(parsedJSON['torrents'])
results = []
for torrentid, torrent_info in found_torrents.iteritems():
(title, url) = self._get_title_and_url(torrent_info)

View file

@ -130,7 +130,7 @@ class DTTCache(tvcache.TVCache):
def _parseItem(self, item):
title, url = self.provider._get_title_and_url(item)
logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG)
logger.log(u"Adding item from [" + self.provider.name + "] RSS feed to cache: " + title, logger.DEBUG)
return self._addCacheEntry(title, url)

View file

@ -130,7 +130,7 @@ class EZRSSProvider(generic.TorrentProvider):
(title, url) = self._get_title_and_url(curItem)
if title and url:
logger.log(u"Adding item from RSS to results: " + title, logger.DEBUG)
logger.log(u"Adding item from [" + self.name + "] RSS feed to cache: " + title, logger.DEBUG)
results.append(curItem)
else:
logger.log(
@ -180,7 +180,7 @@ class EZRSSCache(tvcache.TVCache):
(title, url) = self.provider._get_title_and_url(item)
if title and url:
logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG)
logger.log(u"Adding item from [" + self.provider.name + "] RSS feed to cache: " + title, logger.DEBUG)
url = self._translateLinkURL(url)
return self._addCacheEntry(title, url)

View file

@ -250,15 +250,13 @@ class GenericProvider:
searchStrings += self._get_episode_search_strings(epObj)
# remove duplicate search strings
if len(searchStrings):
searchStrings = [i for n, i in enumerate(searchStrings) if i not in searchStrings[n + 1:]]
searchStrings = [i for n, i in enumerate(searchStrings) if i not in searchStrings[n + 1:]]
for curString in sorted(searchStrings):
itemList += self._doSearch(curString)
# remove duplicate items
if len(itemList):
itemList = list(set(itemList))
itemList = [i for n, i in enumerate(itemList) if i not in itemList[n + 1:]]
for item in itemList:

View file

@ -83,7 +83,9 @@ class HDBitsProvider(generic.TorrentProvider):
season = ep_obj.scene_season
episode = ep_obj.scene_episode
self.cache.updateCache()
if manualSearch:
self.cache.updateCache()
results = self.cache.searchCache(episode, manualSearch)
logger.log(u"Cache results: " + str(results), logger.DEBUG)

View file

@ -356,7 +356,7 @@ class NewzbinCache(tvcache.TVCache):
logger.log("Found quality " + str(quality), logger.DEBUG)
logger.log("Adding item from RSS to cache: " + title, logger.DEBUG)
logger.log(u"Adding item from [" + self.provider.name + "] RSS feed to cache: " + title, logger.DEBUG)
self._addCacheEntry(title, url, quality=quality)

View file

@ -209,7 +209,7 @@ class NewznabProvider(generic.NZBProvider):
(title, url) = self._get_title_and_url(curItem)
if title and url:
logger.log(u"Adding item from RSS to results: " + title, logger.DEBUG)
logger.log(u"Adding item from [" + self.name + "] RSS feed to cache: " + title,logger.DEBUG)
results.append(curItem)
else:
logger.log(

View file

@ -143,7 +143,7 @@ class NyaaCache(tvcache.TVCache):
logger.ERROR)
return None
logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG)
logger.log(u"Adding item from [" + self.provider.name + "] RSS feed to cache: " + title, logger.DEBUG)
return self._addCacheEntry(title, url)

View file

@ -164,5 +164,5 @@ class TorrentRssCache(tvcache.TVCache):
logger.log(u"The XML returned from the RSS feed is incomplete, this result is unusable", logger.ERROR)
return None
logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG)
logger.log(u"Adding item from [" + self.provider.name + "] RSS feed to cache: " + title, logger.DEBUG)
return self._addCacheEntry(title, url)

View file

@ -373,6 +373,9 @@ def searchProviders(show, season, episodes, seasonSearch=False, manualSearch=Fal
if not curProvider.isActive():
continue
if manualSearch:
curProvider.cache.updateCache()
try:
curResults = curProvider.findSearchResults(show, season, episodes, seasonSearch, manualSearch)
except exceptions.AuthException, e:

View file

@ -29,6 +29,7 @@ from sickbeard import ui
BACKLOG_SEARCH = 10
RSS_SEARCH = 20
FAILED_SEARCH = 30
MANUAL_SEARCH = 30
@ -47,8 +48,6 @@ class SearchQueue(generic_queue.GenericQueue):
for cur_item in self.queue:
if isinstance(cur_item, ManualSearchQueueItem) and cur_item.ep_obj == ep_obj:
return True
if isinstance(cur_item, BacklogQueueItem) and cur_item.ep_obj == ep_obj:
return True
return False
def pause_backlog(self):
@ -92,7 +91,11 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
def execute(self):
generic_queue.QueueItem.execute(self)
logger.log("Beginning manual search for " + self.ep_obj.prettyName())
# convert indexer numbering to scene numbering for searches
(self.ep_obj.scene_season, self.ep_obj.scene_episode) = sickbeard.scene_numbering.get_scene_numbering(
self.ep_obj.show.indexerid, self.ep_obj.show.indexer, self.ep_obj.season, self.ep_obj.episode)
logger.log("Beginning manual search for " + self.ep_obj.prettyName() + ' as ' + self.ep_obj.prettySceneName())
foundResults = search.searchProviders(self.ep_obj.show, self.ep_obj.season, [self.ep_obj], manualSearch=True)
result = False
@ -134,7 +137,7 @@ class RSSSearchQueueItem(generic_queue.QueueItem):
self._changeMissingEpisodes()
logger.log(u"Beginning search for new episodes on RSS")
logger.log(u"Beginning search for new episodes on RSS feeds and in cache")
foundResults = search.searchForNeededEpisodes()
@ -188,7 +191,6 @@ class BacklogQueueItem(generic_queue.QueueItem):
self.show = show
self.segment = segment
self.wantedEpisodes = []
self.seasonSearch = False
logger.log(u"Seeing if we need any episodes from " + self.show.name + " season " + str(self.segment))
@ -215,16 +217,29 @@ class BacklogQueueItem(generic_queue.QueueItem):
anyQualities, bestQualities = common.Quality.splitQuality(self.show.quality) #@UnusedVariable
self.wantedEpisodes = self._need_any_episodes(statusResults, bestQualities)
# check if we want to search for season packs instead of just season/episode
seasonEps = show.getAllEpisodes(self.segment)
if len(seasonEps) == len(self.wantedEpisodes):
self.seasonSearch = True
def execute(self):
generic_queue.QueueItem.execute(self)
results = search.searchProviders(self.show, self.segment, self.wantedEpisodes, seasonSearch=self.seasonSearch)
# check if we want to search for season packs instead of just season/episode
seasonSearch = False
seasonEps = self.show.getAllEpisodes(self.segment)
if len(seasonEps) == len(self.wantedEpisodes):
seasonSearch = True
# convert indexer numbering to scene numbering for searches
for i, epObj in enumerate(self.wantedEpisodes):
(self.wantedEpisodes[i].scene_season,
self.wantedEpisodes[i].scene_episode) = sickbeard.scene_numbering.get_scene_numbering(self.show.indexerid,
self.show.indexer,
epObj.season,
epObj.episode)
logger.log(
"Beginning backlog search for " + self.wantedEpisodes[i].prettyName() + ' as ' + self.wantedEpisodes[
i].prettySceneName())
# search for our wanted items and return the results
results = search.searchProviders(self.show, self.segment, self.wantedEpisodes, seasonSearch=seasonSearch)
# download whatever we find
for curResult in results:
@ -250,7 +265,7 @@ class BacklogQueueItem(generic_queue.QueueItem):
# if we need a better one then say yes
if (curStatus in (common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER,
common.SNATCHED_BEST) and curQuality < highestBestQuality) or curStatus == common.WANTED:
epObj = self.show.getEpisode(self.segment,episode)
epObj = self.show.getEpisode(self.segment, episode)
wantedEpisodes.append(epObj)
return wantedEpisodes
@ -258,7 +273,7 @@ class BacklogQueueItem(generic_queue.QueueItem):
class FailedQueueItem(generic_queue.QueueItem):
def __init__(self, show, episodes):
generic_queue.QueueItem.__init__(self, 'Retry', MANUAL_SEARCH)
generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
self.thread_name = 'RETRY-' + str(show.indexerid)
@ -272,7 +287,14 @@ class FailedQueueItem(generic_queue.QueueItem):
episodes = []
for epObj in episodes:
for i, epObj in enumerate(episodes):
# convert indexer numbering to scene numbering for searches
(episodes[i].scene_season, self.episodes[i].scene_episode) = sickbeard.scene_numbering.get_scene_numbering(
self.show.indexerid, self.show.indexer, epObj.season, epObj.episode)
logger.log(
"Beginning failed download search for " + epObj.prettyName() + ' as ' + epObj.prettySceneName())
(release, provider) = failed_history.findRelease(self.show, epObj.season, epObj.episode)
if release:
logger.log(u"Marking release as bad: " + release)

View file

@ -182,7 +182,7 @@ class TVShow(object):
return ep_list
def getEpisode(self, season, episode, file=None, noCreate=False, scene=False):
def getEpisode(self, season, episode, file=None, noCreate=False):
if not season in self.episodes:
self.episodes[season] = {}
@ -197,15 +197,9 @@ class TVShow(object):
episode) + " didn't exist in the cache, trying to create it", logger.DEBUG)
if file != None:
if scene:
ep = TVEpisode(self, scene_season=season, scene_episode=episode, file=file)
else:
ep = TVEpisode(self, season, episode, file)
ep = TVEpisode(self, season, episode, file)
else:
if scene:
ep = TVEpisode(self, scene_season=season, scene_episode=episode)
else:
ep = TVEpisode(self, season, episode, file)
ep = TVEpisode(self, season, episode, file)
if ep != None:
self.episodes[season][episode] = ep
@ -529,11 +523,11 @@ class TVShow(object):
rootEp = None
# if we have an air-by-date show then get the real season/episode numbers
if parse_result.air_by_date:
if parse_result.air_by_date or parse_result.sports:
logger.log(
u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
logger.DEBUG)
airdate = parse_result.air_date.toordinal()
airdate = parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()
myDB = db.DBConnection()
sql_result = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
@ -1134,12 +1128,12 @@ def dirty_setter(attr_name):
class TVEpisode(object):
def __init__(self, show, season=None, episode=None, scene_season=None, scene_episode=None, file=""):
def __init__(self, show, season, episode, file=""):
self._name = ""
self._season = season
self._episode = episode
self._scene_season = scene_season
self._scene_episode = scene_episode
self._scene_season = -1
self._scene_episode = -1
self._description = ""
self._subtitles = list()
self._subtitles_searchcount = 0
@ -1164,7 +1158,7 @@ class TVEpisode(object):
self.lock = threading.Lock()
self.specifyEpisode(self.season, self.episode, self.scene_season, self.scene_episode)
self.specifyEpisode(self.season, self.episode)
self.relatedEps = []
@ -1282,7 +1276,7 @@ class TVEpisode(object):
cur_tbn = False
# check for nfo and tbn
if ek.ek(os.path.isfile, self.location):
if self.location and ek.ek(os.path.isfile, self.location):
for cur_provider in sickbeard.metadata_provider_dict.values():
if cur_provider.episode_metadata:
new_result = cur_provider._has_episode_metadata(self)
@ -1302,11 +1296,11 @@ class TVEpisode(object):
# if either setting has changed return true, if not return false
return oldhasnfo != self.hasnfo or oldhastbn != self.hastbn
def specifyEpisode(self, season, episode, scene_season=None, scene_episode=None):
def specifyEpisode(self, season, episode):
sqlResult = self.loadFromDB(season, episode)
if not sqlResult:
if not sqlResult and self.location:
# only load from NFO if we didn't load from DB
if ek.ek(os.path.isfile, self.location):
try:
@ -1319,7 +1313,7 @@ class TVEpisode(object):
# if we tried loading it from NFO and didn't find the NFO, try the Indexers
if not self.hasnfo:
try:
result = self.loadFromIndexer(season, episode, scene_season, scene_episode)
result = self.loadFromIndexer(season, episode)
except exceptions.EpisodeDeletedException:
result = False
@ -1328,25 +1322,6 @@ class TVEpisode(object):
raise exceptions.EpisodeNotFoundException(
"Couldn't find episode " + str(season) + "x" + str(episode))
# convert from indexer numbering <-> scene numerbing and back again once we have correct season and episode numbers
if self.season and self.episode:
self.scene_season, self.scene_episode = sickbeard.scene_numbering.get_scene_numbering(self.show.indexerid,
self.show.indexer,
self.season, self.episode)
self.season, self.episode = sickbeard.scene_numbering.get_indexer_numbering(self.show.indexerid, self.show.indexer,
self.scene_season,
self.scene_episode)
# convert from scene numbering <-> indexer numbering and back again once we have correct season and episode numbers
elif self.scene_season and self.scene_episode:
self.season, self.episode = sickbeard.scene_numbering.get_indexer_numbering(self.show.indexerid, self.show.indexer,
self.scene_season,
self.scene_episode)
self.scene_season, self.scene_episode = sickbeard.scene_numbering.get_scene_numbering(self.show.indexerid,
self.show.indexer,
self.season,
self.episode)
def loadFromDB(self, season, episode):
logger.log(
@ -1370,8 +1345,6 @@ class TVEpisode(object):
self.season = season
self.episode = episode
self.scene_season = sqlResults[0]["scene_season"]
self.scene_episode = sqlResults[0]["scene_episode"]
self.description = sqlResults[0]["description"]
if not self.description:
self.description = ""
@ -1403,18 +1376,13 @@ class TVEpisode(object):
self.dirty = False
return True
def loadFromIndexer(self, season=None, episode=None, scene_season=None, scene_episode=None, cache=True, tvapi=None, cachedSeason=None):
def loadFromIndexer(self, season=None, episode=None, cache=True, tvapi=None, cachedSeason=None):
if season is None:
season = self.season
if episode is None:
episode = self.episode
if scene_season is None:
scene_season = self.scene_season
if scene_episode is None:
scene_episode = self.scene_episode
logger.log(str(self.show.indexerid) + u": Loading episode details from " + sickbeard.indexerApi(
self.show.indexer).name + " for episode " + str(season) + "x" + str(episode), logger.DEBUG)
@ -1705,13 +1673,12 @@ class TVEpisode(object):
# use a custom update/insert method to get the data into the DB
return [
"INSERT OR REPLACE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, release_name, is_proper, showid, season, episode, scene_season, scene_episode) VALUES "
"((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?),?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);",
"INSERT OR REPLACE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, release_name, is_proper, showid, season, episode) VALUES "
"((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?),?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);",
[self.show.indexerid, self.season, self.episode, self.indexerid, self.indexer, self.name, self.description,
",".join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch,
self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size,
self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode, self.scene_season,
self.scene_episode]]
self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode]]
def saveToDB(self, forceSave=False):
"""
@ -1745,9 +1712,7 @@ class TVEpisode(object):
"location": self.location,
"file_size": self.file_size,
"release_name": self.release_name,
"is_proper": self.is_proper,
"scene_season": self.scene_season,
"scene_episode": self.scene_episode}
"is_proper": self.is_proper}
controlValueDict = {"showid": self.show.indexerid,
"season": self.season,
"episode": self.episode}

View file

@ -63,6 +63,7 @@ class CacheDBConnection(db.DBConnection):
if str(e) != "table lastUpdate already exists":
raise
class TVCache():
def __init__(self, provider):
@ -94,7 +95,7 @@ class TVCache():
def getRSSFeed(self, url, post_data=None):
# create provider storaqe cache
storage = Shove('file://' + ek.ek(os.path.join, sickbeard.CACHE_DIR, self.providerID))
storage = Shove('sqlite:///' + ek.ek(os.path.join, sickbeard.CACHE_DIR, self.provider.name) + '.db')
fc = cache.Cache(storage)
parsed = list(urlparse.urlparse(url))
@ -209,7 +210,6 @@ class TVCache():
lastUpdate = property(_getLastUpdate)
def shouldUpdate(self):
return True
# if we've updated recently then skip the update
if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.minTime):
logger.log(u"Last update was too soon, using old cache: today()-" + str(self.lastUpdate) + "<" + str(
@ -220,10 +220,10 @@ class TVCache():
def _addCacheEntry(self, name, url, quality=None):
cacheResult = sickbeard.name_cache.retrieveNameFromCache(name)
if cacheResult:
logger.log(u"Found Indexer ID:[" + repr(cacheResult) + "], using that for [" + str(name) + "}", logger.DEBUG)
logger.log(u"Found Indexer ID:[" + repr(cacheResult) + "], using that for [" + str(name) + "}",
logger.DEBUG)
return
# if we don't have complete info then parse the filename to get it
@ -242,19 +242,16 @@ class TVCache():
logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG)
return None
showObj = helpers.get_show_by_name(parse_result.series_name)
showObj = sickbeard.name_cache.retrieveShowFromCache(parse_result.series_name)
if not showObj:
logger.log(u"Could not find a show matching " + parse_result.series_name + " in the database, skipping ...", logger.DEBUG)
logger.log(u"Cache lookup failed for [" + parse_result.series_name + "], skipping ...", logger.DEBUG)
return None
logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG)
sickbeard.name_cache.addNameToCache(name, showObj.indexerid)
season = episodes = None
if parse_result.air_by_date:
if parse_result.air_by_date or parse_result.sports:
myDB = db.DBConnection()
airdate = parse_result.air_date.toordinal()
airdate = parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()
sql_results = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?",
[showObj.indexerid, showObj.indexer, airdate])
@ -280,6 +277,7 @@ class TVCache():
name = unicode(name, 'utf-8')
logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG)
sickbeard.name_cache.addNameToCache(name, showObj.indexerid)
return [
"INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)",
[name, season, episodeText, showObj.indexerid, url, curTimestamp, quality]]

View file

@ -68,6 +68,7 @@ class XEMBasicTests(test.SickbeardTestDBCase):
# parse the name to break it into show name, season, and episode
np = NameParser(file)
parse_result = np.parse(release).convert()
airdate = parse_result.sports_event_date.toordinal()
print(parse_result)