diff --git a/gui/slick/interfaces/default/config_general.tmpl b/gui/slick/interfaces/default/config_general.tmpl
index 7a62ef12..8b15b25e 100644
--- a/gui/slick/interfaces/default/config_general.tmpl
+++ b/gui/slick/interfaces/default/config_general.tmpl
@@ -26,6 +26,7 @@
#end if
+
@@ -167,6 +168,14 @@
+
+
+
+
diff --git a/gui/slick/interfaces/default/manage_massEdit.tmpl b/gui/slick/interfaces/default/manage_massEdit.tmpl
index ea7bfef7..86c6166e 100644
--- a/gui/slick/interfaces/default/manage_massEdit.tmpl
+++ b/gui/slick/interfaces/default/manage_massEdit.tmpl
@@ -27,6 +27,7 @@
#set $cur_index = $root_dir_list.index($cur_dir)
+
$cur_dir => $cur_dir
diff --git a/gui/slick/js/massEdit.js b/gui/slick/js/massEdit.js
index 1ddc541b..b5755224 100644
--- a/gui/slick/js/massEdit.js
+++ b/gui/slick/js/massEdit.js
@@ -22,4 +22,10 @@ $(document).ready(function(){
});
+ $('.delete_root_dir').click(function(){
+ var cur_id = find_dir_index($(this).attr('id'));
+
+ $('#new_root_dir_'+cur_id).val(null);
+ $('#display_new_root_dir_'+cur_id).html('DELETED');
+ });
});
\ No newline at end of file
diff --git a/sickbeard/dailysearcher.py b/sickbeard/dailysearcher.py
index 02460084..221b6b8d 100644
--- a/sickbeard/dailysearcher.py
+++ b/sickbeard/dailysearcher.py
@@ -40,67 +40,37 @@ class DailySearcher():
self.amActive = True
- didSearch = False
+ logger.log(u"Searching for coming episodes and 1 weeks worth of previously WANTED episodes ...")
- providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_daily]
- for curProviderCount, curProvider in enumerate(providers):
+ curDate = datetime.date.today().toordinal()
- logger.log(u"Updating [" + curProvider.name + "] RSS cache ...")
+ myDB = db.DBConnection()
+ sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND airdate <= ?",
+ [common.UNAIRED, curDate])
+ sql_l = []
+ for sqlEp in sqlResults:
try:
- curProvider.cache.updateCache()
- except exceptions.AuthException, e:
- logger.log(u"Authentication error: " + ex(e), logger.ERROR)
- continue
- except Exception, e:
- logger.log(u"Error while updating cache for " + curProvider.name + ", skipping: " + ex(e), logger.ERROR)
- logger.log(traceback.format_exc(), logger.DEBUG)
+ show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"]))
+ except exceptions.MultipleShowObjectsException:
+ logger.log(u"ERROR: expected to find a single show matching " + sqlEp["showid"])
continue
- didSearch = True
+ ep = show.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"]))
+ with ep.lock:
+ if ep.show.paused:
+ ep.status = common.SKIPPED
+ else:
+ ep.status = common.WANTED
- if didSearch:
- logger.log(u"Searching for coming episodes and 1 weeks worth of previously WANTED episodes ...")
-
- fromDate = datetime.date.today() - datetime.timedelta(weeks=1)
- curDate = datetime.date.today()
+ sql_l.append(ep.get_sql())
+ if len(sql_l) > 0:
myDB = db.DBConnection()
- sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status in (?,?) AND airdate >= ? AND airdate <= ?",
- [common.UNAIRED, common.WANTED, fromDate.toordinal(), curDate.toordinal()])
+ myDB.mass_action(sql_l)
- sql_l = []
- for sqlEp in sqlResults:
- try:
- show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"]))
- except exceptions.MultipleShowObjectsException:
- logger.log(u"ERROR: expected to find a single show matching " + sqlEp["showid"])
- continue
-
- ep = show.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"]))
- with ep.lock:
- if ep.show.paused:
- ep.status = common.SKIPPED
-
- if ep.status == common.UNAIRED:
- logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to WANTED")
- ep.status = common.WANTED
-
- sql_l.append(ep.get_sql())
-
- if ep.status == common.WANTED:
- dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem(show, [ep])
- sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item)
- else:
- logger.log(u"Could not find any wanted episodes for the last 7 days to search for")
-
- if len(sql_l) > 0:
- myDB = db.DBConnection()
- myDB.mass_action(sql_l)
-
- else:
- logger.log(
- u"No NZB/Torrent providers found or enabled in the sickrage config. Please check your settings.",
- logger.ERROR)
+ # queue episode for daily search
+ dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem()
+ sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item)
self.amActive = False
\ No newline at end of file
diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py
index d95ce049..a1c87a63 100644
--- a/sickbeard/helpers.py
+++ b/sickbeard/helpers.py
@@ -210,21 +210,17 @@ def _remove_file_failed(file):
except:
pass
-
def findCertainShow(showList, indexerid):
- if not showList:
- return None
results = []
- if indexerid:
+ if showList and indexerid:
results = filter(lambda x: int(x.indexerid) == int(indexerid), showList)
- if len(results):
+ if len(results) == 1:
return results[0]
elif len(results) > 1:
raise MultipleShowObjectsException()
-
def makeDir(path):
if not ek.ek(os.path.isdir, path):
try:
diff --git a/sickbeard/providers/animezb.py b/sickbeard/providers/animezb.py
index 69b0db91..2260227b 100644
--- a/sickbeard/providers/animezb.py
+++ b/sickbeard/providers/animezb.py
@@ -133,7 +133,7 @@ class AnimezbCache(tvcache.TVCache):
# only poll Animezb every 20 minutes max
self.minTime = 20
- def _getDailyData(self):
+ def _getRSSData(self):
params = {
"cat": "anime".encode('utf-8'),
diff --git a/sickbeard/providers/bitsoup.py b/sickbeard/providers/bitsoup.py
index 2500353a..1dce5355 100644
--- a/sickbeard/providers/bitsoup.py
+++ b/sickbeard/providers/bitsoup.py
@@ -274,7 +274,7 @@ class BitSoupCache(tvcache.TVCache):
# only poll TorrentBytes every 20 minutes max
self.minTime = 20
- def _getDailyData(self):
+ def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py
index 4786ff91..6f16ea65 100644
--- a/sickbeard/providers/btn.py
+++ b/sickbeard/providers/btn.py
@@ -297,7 +297,7 @@ class BTNCache(tvcache.TVCache):
# At least 15 minutes between queries
self.minTime = 15
- def _getDailyData(self):
+ def _getRSSData(self):
# Get the torrents uploaded since last check.
seconds_since_last_update = math.ceil(time.time() - time.mktime(self._getLastUpdate().timetuple()))
diff --git a/sickbeard/providers/ezrss.py b/sickbeard/providers/ezrss.py
index feb1765e..6249c7ec 100644
--- a/sickbeard/providers/ezrss.py
+++ b/sickbeard/providers/ezrss.py
@@ -174,7 +174,7 @@ class EZRSSCache(tvcache.TVCache):
# only poll EZRSS every 15 minutes max
self.minTime = 15
- def _getDailyData(self):
+ def _getRSSData(self):
rss_url = self.provider.url + 'feed/'
logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG)
diff --git a/sickbeard/providers/fanzub.py b/sickbeard/providers/fanzub.py
index 0ad73ccb..3f590778 100644
--- a/sickbeard/providers/fanzub.py
+++ b/sickbeard/providers/fanzub.py
@@ -128,7 +128,7 @@ class FanzubCache(tvcache.TVCache):
# only poll Fanzub every 20 minutes max
self.minTime = 20
- def _getDailyData(self):
+ def _getRSSData(self):
params = {
"cat": "anime".encode('utf-8'),
diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py
index bab6ea7d..874a934f 100755
--- a/sickbeard/providers/freshontv.py
+++ b/sickbeard/providers/freshontv.py
@@ -306,7 +306,7 @@ class FreshOnTVCache(tvcache.TVCache):
# poll delay in minutes
self.minTime = 20
- def _getDailyData(self):
+ def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py
index c638d939..95bdc169 100644
--- a/sickbeard/providers/generic.py
+++ b/sickbeard/providers/generic.py
@@ -195,8 +195,8 @@ class GenericProvider:
return True
- def searchRSS(self, episodes):
- return self.cache.findNeededEpisodes(episodes)
+ def searchRSS(self):
+ return self.cache.findNeededEpisodes()
def getQuality(self, item, anime=False):
"""
@@ -255,10 +255,15 @@ class GenericProvider:
searched_scene_season = None
for epObj in episodes:
- # check cache for results
- cacheResult = self.cache.searchCache([epObj], manualSearch)
- if len(cacheResult):
- results.update({epObj.episode: cacheResult[epObj]})
+ # search cache for episode result
+ cacheResult = self.cache.searchCache(epObj, manualSearch)
+ if cacheResult:
+ if epObj not in results:
+ results = [cacheResult]
+ else:
+ results.append(cacheResult)
+
+ # found result, search next episode
continue
# skip if season already searched
diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py
index 85cd6017..19a31cc5 100644
--- a/sickbeard/providers/hdbits.py
+++ b/sickbeard/providers/hdbits.py
@@ -204,7 +204,7 @@ class HDBitsCache(tvcache.TVCache):
# only poll HDBits every 15 minutes max
self.minTime = 15
- def _getDailyData(self):
+ def _getRSSData(self):
parsedJSON = self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON(), json=True)
if not self.provider._checkAuthFromData(parsedJSON):
diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py
index a02afbad..e011d010 100644
--- a/sickbeard/providers/hdtorrents.py
+++ b/sickbeard/providers/hdtorrents.py
@@ -336,7 +336,7 @@ class HDTorrentsCache(tvcache.TVCache):
# only poll HDTorrents every 10 minutes max
self.minTime = 20
- def _getDailyData(self):
+ def _getRSSData(self):
search_params = {'RSS': []}
return self.provider._doSearch(search_params)
diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py
index e24bb328..2313efb9 100644
--- a/sickbeard/providers/iptorrents.py
+++ b/sickbeard/providers/iptorrents.py
@@ -276,7 +276,7 @@ class IPTorrentsCache(tvcache.TVCache):
# Only poll IPTorrents every 10 minutes max
self.minTime = 10
- def _getDailyData(self):
+ def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py
index a24eead5..5bea6248 100644
--- a/sickbeard/providers/kat.py
+++ b/sickbeard/providers/kat.py
@@ -355,7 +355,7 @@ class KATCache(tvcache.TVCache):
# only poll ThePirateBay every 10 minutes max
self.minTime = 20
- def _getDailyData(self):
+ def _getRSSData(self):
search_params = {'RSS': ['rss']}
return self.provider._doSearch(search_params)
diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py
index c751517d..d4686b52 100755
--- a/sickbeard/providers/newznab.py
+++ b/sickbeard/providers/newznab.py
@@ -337,21 +337,18 @@ class NewznabCache(tvcache.TVCache):
def updateCache(self):
- # delete anything older then 7 days
- self._clearCache()
-
- if not self.shouldUpdate():
- return
-
- if self._checkAuth(None):
+ if self.shouldUpdate() and self._checkAuth(None):
data = self._getRSSData()
# as long as the http request worked we count this as an update
- if data:
- self.setLastUpdate()
- else:
+ if not data:
return []
+ self.setLastUpdate()
+
+ # clear cache
+ self._clearCache()
+
if self._checkAuth(data):
items = data.entries
cl = []
@@ -370,7 +367,6 @@ class NewznabCache(tvcache.TVCache):
return []
-
# overwrite method with that parses the rageid from the newznab feed
def _parseItem(self, item):
title = item.title
diff --git a/sickbeard/providers/nextgen.py b/sickbeard/providers/nextgen.py
index 09cca9b9..de5ab0c9 100644
--- a/sickbeard/providers/nextgen.py
+++ b/sickbeard/providers/nextgen.py
@@ -318,7 +318,7 @@ class NextGenCache(tvcache.TVCache):
# Only poll NextGen every 10 minutes max
self.minTime = 10
- def _getDailyData(self):
+ def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py
index 2ac991cb..d7f9e3b7 100644
--- a/sickbeard/providers/nyaatorrents.py
+++ b/sickbeard/providers/nyaatorrents.py
@@ -126,7 +126,7 @@ class NyaaCache(tvcache.TVCache):
# only poll NyaaTorrents every 15 minutes max
self.minTime = 15
- def _getDailyData(self):
+ def _getRSSData(self):
params = {
"page": 'rss', # Use RSS page
"order": '1', # Sort Descending By Date
diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py
index 2e0728c2..59985055 100644
--- a/sickbeard/providers/omgwtfnzbs.py
+++ b/sickbeard/providers/omgwtfnzbs.py
@@ -174,7 +174,7 @@ class OmgwtfnzbsCache(tvcache.TVCache):
return (title, url)
- def _getDailyData(self):
+ def _getRSSData(self):
params = {'user': provider.username,
'api': provider.api_key,
'eng': 1,
diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py
index b793c228..3cf27c8a 100644
--- a/sickbeard/providers/rsstorrent.py
+++ b/sickbeard/providers/rsstorrent.py
@@ -102,7 +102,7 @@ class TorrentRssProvider(generic.TorrentProvider):
if not cookie_validator.match(self.cookies):
return (False, 'Cookie is not correctly formatted: ' + self.cookies)
- items = self.cache._getDailyData()
+ items = self.cache._getRSSData()
if not len(items) > 0:
return (False, 'No items found in the RSS feed ' + self.url)
@@ -157,7 +157,7 @@ class TorrentRssCache(tvcache.TVCache):
tvcache.TVCache.__init__(self, provider)
self.minTime = 15
- def _getDailyData(self):
+ def _getRSSData(self):
logger.log(u"TorrentRssCache cache update URL: " + self.provider.url, logger.DEBUG)
request_headers = None
diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py
index 2dc0c31f..36439489 100644
--- a/sickbeard/providers/scc.py
+++ b/sickbeard/providers/scc.py
@@ -312,7 +312,7 @@ class SCCCache(tvcache.TVCache):
# only poll SCC every 10 minutes max
self.minTime = 20
- def _getDailyData(self):
+ def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py
index a2ee3bb9..9839b880 100644
--- a/sickbeard/providers/speedcd.py
+++ b/sickbeard/providers/speedcd.py
@@ -252,7 +252,7 @@ class SpeedCDCache(tvcache.TVCache):
# only poll Speedcd every 20 minutes max
self.minTime = 20
- def _getDailyData(self):
+ def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py
index dd7d841d..eaf970f7 100644
--- a/sickbeard/providers/thepiratebay.py
+++ b/sickbeard/providers/thepiratebay.py
@@ -338,7 +338,7 @@ class ThePirateBayCache(tvcache.TVCache):
# only poll ThePirateBay every 10 minutes max
self.minTime = 20
- def _getDailyData(self):
+ def _getRSSData(self):
search_params = {'RSS': ['rss']}
return self.provider._doSearch(search_params)
diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py
index d8f646cb..dd7f39d9 100644
--- a/sickbeard/providers/torrentbytes.py
+++ b/sickbeard/providers/torrentbytes.py
@@ -274,7 +274,7 @@ class TorrentBytesCache(tvcache.TVCache):
# only poll TorrentBytes every 20 minutes max
self.minTime = 20
- def _getDailyData(self):
+ def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py
index e1d78d8d..eacaa21e 100644
--- a/sickbeard/providers/torrentday.py
+++ b/sickbeard/providers/torrentday.py
@@ -277,7 +277,7 @@ class TorrentDayCache(tvcache.TVCache):
# Only poll IPTorrents every 10 minutes max
self.minTime = 10
- def _getDailyData(self):
+ def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py
index 69401a55..8dfb1731 100644
--- a/sickbeard/providers/torrentleech.py
+++ b/sickbeard/providers/torrentleech.py
@@ -275,7 +275,7 @@ class TorrentLeechCache(tvcache.TVCache):
# only poll TorrentLeech every 20 minutes max
self.minTime = 20
- def _getDailyData(self):
+ def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
diff --git a/sickbeard/providers/tvtorrents.py b/sickbeard/providers/tvtorrents.py
index b9364a49..227c1941 100644
--- a/sickbeard/providers/tvtorrents.py
+++ b/sickbeard/providers/tvtorrents.py
@@ -83,7 +83,7 @@ class TvTorrentsCache(tvcache.TVCache):
# only poll TvTorrents every 15 minutes max
self.minTime = 15
- def _getDailyData(self):
+ def _getRSSData(self):
# These will be ignored on the serverside.
ignore_regex = "all.month|month.of|season[\s\d]*complete"
diff --git a/sickbeard/scene_numbering.py b/sickbeard/scene_numbering.py
index 49e25dd8..79786a53 100644
--- a/sickbeard/scene_numbering.py
+++ b/sickbeard/scene_numbering.py
@@ -53,7 +53,7 @@ def get_scene_numbering(indexer_id, indexer, season, episode, fallback_to_xem=Tr
return (season, episode)
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(indexer_id))
- if not showObj.is_scene:
+ if showObj and not showObj.is_scene:
return (season, episode)
result = find_scene_numbering(int(indexer_id), int(indexer), season, episode)
@@ -105,7 +105,7 @@ def get_scene_absolute_numbering(indexer_id, indexer, absolute_number, fallback_
indexer = int(indexer)
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, indexer_id)
- if not showObj.is_scene:
+ if showObj and not showObj.is_scene:
return absolute_number
result = find_scene_absolute_numbering(indexer_id, indexer, absolute_number)
diff --git a/sickbeard/search.py b/sickbeard/search.py
index f3355fb4..f9b651a4 100644
--- a/sickbeard/search.py
+++ b/sickbeard/search.py
@@ -336,7 +336,7 @@ def filterSearchResults(show, season, results):
return foundResults
-def searchForNeededEpisodes(show, episodes):
+def searchForNeededEpisodes():
foundResults = {}
didSearch = False
@@ -344,15 +344,13 @@ def searchForNeededEpisodes(show, episodes):
origThreadName = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_daily]
- for curProviderCount, curProvider in enumerate(providers):
- if curProvider.anime_only and not show.is_anime:
- logger.log(u"" + str(show.name) + " is not an anime, skiping", logger.DEBUG)
- continue
+ for curProvider in providers:
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
try:
- curFoundResults = curProvider.searchRSS(episodes)
+ curProvider.cache.updateCache()
+ curFoundResults = curProvider.searchRSS()
except exceptions.AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
continue
@@ -374,6 +372,12 @@ def searchForNeededEpisodes(show, episodes):
logger.DEBUG)
continue
+ # find the best result for the current episode
+ bestResult = None
+ for curResult in curFoundResults[curEp]:
+ if not bestResult or bestResult.quality < curResult.quality:
+ bestResult = curResult
+
bestResult = pickBestResult(curFoundResults[curEp], curEp.show)
# if all results were rejected move on to the next episode
@@ -400,7 +404,7 @@ def searchForNeededEpisodes(show, episodes):
u"No NZB/Torrent providers found or enabled in the sickrage config for daily searches. Please check your settings.",
logger.ERROR)
- return foundResults.values() if len(foundResults) else {}
+ return foundResults.values()
def searchProviders(show, season, episodes, manualSearch=False):
@@ -409,6 +413,9 @@ def searchProviders(show, season, episodes, manualSearch=False):
didSearch = False
+ # build name cache for show
+ sickbeard.name_cache.buildNameCache(show)
+
# check if we want to search for season packs instead of just season/episode
seasonSearch = False
if not manualSearch:
@@ -442,6 +449,7 @@ def searchProviders(show, season, episodes, manualSearch=False):
logger.log(u"Searching for episodes we need from " + show.name + " Season " + str(season))
try:
+ curProvider.cache.updateCache()
searchResults = curProvider.findSearchResults(show, season, episodes, search_mode, manualSearch)
except exceptions.AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
diff --git a/sickbeard/searchBacklog.py b/sickbeard/searchBacklog.py
index 4bccad8f..1abe44c7 100644
--- a/sickbeard/searchBacklog.py
+++ b/sickbeard/searchBacklog.py
@@ -29,6 +29,7 @@ from sickbeard import logger
from sickbeard import ui
from sickbeard import common
+
class BacklogSearchScheduler(scheduler.Scheduler):
def forceSearch(self):
self.action._set_lastBacklog(1)
@@ -40,11 +41,12 @@ class BacklogSearchScheduler(scheduler.Scheduler):
else:
return datetime.date.fromordinal(self.action._lastBacklog + self.action.cycleTime)
+
class BacklogSearcher:
def __init__(self):
self._lastBacklog = self._get_lastBacklog()
- self.cycleTime = sickbeard.BACKLOG_FREQUENCY/60/24
+ self.cycleTime = sickbeard.BACKLOG_FREQUENCY / 60 / 24
self.lock = threading.Lock()
self.amActive = False
self.amPaused = False
@@ -99,7 +101,7 @@ class BacklogSearcher:
if len(segments):
backlog_queue_item = search_queue.BacklogQueueItem(curShow, segments)
- sickbeard.searchQueueScheduler.action.add_item(backlog_queue_item) #@UndefinedVariable
+ sickbeard.searchQueueScheduler.action.add_item(backlog_queue_item) # @UndefinedVariable
else:
logger.log(u"Nothing needs to be downloaded for " + str(curShow.name) + ", skipping this season",
logger.DEBUG)
@@ -132,14 +134,14 @@ class BacklogSearcher:
return self._lastBacklog
def _get_segments(self, show, fromDate):
- anyQualities, bestQualities = common.Quality.splitQuality(show.quality) #@UnusedVariable
+ anyQualities, bestQualities = common.Quality.splitQuality(show.quality) # @UnusedVariable
logger.log(u"Seeing if we need anything from " + show.name)
myDB = db.DBConnection()
if show.air_by_date:
sqlResults = myDB.select(
- "SELECT ep.status, ep.season, ep.episode FROM tv_episodes ep, tv_shows show WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 ANd ep.airdate > ? AND ep.showid = ? AND show.air_by_date = 1",
+ "SELECT ep.status, ep.season, ep.episode FROM tv_episodes ep, tv_shows show WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 AND ep.airdate > ? AND ep.showid = ? AND show.air_by_date = 1",
[fromDate.toordinal(), show.indexerid])
else:
sqlResults = myDB.select(
diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py
index bbda755a..5616fc15 100644
--- a/sickbeard/search_queue.py
+++ b/sickbeard/search_queue.py
@@ -72,34 +72,27 @@ class SearchQueue(generic_queue.GenericQueue):
return False
def add_item(self, item):
-
- if isinstance(item, (DailySearchQueueItem, BacklogQueueItem, ManualSearchQueueItem, FailedQueueItem)) \
- and not self.is_in_queue(item.show, item.segment):
- sickbeard.name_cache.buildNameCache(item.show)
+ if isinstance(item, DailySearchQueueItem) or (
+ isinstance(item, (BacklogQueueItem, ManualSearchQueueItem, FailedQueueItem)) and not self.is_in_queue(
+ item.show, item.segment)):
generic_queue.GenericQueue.add_item(self, item)
else:
logger.log(u"Not adding item, it's already in the queue", logger.DEBUG)
class DailySearchQueueItem(generic_queue.QueueItem):
- def __init__(self, show, segment):
+ def __init__(self):
generic_queue.QueueItem.__init__(self, 'Daily Search', DAILY_SEARCH)
- self.priority = generic_queue.QueuePriorities.HIGH
- self.name = 'DAILYSEARCH-' + str(show.indexerid)
- self.show = show
- self.segment = segment
def run(self):
-
generic_queue.QueueItem.run(self)
try:
-
- logger.log("Beginning daily search for [" + self.show.name + "]")
- foundResults = search.searchForNeededEpisodes(self.show, self.segment)
+ logger.log("Beginning daily search for new episodes")
+ foundResults = search.searchForNeededEpisodes()
if not len(foundResults):
- logger.log(u"No needed episodes found during daily search for [" + self.show.name + "]")
+ logger.log(u"No needed episodes found")
else:
for result in foundResults:
# just use the first result for now
@@ -115,6 +108,7 @@ class DailySearchQueueItem(generic_queue.QueueItem):
self.finish()
+
class ManualSearchQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
@@ -169,7 +163,7 @@ class BacklogQueueItem(generic_queue.QueueItem):
try:
for season in self.segment:
sickbeard.searchBacklog.BacklogSearcher.currentSearchInfo = {
- 'title': self.show.name + " Season " + str(season)}
+ 'title': self.show.name + " Season " + str(season)}
wantedEps = self.segment[season]
diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py
index 96b79499..ac554f72 100644
--- a/sickbeard/tvcache.py
+++ b/sickbeard/tvcache.py
@@ -89,26 +89,17 @@ class TVCache():
def _clearCache(self):
if self.shouldClearCache():
- logger.log(u"Clearing items older than 1 week from " + self.provider.name + " cache")
-
- curDate = datetime.date.today() - datetime.timedelta(weeks=1)
-
myDB = self._getDB()
- myDB.action("DELETE FROM [" + self.providerID + "] WHERE time < ?", [int(time.mktime(curDate.timetuple()))])
+ myDB.action("DELETE FROM [" + self.providerID + "] WHERE 1")
def _get_title_and_url(self, item):
# override this in the provider if daily search has a different data layout to backlog searches
return self.provider._get_title_and_url(item)
def _getRSSData(self):
-
data = None
-
return data
- def _getDailyData(self):
- return None
-
def _checkAuth(self):
return self.provider._checkAuth()
@@ -116,10 +107,9 @@ class TVCache():
return True
def updateCache(self):
-
if self.shouldUpdate() and self._checkAuth():
# as long as the http request worked we count this as an update
- data = self._getDailyData()
+ data = self._getRSSData()
if not data:
return []
@@ -289,9 +279,9 @@ class TVCache():
[name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
- def searchCache(self, episodes, manualSearch=False):
- neededEps = self.findNeededEpisodes(episodes, manualSearch)
- return neededEps
+ def searchCache(self, episode, manualSearch=False):
+ neededEps = self.findNeededEpisodes(episode, manualSearch)
+ return neededEps[episode]
def listPropers(self, date=None, delimiter="."):
myDB = self._getDB()
@@ -303,69 +293,84 @@ class TVCache():
return filter(lambda x: x['indexerid'] != 0, myDB.select(sql))
- def findNeededEpisodes(self, episodes, manualSearch=False):
+ def findNeededEpisodes(self, episode=None, manualSearch=False):
neededEps = {}
- for epObj in episodes:
- myDB = self._getDB()
+ if episode:
+ neededEps[episode] = []
+
+ myDB = self._getDB()
+ if not episode:
+ sqlResults = myDB.select("SELECT * FROM [" + self.providerID + "]")
+ else:
sqlResults = myDB.select(
"SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ?",
- [epObj.show.indexerid, epObj.season, "%|" + str(epObj.episode) + "|%"])
+ [episode.show.indexerid, episode.season, "%|" + str(episode.episode) + "|%"])
- # for each cache entry
- for curResult in sqlResults:
+ # for each cache entry
+ for curResult in sqlResults:
- # skip non-tv crap (but allow them for Newzbin cause we assume it's filtered well)
- if self.providerID != 'newzbin' and not show_name_helpers.filterBadReleases(curResult["name"]):
- continue
+ # skip non-tv crap
+ if not show_name_helpers.filterBadReleases(curResult["name"]):
+ continue
- # get the show object, or if it's not one of our shows then ignore it
- try:
- showObj = helpers.findCertainShow(sickbeard.showList, int(curResult["indexerid"]))
- except MultipleShowObjectsException:
- showObj = None
+ # get the show object, or if it's not one of our shows then ignore it
+ showObj = helpers.findCertainShow(sickbeard.showList, int(curResult["indexerid"]))
+ if not showObj:
+ continue
- if not showObj:
- continue
+ # skip if provider is anime only and show is not anime
+ if self.provider.anime_only and not showObj.is_anime:
+ logger.log(u"" + str(showObj.name) + " is not an anime, skiping", logger.DEBUG)
+ continue
- # get season and ep data (ignoring multi-eps for now)
- curSeason = int(curResult["season"])
- if curSeason == -1:
- continue
- curEp = curResult["episodes"].split("|")[1]
- if not curEp:
- continue
- curEp = int(curEp)
- curQuality = int(curResult["quality"])
- curReleaseGroup = curResult["release_group"]
- curVersion = curResult["version"]
+ # get season and ep data (ignoring multi-eps for now)
+ curSeason = int(curResult["season"])
+ if curSeason == -1:
+ continue
+ curEp = curResult["episodes"].split("|")[1]
+ if not curEp:
+ continue
+ curEp = int(curEp)
- # if the show says we want that episode then add it to the list
- if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch):
- logger.log(u"Skipping " + curResult["name"] + " because we don't want an episode that's " +
- Quality.qualityStrings[curQuality], logger.DEBUG)
- continue
+ curQuality = int(curResult["quality"])
+ curReleaseGroup = curResult["release_group"]
+ curVersion = curResult["version"]
- # build a result object
- title = curResult["name"]
- url = curResult["url"]
+ # if the show says we want that episode then add it to the list
+ if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch):
+ logger.log(u"Skipping " + curResult["name"] + " because we don't want an episode that's " +
+ Quality.qualityStrings[curQuality], logger.DEBUG)
+ continue
- logger.log(u"Found result " + title + " at " + url)
+ # build name cache for show
+ sickbeard.name_cache.buildNameCache(showObj)
- result = self.provider.getResult([epObj])
- result.show = showObj
- result.url = url
- result.name = title
- result.quality = curQuality
- result.release_group = curReleaseGroup
- result.version = curVersion
- result.content = None
+ if episode:
+ epObj = episode
+ else:
+ epObj = showObj.getEpisode(curSeason, curEp)
- # add it to the list
- if epObj not in neededEps:
- neededEps[epObj] = [result]
- else:
- neededEps[epObj].append(result)
+ # build a result object
+ title = curResult["name"]
+ url = curResult["url"]
+
+ logger.log(u"Found result " + title + " at " + url)
+
+ result = self.provider.getResult([epObj])
+ result.show = showObj
+ result.url = url
+ result.name = title
+ result.quality = curQuality
+ result.release_group = curReleaseGroup
+ result.version = curVersion
+ result.content = None
+
+ # add it to the list
+ if epObj not in neededEps:
+ neededEps[epObj] = [result]
+ else:
+ neededEps[epObj].append(result)
# datetime stamp this search so cache gets cleared
self.setLastSearch()